diff --git a/.github/.copilot.instructions.md b/.github/.copilot.instructions.md deleted file mode 100644 index fd67f562..00000000 --- a/.github/.copilot.instructions.md +++ /dev/null @@ -1,67 +0,0 @@ -# Copilot Instructions for Power Query / M Language Extension - -## Project Overview - -This is a VS Code extension providing Language Server Protocol (LSP) support for the Power Query / M formula language. The extension consists of: - -- **Client**: VS Code extension (`client/` folder) -- **Server**: Language server implementation (`server/` folder) -- **Scripts**: Build and utility scripts (`scripts/` folder) - -## Architecture - -- **Client-Server Model**: Uses LSP architecture with separate client and server processes -- **TypeScript**: Written in TypeScript with webpack bundling -- **Multi-package**: Root package.json manages client, server, and scripts subpackages - -## Important Files to Monitor - -- `package.json` (root) - Main package configuration -- `client/package.json` - Client dependencies including vscode-languageclient -- `server/package.json` - Server dependencies including vscode-languageserver -- `client/src/extension.ts` - Main extension entry point -- `server/src/server.ts` - Language server implementation - -## Build System - -- Uses webpack for bundling -- Separate build processes for client and server -- TypeScript compilation with multiple tsconfig files - -## Testing - -- Mocha for server tests -- Separate test configurations for client and server - -## Common Patterns in This Codebase - -- Extension activation in `client/src/extension.ts` -- Language server setup in `server/src/server.ts` -- Webpack configuration for both client and server -- npm scripts for building, testing, and packaging - -## Useful Commands - -- `npm run build`- Build all packages -- `npm run vsix` - Runs webpack and generates the .vsix installer for the extension -- `npm run test:server` - Runs server unit tests. Requires `build` to be run first. -- `npm run test:client` - Runs client UI tests using `vscode-test`. Requires the webpack process to be run first. -- `npm run test` - Run both server and client tests. - -## General Development Guidelines - -- Always test both client and server functionality after changes -- Pay attention to API changes in LSP libraries -- Consider backward compatibility when possible -- Update all related dependencies together to avoid version conflicts - -## Task Tracking - -**Important**: For current task-specific information, progress tracking, and temporary notes, always check and use the `.copilot-current-task.md` file in the root directory. This file contains: - -- Current task objectives and progress -- Task-specific checklists and breaking changes -- Temporary analysis and investigation notes -- Status updates for the current task - -Create or update this file at the beginning of each task session and refer to it throughout the work. This file is gitignored and should not be committed to the repository. This file should contain `` at the top to disable markdown lint operations. diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000..cf78b32c --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,74 @@ +# Copilot Instructions + +## Build, Test, and Lint + +This is a multi-package monorepo (root, `client/`, `server/`, `scripts/`). Each has its own `node_modules` and `package.json`. The root `npm install` runs `npm install-clean` in all sub-packages via `postinstall`. + +```shell +npm install # install all packages (root + client + server + scripts) +npm run build # TypeScript compile all packages +npm run lint # ESLint all packages + +# Server unit tests (mocha, no build dependency) +npm run test:server + +# Client UI tests (vscode-test-electron, requires webpack first) +npm run webpack-prod +npm run test:client + +# Run a single server test file directly +cd server +npx mocha --require ts-node/register src/test/.test.ts + +# Package the extension +npm run vsix +``` + +## Architecture + +This is a VS Code extension providing Language Server Protocol (LSP) support for the Power Query / M formula language. + +**Client** (`client/src/extension.ts`): Activates the extension, starts the language server over **Node IPC**, and manages the library symbol system. Exposes a `PowerQueryApi` for other extensions. + +**Server** (`server/src/server.ts`): Handles LSP requests — completion, hover, definition, formatting, diagnostics, rename, folding, document symbols, semantic tokens, and signature help. Request handling follows a consistent pattern: fetch document → create cancellation token → build a `PQLS.Analysis` → call the analysis API → map results to LSP types. Errors go through `ErrorUtils.handleError`. + +**Scripts** (`scripts/`): Standalone benchmark/tooling utilities, not part of the extension runtime. + +**Core dependencies** (Microsoft-owned, all three are used across the codebase): + +- `@microsoft/powerquery-parser` — Lexer, parser, and type validation +- `@microsoft/powerquery-language-services` — Higher-level language service (Analysis, completions, hover, etc.) +- `@microsoft/powerquery-formatter` — Code formatter (server-side only) + +### Library Symbol System + +External library symbols allow users to extend the M standard library with custom function definitions loaded from JSON files on disk. The flow: + +1. User configures `powerquery.client.additionalSymbolsDirectories` setting +2. `LibrarySymbolManager` scans directories for `.json` files, parses them via `LibrarySymbolUtils` +3. `LibrarySymbolClient` sends symbols to the server via custom LSP requests (`powerquery/addLibrarySymbols`, `powerquery/removeLibrarySymbols`) +4. Server merges external symbols with built-in standard/SDK library in `SettingsUtils.getLibrary()` + +### Local Development with Sibling Packages + +Use `npm run link:start` to develop against locally-built copies of the parser, formatter, and language-services packages (via `npm link`). Use `npm run link:stop` to revert to published npm versions. + +## Code Conventions + +**TypeScript strictness** — The ESLint config enforces rules that are stricter than typical TypeScript projects: + +- `explicit-function-return-type`: All functions must have explicit return type annotations +- `typedef`: Required on variables, parameters, properties, arrow parameters, and destructuring +- `no-floating-promises`: All promises must be awaited or handled +- `switch-exhaustiveness-check`: Switch statements must cover all cases +- `sort-imports`: Imports must be sorted (separated groups allowed, case-insensitive) +- `no-plusplus`: Use `+= 1` instead of `++` +- `object-shorthand`: Always use shorthand properties/methods +- `arrow-body-style`: Use concise arrow function bodies (no braces for single expressions) +- `curly`: Always use braces for control flow, even single-line + +**Formatting** (Prettier): 120 char line width, 4-space indent, trailing commas, no parens on single arrow params. + +**Import aliases**: The codebase uses `PQP` for powerquery-parser, `PQLS` for powerquery-language-services, and `PQF` for powerquery-formatter. + +**Testing**: Server tests use Mocha (`describe`/`it`) with Chai `expect` and Node `assert`. Client tests use VS Code's test runner (`suite`/`test` TDD-style). diff --git a/client/.vscode-test.js b/client/.vscode-test.js index 555b0ee6..9ebf317f 100644 --- a/client/.vscode-test.js +++ b/client/.vscode-test.js @@ -7,7 +7,7 @@ const { defineConfig } = require('@vscode/test-cli'); module.exports = defineConfig([ { label: "UI Tests", - files: "lib/test/**/*.test.js", + files: "lib/test/**/!(multiRootWorkspace).test.js", workspaceFolder: "src/test/testFixture", extensionDevelopmentPath: "..", launchArgs: ["--profile-temp", "--disable-extensions"], @@ -26,5 +26,19 @@ module.exports = defineConfig([ // }, // } } + }, + { + label: "Multi-root Workspace Tests", + files: "lib/test/**/multiRootWorkspace.test.js", + workspaceFolder: "src/test/multiRootFixture/test.code-workspace", + extensionDevelopmentPath: "..", + launchArgs: ["--profile-temp", "--disable-extensions"], + + mocha: { + color: true, + ui: "tdd", + timeout: 20000, + slow: 10000, + } } ]); \ No newline at end of file diff --git a/client/package-lock.json b/client/package-lock.json index 8866f545..91a012cd 100644 --- a/client/package-lock.json +++ b/client/package-lock.json @@ -116,9 +116,9 @@ } }, "node_modules/@eslint/config-array/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -190,9 +190,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -1290,9 +1290,9 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", + "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", "license": "MIT", "dependencies": { "balanced-match": "^1.0.0" @@ -2038,9 +2038,9 @@ } }, "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -3020,9 +3020,9 @@ } }, "node_modules/lodash": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", - "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", + "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==", "dev": true, "license": "MIT" }, @@ -3690,9 +3690,9 @@ "license": "ISC" }, "node_modules/picomatch": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", - "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", "dev": true, "license": "MIT", "engines": { @@ -4439,9 +4439,9 @@ } }, "node_modules/test-exclude/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -4520,9 +4520,9 @@ } }, "node_modules/tinyglobby/node_modules/picomatch": { - "version": "4.0.3", - "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", - "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", "dev": true, "license": "MIT", "engines": { diff --git a/client/src/extension.ts b/client/src/extension.ts index 750006af..ae2ad8ca 100644 --- a/client/src/extension.ts +++ b/client/src/extension.ts @@ -63,7 +63,7 @@ export async function activate(context: vscode.ExtensionContext): Promise { @@ -73,9 +73,20 @@ export async function activate(context: vscode.ExtensionContext): Promise { + await Promise.all( + event.removed.map((folder: vscode.WorkspaceFolder) => + librarySymbolManager.removeSymbolsForFolder(folder.uri.toString()), + ), + ); + + await Promise.all( + event.added.map((folder: vscode.WorkspaceFolder) => configureSymbolDirectoriesForFolder(folder)), + ); + }), ); return Object.freeze(librarySymbolClient); @@ -85,18 +96,37 @@ export function deactivate(): Thenable | undefined { return client?.stop(); } -async function configureSymbolDirectories(): Promise { - const config: vscode.WorkspaceConfiguration = vscode.workspace.getConfiguration(ConfigurationConstant.BasePath); +async function configureAllFolderSymbolDirectories(): Promise { + const folders: readonly vscode.WorkspaceFolder[] | undefined = vscode.workspace.workspaceFolders; + + if (!folders || folders.length === 0) { + // No workspace folders — read global config + const config: vscode.WorkspaceConfiguration = vscode.workspace.getConfiguration(ConfigurationConstant.BasePath); + + const additionalSymbolsDirectories: string[] | undefined = config.get( + ConfigurationConstant.AdditionalSymbolsDirectories, + ); + + await librarySymbolManager.refreshSymbolDirectories(additionalSymbolsDirectories ?? []); + + return; + } + + await Promise.all(folders.map((folder: vscode.WorkspaceFolder) => configureSymbolDirectoriesForFolder(folder))); +} + +async function configureSymbolDirectoriesForFolder(folder: vscode.WorkspaceFolder): Promise { + const config: vscode.WorkspaceConfiguration = vscode.workspace.getConfiguration( + ConfigurationConstant.BasePath, + folder.uri, + ); const additionalSymbolsDirectories: string[] | undefined = config.get( ConfigurationConstant.AdditionalSymbolsDirectories, ); - // TODO: Should we fix/remove invalid and malformed directory path values? - // For example, a quoted path "c:\path\to\file" will be considered invalid and reported as an error. - // We could modify values and write them back to the original config locations. - - await librarySymbolManager.refreshSymbolDirectories(additionalSymbolsDirectories ?? []); - - // TODO: Configure file system watchers to detect library file changes. + await librarySymbolManager.refreshSymbolDirectoriesForFolder( + folder.uri.toString(), + additionalSymbolsDirectories ?? [], + ); } diff --git a/client/src/librarySymbolManager.ts b/client/src/librarySymbolManager.ts index fe317ca5..4451869e 100644 --- a/client/src/librarySymbolManager.ts +++ b/client/src/librarySymbolManager.ts @@ -18,8 +18,13 @@ export class LibrarySymbolManager { private static readonly SymbolFileExtension: string = ".json"; private static readonly SymbolFileEncoding: string = "utf-8"; + // The key used for symbols not associated with a specific workspace folder (e.g. single-folder workspaces). + private static readonly GlobalFolderKey: string = "__global__"; + private readonly fs: vscode.FileSystem; - private readonly registeredSymbolModules: string[] = []; + + // Tracks registered module names per workspace folder key. + private readonly registeredModulesByFolder: Map = new Map(); constructor( private librarySymbolClient: PowerQueryApi, @@ -29,71 +34,70 @@ export class LibrarySymbolManager { this.fs = fs ?? vscode.workspace.fs; } - public async refreshSymbolDirectories(directories: ReadonlyArray): Promise> { - await this.clearAllRegisteredSymbolModules(); + /** + * Refreshes symbol directories for a specific workspace folder. + * Only clears and reloads symbols associated with the given folder key. + */ + public async refreshSymbolDirectoriesForFolder( + folderKey: string, + directories: ReadonlyArray, + ): Promise> { + await this.clearRegisteredSymbolModulesForFolder(folderKey); if (!directories || directories.length === 0) { return []; } - // Fetch the full list of files to process. - const fileDiscoveryActions: Promise>[] = []; - - const normalizedDirectoryUris: ReadonlyArray = directories.map((directory: string) => { - const normalized: string = path.normalize(directory); + const validSymbolLibraries: Map = await this.loadSymbolsFromDirectories(directories); - if (directory !== normalized) { - this.clientTrace?.info(`Normalized symbol file path '${directory}' => '${normalized}'`); - } + // Prefix module names with the folder key to avoid cross-folder collisions. + const namespacedLibraries: Map = new Map(); - return vscode.Uri.file(normalized); - }); - - const dedupedDirectoryUris: ReadonlyArray = Array.from(new Set(normalizedDirectoryUris)); - - for (const uri of dedupedDirectoryUris) { - fileDiscoveryActions.push(this.getSymbolFilesFromDirectory(uri)); + for (const [moduleName, library] of validSymbolLibraries) { + const namespacedName: string = LibrarySymbolManager.namespacedModuleName(folderKey, moduleName); + namespacedLibraries.set(namespacedName, library); } - // TODO: check for duplicate module file names and only take the last one. - // This would allow a connector developer to override a symbol library generated - // with an older version of their connector. - const symbolFileActions: Promise<[vscode.Uri, LibraryJson] | undefined>[] = []; - const files: ReadonlyArray = (await Promise.all(fileDiscoveryActions)).flat(); + this.clientTrace?.info( + `Registering symbol files for folder '${folderKey}'. Total file count: ${namespacedLibraries.size}`, + ); - for (const fileUri of files) { - symbolFileActions.push(this.processSymbolFile(fileUri)); + if (namespacedLibraries.size > 0) { + await this.librarySymbolClient.addLibrarySymbols(namespacedLibraries); + const folderModules: string[] = this.registeredModulesByFolder.get(folderKey) ?? []; + folderModules.push(...namespacedLibraries.keys()); + this.registeredModulesByFolder.set(folderKey, folderModules); } - if (symbolFileActions.length === 0) { - this.clientTrace?.info( - `No symbol files (${LibrarySymbolManager.SymbolFileExtension}) found in symbol file directories.`, - ); - - return []; - } - - // Process all symbol files, filtering out any that failed to load. - const allSymbolFiles: ReadonlyArray<[vscode.Uri, LibraryJson]> = (await Promise.all(symbolFileActions)).filter( - (value: [vscode.Uri, LibraryJson] | undefined) => value !== undefined, - ) as ReadonlyArray<[vscode.Uri, LibraryJson]>; + return this.registeredModulesByFolder.get(folderKey) ?? []; + } - const validSymbolLibraries: Map = new Map(); + /** + * Refreshes symbol directories without folder scoping (single-folder or no-folder workspaces). + */ + public async refreshSymbolDirectories(directories: ReadonlyArray): Promise> { + return await this.refreshSymbolDirectoriesForFolder(LibrarySymbolManager.GlobalFolderKey, directories); + } - for (const [uri, library] of allSymbolFiles) { - const moduleName: string = LibrarySymbolManager.getModuleNameFromFileUri(uri); - validSymbolLibraries.set(moduleName, library); - } + /** + * Removes all registered symbol modules for a specific workspace folder. + */ + public async removeSymbolsForFolder(folderKey: string): Promise { + await this.clearRegisteredSymbolModulesForFolder(folderKey); + } - this.clientTrace?.info(`Registering symbol files. Total file count: ${validSymbolLibraries.size}`); + /** + * Removes all registered symbol modules across all folders. + */ + public async removeAllSymbols(): Promise { + const allModules: string[] = Array.from(this.registeredModulesByFolder.values()).flat(); - if (validSymbolLibraries.size > 0) { - await this.librarySymbolClient - .addLibrarySymbols(validSymbolLibraries) - .then(() => this.registeredSymbolModules.push(...validSymbolLibraries.keys())); + if (allModules.length === 0) { + return; } - return this.registeredSymbolModules; + await this.librarySymbolClient.removeLibrarySymbols(allModules); + this.registeredModulesByFolder.clear(); } public async getSymbolFilesFromDirectory(directory: vscode.Uri): Promise> { @@ -162,17 +166,70 @@ export class LibrarySymbolManager { return undefined; } + private static namespacedModuleName(folderKey: string, moduleName: string): string { + return `${folderKey}::${moduleName}`; + } + private static getModuleNameFromFileUri(fileUri: vscode.Uri): string { return path.basename(fileUri.fsPath, LibrarySymbolManager.SymbolFileExtension); } - private async clearAllRegisteredSymbolModules(): Promise { - if (this.registeredSymbolModules.length === 0) { + private async loadSymbolsFromDirectories(directories: ReadonlyArray): Promise> { + const fileDiscoveryActions: Promise>[] = []; + + const normalizedDirectoryUris: ReadonlyArray = directories.map((directory: string) => { + const normalized: string = path.normalize(directory); + + if (directory !== normalized) { + this.clientTrace?.info(`Normalized symbol file path '${directory}' => '${normalized}'`); + } + + return vscode.Uri.file(normalized); + }); + + const dedupedDirectoryUris: ReadonlyArray = Array.from(new Set(normalizedDirectoryUris)); + + for (const uri of dedupedDirectoryUris) { + fileDiscoveryActions.push(this.getSymbolFilesFromDirectory(uri)); + } + + const symbolFileActions: Promise<[vscode.Uri, LibraryJson] | undefined>[] = []; + const files: ReadonlyArray = (await Promise.all(fileDiscoveryActions)).flat(); + + for (const fileUri of files) { + symbolFileActions.push(this.processSymbolFile(fileUri)); + } + + if (symbolFileActions.length === 0) { + this.clientTrace?.info( + `No symbol files (${LibrarySymbolManager.SymbolFileExtension}) found in symbol file directories.`, + ); + + return new Map(); + } + + const allSymbolFiles: ReadonlyArray<[vscode.Uri, LibraryJson]> = (await Promise.all(symbolFileActions)).filter( + (value: [vscode.Uri, LibraryJson] | undefined) => value !== undefined, + ) as ReadonlyArray<[vscode.Uri, LibraryJson]>; + + const validSymbolLibraries: Map = new Map(); + + for (const [uri, library] of allSymbolFiles) { + const moduleName: string = LibrarySymbolManager.getModuleNameFromFileUri(uri); + validSymbolLibraries.set(moduleName, library); + } + + return validSymbolLibraries; + } + + private async clearRegisteredSymbolModulesForFolder(folderKey: string): Promise { + const folderModules: string[] | undefined = this.registeredModulesByFolder.get(folderKey); + + if (!folderModules || folderModules.length === 0) { return; } - await this.librarySymbolClient - .removeLibrarySymbols(this.registeredSymbolModules) - .then(() => (this.registeredSymbolModules.length = 0)); + await this.librarySymbolClient.removeLibrarySymbols(folderModules); + this.registeredModulesByFolder.delete(folderKey); } } diff --git a/client/src/test/multiRootFixture/test.code-workspace b/client/src/test/multiRootFixture/test.code-workspace new file mode 100644 index 00000000..f34d9644 --- /dev/null +++ b/client/src/test/multiRootFixture/test.code-workspace @@ -0,0 +1,9 @@ +{ + "folders": [ + { "path": "workspaceA" }, + { "path": "workspaceB" } + ], + "settings": { + "git.enabled": false + } +} diff --git a/client/src/test/multiRootFixture/workspaceA/.vscode/settings.json b/client/src/test/multiRootFixture/workspaceA/.vscode/settings.json new file mode 100644 index 00000000..a8acbb89 --- /dev/null +++ b/client/src/test/multiRootFixture/workspaceA/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "git.enabled": false +} diff --git a/client/src/test/multiRootFixture/workspaceA/sample.pq b/client/src/test/multiRootFixture/workspaceA/sample.pq new file mode 100644 index 00000000..3e379696 --- /dev/null +++ b/client/src/test/multiRootFixture/workspaceA/sample.pq @@ -0,0 +1,4 @@ +let + result = ConnectorA.Contents("https://example.com") +in + result diff --git a/client/src/test/multiRootFixture/workspaceA/symbols/ConnectorA.json b/client/src/test/multiRootFixture/workspaceA/symbols/ConnectorA.json new file mode 100644 index 00000000..19fa663b --- /dev/null +++ b/client/src/test/multiRootFixture/workspaceA/symbols/ConnectorA.json @@ -0,0 +1,25 @@ +[ + { + "name": "ConnectorA.Contents", + "documentation": null, + "completionItemKind": 3, + "functionParameters": [ + { + "name": "url", + "type": "text", + "isRequired": true, + "isNullable": false, + "caption": null, + "description": null, + "sampleValues": null, + "allowedValues": null, + "defaultValue": null, + "fields": null, + "enumNames": null, + "enumCaptions": null + } + ], + "isDataSource": true, + "type": "any" + } +] diff --git a/client/src/test/multiRootFixture/workspaceB/.vscode/settings.json b/client/src/test/multiRootFixture/workspaceB/.vscode/settings.json new file mode 100644 index 00000000..a8acbb89 --- /dev/null +++ b/client/src/test/multiRootFixture/workspaceB/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "git.enabled": false +} diff --git a/client/src/test/multiRootFixture/workspaceB/sample.pq b/client/src/test/multiRootFixture/workspaceB/sample.pq new file mode 100644 index 00000000..4eff57f5 --- /dev/null +++ b/client/src/test/multiRootFixture/workspaceB/sample.pq @@ -0,0 +1,4 @@ +let + result = ConnectorB.Contents("myserver") +in + result diff --git a/client/src/test/multiRootFixture/workspaceB/symbols/ConnectorB.json b/client/src/test/multiRootFixture/workspaceB/symbols/ConnectorB.json new file mode 100644 index 00000000..b62185ba --- /dev/null +++ b/client/src/test/multiRootFixture/workspaceB/symbols/ConnectorB.json @@ -0,0 +1,25 @@ +[ + { + "name": "ConnectorB.Contents", + "documentation": null, + "completionItemKind": 3, + "functionParameters": [ + { + "name": "server", + "type": "text", + "isRequired": true, + "isNullable": false, + "caption": null, + "description": null, + "sampleValues": null, + "allowedValues": null, + "defaultValue": null, + "fields": null, + "enumNames": null, + "enumCaptions": null + } + ], + "isDataSource": true, + "type": "any" + } +] diff --git a/client/src/test/suite/librarySymbolManager.test.ts b/client/src/test/suite/librarySymbolManager.test.ts index a9acc5bc..7e716dc7 100644 --- a/client/src/test/suite/librarySymbolManager.test.ts +++ b/client/src/test/suite/librarySymbolManager.test.ts @@ -84,13 +84,19 @@ suite("LibrarySymbolManager.refreshSymbolDirectories", () => { ]); assert.equal(modules.length, 1, "Expected one result"); - assert.equal(modules[0], "ExtensionTest"); + assert.ok(modules[0].includes("ExtensionTest"), "Expected module name to contain ExtensionTest"); assert.ok(mockClient.registeredSymbols, "call should have been made"); assert.equal(mockClient.registeredSymbols.size, 1, "Expected one element in the symbols call"); - const entry: LibraryJson | undefined = mockClient.registeredSymbols.get("ExtensionTest"); - assert(entry !== undefined, "Expected ExtensionTest to in the results"); + // Find the entry by checking keys that end with ExtensionTest + const matchingKey: string | undefined = Array.from(mockClient.registeredSymbols.keys()).find((key: string) => + key.includes("ExtensionTest"), + ); + + assert(matchingKey !== undefined, "Expected ExtensionTest to be in the results"); + const entry: LibraryJson | undefined = mockClient.registeredSymbols.get(matchingKey); + assert(entry !== undefined, "Expected entry to exist"); assert.equal(entry.length, 1, "Expected one library in the result"); assert.equal(entry[0].name, "ExtensionTest.Contents"); @@ -100,6 +106,117 @@ suite("LibrarySymbolManager.refreshSymbolDirectories", () => { }); }); +suite("LibrarySymbolManager.refreshSymbolDirectoriesForFolder", () => { + teardown(async () => { + await librarySymbolManager.removeAllSymbols(); + mockClient.reset(); + }); + + test("Symbols are namespaced by folder key", async () => { + const modules: ReadonlyArray = await librarySymbolManager.refreshSymbolDirectoriesForFolder( + "file:///folderA", + [TestUtils.getTestFixturePath()], + ); + + assert.equal(modules.length, 1, "Expected one module"); + assert.ok(modules[0].startsWith("file:///folderA::"), "Expected module name to be namespaced with folder key"); + assert.ok(modules[0].endsWith("ExtensionTest"), "Expected module name to contain ExtensionTest"); + }); + + test("Different folders can register same-named modules independently", async () => { + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderA", [ + TestUtils.getTestFixturePath(), + ]); + + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderB", [ + TestUtils.getTestFixturePath(), + ]); + + // Both folders should have their own namespaced copy + assert.equal(mockClient.registeredSymbols.size, 2, "Expected two registered symbol sets"); + + const keys: string[] = Array.from(mockClient.registeredSymbols.keys()); + const folderAKey: string | undefined = keys.find((k: string) => k.startsWith("file:///folderA::")); + const folderBKey: string | undefined = keys.find((k: string) => k.startsWith("file:///folderB::")); + + assert.ok(folderAKey, "Expected folderA module"); + assert.ok(folderBKey, "Expected folderB module"); + assert.notEqual(folderAKey, folderBKey, "Keys should be different"); + }); + + test("Refreshing one folder does not affect another folder's symbols", async () => { + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderA", [ + TestUtils.getTestFixturePath(), + ]); + + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderB", [ + TestUtils.getTestFixturePath(), + ]); + + assert.equal(mockClient.registeredSymbols.size, 2, "Expected two registered symbol sets"); + + // Clear folderA's symbols by refreshing with empty directories + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderA", []); + + // FolderB's symbols should still be registered + assert.equal(mockClient.registeredSymbols.size, 1, "Expected one registered symbol set remaining"); + + const remainingKey: string = Array.from(mockClient.registeredSymbols.keys())[0]; + assert.ok(remainingKey.startsWith("file:///folderB::"), "Expected folderB's symbols to remain"); + }); + + test("removeSymbolsForFolder removes only that folder's symbols", async () => { + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderA", [ + TestUtils.getTestFixturePath(), + ]); + + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderB", [ + TestUtils.getTestFixturePath(), + ]); + + assert.equal(mockClient.registeredSymbols.size, 2); + + await librarySymbolManager.removeSymbolsForFolder("file:///folderA"); + + assert.equal(mockClient.registeredSymbols.size, 1, "Expected one registered symbol set remaining"); + const remainingKey: string = Array.from(mockClient.registeredSymbols.keys())[0]; + assert.ok(remainingKey.startsWith("file:///folderB::"), "Expected folderB's symbols to remain"); + }); + + test("removeAllSymbols clears all folders", async () => { + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderA", [ + TestUtils.getTestFixturePath(), + ]); + + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderB", [ + TestUtils.getTestFixturePath(), + ]); + + assert.equal(mockClient.registeredSymbols.size, 2); + + await librarySymbolManager.removeAllSymbols(); + + assert.equal(mockClient.registeredSymbols.size, 0, "Expected all symbols to be cleared"); + }); + + test("Refreshing a folder replaces its previous symbols", async () => { + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderA", [ + TestUtils.getTestFixturePath(), + ]); + + assert.equal(mockClient.registeredSymbols.size, 1); + + // Refresh folderA again with same directories — should clear old and re-register + await librarySymbolManager.refreshSymbolDirectoriesForFolder("file:///folderA", [ + TestUtils.getTestFixturePath(), + ]); + + assert.equal(mockClient.registeredSymbols.size, 1, "Expected exactly one symbol set after re-refresh"); + const key: string = Array.from(mockClient.registeredSymbols.keys())[0]; + assert.ok(key.startsWith("file:///folderA::"), "Expected folderA namespace"); + }); +}); + suite("LibrarySymbolManager.getSymbolFilesFromDirectory", () => { test("Two files", async () => await runDirectoryTest(TestUtils.getTestFixturePath(), 2)); test("Does not exist", async () => await runDirectoryTest(TestUtils.randomDirName(), 0)); diff --git a/client/src/test/suite/multiRootWorkspace.test.ts b/client/src/test/suite/multiRootWorkspace.test.ts new file mode 100644 index 00000000..194462a6 --- /dev/null +++ b/client/src/test/suite/multiRootWorkspace.test.ts @@ -0,0 +1,179 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import * as assert from "assert"; +import * as path from "path"; +import * as vscode from "vscode"; + +import { LibraryJson, PowerQueryApi } from "../../powerQueryApi"; +import { LibrarySymbolManager } from "../../librarySymbolManager"; + +class MockLibrarySymbolClient implements PowerQueryApi { + public registeredSymbols: Map = new Map(); + + onModuleLibraryUpdated(_workspaceUriPath: string, _library: LibraryJson): void { + throw new Error("Function not implemented."); + } + + addLibrarySymbols(librarySymbols: ReadonlyMap): Promise { + for (const [key, value] of librarySymbols) { + this.registeredSymbols.set(key, value); + } + + return Promise.resolve(); + } + + removeLibrarySymbols(librariesToRemove: ReadonlyArray): Promise { + for (const library of librariesToRemove) { + this.registeredSymbols.delete(library); + } + + return Promise.resolve(); + } + + reset(): void { + this.registeredSymbols.clear(); + } +} + +const multiRootFixturePath: string = path.resolve(__dirname, "../../../src/test/multiRootFixture"); + +function getSymbolsDirForFolder(folderName: string): string { + return path.resolve(multiRootFixturePath, folderName, "symbols"); +} + +suite("Multi-root Workspace Tests", () => { + test("workspace has multiple folders", () => { + const folders: readonly vscode.WorkspaceFolder[] | undefined = vscode.workspace.workspaceFolders; + assert.ok(folders, "Expected workspace folders to be defined"); + assert.equal(folders.length, 2, "Expected two workspace folders"); + + const folderNames: string[] = folders.map((f: vscode.WorkspaceFolder) => f.name); + assert.ok(folderNames.includes("workspaceA"), "Expected workspaceA folder"); + assert.ok(folderNames.includes("workspaceB"), "Expected workspaceB folder"); + }); + + test("extension activates in multi-root workspace", async () => { + const ext: vscode.Extension | undefined = + vscode.extensions.getExtension("powerquery.vscode-powerquery"); + + assert.ok(ext, "Extension should be available"); + + const api: PowerQueryApi = await ext.activate(); + assert.ok(api, "Extension should return an API"); + assert.ok(api.addLibrarySymbols, "API should have addLibrarySymbols"); + assert.ok(api.removeLibrarySymbols, "API should have removeLibrarySymbols"); + }); +}); + +suite("Multi-root Workspace - Per-folder symbol isolation", () => { + const mockClient: MockLibrarySymbolClient = new MockLibrarySymbolClient(); + const manager: LibrarySymbolManager = new LibrarySymbolManager(mockClient); + + teardown(async () => { + await manager.removeAllSymbols(); + mockClient.reset(); + }); + + test("each folder loads its own symbols independently", async () => { + const folders: readonly vscode.WorkspaceFolder[] | undefined = vscode.workspace.workspaceFolders; + assert.ok(folders && folders.length >= 2, "Need at least two workspace folders"); + + // Register symbols for each folder using its own symbol directory + await Promise.all( + folders.map((folder: vscode.WorkspaceFolder) => { + const symbolsDir: string = getSymbolsDirForFolder(folder.name); + + return manager.refreshSymbolDirectoriesForFolder(folder.uri.toString(), [symbolsDir]); + }), + ); + + // Both folders should have symbols registered + assert.equal(mockClient.registeredSymbols.size, 2, "Expected two symbol sets"); + + // Verify ConnectorA is namespaced under workspaceA + const keys: string[] = Array.from(mockClient.registeredSymbols.keys()); + const folderAKeys: string[] = keys.filter((k: string) => k.includes("workspaceA")); + const folderBKeys: string[] = keys.filter((k: string) => k.includes("workspaceB")); + + assert.equal(folderAKeys.length, 1, "Expected one symbol set for workspaceA"); + assert.equal(folderBKeys.length, 1, "Expected one symbol set for workspaceB"); + + // Verify the actual symbol content is correct + const connectorASymbols: LibraryJson | undefined = mockClient.registeredSymbols.get(folderAKeys[0]); + assert.ok(connectorASymbols, "ConnectorA symbols should exist"); + assert.equal(connectorASymbols[0].name, "ConnectorA.Contents"); + + const connectorBSymbols: LibraryJson | undefined = mockClient.registeredSymbols.get(folderBKeys[0]); + assert.ok(connectorBSymbols, "ConnectorB symbols should exist"); + assert.equal(connectorBSymbols[0].name, "ConnectorB.Contents"); + }); + + test("removing a workspace folder clears only its symbols", async () => { + const folders: readonly vscode.WorkspaceFolder[] | undefined = vscode.workspace.workspaceFolders; + assert.ok(folders && folders.length >= 2); + + await Promise.all( + folders.map((folder: vscode.WorkspaceFolder) => { + const symbolsDir: string = getSymbolsDirForFolder(folder.name); + + return manager.refreshSymbolDirectoriesForFolder(folder.uri.toString(), [symbolsDir]); + }), + ); + + assert.equal(mockClient.registeredSymbols.size, 2); + + // Simulate removing workspaceA + await manager.removeSymbolsForFolder(folders[0].uri.toString()); + + assert.equal(mockClient.registeredSymbols.size, 1, "Expected one symbol set remaining"); + + // The remaining symbols should be from the second folder + const remainingKey: string = Array.from(mockClient.registeredSymbols.keys())[0]; + + assert.ok( + remainingKey.includes(folders[1].name), + `Expected remaining symbols from ${folders[1].name}, got key: ${remainingKey}`, + ); + }); + + test("refreshing one folder preserves other folder symbols", async () => { + const folders: readonly vscode.WorkspaceFolder[] | undefined = vscode.workspace.workspaceFolders; + assert.ok(folders && folders.length >= 2); + + // Register symbols for both folders + await Promise.all( + folders.map((folder: vscode.WorkspaceFolder) => { + const symbolsDir: string = getSymbolsDirForFolder(folder.name); + + return manager.refreshSymbolDirectoriesForFolder(folder.uri.toString(), [symbolsDir]); + }), + ); + + assert.equal(mockClient.registeredSymbols.size, 2); + + // Re-register symbols for folderA only + const symbolsDirA: string = getSymbolsDirForFolder(folders[0].name); + await manager.refreshSymbolDirectoriesForFolder(folders[0].uri.toString(), [symbolsDirA]); + + // Both folders should still have symbols + assert.equal(mockClient.registeredSymbols.size, 2, "Expected both folders to still have symbols"); + }); + + test("per-folder config can be read with scope URI", () => { + const folders: readonly vscode.WorkspaceFolder[] | undefined = vscode.workspace.workspaceFolders; + assert.ok(folders && folders.length >= 2); + + // Verify we can read config scoped to each workspace folder + for (const folder of folders) { + const config: vscode.WorkspaceConfiguration = vscode.workspace.getConfiguration( + "powerquery.client", + folder.uri, + ); + + // The setting should be readable (even if undefined/default) + const dirs: string[] | undefined = config.get("additionalSymbolsDirectories"); + assert.ok(dirs === undefined || Array.isArray(dirs), "Setting should be undefined or an array"); + } + }); +}); diff --git a/package-lock.json b/package-lock.json index 1afcc25c..aa4fabf6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -63,7 +63,9 @@ } }, "node_modules/@eslint/config-array/node_modules/brace-expansion": { - "version": "1.1.12", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "peer": true, @@ -131,7 +133,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.12", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "peer": true, @@ -578,7 +582,9 @@ "license": "MIT" }, "node_modules/brace-expansion": { - "version": "2.0.2", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", + "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", "dev": true, "license": "MIT", "dependencies": { @@ -841,7 +847,9 @@ } }, "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.12", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "peer": true, diff --git a/package.json b/package.json index e4824899..7dc5a50d 100644 --- a/package.json +++ b/package.json @@ -134,7 +134,7 @@ "description": "Recommended always off. Enables benchmark traces to be generated for the extension." }, "powerquery.client.additionalSymbolsDirectories": { - "scope": "machine-overridable", + "scope": "resource", "type": "array", "items": { "type": "string" diff --git a/scripts/package-lock.json b/scripts/package-lock.json index 91b7f547..a6d00a81 100644 --- a/scripts/package-lock.json +++ b/scripts/package-lock.json @@ -89,9 +89,9 @@ } }, "node_modules/@eslint/config-array/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -163,9 +163,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -1126,9 +1126,9 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", + "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", "dev": true, "license": "MIT", "dependencies": { @@ -1710,9 +1710,9 @@ } }, "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { diff --git a/server/.mocharc.json b/server/.mocharc.json index 916bf7ff..1a2efa90 100644 --- a/server/.mocharc.json +++ b/server/.mocharc.json @@ -1,5 +1,5 @@ { "require": ["ts-node/register"], - "extensions": ["ts"], - "spec": "src/test/**/*.test.ts" + "extensions": ["ts", "js"], + "spec": ["src/test/*.test.ts", "lib/test/grammar/**/*.test.js"] } diff --git a/server/package-lock.json b/server/package-lock.json index cafc985a..015b0a9e 100644 --- a/server/package-lock.json +++ b/server/package-lock.json @@ -34,6 +34,8 @@ "ts-loader": "^9.5.4", "ts-node": "^10.9.2", "typescript": "^5.9.2", + "vscode-oniguruma": "^2.0.1", + "vscode-textmate": "^9.3.2", "webpack": "^5.105.1", "webpack-cli": "^6.0.1" }, @@ -116,9 +118,9 @@ } }, "node_modules/@eslint/config-array/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -190,9 +192,9 @@ } }, "node_modules/@eslint/eslintrc/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -1196,9 +1198,9 @@ } }, "node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.3.tgz", + "integrity": "sha512-MCV/fYJEbqx68aE58kv2cA/kiky1G8vux3OR6/jbS+jIMe/6fJWa0DTzJU7dqijOWYwHi1t29FlfYI9uytqlpA==", "dev": true, "license": "MIT", "dependencies": { @@ -1872,9 +1874,9 @@ } }, "node_modules/eslint/node_modules/brace-expansion": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz", - "integrity": "sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg==", + "version": "1.1.13", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz", + "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==", "dev": true, "license": "MIT", "dependencies": { @@ -2655,9 +2657,9 @@ } }, "node_modules/lodash": { - "version": "4.17.23", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", - "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "version": "4.18.1", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.18.1.tgz", + "integrity": "sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==", "dev": true, "license": "MIT" }, @@ -4038,6 +4040,20 @@ "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", "license": "MIT" }, + "node_modules/vscode-oniguruma": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/vscode-oniguruma/-/vscode-oniguruma-2.0.1.tgz", + "integrity": "sha512-poJU8iHIWnC3vgphJnrLZyI3YdqRlR27xzqDmpPXYzA93R4Gk8z7T6oqDzDoHjoikA2aS82crdXFkjELCdJsjQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/vscode-textmate": { + "version": "9.3.2", + "resolved": "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-9.3.2.tgz", + "integrity": "sha512-n2uGbUcrjhUEBH16uGA0TvUfhWwliFZ1e3+pTjrkim1Mt7ydB41lV08aUvsi70OlzDWp6X7Bx3w/x3fAXIsN0Q==", + "dev": true, + "license": "MIT" + }, "node_modules/watchpack": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.5.1.tgz", diff --git a/server/package.json b/server/package.json index cc6c2dad..05811a02 100644 --- a/server/package.json +++ b/server/package.json @@ -56,6 +56,8 @@ "ts-loader": "^9.5.4", "ts-node": "^10.9.2", "typescript": "^5.9.2", + "vscode-oniguruma": "^2.0.1", + "vscode-textmate": "^9.3.2", "webpack": "^5.105.1", "webpack-cli": "^6.0.1" } diff --git a/server/src/library/externalLibraryUtils.ts b/server/src/library/externalLibraryUtils.ts index d4cdc6b5..ac9a4847 100644 --- a/server/src/library/externalLibraryUtils.ts +++ b/server/src/library/externalLibraryUtils.ts @@ -21,4 +21,34 @@ export function removeLibraries(libraryNames: ReadonlyArray): void { } } +export function getRegisteredModuleNames(): ReadonlyArray { + return Array.from(externalLibraryByName.keys()); +} + +/** + * Returns a map of symbol names that appear in more than one registered module. + * Key: symbol name, Value: array of module names that contain it. + */ +export function getOverlappingSymbols(): ReadonlyMap> { + const symbolToModules: Map = new Map(); + + for (const [moduleName, symbols] of externalLibraryByName) { + for (const symbol of symbols) { + const modules: string[] = symbolToModules.get(symbol.name) ?? []; + modules.push(moduleName); + symbolToModules.set(symbol.name, modules); + } + } + + const overlaps: Map> = new Map(); + + for (const [symbolName, modules] of symbolToModules) { + if (modules.length > 1) { + overlaps.set(symbolName, modules); + } + } + + return overlaps; +} + const externalLibraryByName: Map = new Map(); diff --git a/server/src/server.ts b/server/src/server.ts index 0c68c4ef..158680a7 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -314,9 +314,9 @@ connection.onInitialize((params: LS.InitializeParams) => { }, textDocumentSync: LS.TextDocumentSyncKind.Incremental, workspace: { - // TODO: Disabling until we've fully tested support for multiple workspace folders workspaceFolders: { - supported: false, + supported: true, + changeNotifications: true, }, }, }; @@ -419,9 +419,15 @@ connection.languages.semanticTokens.on((params: LS.SemanticTokensParams, cancell connection.onRequest( "powerquery/moduleLibraryUpdated", EventHandlerUtils.genericRequestHandler((params: ModuleLibraryUpdatedParams) => { + connection.console.log( + `[Library] moduleLibraryUpdated: workspaceUri=${params.workspaceUriPath}, symbolCount=${params.library.length}`, + ); + ModuleLibraryUtils.onModuleAdded(params.workspaceUriPath, params.library); LibraryUtils.clearCache(); connection.languages.diagnostics.refresh(); + + connection.console.log(`[Library] Module libraries registered: ${ModuleLibraryUtils.getModuleCount()} total`); }), ); @@ -430,18 +436,57 @@ connection.onRequest( EventHandlerUtils.genericRequestHandler((params: AddLibrarySymbolsParams) => { // JSON-RPC doesn't support sending Maps, so we have to convert from tuple array. const symbolMaps: ReadonlyMap = new Map(params.librarySymbols); + + const moduleDetails: string = Array.from(symbolMaps.entries()) + .map(([name, symbols]: [string, LibraryJson]) => `${name}(${symbols.length})`) + .join(", "); + + connection.console.log(`[Library] addLibrarySymbols: modules=[${moduleDetails}]`); + + // Check for modules that will be overwritten + const existingModules: ReadonlyArray = ExternalLibraryUtils.getRegisteredModuleNames(); + + const overwritten: string[] = Array.from(symbolMaps.keys()).filter((name: string) => + existingModules.includes(name), + ); + + if (overwritten.length > 0) { + connection.console.warn(`[Library] Overwriting existing modules: [${overwritten.join(", ")}]`); + } + ExternalLibraryUtils.addLibaries(symbolMaps); LibraryUtils.clearCache(); connection.languages.diagnostics.refresh(); + + // Check for symbol name overlaps across modules + const overlaps: ReadonlyMap> = ExternalLibraryUtils.getOverlappingSymbols(); + + if (overlaps.size > 0) { + for (const [symbolName, modules] of overlaps) { + connection.console.warn( + `[Library] Overlapping symbol '${symbolName}' registered by modules: [${modules.join(", ")}]`, + ); + } + } + + connection.console.log( + `[Library] External libraries registered: [${ExternalLibraryUtils.getRegisteredModuleNames().join(", ")}]`, + ); }), ); connection.onRequest( "powerquery/removeLibrarySymbols", EventHandlerUtils.genericRequestHandler((params: RemoveLibrarySymbolsParams) => { + connection.console.log(`[Library] removeLibrarySymbols: modules=[${params.librariesToRemove.join(", ")}]`); + ExternalLibraryUtils.removeLibraries(params.librariesToRemove); LibraryUtils.clearCache(); connection.languages.diagnostics.refresh(); + + const remaining: ReadonlyArray = ExternalLibraryUtils.getRegisteredModuleNames(); + + connection.console.log(`[Library] External libraries remaining: [${remaining.join(", ")}]`); }), ); diff --git a/server/src/test/grammar/comments.test.ts b/server/src/test/grammar/comments.test.ts new file mode 100644 index 00000000..2303bbbc --- /dev/null +++ b/server/src/test/grammar/comments.test.ts @@ -0,0 +1,122 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import { expect } from "chai"; + +import { + findToken, + hasScope, + MultiLineTokenInfo, + TokenInfo, + tokenizeLine, + tokenizeLines, +} from "./grammarTestHelper.js"; + +describe("Grammar - Comments", () => { + describe("single-line comments", () => { + it("should tokenize a single-line comment", async () => { + const tokens: TokenInfo[] = await tokenizeLine("// This is a comment"); + + const commentToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "comment.line.double-slash.powerquery"), + ); + + expect(commentToken).to.not.equal(undefined); + }); + + it("should tokenize a comment after code", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x = 1 // inline comment"); + + const commentToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "comment.line.double-slash.powerquery"), + ); + + expect(commentToken).to.not.equal(undefined); + + // The code before the comment should not be in comment scope + const xToken: TokenInfo | undefined = findToken(tokens, "x"); + expect(xToken).to.not.equal(undefined); + expect(hasScope(xToken!, "comment")).to.equal(false); + }); + + it("should tokenize an empty comment", async () => { + const tokens: TokenInfo[] = await tokenizeLine("//"); + + const commentToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "comment.line.double-slash.powerquery"), + ); + + expect(commentToken).to.not.equal(undefined); + }); + + it("should not treat // inside a string as a comment", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"http://example.com"'); + + const commentTokens: TokenInfo[] = tokens.filter((t: TokenInfo) => hasScope(t, "comment")); + + expect(commentTokens.length, "// inside a string should not be a comment").to.equal(0); + }); + }); + + describe("block comments", () => { + it("should tokenize a single-line block comment", async () => { + const tokens: TokenInfo[] = await tokenizeLine("/* block comment */"); + + const commentToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "comment.block.powerquery"), + ); + + expect(commentToken).to.not.equal(undefined); + }); + + it("should tokenize a multi-line block comment", async () => { + const tokens: MultiLineTokenInfo[] = await tokenizeLines("/* first line\nsecond line */"); + + const commentTokens: MultiLineTokenInfo[] = tokens.filter((t: MultiLineTokenInfo) => + hasScope(t, "comment.block.powerquery"), + ); + + expect(commentTokens.length).to.be.greaterThan(0); + + // Both lines should have comment tokens + const line0Comments: MultiLineTokenInfo[] = commentTokens.filter((t: MultiLineTokenInfo) => t.line === 0); + + const line1Comments: MultiLineTokenInfo[] = commentTokens.filter((t: MultiLineTokenInfo) => t.line === 1); + + expect(line0Comments.length).to.be.greaterThan(0); + expect(line1Comments.length).to.be.greaterThan(0); + }); + + it("should tokenize code before and after block comment", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x /* comment */ y"); + const xToken: TokenInfo | undefined = findToken(tokens, "x"); + const yToken: TokenInfo | undefined = findToken(tokens, "y"); + + expect(xToken).to.not.equal(undefined); + expect(yToken).to.not.equal(undefined); + expect(hasScope(xToken!, "comment")).to.equal(false); + expect(hasScope(yToken!, "comment")).to.equal(false); + }); + + it("should not treat /* inside a string as a comment", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"/* not a comment */"'); + + // All tokens should be within string scope, none should be comment + const nonStringCommentTokens: TokenInfo[] = tokens.filter( + (t: TokenInfo) => hasScope(t, "comment") && !hasScope(t, "string"), + ); + + expect(nonStringCommentTokens.length).to.equal(0); + }); + + it("should handle empty block comment", async () => { + const tokens: TokenInfo[] = await tokenizeLine("/**/"); + + const commentToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "comment.block.powerquery"), + ); + + expect(commentToken).to.not.equal(undefined); + }); + }); +}); diff --git a/server/src/test/grammar/expressions.test.ts b/server/src/test/grammar/expressions.test.ts new file mode 100644 index 00000000..6e7ae4f4 --- /dev/null +++ b/server/src/test/grammar/expressions.test.ts @@ -0,0 +1,268 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import { expect } from "chai"; + +import { + expectScope, + findToken, + hasScope, + MultiLineTokenInfo, + TokenInfo, + tokenizeLine, + tokenizeLines, +} from "./grammarTestHelper.js"; + +describe("Grammar - Expressions", () => { + describe("let expression", () => { + it("should tokenize a let/in expression", async () => { + const tokens: TokenInfo[] = await tokenizeLine("let x = 1 in x"); + const letToken: TokenInfo | undefined = findToken(tokens, "let"); + const inToken: TokenInfo | undefined = findToken(tokens, "in"); + + expect(letToken).to.not.equal(undefined); + expect(inToken).to.not.equal(undefined); + expectScope(letToken, "keyword.other.powerquery"); + expectScope(inToken, "keyword.other.powerquery"); + }); + + it("should tokenize a multiline let expression", async () => { + const tokens: MultiLineTokenInfo[] = await tokenizeLines("let\n x = 1,\n y = 2\nin\n x + y"); + + const letToken: MultiLineTokenInfo | undefined = tokens.find((t: MultiLineTokenInfo) => t.text === "let"); + + const inToken: MultiLineTokenInfo | undefined = tokens.find((t: MultiLineTokenInfo) => t.text === "in"); + + expect(letToken).to.not.equal(undefined); + expect(inToken).to.not.equal(undefined); + }); + }); + + describe("if expression", () => { + it("should tokenize if/then/else", async () => { + const tokens: TokenInfo[] = await tokenizeLine("if x > 0 then x else -x"); + const ifToken: TokenInfo | undefined = findToken(tokens, "if"); + const thenToken: TokenInfo | undefined = findToken(tokens, "then"); + const elseToken: TokenInfo | undefined = findToken(tokens, "else"); + + expect(ifToken).to.not.equal(undefined); + expect(thenToken).to.not.equal(undefined); + expect(elseToken).to.not.equal(undefined); + expectScope(ifToken, "keyword.control.conditional.powerquery"); + expectScope(thenToken, "keyword.control.conditional.powerquery"); + expectScope(elseToken, "keyword.control.conditional.powerquery"); + }); + }); + + describe("each expression", () => { + it("should tokenize 'each _ + 1'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("each _ + 1"); + const eachToken: TokenInfo | undefined = findToken(tokens, "each"); + const underscoreToken: TokenInfo | undefined = findToken(tokens, "_"); + + expect(eachToken).to.not.equal(undefined); + expect(underscoreToken).to.not.equal(undefined); + expectScope(eachToken, "keyword.other.powerquery"); + expectScope(underscoreToken, "keyword.operator.implicitvariable.powerquery"); + }); + + it("should tokenize each with field access", async () => { + const tokens: TokenInfo[] = await tokenizeLine("each [Name]"); + const eachToken: TokenInfo | undefined = findToken(tokens, "each"); + expect(eachToken).to.not.equal(undefined); + expectScope(eachToken, "keyword.other.powerquery"); + }); + }); + + describe("function expression", () => { + it("should tokenize a function definition", async () => { + const tokens: TokenInfo[] = await tokenizeLine("(x, y) => x + y"); + const arrowToken: TokenInfo | undefined = findToken(tokens, "=>"); + expect(arrowToken).to.not.equal(undefined); + expectScope(arrowToken, "keyword.operator.function.powerquery"); + }); + + it("should tokenize a typed function", async () => { + const tokens: TokenInfo[] = await tokenizeLine("(x as number) as number => x * 2"); + + const asTokens: TokenInfo[] = tokens.filter( + (t: TokenInfo) => t.text === "as" && hasScope(t, "keyword.other.powerquery"), + ); + + expect(asTokens.length).to.equal(2); + }); + }); + + describe("try/catch expression", () => { + it("should tokenize try/otherwise", async () => { + const tokens: TokenInfo[] = await tokenizeLine("try x otherwise 0"); + const tryToken: TokenInfo | undefined = findToken(tokens, "try"); + const otherwiseToken: TokenInfo | undefined = findToken(tokens, "otherwise"); + + expect(tryToken).to.not.equal(undefined); + expect(otherwiseToken).to.not.equal(undefined); + expectScope(tryToken, "keyword.control.exception.powerquery"); + expectScope(otherwiseToken, "keyword.control.exception.powerquery"); + }); + + it("should tokenize try/catch", async () => { + const tokens: TokenInfo[] = await tokenizeLine("try x catch (e) => e"); + const tryToken: TokenInfo | undefined = findToken(tokens, "try"); + const catchToken: TokenInfo | undefined = findToken(tokens, "catch"); + + expect(tryToken).to.not.equal(undefined); + expect(catchToken).to.not.equal(undefined); + expectScope(tryToken, "keyword.control.exception.powerquery"); + expectScope(catchToken, "keyword.control.exception.powerquery"); + }); + }); + + describe("error expression", () => { + it("should tokenize 'error' keyword", async () => { + const tokens: TokenInfo[] = await tokenizeLine('error "Something failed"'); + const errorToken: TokenInfo | undefined = findToken(tokens, "error"); + expect(errorToken).to.not.equal(undefined); + expectScope(errorToken, "keyword.other.powerquery"); + }); + }); + + describe("meta expression", () => { + it("should tokenize 'meta' keyword", async () => { + const tokens: TokenInfo[] = await tokenizeLine('x meta [Documentation.Name = "Test"]'); + const metaToken: TokenInfo | undefined = findToken(tokens, "meta"); + expect(metaToken).to.not.equal(undefined); + expectScope(metaToken, "keyword.other.powerquery"); + }); + }); + + describe("list expression", () => { + it("should tokenize a list literal", async () => { + const tokens: TokenInfo[] = await tokenizeLine("{1, 2, 3}"); + const openBrace: TokenInfo | undefined = findToken(tokens, "{"); + const closeBrace: TokenInfo | undefined = findToken(tokens, "}"); + expect(openBrace).to.not.equal(undefined); + expect(closeBrace).to.not.equal(undefined); + expectScope(openBrace, "punctuation.section.braces.begin.powerquery"); + expectScope(closeBrace, "punctuation.section.braces.end.powerquery"); + }); + + it("should tokenize a list range with '..'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("{1..10}"); + const rangeToken: TokenInfo | undefined = findToken(tokens, ".."); + expect(rangeToken).to.not.equal(undefined); + expectScope(rangeToken, "keyword.operator.list.powerquery"); + }); + }); + + describe("record expression", () => { + it("should tokenize a record literal", async () => { + const tokens: TokenInfo[] = await tokenizeLine('[Name = "Alice", Age = 30]'); + const openBracket: TokenInfo | undefined = findToken(tokens, "["); + const closeBracket: TokenInfo | undefined = findToken(tokens, "]"); + expect(openBracket).to.not.equal(undefined); + expect(closeBracket).to.not.equal(undefined); + }); + + it("should scope record content as meta.recordexpression", async () => { + const tokens: TokenInfo[] = await tokenizeLine("[a = 1]"); + + const innerTokens: TokenInfo[] = tokens.filter((t: TokenInfo) => + hasScope(t, "meta.recordexpression.powerquery"), + ); + + expect(innerTokens.length).to.be.greaterThan(0); + }); + }); + + describe("section expression", () => { + it("should tokenize a section declaration", async () => { + const tokens: TokenInfo[] = await tokenizeLine("section MySection;"); + const sectionToken: TokenInfo | undefined = findToken(tokens, "section"); + expect(sectionToken).to.not.equal(undefined); + expectScope(sectionToken, "keyword.powerquery"); + }); + + it("should tokenize shared section member", async () => { + const tokens: TokenInfo[] = await tokenizeLine("shared MyFunc = (x) => x;"); + const sharedToken: TokenInfo | undefined = findToken(tokens, "shared"); + expect(sharedToken).to.not.equal(undefined); + expectScope(sharedToken, "keyword.powerquery"); + }); + + it("should tokenize section access with '!'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("Section1!Member1"); + const bangToken: TokenInfo | undefined = findToken(tokens, "!"); + expect(bangToken).to.not.equal(undefined); + expectScope(bangToken, "keyword.operator.sectionaccess.powerquery"); + }); + }); + + describe("real-world expressions", () => { + it("should tokenize a Table.AddColumn call", async () => { + const tokens: TokenInfo[] = await tokenizeLine('Table.AddColumn(Source, "NewCol", each [Value] * 2)'); + + const funcName: TokenInfo | undefined = findToken(tokens, "Table.AddColumn"); + expect(funcName).to.not.equal(undefined); + expectScope(funcName!, "entity.name.powerquery"); + }); + + it("should tokenize a complex let expression", async () => { + const tokens: MultiLineTokenInfo[] = await tokenizeLines( + [ + "let", + ' Source = Csv.Document(File.Contents("data.csv")),', + ' #"Promoted Headers" = Table.PromoteHeaders(Source),', + ' #"Changed Type" = Table.TransformColumnTypes(#"Promoted Headers", {{"Col1", type text}})', + "in", + ' #"Changed Type"', + ].join("\n"), + ); + + // Verify key tokens + const letToken: MultiLineTokenInfo | undefined = tokens.find((t: MultiLineTokenInfo) => t.text === "let"); + + const inToken: MultiLineTokenInfo | undefined = tokens.find((t: MultiLineTokenInfo) => t.text === "in"); + + expect(letToken).to.not.equal(undefined); + expect(inToken).to.not.equal(undefined); + + // Verify dotted identifiers are matched as single tokens + const csvDoc: MultiLineTokenInfo | undefined = tokens.find( + (t: MultiLineTokenInfo) => t.text === "Csv.Document", + ); + + expect(csvDoc).to.not.equal(undefined); + + // Verify quoted identifiers + const quotedIdBegins: MultiLineTokenInfo[] = tokens.filter( + (t: MultiLineTokenInfo) => + t.text === '#"' && hasScope(t, "punctuation.definition.quotedidentifier.begin"), + ); + + expect(quotedIdBegins.length).to.be.greaterThan(0); + }); + + it("should tokenize a type definition", async () => { + const tokens: TokenInfo[] = await tokenizeLine("type table [Name = text, Age = number, Active = logical]"); + + const typeToken: TokenInfo | undefined = findToken(tokens, "type"); + const tableToken: TokenInfo | undefined = findToken(tokens, "table"); + expect(typeToken).to.not.equal(undefined); + expect(tableToken).to.not.equal(undefined); + }); + + it("should tokenize nested function with types", async () => { + const tokens: TokenInfo[] = await tokenizeLine( + "let fn = (x as number, optional y as nullable number) as number => x + (y ?? 0) in fn", + ); + + const letToken: TokenInfo | undefined = findToken(tokens, "let"); + const inToken: TokenInfo | undefined = findToken(tokens, "in"); + const arrowToken: TokenInfo | undefined = findToken(tokens, "=>"); + + expect(letToken).to.not.equal(undefined); + expect(inToken).to.not.equal(undefined); + expect(arrowToken).to.not.equal(undefined); + }); + }); +}); diff --git a/server/src/test/grammar/grammarTestHelper.ts b/server/src/test/grammar/grammarTestHelper.ts new file mode 100644 index 00000000..5f41c488 --- /dev/null +++ b/server/src/test/grammar/grammarTestHelper.ts @@ -0,0 +1,137 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import * as fs from "fs"; +import * as oniguruma from "vscode-oniguruma"; +import * as path from "path"; +import * as vsctm from "vscode-textmate"; + +const wasmBin: Buffer = fs.readFileSync( + path.resolve(__dirname, "../../../node_modules/vscode-oniguruma/release/onig.wasm"), +); + +const vscodeOnigurumaLib: Promise = oniguruma.loadWASM({ data: wasmBin }).then(() => ({ + createOnigScanner: (patterns: string[]) => new oniguruma.OnigScanner(patterns), + createOnigString: (s: string) => new oniguruma.OnigString(s), +})); + +const grammarPath: string = path.resolve(__dirname, "../../../../syntaxes/powerquery.tmLanguage.json"); + +const registry: vsctm.Registry = new vsctm.Registry({ + onigLib: vscodeOnigurumaLib, + // eslint-disable-next-line require-await + loadGrammar: async (scopeName: string): Promise => { + if (scopeName === "source.powerquery") { + const grammarContent: string = fs.readFileSync(grammarPath, "utf-8"); + + return vsctm.parseRawGrammar(grammarContent, grammarPath); + } + + return null; + }, +}); + +let cachedGrammar: vsctm.IGrammar | null = null; + +export interface TokenInfo { + readonly text: string; + readonly scopes: string[]; +} + +export async function getGrammar(): Promise { + if (cachedGrammar) { + return cachedGrammar; + } + + const grammar: vsctm.IGrammar | null = await registry.loadGrammar("source.powerquery"); + + if (!grammar) { + throw new Error("Failed to load Power Query grammar"); + } + + // eslint-disable-next-line require-atomic-updates + cachedGrammar = grammar; + + return grammar; +} + +export async function tokenizeLine(text: string): Promise { + const grammar: vsctm.IGrammar = await getGrammar(); + const result: vsctm.ITokenizeLineResult = grammar.tokenizeLine(text, vsctm.INITIAL); + const tokens: TokenInfo[] = []; + + for (const token of result.tokens) { + tokens.push({ + text: text.substring(token.startIndex, token.endIndex), + scopes: token.scopes, + }); + } + + return tokens; +} + +export interface MultiLineTokenInfo { + readonly line: number; + readonly text: string; + readonly scopes: string[]; +} + +export async function tokenizeLines(text: string): Promise { + const grammar: vsctm.IGrammar = await getGrammar(); + const lines: string[] = text.split(/\r?\n/); + const allTokens: MultiLineTokenInfo[] = []; + let ruleStack: vsctm.StateStack = vsctm.INITIAL; + + for (let i: number = 0; i < lines.length; i += 1) { + const line: string = lines[i]; + const result: vsctm.ITokenizeLineResult = grammar.tokenizeLine(line, ruleStack); + + for (const token of result.tokens) { + allTokens.push({ + line: i, + text: line.substring(token.startIndex, token.endIndex), + scopes: token.scopes, + }); + } + + ruleStack = result.ruleStack; + } + + return allTokens; +} + +export function findToken(tokens: TokenInfo[], text: string): TokenInfo | undefined { + return tokens.find((t: TokenInfo) => t.text === text); +} + +export function findTokens(tokens: TokenInfo[], text: string): TokenInfo[] { + return tokens.filter((t: TokenInfo) => t.text === text); +} + +export function hasScope(token: TokenInfo, scope: string): boolean { + return token.scopes.some((s: string) => s.includes(scope)); +} + +export function expectScope(token: TokenInfo | undefined, scope: string): void { + if (!token) { + throw new Error(`Token not found; expected scope "${scope}"`); + } + + if (!hasScope(token, scope)) { + throw new Error( + `Expected scope "${scope}" in token "${token.text}", but found scopes: [${token.scopes.join(", ")}]`, + ); + } +} + +export function expectNoScope(token: TokenInfo | undefined, scope: string): void { + if (!token) { + throw new Error(`Token not found; expected no scope "${scope}"`); + } + + if (hasScope(token, scope)) { + throw new Error( + `Expected no scope "${scope}" in token "${token.text}", but found scopes: [${token.scopes.join(", ")}]`, + ); + } +} diff --git a/server/src/test/grammar/identifiers.test.ts b/server/src/test/grammar/identifiers.test.ts new file mode 100644 index 00000000..e4702ff4 --- /dev/null +++ b/server/src/test/grammar/identifiers.test.ts @@ -0,0 +1,195 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import { expect } from "chai"; + +import { expectScope, findToken, hasScope, TokenInfo, tokenizeLine } from "./grammarTestHelper.js"; + +describe("Grammar - Identifiers", () => { + describe("regular identifiers", () => { + it("should tokenize a simple identifier", async () => { + const tokens: TokenInfo[] = await tokenizeLine("myVariable"); + const token: TokenInfo | undefined = findToken(tokens, "myVariable"); + expect(token).to.not.equal(undefined); + expectScope(token, "entity.name.powerquery"); + }); + + it("should tokenize an identifier starting with underscore", async () => { + const tokens: TokenInfo[] = await tokenizeLine("_private"); + const token: TokenInfo | undefined = findToken(tokens, "_private"); + expect(token).to.not.equal(undefined); + expectScope(token, "entity.name.powerquery"); + }); + + it("should tokenize identifiers with digits", async () => { + const tokens: TokenInfo[] = await tokenizeLine("value1"); + const token: TokenInfo | undefined = findToken(tokens, "value1"); + expect(token).to.not.equal(undefined); + expectScope(token, "entity.name.powerquery"); + }); + + it("should tokenize single-character identifiers", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x = 1"); + const token: TokenInfo | undefined = findToken(tokens, "x"); + expect(token).to.not.equal(undefined); + expectScope(token, "entity.name.powerquery"); + }); + }); + + describe("dotted identifiers (M spec: regular-identifier with dot-character)", () => { + // Per M spec: regular-identifier = available-identifier | available-identifier dot-character regular-identifier + // "Table.AddColumn" is a single identifier — the dot is part of the identifier, NOT a member access + + it("should tokenize 'Table.AddColumn' as a single identifier", async () => { + const tokens: TokenInfo[] = await tokenizeLine("Table.AddColumn"); + const token: TokenInfo | undefined = findToken(tokens, "Table.AddColumn"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + + it("should tokenize 'List.Transform' as a single identifier", async () => { + const tokens: TokenInfo[] = await tokenizeLine("List.Transform"); + const token: TokenInfo | undefined = findToken(tokens, "List.Transform"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + + it("should tokenize short dotted identifiers like 'A.BC'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("A.BC"); + const token: TokenInfo | undefined = findToken(tokens, "A.BC"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + + it("should tokenize multi-dot identifiers like 'A.B.C'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("A.B.C"); + const token: TokenInfo | undefined = findToken(tokens, "A.B.C"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + + it("should not treat the dot as an operator in dotted identifiers", async () => { + const tokens: TokenInfo[] = await tokenizeLine("Table.AddColumn"); + const dotToken: TokenInfo | undefined = findToken(tokens, "."); + + if (dotToken) { + expect.fail("Dot should not be a separate token in 'Table.AddColumn'"); + } + }); + + it("should tokenize dotted identifier in a function call", async () => { + const tokens: TokenInfo[] = await tokenizeLine("Table.AddColumn(tbl, col)"); + const token: TokenInfo | undefined = findToken(tokens, "Table.AddColumn"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + }); + + describe("quoted identifiers", () => { + it("should tokenize a quoted identifier with spaces", async () => { + const tokens: TokenInfo[] = await tokenizeLine('#"My Column Name"'); + + // The #" and ending " are delimiters, the content may be one or more tokens + const beginToken: TokenInfo | undefined = tokens.find( + (t: TokenInfo) => t.text === '#"' && hasScope(t, "punctuation.definition.quotedidentifier.begin"), + ); + + expect(beginToken).to.not.equal(undefined); + }); + + it("should tokenize a quoted identifier with keywords inside", async () => { + const tokens: TokenInfo[] = await tokenizeLine('#"let in"'); + + const beginToken: TokenInfo | undefined = tokens.find( + (t: TokenInfo) => t.text === '#"' && hasScope(t, "punctuation.definition.quotedidentifier.begin"), + ); + + expect(beginToken).to.not.equal(undefined); + }); + + it("should tokenize a quoted identifier with escaped quotes", async () => { + const tokens: TokenInfo[] = await tokenizeLine('#"say ""hello"""'); + + const escapeToken: TokenInfo | undefined = tokens.find( + (t: TokenInfo) => t.text === '""' && hasScope(t, "constant.character.escape.quote"), + ); + + expect(escapeToken).to.not.equal(undefined); + }); + + it("should scope the whole quoted identifier as entity.name", async () => { + const tokens: TokenInfo[] = await tokenizeLine('#"Column A"'); + + const entityTokens: TokenInfo[] = tokens.filter((t: TokenInfo) => hasScope(t, "entity.name.powerquery")); + + expect(entityTokens.length).to.be.greaterThan(0); + }); + + it("should tokenize a quoted identifier with operators inside", async () => { + const tokens: TokenInfo[] = await tokenizeLine('#"A + B"'); + + const beginToken: TokenInfo | undefined = tokens.find( + (t: TokenInfo) => t.text === '#"' && hasScope(t, "punctuation.definition.quotedidentifier.begin"), + ); + + expect(beginToken).to.not.equal(undefined); + }); + }); + + describe("inclusive identifiers (@)", () => { + // Per M spec: inclusive-identifier-reference = @ identifier + it("should tokenize '@myFunc' with @ as inclusive identifier marker", async () => { + const tokens: TokenInfo[] = await tokenizeLine("@myFunc"); + + // The grammar should recognize @ as part of the identifier + const hasInclusiveScope: boolean = tokens.some( + (t: TokenInfo) => + hasScope(t, "inclusiveidentifier") || hasScope(t, "keyword.operator.inclusiveidentifier"), + ); + + expect(hasInclusiveScope, "@ should be scoped as inclusive identifier").to.equal(true); + }); + + it("should tokenize the identifier part after @", async () => { + const tokens: TokenInfo[] = await tokenizeLine("@myFunc"); + + const identToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "entity.name.powerquery"), + ); + + expect(identToken).to.not.equal(undefined); + }); + }); + + describe("implicit variable (_)", () => { + it("should tokenize standalone '_' as implicit variable", async () => { + const tokens: TokenInfo[] = await tokenizeLine("each _ + 1"); + const token: TokenInfo | undefined = findToken(tokens, "_"); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.implicitvariable.powerquery"); + }); + + it("should not treat '_' inside an identifier as implicit variable", async () => { + const tokens: TokenInfo[] = await tokenizeLine("my_var"); + const token: TokenInfo | undefined = findToken(tokens, "my_var"); + expect(token).to.not.equal(undefined); + expectScope(token, "entity.name.powerquery"); + }); + }); + + describe("intrinsic variables", () => { + it("should tokenize '#sections' as an intrinsic variable", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#sections"); + const token: TokenInfo | undefined = findToken(tokens, "#sections"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.intrinsicvariable.powerquery"); + }); + + it("should tokenize '#shared' as an intrinsic variable", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#shared"); + const token: TokenInfo | undefined = findToken(tokens, "#shared"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.intrinsicvariable.powerquery"); + }); + }); +}); diff --git a/server/src/test/grammar/keywords.test.ts b/server/src/test/grammar/keywords.test.ts new file mode 100644 index 00000000..b391e21b --- /dev/null +++ b/server/src/test/grammar/keywords.test.ts @@ -0,0 +1,143 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import { expect } from "chai"; + +import { expectScope, findToken, TokenInfo, tokenizeLine } from "./grammarTestHelper.js"; + +describe("Grammar - Keywords", () => { + describe("logical operator keywords", () => { + for (const keyword of ["and", "or", "not"]) { + it(`should tokenize '${keyword}' as a logical operator keyword`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`x ${keyword} y`); + const token: TokenInfo | undefined = findToken(tokens, keyword); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.word.logical.powerquery"); + }); + } + }); + + describe("conditional keywords", () => { + for (const keyword of ["if", "then", "else"]) { + it(`should tokenize '${keyword}' as a conditional keyword`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`${keyword} x`); + const token: TokenInfo | undefined = findToken(tokens, keyword); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.control.conditional.powerquery"); + }); + } + }); + + describe("exception keywords", () => { + for (const keyword of ["try", "catch", "otherwise"]) { + it(`should tokenize '${keyword}' as an exception keyword`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`${keyword} x`); + const token: TokenInfo | undefined = findToken(tokens, keyword); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.control.exception.powerquery"); + }); + } + }); + + describe("general keywords", () => { + for (const keyword of ["as", "each", "in", "is", "let", "meta", "type", "error"]) { + it(`should tokenize '${keyword}' as a general keyword`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`${keyword} x`); + const token: TokenInfo | undefined = findToken(tokens, keyword); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.other.powerquery"); + }); + } + }); + + describe("section keywords", () => { + for (const keyword of ["section", "shared"]) { + it(`should tokenize '${keyword}' as a section keyword`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`${keyword} MySection`); + const token: TokenInfo | undefined = findToken(tokens, keyword); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.powerquery"); + }); + } + }); + + describe("keywords should not match inside identifiers", () => { + it("should not tokenize 'letter' as containing 'let'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("letter"); + const token: TokenInfo | undefined = findToken(tokens, "letter"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + + it("should not tokenize 'notify' as containing 'not'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("notify"); + const token: TokenInfo | undefined = findToken(tokens, "notify"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + + it("should not tokenize 'android' as containing 'and'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("android"); + const token: TokenInfo | undefined = findToken(tokens, "android"); + expect(token).to.not.equal(undefined); + expectScope(token!, "entity.name.powerquery"); + }); + }); + + describe("#keyword forms (M spec keywords)", () => { + // Per M spec, these are all keywords: + // #binary #date #datetime #datetimezone #duration #infinity #nan #sections #shared #table #time + + it("should tokenize '#infinity' as a numeric constant", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#infinity"); + const token: TokenInfo | undefined = findToken(tokens, "#infinity"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.numeric.float.powerquery"); + }); + + it("should tokenize '#nan' as a numeric constant", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#nan"); + const token: TokenInfo | undefined = findToken(tokens, "#nan"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.numeric.float.powerquery"); + }); + + it("should tokenize '#sections' as an intrinsic variable", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#sections"); + const token: TokenInfo | undefined = findToken(tokens, "#sections"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.intrinsicvariable.powerquery"); + }); + + it("should tokenize '#shared' as an intrinsic variable", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#shared"); + const token: TokenInfo | undefined = findToken(tokens, "#shared"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.intrinsicvariable.powerquery"); + }); + + // These #keyword forms are in the M spec but may not be in the grammar. + // Tests document the current behavior. + for (const keyword of ["#binary", "#date", "#datetime", "#datetimezone", "#duration", "#table", "#time"]) { + it(`should tokenize '${keyword}' as a keyword (M spec requirement)`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`${keyword} {}`); + // Check if ANY token contains this text and has keyword-like scope + const token: TokenInfo | undefined = findToken(tokens, keyword); + + if (token) { + // Token found — verify it has some meaningful scope + const hasKeywordScope: boolean = token.scopes.some( + (s: string) => s.includes("keyword") || s.includes("storage") || s.includes("support"), + ); + + expect(hasKeywordScope, `'${keyword}' should have a keyword or storage scope`).to.equal(true); + } else { + // Token not found as a unit — this is a gap in the grammar + // Verify the text is at least partially tokenized + const allText: string = tokens.map((t: TokenInfo) => t.text).join(""); + expect(allText).to.include(keyword.replace("#", "")); + } + }); + } + }); +}); diff --git a/server/src/test/grammar/literals.test.ts b/server/src/test/grammar/literals.test.ts new file mode 100644 index 00000000..58f81cf3 --- /dev/null +++ b/server/src/test/grammar/literals.test.ts @@ -0,0 +1,268 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import { expect } from "chai"; + +import { + expectScope, + findToken, + hasScope, + MultiLineTokenInfo, + TokenInfo, + tokenizeLine, + tokenizeLines, +} from "./grammarTestHelper.js"; + +describe("Grammar - Literals", () => { + describe("string literals", () => { + it("should tokenize a simple string", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"Hello, world"'); + + const stringTokens: TokenInfo[] = tokens.filter((t: TokenInfo) => + hasScope(t, "string.quoted.double.powerquery"), + ); + + expect(stringTokens.length).to.be.greaterThan(0); + }); + + it("should tokenize string delimiters", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"text"'); + + const beginToken: TokenInfo | undefined = tokens.find( + (t: TokenInfo) => t.text === '"' && hasScope(t, "punctuation.definition.string.begin.powerquery"), + ); + + expect(beginToken).to.not.equal(undefined); + }); + + it("should tokenize escaped double quotes inside strings", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"say ""hello""!"'); + + const escapeTokens: TokenInfo[] = tokens.filter( + (t: TokenInfo) => t.text === '""' && hasScope(t, "constant.character.escape.quote.powerquery"), + ); + + expect(escapeTokens.length).to.be.greaterThan(0); + }); + + it("should tokenize empty strings", async () => { + const tokens: TokenInfo[] = await tokenizeLine('""'); + + const stringTokens: TokenInfo[] = tokens.filter((t: TokenInfo) => + hasScope(t, "string.quoted.double.powerquery"), + ); + + expect(stringTokens.length).to.be.greaterThan(0); + }); + + it("should tokenize escape sequences in strings", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"Hello#(cr,lf)World"'); + + const escapeToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "constant.character.escapesequence.powerquery"), + ); + + expect(escapeToken).to.not.equal(undefined); + }); + + it("should tokenize unicode escape sequences", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"#(000D)"'); + + const escapeToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "constant.character.escapesequence.powerquery"), + ); + + expect(escapeToken).to.not.equal(undefined); + }); + + it("should tokenize # escape in strings", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"#(#)"'); + + const escapeToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "constant.character.escapesequence.powerquery"), + ); + + expect(escapeToken).to.not.equal(undefined); + }); + + it("should tokenize tab escape", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"#(tab)"'); + + const escapeToken: TokenInfo | undefined = tokens.find((t: TokenInfo) => + hasScope(t, "constant.character.escapesequence.powerquery"), + ); + + expect(escapeToken).to.not.equal(undefined); + }); + }); + + describe("integer literals", () => { + it("should tokenize a simple integer", async () => { + const tokens: TokenInfo[] = await tokenizeLine("42"); + const token: TokenInfo | undefined = findToken(tokens, "42"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.integer.powerquery"); + }); + + it("should tokenize zero", async () => { + const tokens: TokenInfo[] = await tokenizeLine("0"); + const token: TokenInfo | undefined = findToken(tokens, "0"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric"); + }); + + it("should tokenize multi-digit integers", async () => { + const tokens: TokenInfo[] = await tokenizeLine("12345"); + const token: TokenInfo | undefined = findToken(tokens, "12345"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric"); + }); + }); + + describe("hexadecimal literals", () => { + it("should tokenize 0xFF", async () => { + const tokens: TokenInfo[] = await tokenizeLine("0xFF"); + const token: TokenInfo | undefined = findToken(tokens, "0xFF"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.integer.hexadecimal.powerquery"); + }); + + it("should tokenize 0x with uppercase X", async () => { + const tokens: TokenInfo[] = await tokenizeLine("0XAB"); + const token: TokenInfo | undefined = findToken(tokens, "0XAB"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.integer.hexadecimal.powerquery"); + }); + + it("should tokenize long hex values", async () => { + const tokens: TokenInfo[] = await tokenizeLine("0xDEADBEEF"); + const token: TokenInfo | undefined = findToken(tokens, "0xDEADBEEF"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.integer.hexadecimal.powerquery"); + }); + }); + + describe("decimal (floating point) literals", () => { + it("should tokenize a decimal number", async () => { + const tokens: TokenInfo[] = await tokenizeLine("3.14"); + const token: TokenInfo | undefined = findToken(tokens, "3.14"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.decimal.powerquery"); + }); + + it("should tokenize a decimal starting with dot", async () => { + // Per M spec: decimal-number-literal includes .decimal-digits + const tokens: TokenInfo[] = await tokenizeLine(".5"); + const token: TokenInfo | undefined = findToken(tokens, ".5"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.decimal.powerquery"); + }); + + it("should tokenize a decimal with leading zero", async () => { + const tokens: TokenInfo[] = await tokenizeLine("0.123"); + const token: TokenInfo | undefined = findToken(tokens, "0.123"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.decimal.powerquery"); + }); + }); + + describe("floating point literals with exponents", () => { + it("should tokenize a float with exponent", async () => { + const tokens: TokenInfo[] = await tokenizeLine("1e10"); + const token: TokenInfo | undefined = findToken(tokens, "1e10"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.float.powerquery"); + }); + + it("should tokenize a float with uppercase E", async () => { + const tokens: TokenInfo[] = await tokenizeLine("1E10"); + const token: TokenInfo | undefined = findToken(tokens, "1E10"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.float.powerquery"); + }); + + it("should tokenize a float with positive exponent", async () => { + const tokens: TokenInfo[] = await tokenizeLine("1e+5"); + const token: TokenInfo | undefined = findToken(tokens, "1e+5"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.float.powerquery"); + }); + + it("should tokenize a float with negative exponent", async () => { + const tokens: TokenInfo[] = await tokenizeLine("1e-5"); + const token: TokenInfo | undefined = findToken(tokens, "1e-5"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.float.powerquery"); + }); + + it("should tokenize a float with decimal and exponent", async () => { + const tokens: TokenInfo[] = await tokenizeLine("3.14e10"); + const token: TokenInfo | undefined = findToken(tokens, "3.14e10"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.numeric.float.powerquery"); + }); + }); + + describe("numeric constants", () => { + it("should tokenize #infinity", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#infinity"); + const token: TokenInfo | undefined = findToken(tokens, "#infinity"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.numeric.float.powerquery"); + }); + + it("should tokenize #nan", async () => { + const tokens: TokenInfo[] = await tokenizeLine("#nan"); + const token: TokenInfo | undefined = findToken(tokens, "#nan"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.numeric.float.powerquery"); + }); + }); + + describe("logical literals", () => { + it("should tokenize 'true'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("true"); + const token: TokenInfo | undefined = findToken(tokens, "true"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.logical.powerquery"); + }); + + it("should tokenize 'false'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("false"); + const token: TokenInfo | undefined = findToken(tokens, "false"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.logical.powerquery"); + }); + }); + + describe("null literal", () => { + it("should tokenize 'null'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("null"); + const token: TokenInfo | undefined = findToken(tokens, "null"); + expect(token).to.not.equal(undefined); + expectScope(token, "constant.language.null.powerquery"); + }); + }); + + describe("verbatim literals (M spec)", () => { + // Per M spec: verbatim-literal = #!" text-literal-characters_opt " + it("should tokenize '#!\"text\"' as a verbatim literal", async () => { + const tokens: TokenInfo[] = await tokenizeLine('#!"some error text"'); + // Check if the grammar handles verbatim literals at all + const allText: string = tokens.map((t: TokenInfo) => t.text).join(""); + expect(allText).to.include("some error text"); + }); + }); + + describe("multiline strings", () => { + it("should tokenize a string spanning multiple lines", async () => { + const tokens: MultiLineTokenInfo[] = await tokenizeLines('"Hello\nWorld"'); + + const stringTokens: MultiLineTokenInfo[] = tokens.filter((t: MultiLineTokenInfo) => + hasScope(t, "string.quoted.double.powerquery"), + ); + + expect(stringTokens.length).to.be.greaterThan(0); + }); + }); +}); diff --git a/server/src/test/grammar/operators.test.ts b/server/src/test/grammar/operators.test.ts new file mode 100644 index 00000000..b215ae4f --- /dev/null +++ b/server/src/test/grammar/operators.test.ts @@ -0,0 +1,189 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import { expect } from "chai"; + +import { expectScope, findToken, hasScope, TokenInfo, tokenizeLine } from "./grammarTestHelper.js"; + +describe("Grammar - Operators", () => { + describe("assignment/comparison operator", () => { + it("should tokenize '=' as assignment-or-comparison", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x = 1"); + const token: TokenInfo | undefined = findToken(tokens, "="); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.assignment-or-comparison.powerquery"); + }); + }); + + describe("comparison operators", () => { + for (const op of ["<>", "<", ">", "<=", ">="]) { + it(`should tokenize '${op}' as a comparison operator`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`x ${op} y`); + const token: TokenInfo | undefined = findToken(tokens, op); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.comparison.powerquery"); + }); + } + }); + + describe("arithmetic operators", () => { + for (const op of ["+", "-", "*", "/"]) { + it(`should tokenize '${op}' as an arithmetic operator`, async () => { + const tokens: TokenInfo[] = await tokenizeLine(`x ${op} y`); + const token: TokenInfo | undefined = findToken(tokens, op); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.arithmetic.powerquery"); + }); + } + }); + + describe("combination operator", () => { + it("should tokenize '&' as a combination operator", async () => { + const tokens: TokenInfo[] = await tokenizeLine('"a" & "b"'); + const token: TokenInfo | undefined = findToken(tokens, "&"); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.combination.powerquery"); + }); + }); + + describe("function operator", () => { + it("should tokenize '=>' as a function operator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("(x) => x + 1"); + const token: TokenInfo | undefined = findToken(tokens, "=>"); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.function.powerquery"); + }); + }); + + describe("section access operator", () => { + it("should tokenize '!' as section access operator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("Section1!Value1"); + const token: TokenInfo | undefined = findToken(tokens, "!"); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.sectionaccess.powerquery"); + }); + }); + + describe("optional operator", () => { + it("should tokenize '?' as optional operator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("record[field]?"); + const token: TokenInfo | undefined = findToken(tokens, "?"); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.optional.powerquery"); + }); + }); + + describe("null coalescing operator (M spec: ??)", () => { + it("should tokenize '??' as a null coalescing operator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x ?? 0"); + const token: TokenInfo | undefined = findToken(tokens, "??"); + expect(token).to.not.equal(undefined); + expectScope(token!, "keyword.operator.nullcoalescing.powerquery"); + }); + }); + + describe("dot operators", () => { + it("should tokenize '..' as list operator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("{1..10}"); + const token: TokenInfo | undefined = findToken(tokens, ".."); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.list.powerquery"); + }); + + it("should tokenize '...' as ellipsis operator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("..."); + const token: TokenInfo | undefined = findToken(tokens, "..."); + expect(token).to.not.equal(undefined); + expectScope(token, "keyword.operator.ellipsis.powerquery"); + }); + + it("should not confuse '...' with '..'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("..."); + // "..." should be matched as ellipsis, not as ".." followed by "." + const ellipsisToken: TokenInfo | undefined = findToken(tokens, "..."); + expect(ellipsisToken).to.not.equal(undefined); + }); + }); + + describe("punctuators", () => { + it("should tokenize ',' as separator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("a, b"); + const token: TokenInfo | undefined = findToken(tokens, ","); + expect(token).to.not.equal(undefined); + expectScope(token, "punctuation.separator.powerquery"); + }); + + it("should tokenize '(' as begin parens", async () => { + const tokens: TokenInfo[] = await tokenizeLine("(x)"); + const token: TokenInfo | undefined = findToken(tokens, "("); + expect(token).to.not.equal(undefined); + expectScope(token, "punctuation.section.parens.begin.powerquery"); + }); + + it("should tokenize ')' as end parens", async () => { + const tokens: TokenInfo[] = await tokenizeLine("(x)"); + const token: TokenInfo | undefined = findToken(tokens, ")"); + expect(token).to.not.equal(undefined); + expectScope(token, "punctuation.section.parens.end.powerquery"); + }); + + it("should tokenize '{' as begin braces", async () => { + const tokens: TokenInfo[] = await tokenizeLine("{1, 2}"); + const token: TokenInfo | undefined = findToken(tokens, "{"); + expect(token).to.not.equal(undefined); + expectScope(token, "punctuation.section.braces.begin.powerquery"); + }); + + it("should tokenize '}' as end braces", async () => { + const tokens: TokenInfo[] = await tokenizeLine("{1, 2}"); + const token: TokenInfo | undefined = findToken(tokens, "}"); + expect(token).to.not.equal(undefined); + expectScope(token, "punctuation.section.braces.end.powerquery"); + }); + + it("should tokenize '[' as begin brackets (record)", async () => { + const tokens: TokenInfo[] = await tokenizeLine("[a = 1]"); + const token: TokenInfo | undefined = findToken(tokens, "["); + expect(token).to.not.equal(undefined); + expectScope(token, "punctuation.section.brackets.begin.powerquery"); + }); + + it("should tokenize ']' as end brackets (record)", async () => { + const tokens: TokenInfo[] = await tokenizeLine("[a = 1]"); + const token: TokenInfo | undefined = findToken(tokens, "]"); + expect(token).to.not.equal(undefined); + expectScope(token, "punctuation.section.brackets.end.powerquery"); + }); + + it("should tokenize ';' as a terminator", async () => { + const tokens: TokenInfo[] = await tokenizeLine("section MySection;"); + const token: TokenInfo | undefined = findToken(tokens, ";"); + expect(token).to.not.equal(undefined); + expectScope(token!, "punctuation.terminator.powerquery"); + }); + }); + + describe("record expression scoping", () => { + it("should scope record contents as meta.recordexpression", async () => { + const tokens: TokenInfo[] = await tokenizeLine("[a = 1, b = 2]"); + + const recordTokens: TokenInfo[] = tokens.filter((t: TokenInfo) => + hasScope(t, "meta.recordexpression.powerquery"), + ); + + expect(recordTokens.length).to.be.greaterThan(0); + }); + + it("should handle nested records", async () => { + const tokens: TokenInfo[] = await tokenizeLine("[a = [b = 1]]"); + + // Inner content should have nested recordexpression scope + const innerTokens: TokenInfo[] = tokens.filter( + (t: TokenInfo) => + t.scopes.filter((s: string) => s.includes("meta.recordexpression.powerquery")).length >= 2, + ); + + expect(innerTokens.length, "inner record content should have nested record scopes").to.be.greaterThan(0); + }); + }); +}); diff --git a/server/src/test/grammar/types.test.ts b/server/src/test/grammar/types.test.ts new file mode 100644 index 00000000..658305c7 --- /dev/null +++ b/server/src/test/grammar/types.test.ts @@ -0,0 +1,110 @@ +// Copyright (c) Microsoft Corporation. +// Licensed under the MIT license. + +import { expect } from "chai"; + +import { expectScope, findToken, hasScope, TokenInfo, tokenizeLine } from "./grammarTestHelper.js"; + +describe("Grammar - Types", () => { + describe("primitive types", () => { + const primitiveTypes: string[] = [ + "action", + "any", + "anynonnull", + "binary", + "date", + "datetime", + "datetimezone", + "duration", + "function", + "list", + "logical", + "none", + "null", + "number", + "record", + "table", + "text", + "time", + "type", + ]; + + for (const typeName of primitiveTypes) { + it(`should tokenize '${typeName}' as a storage type`, async () => { + // Use in a type context to avoid keyword conflicts + const tokens: TokenInfo[] = await tokenizeLine(`x as ${typeName}`); + const token: TokenInfo | undefined = findToken(tokens, typeName); + expect(token).to.not.equal(undefined); + + // Some of these ('null', 'type') might match keyword rules instead of type rules + const hasTypeOrKeywordScope: boolean = + hasScope(token!, "storage.type.powerquery") || + hasScope(token!, "constant.language.null.powerquery") || + hasScope(token!, "keyword.other.powerquery"); + + expect( + hasTypeOrKeywordScope, + `'${typeName}' should have a type or keyword scope, got: [${token!.scopes.join(", ")}]`, + ).to.equal(true); + }); + } + }); + + describe("type modifiers", () => { + it("should tokenize 'optional' as a storage modifier", async () => { + const tokens: TokenInfo[] = await tokenizeLine("optional x as text"); + const token: TokenInfo | undefined = findToken(tokens, "optional"); + expect(token).to.not.equal(undefined); + expectScope(token, "storage.modifier.powerquery"); + }); + + it("should tokenize 'nullable' as a storage modifier", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x as nullable text"); + const token: TokenInfo | undefined = findToken(tokens, "nullable"); + expect(token).to.not.equal(undefined); + expectScope(token, "storage.modifier.powerquery"); + }); + }); + + describe("type in context", () => { + it("should tokenize type assertion with 'as'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x as number"); + const asToken: TokenInfo | undefined = findToken(tokens, "as"); + const numberToken: TokenInfo | undefined = findToken(tokens, "number"); + expect(asToken).to.not.equal(undefined); + expect(numberToken).to.not.equal(undefined); + expectScope(asToken, "keyword.other.powerquery"); + expectScope(numberToken, "storage.type.powerquery"); + }); + + it("should tokenize type check with 'is'", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x is number"); + const isToken: TokenInfo | undefined = findToken(tokens, "is"); + const numberToken: TokenInfo | undefined = findToken(tokens, "number"); + expect(isToken).to.not.equal(undefined); + expect(numberToken).to.not.equal(undefined); + expectScope(isToken, "keyword.other.powerquery"); + expectScope(numberToken, "storage.type.powerquery"); + }); + + it("should tokenize nullable type", async () => { + const tokens: TokenInfo[] = await tokenizeLine("x as nullable number"); + const nullableToken: TokenInfo | undefined = findToken(tokens, "nullable"); + const numberToken: TokenInfo | undefined = findToken(tokens, "number"); + expect(nullableToken).to.not.equal(undefined); + expect(numberToken).to.not.equal(undefined); + expectScope(nullableToken, "storage.modifier.powerquery"); + expectScope(numberToken, "storage.type.powerquery"); + }); + + it("should tokenize optional parameter type", async () => { + const tokens: TokenInfo[] = await tokenizeLine("optional x as text"); + const optionalToken: TokenInfo | undefined = findToken(tokens, "optional"); + const textToken: TokenInfo | undefined = findToken(tokens, "text"); + expect(optionalToken).to.not.equal(undefined); + expect(textToken).to.not.equal(undefined); + expectScope(optionalToken, "storage.modifier.powerquery"); + expectScope(textToken, "storage.type.powerquery"); + }); + }); +}); diff --git a/server/src/test/standardLibrary.test.ts b/server/src/test/standardLibrary.test.ts index a27c5a8c..5ba20f06 100644 --- a/server/src/test/standardLibrary.test.ts +++ b/server/src/test/standardLibrary.test.ts @@ -253,3 +253,66 @@ describe(`setLibrarySymbols`, () => { expect(ExternalLibraryUtils.getSymbols().length).to.equal(0, "expected 0 libraries"); }); }); + +describe(`ExternalLibraryUtils overlap detection`, () => { + const symbolA: PQLS.LibrarySymbol.LibrarySymbol[] = JSON.parse(additionalSymbolJsonStr); + + afterEach(() => { + ExternalLibraryUtils.removeLibraries(ExternalLibraryUtils.getRegisteredModuleNames() as string[]); + }); + + it(`getRegisteredModuleNames returns all registered module names`, () => { + ExternalLibraryUtils.addLibaries(new Map([["ModuleA", symbolA]])); + ExternalLibraryUtils.addLibaries(new Map([["ModuleB", symbolA]])); + + const names: ReadonlyArray = ExternalLibraryUtils.getRegisteredModuleNames(); + expect(names).to.have.lengthOf(2); + expect(names).to.include("ModuleA"); + expect(names).to.include("ModuleB"); + }); + + it(`getOverlappingSymbols returns empty when no overlaps`, () => { + const uniqueSymbol: PQLS.LibrarySymbol.LibrarySymbol[] = [{ ...symbolA[0], name: "UniqueConnector.Contents" }]; + + ExternalLibraryUtils.addLibaries(new Map([["ModuleA", symbolA]])); + ExternalLibraryUtils.addLibaries(new Map([["ModuleB", uniqueSymbol]])); + + const overlaps: ReadonlyMap> = ExternalLibraryUtils.getOverlappingSymbols(); + expect(overlaps.size).to.equal(0); + }); + + it(`getOverlappingSymbols detects same symbol registered by different modules`, () => { + ExternalLibraryUtils.addLibaries(new Map([["folderA::ConnectorX", symbolA]])); + ExternalLibraryUtils.addLibaries(new Map([["folderB::ConnectorX", symbolA]])); + + const overlaps: ReadonlyMap> = ExternalLibraryUtils.getOverlappingSymbols(); + expect(overlaps.size).to.equal(1); + + const modules: ReadonlyArray | undefined = overlaps.get("ExtensionTest.Contents"); + assert(modules !== undefined, "expected overlapping entry for ExtensionTest.Contents"); + expect(modules).to.have.lengthOf(2); + expect(modules).to.include("folderA::ConnectorX"); + expect(modules).to.include("folderB::ConnectorX"); + }); + + it(`getOverlappingSymbols does not flag same module overwriting itself`, () => { + ExternalLibraryUtils.addLibaries(new Map([["ModuleA", symbolA]])); + // Overwrite with same key — still only one module + ExternalLibraryUtils.addLibaries(new Map([["ModuleA", symbolA]])); + + const overlaps: ReadonlyMap> = ExternalLibraryUtils.getOverlappingSymbols(); + expect(overlaps.size).to.equal(0); + }); + + it(`removeLibraries clears overlap`, () => { + ExternalLibraryUtils.addLibaries(new Map([["folderA::ConnectorX", symbolA]])); + ExternalLibraryUtils.addLibaries(new Map([["folderB::ConnectorX", symbolA]])); + + expect(ExternalLibraryUtils.getOverlappingSymbols().size).to.equal(1); + + ExternalLibraryUtils.removeLibraries(["folderA::ConnectorX"]); + + expect(ExternalLibraryUtils.getOverlappingSymbols().size).to.equal(0); + expect(ExternalLibraryUtils.getRegisteredModuleNames()).to.have.lengthOf(1); + }); +}); diff --git a/server/tsconfig.json b/server/tsconfig.json index 792d6f1f..a6b6682a 100644 --- a/server/tsconfig.json +++ b/server/tsconfig.json @@ -5,5 +5,10 @@ "rootDir": "src" }, "include": ["src/**/*.ts"], - "exclude": ["node_modules"] + "exclude": ["node_modules"], + "ts-node": { + "compilerOptions": { + "skipLibCheck": true + } + } } diff --git a/syntaxes/powerquery.tmLanguage.json b/syntaxes/powerquery.tmLanguage.json index f79f6095..ad2e61ba 100644 --- a/syntaxes/powerquery.tmLanguage.json +++ b/syntaxes/powerquery.tmLanguage.json @@ -243,7 +243,7 @@ } }, "Identifier": { - "match": "(?x:(?)|(=)|(<>|<|>|<=|>=)|(&)|(\\+|-|\\*|\\/)|(!)|(\\?)", + "match": "(=>)|(=)|(<>|<=|>=|<|>)|(&)|(\\+|-|\\*|\\/)|(!)|(\\?\\?)|(\\?)", "captures": { "1": { "name": "keyword.operator.function.powerquery" @@ -275,6 +275,9 @@ "name": "keyword.operator.sectionaccess.powerquery" }, "7": { + "name": "keyword.operator.nullcoalescing.powerquery" + }, + "8": { "name": "keyword.operator.optional.powerquery" } } @@ -311,21 +314,24 @@ "contentName": "meta.recordexpression.powerquery" }, "Punctuation": { - "match": "(,)|(\\()|(\\))|({)|(})", + "match": "(;)|(,)|(\\()|(\\))|({)|(})", "captures": { "1": { - "name": "punctuation.separator.powerquery" + "name": "punctuation.terminator.powerquery" }, "2": { - "name": "punctuation.section.parens.begin.powerquery" + "name": "punctuation.separator.powerquery" }, "3": { - "name": "punctuation.section.parens.end.powerquery" + "name": "punctuation.section.parens.begin.powerquery" }, "4": { - "name": "punctuation.section.braces.begin.powerquery" + "name": "punctuation.section.parens.end.powerquery" }, "5": { + "name": "punctuation.section.braces.begin.powerquery" + }, + "6": { "name": "punctuation.section.braces.end.powerquery" } }