diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..71297651c --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,55 @@ +{ + "name": "CLEAR Compiler", + + // Microsoft's Ruby 3.2 base image — same minor as CI + // (.github/workflows/ci.yml's RUBY_VERSION). + "image": "mcr.microsoft.com/devcontainers/ruby:1-3.2", + + // Node + npm for building the in-repo VS Code extension + // (.vscode/extensions/cheat-lang). Plus git so the workspace + // can interact with origin. + "features": { + "ghcr.io/devcontainers/features/node:1": { "version": "22" }, + "ghcr.io/devcontainers/features/git:1": {} + }, + + // Runs once after the container is created. See setup.sh — installs + // gems, downloads the pinned Zig toolchain, and compiles the VS Code + // extension so the LSP is ready as soon as the user opens a .cht file. + "postCreateCommand": "bash .devcontainer/setup.sh", + + // Runs on every container start (including rebuilds and resumes from + // a stopped state). `bundle install` is idempotent and fast when the + // gems are already present; running it here means the LSP recovers + // automatically if the gem cache went missing between sessions. + "postStartCommand": "bundle install --quiet", + + "customizations": { + "vscode": { + "settings": { + "files.associations": { + "*.cht": "clear" + }, + "[clear]": { + "editor.tabSize": 2, + "editor.insertSpaces": true, + "editor.detectIndentation": false + } + }, + // Extensions from the marketplace that pair well with editing a + // Ruby + Zig + TypeScript multi-language repo. The in-repo + // .vscode/extensions/cheat-lang extension auto-loads from the + // workspace; no marketplace install needed for it. + "extensions": [ + "rebornix.ruby", + "ziglang.vscode-zig" + ] + } + }, + + "remoteUser": "vscode", + + // Reasonable defaults; bump when the project gains web servers or + // dashboards that need port forwarding. + "forwardPorts": [] +} diff --git a/.devcontainer/setup.sh b/.devcontainer/setup.sh new file mode 100755 index 000000000..f1acd0366 --- /dev/null +++ b/.devcontainer/setup.sh @@ -0,0 +1,72 @@ +#!/usr/bin/env bash +# Devcontainer post-create hook. Runs once when the container is built. +# +# Steps (idempotent — safe to re-run): +# 1. Install Ruby gems for the compiler. +# 2. Install Zig (matches the version pinned in CI). +# 3. Build the in-repo VS Code extension. + +set -euo pipefail + +ZIG_VERSION="0.16.0" +ZIG_INSTALL_DIR="/usr/local/share/zig" + +echo "[devcontainer] $(date '+%H:%M:%S') step 1/3 — bundle install" +bundle install + +echo "[devcontainer] $(date '+%H:%M:%S') step 2/3 — installing Zig ${ZIG_VERSION}" +if command -v zig >/dev/null 2>&1; then + echo " zig already on PATH ($(zig version)) — skipping" +else + arch="$(uname -m)" + case "$arch" in + x86_64) zig_arch="x86_64-linux" ;; + aarch64) zig_arch="aarch64-linux" ;; + *) echo " unsupported arch: $arch — install zig manually"; exit 0 ;; + esac + tarball="zig-${zig_arch}-${ZIG_VERSION}.tar.xz" + url="https://ziglang.org/download/${ZIG_VERSION}/${tarball}" + + echo " downloading ${url}" + curl -fsSL "$url" -o "/tmp/${tarball}" + + sudo mkdir -p "$ZIG_INSTALL_DIR" + sudo tar -xJf "/tmp/${tarball}" -C "$ZIG_INSTALL_DIR" --strip-components=1 + sudo ln -sf "$ZIG_INSTALL_DIR/zig" /usr/local/bin/zig + rm -f "/tmp/${tarball}" + echo " zig $(zig version) installed at $ZIG_INSTALL_DIR" +fi + +echo "[devcontainer] $(date '+%H:%M:%S') step 3/4 — building VS Code extension" +pushd .vscode/extensions/cheat-lang >/dev/null +npm install --silent +npm run compile +popd >/dev/null + +echo "[devcontainer] $(date '+%H:%M:%S') step 4/4 — installing extension into VS Code Server" +# VS Code (and Codespaces) does NOT auto-load extensions from +# `.vscode/extensions//` — that's a Cursor-specific convention. +# To get the extension loaded in vanilla VS Code Server, symlink the +# built directory into `~/.vscode-server/extensions/` using the +# `.-` naming convention VS Code expects. +EXT_SRC="$PWD/.vscode/extensions/cheat-lang" +EXT_NAME="clear.clear-lang-0.2.0" + +# Codespaces uses ~/.vscode-server/extensions; some Dev Containers +# use ~/.vscode-remote/extensions. Symlink to whichever exists, and +# create both as a belt-and-suspenders. +for VSCODE_HOME in "$HOME/.vscode-server" "$HOME/.vscode-remote"; do + mkdir -p "$VSCODE_HOME/extensions" + ln -sfn "$EXT_SRC" "$VSCODE_HOME/extensions/$EXT_NAME" + echo " installed at $VSCODE_HOME/extensions/$EXT_NAME" +done + +echo "[devcontainer] $(date '+%H:%M:%S') setup complete" +echo "" +echo " Try it:" +echo " 1. Open any .cht file (try transpile-tests/01_smoke.cht)" +echo " 2. Squiggles, hover (mouse-over), and Ctrl+. (quick fix) all work." +echo "" +echo " Run the test suite:" +echo " bundle exec prspec spec/ # 4180+ Ruby specs" +echo " ./clear test transpile-tests/ # 514 transpile tests" diff --git a/.gitignore b/.gitignore index f8c83b930..0df87ea47 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ # Ignore everything at root, then explicitly allow known files/dirs. # Anything new dropped at root is ignored by default. /* +!/.devcontainer/ !/.github/ !/.gitignore !/.rspec @@ -17,6 +18,7 @@ !/TODO.md !/WALKTHROUGH.md !/clear +!/bin/ !/benchmarks/ !/docs/ !/examples/ diff --git a/.vscode/extensions/cheat-lang b/.vscode/extensions/cheat-lang deleted file mode 100644 index 9169f38e9..000000000 --- a/.vscode/extensions/cheat-lang +++ /dev/null @@ -1,25 +0,0 @@ -{ - "name": "cheat-lang", - "displayName": "CHEAT Language Support", - "description": "Syntax highlighting for the CHEAT programming language", - "version": "0.0.1", - "engines": { - "vscode": "^1.70.0" - }, - "categories": [ - "Programming Languages" - ], - "contributes": { - "languages": [{ - "id": "cheat", - "aliases": ["Cheat", "cheat"], - "extensions": [".cht"], - "configuration": "./language-configuration.json" - }], - "grammars": [{ - "language": "cheat", - "scopeName": "source.cheat", - "path": "./syntaxes/cheat.tmLanguage.json" - }] - } -} diff --git a/.vscode/extensions/cheat-lang/.gitignore b/.vscode/extensions/cheat-lang/.gitignore new file mode 100644 index 000000000..d3e15b1e6 --- /dev/null +++ b/.vscode/extensions/cheat-lang/.gitignore @@ -0,0 +1,3 @@ +node_modules/ +out/ +*.vsix diff --git a/.vscode/extensions/cheat-lang/README.md b/.vscode/extensions/cheat-lang/README.md new file mode 100644 index 000000000..5c32b50e9 --- /dev/null +++ b/.vscode/extensions/cheat-lang/README.md @@ -0,0 +1,119 @@ +# clear-lang — VS Code extension for CLEAR + +Syntax highlighting + Language Server integration for the CLEAR +programming language (`.cht` files). Surfaces diagnostics, hover +documentation, and quick-fix code actions in VS Code, the same way +the Neovim setup does. + +## What you'll get + +- **Squiggles** on every error with the registry code shown. +- **Hover** (cursor on a diagnostic) — markdown popup with the + registry's summary, cause, fix hint, and a worked bad-vs-good + example pulled from the test suite. +- **Quick-fix menu** (Ctrl+. / Cmd+.) — choose from the available + auto / interactive fixes; accepting one applies the edit. + +## One-time setup (from the cheat repo root) + +The extension lives inside the repo at +`.vscode/extensions/cheat-lang/`. Build the TypeScript client: + +```sh +cd .vscode/extensions/cheat-lang +npm install +npm run compile +``` + +This produces `out/extension.js` (the entry point referenced by +`package.json`). The extension is now ready. + +### How VS Code finds it + +VS Code automatically loads extensions found in `.vscode/extensions/` +when you open the workspace. No manual install step needed beyond +`npm run compile`. Reload the VS Code window (Ctrl+Shift+P → +"Developer: Reload Window") to pick up the freshly-built extension. + +## Verifying + +Open any `.cht` file. The status bar should show "CLEAR" as the +language. Open the Output panel (Ctrl+Shift+U) and select +"CLEAR Language Server" from the dropdown — you should see startup +log lines from the server. + +Try a deliberately-broken file: + +```clear +FN main() RETURNS Void -> + _ = doesNotExist; + x = 5; + WITH RESTRICT x { _ = x; } +END +``` + +You should see: + +1. Squiggles under `doesNotExist` (line 2) and the `WITH RESTRICT x` + line (line 4). +2. Hover the mouse over `doesNotExist` → tooltip with the registry + markdown. +3. Cursor on the `x` of `RESTRICT x`, press Ctrl+. → menu offers + "Declare 'x' as MUTABLE at its binding site (line 3).". Accept; + the buffer updates. + +## Settings + +`clear.serverPath` (string, default `""`) — absolute path to +`bin/clear-lsp`. Defaults to auto-detecting from the extension's +install location, which works when the extension lives inside the +cheat repo. Set it explicitly when installing as a `.vsix` outside +the repo. + +`clear.serverArgs` (array of string, default +`["--log-level=info"]`) — extra arguments passed to clear-lsp. Bump +to `--log-level=debug` for verbose protocol logs. + +`clear.useBundleExec` (boolean, default `true`) — whether to +invoke the server via `bundle exec`. Set to false if your +environment has the right gems on `$LOAD_PATH` already (uncommon). + +## Troubleshooting + +- **"clear-lsp not found at /path"** — the auto-detection failed. + Either move the extension to `.vscode/extensions/cheat-lang/` + inside your cheat clone, or set `clear.serverPath` manually. +- **Server starts then stops immediately** — open the + "CLEAR Language Server" output channel; the server's stderr + appears there. Common causes: missing gems (`bundle install` in + the repo root) or wrong Ruby version. +- **No diagnostics on a file with errors** — confirm the language + is "CLEAR" in the status bar; if VS Code thinks it's plain text, + the language registration didn't take. Try reloading the window. + +## Files + +``` +.vscode/extensions/cheat-lang/ +├── package.json — extension manifest +├── tsconfig.json — TypeScript config +├── language-configuration.json — comments, brackets, indentation +├── README.md — this file +├── src/extension.ts — LSP client (TypeScript) +├── syntaxes/clear.tmLanguage.json — syntax highlighting grammar +└── out/extension.js — compiled client (generated) +``` + +## Packaging as a .vsix (optional) + +For distribution outside the repo, install `vsce` and package: + +```sh +npm install -g @vscode/vsce +cd .vscode/extensions/cheat-lang +vsce package +``` + +Produces `clear-lang-0.2.0.vsix`. Install via "Extensions: Install +from VSIX..." in VS Code. Users will need to set `clear.serverPath` +manually since auto-detection won't find the binary. diff --git a/.vscode/extensions/cheat-lang/language-configuration.json b/.vscode/extensions/cheat-lang/language-configuration.json new file mode 100644 index 000000000..b4808dceb --- /dev/null +++ b/.vscode/extensions/cheat-lang/language-configuration.json @@ -0,0 +1,27 @@ +{ + "comments": { + "lineComment": "#" + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "autoClosingPairs": [ + { "open": "{", "close": "}" }, + { "open": "[", "close": "]" }, + { "open": "(", "close": ")" }, + { "open": "\"", "close": "\"", "notIn": ["string"] } + ], + "surroundingPairs": [ + ["{", "}"], + ["[", "]"], + ["(", ")"], + ["\"", "\""] + ], + "indentationRules": { + "increaseIndentPattern": "(.*\\s(DO|THEN|->)\\s*$)|(.*\\{\\s*$)|(\\bSTRUCT\\s+\\w+\\s*\\{\\s*$)|(\\bENUM\\s+\\w+\\s*\\{\\s*$)|(\\bUNION\\s+\\w+\\s*\\{\\s*$)", + "decreaseIndentPattern": "^\\s*(END|\\}|ELSE|ELSE_IF|CATCH)\\b" + }, + "wordPattern": "(-?\\d*\\.\\d\\w*)|([^\\`\\~\\!\\@\\#\\%\\^\\&\\*\\(\\)\\-\\=\\+\\[\\{\\]\\}\\\\\\|\\;\\:\\'\\\"\\,\\.\\<\\>\\/\\?\\s]+)" +} diff --git a/.vscode/extensions/cheat-lang/package-lock.json b/.vscode/extensions/cheat-lang/package-lock.json new file mode 100644 index 000000000..800b4904a --- /dev/null +++ b/.vscode/extensions/cheat-lang/package-lock.json @@ -0,0 +1,139 @@ +{ + "name": "clear-lang", + "version": "0.2.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "clear-lang", + "version": "0.2.0", + "dependencies": { + "vscode-languageclient": "^9.0.0" + }, + "devDependencies": { + "@types/node": "^18.0.0", + "@types/vscode": "^1.70.0", + "typescript": "^5.0.0" + }, + "engines": { + "vscode": "^1.70.0" + } + }, + "node_modules/@types/node": { + "version": "18.19.130", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.130.tgz", + "integrity": "sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==", + "dev": true, + "license": "MIT", + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/vscode": { + "version": "1.118.0", + "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.118.0.tgz", + "integrity": "sha512-Ah6eTlqDcwIMELEVwQMO++rJAFBRz/oLluLD/vWdYrH1KuI9kfpaM+7pg0OvvascgcJy+ghLCERAYouM4QbzGw==", + "dev": true, + "license": "MIT" + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "license": "MIT" + }, + "node_modules/brace-expansion": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.1.0.tgz", + "integrity": "sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w==", + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/minimatch": { + "version": "5.1.9", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.9.tgz", + "integrity": "sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==", + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver": { + "version": "7.7.4", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.7.4.tgz", + "integrity": "sha512-vFKC2IEtQnVhpT78h1Yp8wzwrf8CM+MzKMHGJZfBtzhZNycRFnXsHk6E5TxIkkMsgNS7mdX3AGB7x2QM2di4lA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true, + "license": "MIT" + }, + "node_modules/vscode-jsonrpc": { + "version": "8.2.0", + "resolved": "https://registry.npmjs.org/vscode-jsonrpc/-/vscode-jsonrpc-8.2.0.tgz", + "integrity": "sha512-C+r0eKJUIfiDIfwJhria30+TYWPtuHJXHtI7J0YlOmKAo7ogxP20T0zxB7HZQIFhIyvoBPwWskjxrvAtfjyZfA==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/vscode-languageclient": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/vscode-languageclient/-/vscode-languageclient-9.0.1.tgz", + "integrity": "sha512-JZiimVdvimEuHh5olxhxkht09m3JzUGwggb5eRUkzzJhZ2KjCN0nh55VfiED9oez9DyF8/fz1g1iBV3h+0Z2EA==", + "license": "MIT", + "dependencies": { + "minimatch": "^5.1.0", + "semver": "^7.3.7", + "vscode-languageserver-protocol": "3.17.5" + }, + "engines": { + "vscode": "^1.82.0" + } + }, + "node_modules/vscode-languageserver-protocol": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-protocol/-/vscode-languageserver-protocol-3.17.5.tgz", + "integrity": "sha512-mb1bvRJN8SVznADSGWM9u/b07H7Ecg0I3OgXDuLdn307rl/J3A9YD6/eYOssqhecL27hK1IPZAsaqh00i/Jljg==", + "license": "MIT", + "dependencies": { + "vscode-jsonrpc": "8.2.0", + "vscode-languageserver-types": "3.17.5" + } + }, + "node_modules/vscode-languageserver-types": { + "version": "3.17.5", + "resolved": "https://registry.npmjs.org/vscode-languageserver-types/-/vscode-languageserver-types-3.17.5.tgz", + "integrity": "sha512-Ld1VelNuX9pdF39h2Hgaeb5hEZM2Z3jUrrMgWQAu82jMtZp7p3vJT3BzToKtZI7NgQssZje5o0zryOrhQvzQAg==", + "license": "MIT" + } + } +} diff --git a/.vscode/extensions/cheat-lang/package.json b/.vscode/extensions/cheat-lang/package.json new file mode 100644 index 000000000..d19c5bd47 --- /dev/null +++ b/.vscode/extensions/cheat-lang/package.json @@ -0,0 +1,69 @@ +{ + "name": "clear-lang", + "displayName": "CLEAR Language Support", + "description": "Syntax highlighting + LSP integration for the CLEAR programming language (.cht files)", + "version": "0.2.0", + "publisher": "clear", + "engines": { + "vscode": "^1.70.0" + }, + "categories": [ + "Programming Languages" + ], + "main": "./out/extension.js", + "activationEvents": [ + "onLanguage:clear" + ], + "contributes": { + "languages": [ + { + "id": "clear", + "aliases": ["CLEAR", "clear"], + "extensions": [".cht"], + "configuration": "./language-configuration.json" + } + ], + "grammars": [ + { + "language": "clear", + "scopeName": "source.clear", + "path": "./syntaxes/clear.tmLanguage.json" + } + ], + "configuration": { + "type": "object", + "title": "CLEAR", + "properties": { + "clear.serverPath": { + "type": "string", + "default": "", + "description": "Absolute path to the cheat repo's `bin/clear-lsp`. When empty, the extension auto-detects by walking up from the extension's install location (works when the extension lives inside the repo at `.vscode/extensions/cheat-lang/`)." + }, + "clear.serverArgs": { + "type": "array", + "default": ["--log-level=info"], + "items": { "type": "string" }, + "description": "Extra arguments passed to clear-lsp." + }, + "clear.useBundleExec": { + "type": "boolean", + "default": true, + "description": "Whether to invoke the server via `bundle exec`. Set to false if your environment already has the right gems on $LOAD_PATH (uncommon)." + } + } + } + }, + "scripts": { + "vscode:prepublish": "npm run compile", + "compile": "tsc -p ./", + "watch": "tsc -watch -p ./" + }, + "dependencies": { + "vscode-languageclient": "^9.0.0" + }, + "devDependencies": { + "@types/node": "^18.0.0", + "@types/vscode": "^1.70.0", + "typescript": "^5.0.0" + } +} diff --git a/.vscode/extensions/cheat-lang/src/extension.ts b/.vscode/extensions/cheat-lang/src/extension.ts new file mode 100644 index 000000000..82e784c67 --- /dev/null +++ b/.vscode/extensions/cheat-lang/src/extension.ts @@ -0,0 +1,105 @@ +// CLEAR Language Server VS Code client. +// +// Activates on `.cht` files (registered language id: "clear"), spawns +// `bundle exec bin/clear-lsp`, and wires the standard LSP client so +// VS Code surfaces: +// +// * Diagnostics — squiggles on errors with the registry code. +// * Hover — `K`-equivalent: cursor on a diagnostic shows the +// registry markdown popup (cause, fix hint, bad/good example). +// * Code actions — Ctrl+. (Cmd+. on macOS) opens the quick-fix +// menu populated by FixableFinding's auto + interactive fixes. +// +// The server path is auto-detected when the extension is installed +// inside the cheat repo at `.vscode/extensions/cheat-lang/` — the +// extension walks up three levels to the repo root and finds +// `bin/clear-lsp` there. Override via the `clear.serverPath` setting +// when installing the extension as a portable .vsix that isn't +// shipped from inside the repo. + +import * as fs from "fs"; +import * as path from "path"; +import { + workspace, + window, + ExtensionContext, + Uri, +} from "vscode"; +import { + LanguageClient, + LanguageClientOptions, + ServerOptions, + TransportKind, +} from "vscode-languageclient/node"; + +let client: LanguageClient | undefined; + +export function activate(context: ExtensionContext) { + const cfg = workspace.getConfiguration("clear"); + + const serverPath = (cfg.get("serverPath") || "").trim() || + defaultServerPath(context.extensionPath); + if (!fs.existsSync(serverPath)) { + window.showErrorMessage( + `clear-lsp not found at ${serverPath}. Set "clear.serverPath" in settings ` + + `to point at your cheat repo's bin/clear-lsp.` + ); + return; + } + + const useBundleExec = cfg.get("useBundleExec", true); + const serverArgs = cfg.get("serverArgs", ["--log-level=info"]); + const repoRoot = path.dirname(path.dirname(serverPath)); // .../cheat/bin/clear-lsp → .../cheat + + // The CLEAR compiler requires `bundler/setup`, so by default we + // launch via `bundle exec`. Fall back to direct invocation when + // the user has the gems available globally. + const command = useBundleExec ? "bundle" : serverPath; + const args = useBundleExec + ? ["exec", serverPath, ...serverArgs] + : serverArgs; + + const serverOptions: ServerOptions = { + command, + args, + transport: TransportKind.stdio, + options: { + cwd: repoRoot, // bundler reads Gemfile from here + }, + }; + + const clientOptions: LanguageClientOptions = { + documentSelector: [{ scheme: "file", language: "clear" }], + synchronize: { + fileEvents: workspace.createFileSystemWatcher("**/*.cht"), + }, + outputChannelName: "CLEAR Language Server", + }; + + client = new LanguageClient( + "clear-lsp", + "CLEAR Language Server", + serverOptions, + clientOptions + ); + + // Surface server stderr in the output channel so users can see + // logs without leaving VS Code. + client.start().catch((err) => { + window.showErrorMessage(`clear-lsp failed to start: ${err.message}`); + }); +} + +export function deactivate(): Thenable | undefined { + return client?.stop(); +} + +// When `clear.serverPath` isn't set, find the LSP binary by walking +// up from the extension's own install location to the cheat repo +// root. This makes the in-repo install (.vscode/extensions/...) +// just work without configuration. +function defaultServerPath(extensionPath: string): string { + // .vscode/extensions/cheat-lang → up 3 → repo root. + const repoRoot = path.resolve(extensionPath, "..", "..", ".."); + return path.join(repoRoot, "bin", "clear-lsp"); +} diff --git a/.vscode/extensions/cheat-lang/syntaxes/clear.tmLanguage.json b/.vscode/extensions/cheat-lang/syntaxes/clear.tmLanguage.json new file mode 100644 index 000000000..2be4453c9 --- /dev/null +++ b/.vscode/extensions/cheat-lang/syntaxes/clear.tmLanguage.json @@ -0,0 +1,252 @@ +{ + "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", + "name": "CLEAR", + "scopeName": "source.clear", + "patterns": [ + { "include": "#comments" }, + { "include": "#strings" }, + { "include": "#capabilities" }, + { "include": "#keywords" }, + { "include": "#types" }, + { "include": "#numbers" }, + { "include": "#operators" }, + { "include": "#sigils" }, + { "include": "#bindings" }, + { "include": "#functions" } + ], + "repository": { + "comments": { + "patterns": [ + { + "name": "comment.line.number-sign.clear", + "match": "#.*$" + } + ] + }, + "keywords": { + "patterns": [ + { + "comment": "Control flow", + "name": "keyword.control.clear", + "match": "\\b(IF|THEN|ELSE|ELSE_IF|END|WHILE|DO|FOR|IN|BG|NEXT|BREAK|CONTINUE|RETURN|RETURNS|MATCH|PARTIAL|START|DEFAULT|WHEN|IFF|CATCH|EXIT|DIE|PASS|PRUNE|RAISE|ASSERT|TIGHT|YIELD|OR_RESCUE|ON|RETRY)\\b" + }, + { + "comment": "Storage / declarations", + "name": "storage.type.clear", + "match": "\\b(FN|METHOD|MUTABLE|STRUCT|ENUM|UNION|STREAM)\\b" + }, + { + "comment": "Ownership / memory operators", + "name": "keyword.other.memory.clear", + "match": "\\b(GIVE|TAKES|COPY|MOVE|SHARE|LINK|RESOLVE|FREEZE|CLONE)\\b" + }, + { + "comment": "Pipeline / query operators", + "name": "keyword.other.pipeline.clear", + "match": "\\b(SELECT|WHERE|UNNEST|EACH|FIND|ANY|ALL|INDEX|SORT|ORDER_BY|LIMIT|SKIP|DISTINCT|REDUCE|COUNT|SUM|AVERAGE|AVG|MIN|MAX|CONCURRENT|SHARD|JOIN|WINDOW|TAKE_WHILE|TAKEWHILE|TAP|FOLD|COLLECT)\\b" + }, + { + "comment": "Module / FFI / visibility", + "name": "keyword.other.module.clear", + "match": "\\b(REQUIRE|USE|PUB|PRIVATE|EXTERN|FROM|EFFECTS|CLOSE|CAPTURES)\\b" + }, + { + "comment": "WITH-block capabilities", + "name": "keyword.other.capability.clear", + "match": "\\b(WITH|EXCLUSIVE|RESTRICT|BORROWED|VIEW|MATERIALIZED|SNAPSHOT|POLYMORPHIC|GUARD|CAST|AS|POSSIBLE_DEADLOCK|POSSIBLE_LOCK_CYCLE)\\b" + }, + { + "comment": "REQUIRES families and reentrance variants", + "name": "keyword.other.requires.clear", + "match": "\\b(REQUIRES|LOCKED|VERSIONED|ATOMIC|LOCAL|ACTOR|NON_REENTRANT|REENTRANT|SNAPSHOTTED|MAX_DEPTH|NOT_LOGICAL|THUNK|TAIL_CALL)\\b" + }, + { + "comment": "Predicate clauses", + "name": "keyword.other.contract.clear", + "match": "\\b(PRE|DEBUG_POST)\\b" + }, + { + "comment": "SYNC POLICY (top-level concurrency policy)", + "name": "keyword.other.policy.clear", + "match": "\\b(SYNC|POLICY)\\b" + }, + { + "comment": "Test framework keywords", + "name": "keyword.other.test.clear", + "match": "\\b(TEST|THAT|BENCH|BENCHMARK|BEFORE|AFTER|SETUP|LET|EXPECT|PENDING|STUB)\\b" + }, + { + "comment": "Word-shaped operators", + "name": "keyword.operator.word.clear", + "match": "\\b(AND|OR|NOT|MOD|IS)\\b" + }, + { + "name": "constant.language.boolean.clear", + "match": "\\b(TRUE|FALSE|NIL)\\b" + } + ] + }, + "types": { + "patterns": [ + { + "comment": "Built-in primitive and stdlib types", + "name": "support.type.clear", + "match": "\\b(Number|Int8|Int16|Int32|Int64|UInt8|UInt16|UInt32|UInt64|Float32|Float64|Byte|Bool|String|Void|Auto|Any|HashMap|Set|List|Pool|Map|Stream|Promise|Id|TCPServer|TCPClient|File|Counter|Box)\\b" + }, + { + "comment": "User-defined type names — capitalised identifiers", + "name": "entity.name.type.clear", + "match": "\\b[A-Z][a-zA-Z0-9]*\\b" + } + ] + }, + "capabilities": { + "patterns": [ + { + "comment": "Capability sigil with optional :modifier:modifier... chain (e.g. @shared:locked, @indirect:atomic, @sharded(N))", + "match": "(@(?:multiowned|shared|locked|writeLocked|list|pool|set|map|sharded|striped|local|indirect|atomic|versioned|observable|pinned|arena|large|xl|service|micro|standard|reentrant|nonReentrant|canSmash|parallel|soa|split|raw|frozen|alwaysMutable|link|thunk|maxDepth))((?::[a-zA-Z][a-zA-Z0-9]*)*)", + "captures": { + "1": { "name": "storage.modifier.capability.clear" }, + "2": { "name": "storage.modifier.capability.chain.clear" } + } + } + ] + }, + "strings": { + "name": "string.quoted.double.clear", + "begin": "\"", + "end": "\"", + "patterns": [ + { + "name": "constant.character.escape.clear", + "match": "\\\\." + }, + { + "comment": "${...} interpolation with embedded CLEAR expressions", + "name": "meta.embedded.expression.clear", + "begin": "\\$\\{", + "end": "\\}", + "beginCaptures": { "0": { "name": "punctuation.definition.interpolation.begin.clear" } }, + "endCaptures": { "0": { "name": "punctuation.definition.interpolation.end.clear" } }, + "patterns": [ + { "include": "$self" } + ] + } + ] + }, + "bindings": { + "patterns": [ + { + "comment": "Pipeline AS-binding alias: AS $u, AS $u.field, etc.", + "name": "variable.parameter.pipeline.clear", + "match": "\\$[a-zA-Z_][a-zA-Z0-9_]*" + } + ] + }, + "functions": { + "patterns": [ + { + "comment": "Function call: identifier (with optional !/? suffix) followed by (", + "match": "\\b([a-zA-Z_][a-zA-Z0-9_]*[!?]?)(?=\\()", + "captures": { + "1": { "name": "entity.name.function.clear" } + } + } + ] + }, + "numbers": { + "patterns": [ + { + "name": "constant.numeric.hex.clear", + "match": "\\b0x[0-9a-fA-F_]+(_?(u8|u16|u32|u64|i8|i16|i32|i64))?\\b" + }, + { + "name": "constant.numeric.binary.clear", + "match": "\\b0b[01_]+(_?(u8|u16|u32|u64|i8|i16|i32|i64))?\\b" + }, + { + "name": "constant.numeric.float.clear", + "match": "\\b\\d[\\d_]*\\.\\d[\\d_]*(_?(f32|f64))?\\b" + }, + { + "name": "constant.numeric.integer.clear", + "match": "\\b\\d[\\d_]*(_?(u8|u16|u32|u64|i8|i16|i32|i64|f32|f64))?\\b" + } + ] + }, + "sigils": { + "patterns": [ + { + "comment": "Explicit panic operator", + "name": "keyword.operator.panic.clear", + "match": "!!" + }, + { + "comment": "Mutation suffix on identifiers (foo!, increment!) — must follow a word", + "name": "keyword.operator.mutation.clear", + "match": "(?<=[a-zA-Z0-9_])!" + }, + { + "comment": "Error-union prefix (RETURNS !T, !Void, ...)", + "name": "keyword.operator.error-union.clear", + "match": "(?" + }, + { + "comment": "Function arrow", + "name": "keyword.operator.arrow.clear", + "match": "->" + }, + { + "comment": "Range operators: inclusive, exclusive, slice-wildcard", + "name": "keyword.operator.range.clear", + "match": "(\\.\\.<|\\.\\.=|\\.\\.|\\[\\*\\])" + }, + { + "name": "keyword.operator.compound-assign.clear", + "match": "(\\+=|-=|\\*=|/=|\\.=)" + }, + { + "name": "keyword.operator.comparison.clear", + "match": "(==|!=|<=|>=|<|>)" + }, + { + "name": "keyword.operator.arithmetic.clear", + "match": "(\\+|-|\\*|/)" + }, + { + "name": "keyword.operator.assignment.clear", + "match": "=" + }, + { + "comment": "Discard / placeholder identifier", + "name": "variable.language.placeholder.clear", + "match": "\\b_\\b" + } + ] + } + } +} diff --git a/.vscode/extensions/cheat-lang/tsconfig.json b/.vscode/extensions/cheat-lang/tsconfig.json new file mode 100644 index 000000000..9e9223230 --- /dev/null +++ b/.vscode/extensions/cheat-lang/tsconfig.json @@ -0,0 +1,16 @@ +{ + "compilerOptions": { + "module": "commonjs", + "target": "ES2020", + "lib": ["ES2020"], + "outDir": "out", + "rootDir": "src", + "sourceMap": true, + "strict": true, + "esModuleInterop": true, + "skipLibCheck": true, + "resolveJsonModule": true + }, + "include": ["src/**/*.ts"], + "exclude": ["node_modules", "out"] +} diff --git a/bin/clear-lsp b/bin/clear-lsp new file mode 100755 index 000000000..1b8a5abfc --- /dev/null +++ b/bin/clear-lsp @@ -0,0 +1,49 @@ +#!/usr/bin/env ruby +# clear-lsp — Language Server for the CLEAR programming language. +# +# Speaks the Language Server Protocol (LSP) over stdio. Editors that +# support LSP (Neovim, VS Code, etc.) launch this binary and send +# JSON-RPC requests; the server replies with diagnostics, hover +# content, and code actions sourced from the existing CLEAR compiler +# (Lexer → Parser → SemanticAnnotator + FixCollector). +# +# Usage: +# clear-lsp [--log-level=debug|info|warn|error] +# +# The server reads from stdin, writes JSON-RPC frames to stdout, and +# logs to stderr (LSP convention; clients display server stderr). + +require_relative "../src/lsp/server" + +log_level = :info +ARGV.each do |arg| + if arg.start_with?("--log-level=") + raw = arg.split("=", 2).last + sym = raw.to_sym + if LSP::Logger::LEVELS.key?(sym) + log_level = sym + else + $stderr.puts "clear-lsp: unknown --log-level=#{raw} (expected one of: #{LSP::Logger::LEVELS.keys.join(', ')})" + exit 2 + end + elsif arg == "--help" || arg == "-h" + puts "Usage: clear-lsp [--log-level=debug|info|warn|error]" + exit 0 + elsif arg == "--stdio" + # Some LSP clients (vscode-languageclient defaults among them) + # append `--stdio` to the spawn args when configured for stdio + # transport. We only support stdio anyway — accept the flag as + # a no-op so the client can connect without rejecting the + # binary as broken. Same intent for the other transport flags + # we'd never actually honour, but stdio is the one that gets + # passed in practice. + nil + elsif arg == "--node-ipc" || arg == "--pipe" || arg.start_with?("--socket=") + $stderr.puts "clear-lsp: only stdio transport is supported; ignoring #{arg.inspect}" + else + $stderr.puts "clear-lsp: unknown argument #{arg.inspect}" + exit 2 + end +end + +LSP::Server.new(log_level: log_level).run diff --git a/spec/affine_ownership_spec.rb b/spec/affine_ownership_spec.rb index e09659556..8d12a4889 100644 --- a/spec/affine_ownership_spec.rb +++ b/spec/affine_ownership_spec.rb @@ -54,7 +54,7 @@ def get_last_type(source) FLUX } it "raises error on use-after-move" do - expect { ast }.to raise_error(/Use of moved value 'a'/) + expect { ast }.to raise_error(/USE AFTER MOVE: You can't use `a`/) end end @@ -87,7 +87,7 @@ def get_last_type(source) FLUX } it "raises error on use-after-move of sub-path" do - expect { ast }.to raise_error(/Use of moved value 'outer.inner'/) + expect { ast }.to raise_error(/USE AFTER MOVE: You can't use `outer\.inner`/) end end @@ -122,7 +122,7 @@ def get_last_type(source) FLUX } it "raises error on accessing child of moved sub-path" do - expect { ast }.to raise_error(/Use of moved value 'outer.inner'/) + expect { ast }.to raise_error(/USE AFTER MOVE: You can't use `outer\.inner`/) end end @@ -218,7 +218,7 @@ def get_last_type(source) FLUX } it "marks the variable as moved if the parameter specifies TAKES" do - expect { ast }.to raise_error(/Use of moved value 'x'/) + expect { ast }.to raise_error(/USE AFTER MOVE: You can't use `x`/) end end @@ -247,7 +247,7 @@ def get_last_type(source) FLUX } it "invalidates the variable in the parent scope if it moved in ANY branch" do - expect { ast }.to raise_error(/Use of moved value 'x'/) + expect { ast }.to raise_error(/USE AFTER MOVE: You can't use `x`/) end end @@ -270,7 +270,7 @@ def get_last_type(source) FLUX } it "consistently invalidates the variable" do - expect { ast }.to raise_error(/Use of moved value 'x'/) + expect { ast }.to raise_error(/USE AFTER MOVE: You can't use `x`/) end end end @@ -357,7 +357,7 @@ def get_last_type(source) # This detects that the loop body moves 'x', implying 'x' must be available # at the start of every iteration, which it isn't after the first move. it "raises error if a loop body moves a variable defined outside the loop" do - expect { ast }.to raise_error(/Use of moved value 'x'/) + expect { ast }.to raise_error(/USE AFTER MOVE: You can't use `x`/) end end end diff --git a/spec/annotator_spec.rb b/spec/annotator_spec.rb index 1fd8af4d2..01125a4f6 100644 --- a/spec/annotator_spec.rb +++ b/spec/annotator_spec.rb @@ -969,7 +969,7 @@ def get_last_type(source) } it "raises a missing field error (from GetField visitor)" do - expect { ast }.to raise_error(/Cannot determine struct type/i) + expect { ast }.to raise_error(/no field 'z'/i) end end end @@ -1306,7 +1306,7 @@ def get_last_type(source) } it "raises an Unknown Struct error" do - expect { ast }.to raise_error(/Unknown struct type: 'Ghost'/) + expect { ast }.to raise_error(/Unknown struct type 'Ghost'/) end end end diff --git a/spec/capabilities_spec.rb b/spec/capabilities_spec.rb index 05a3c2db1..f45ce92a0 100644 --- a/spec/capabilities_spec.rb +++ b/spec/capabilities_spec.rb @@ -977,7 +977,7 @@ def transpile(source) RETURN fib(n - 1) + fib(n - 2); END CLEAR - expect { run(code) }.to raise_error(CompilerError, /Reentrancy Error.*fib.*@reentrant/) + expect { run(code) }.to raise_error(CompilerError, /Reentrancy Error.*fib.*EFFECTS REENTRANT/) end it "accepts a directly-recursive function marked @reentrant" do @@ -997,7 +997,7 @@ def transpile(source) RETURN fib(n - 1) + fib(n - 2); END CLEAR - expect { run(code) }.to raise_error(CompilerError, /Use @reentrant.*not @nonReentrant/) + expect { run(code) }.to raise_error(CompilerError, /Replace `@nonReentrant` with `EFFECTS REENTRANT`/) end it "transpiles @reentrant function without a StackGuard prologue" do diff --git a/spec/clear_fix_spec.rb b/spec/clear_fix_spec.rb index 352479258..6c20e8318 100644 --- a/spec/clear_fix_spec.rb +++ b/spec/clear_fix_spec.rb @@ -271,7 +271,7 @@ def write(name, content) it "reports the move line and three candidate fixes" do path = write("m.cht", src) out, _, _ = run_fix("--dry-run", path) - expect(out).to match(/Use of moved value 'a' \(moved at line 5\)/) + expect(out).to match(/USE AFTER MOVE.*`a`.*line 5/) expect(out).to match(/Wrap the consuming reference with COPY at line 5/) expect(out).to match(/Change 'a' to `@multiowned`/) expect(out).to match(/Change 'a' to `@shared`/) diff --git a/spec/error_call_site_audit_spec.rb b/spec/error_call_site_audit_spec.rb index e956e0867..7df59f5f0 100644 --- a/spec/error_call_site_audit_spec.rb +++ b/spec/error_call_site_audit_spec.rb @@ -38,10 +38,20 @@ # `coerce!` returns a string from a data structure / method. # Migrating requires those producers to switch to codes too. # Tranche 8 closed every exception by stamping an umbrella code on - # each pass-through site. The hash is intentionally empty: any new - # raw-string `error!` call is a regression. If a future legitimate - # case appears, add it here with a one-line justification. - EXCEPTIONS = {}.freeze + # each pass-through site. Tier 2 fixable! work added one site that + # passes a Symbol `code` as a variable (emit_match_partial_fix! takes + # `code:` so it can be reused for MATCH_NEEDS_ENUM_OR_UNION and + # MATCH_NON_EXHAUSTIVE). v2 fix B added a second such site + # (emit_reentrant_error! takes `code:` so it can be reused for both + # REENTRANCE_DIRECT_RECURSIVE and REENTRANCE_INDIRECT_RECURSIVE). + # The USE_OF_MOVED_* rewrite added a third (emit_use_of_moved_in_loop_error! + # takes `code:` so it serves both USE_OF_MOVED_IN_LOOP and + # USE_OF_MOVED_IN_LOOP_SHORT). Budget = 3 covers all three helpers; + # the static parser can't see that `code` always holds a real + # registry symbol at runtime. + EXCEPTIONS = { + 'src/annotator-helpers/fixable_helpers.rb' => 3, + }.freeze def self.scan_raw_sites sites = Hash.new(0) diff --git a/spec/error_emission_coverage_spec.rb b/spec/error_emission_coverage_spec.rb index 3298078c1..2859cf738 100644 --- a/spec/error_emission_coverage_spec.rb +++ b/spec/error_emission_coverage_spec.rb @@ -551,7 +551,7 @@ def initialize; @source_code = ""; end FN doIt() RETURNS Void EFFECTS BOGUS_KIND -> END FN main() RETURNS Void -> END CLEAR - }.to raise_error(ParserError, /Unknown effect/) + }.to raise_error(ParserError, /Unknown function effect/) end it "compiles when EFFECTS names REENTRANT" do diff --git a/spec/fixable_capability_typo_spec.rb b/spec/fixable_capability_typo_spec.rb new file mode 100644 index 000000000..9ffef94b0 --- /dev/null +++ b/spec/fixable_capability_typo_spec.rb @@ -0,0 +1,85 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# Capability sigil / modifier / WITH-keyword typos. All three sites +# now route through emit_typo_suggestion! with the appropriate +# candidate set (CAP_SIGIL_ATTRS keys, CAPABILITY_TOKENS, AST::CAPABILITIES). +RSpec.describe "Capability typo auto-fixes" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def parse(source) + tokens = Lexer.new(source).tokenize + Parser.new(tokens, source).parse + end + + describe "UNKNOWN_CAPABILITY_SIGIL — `@shared:lokced` typo" do + let(:src) { + <<~CLEAR + STRUCT Counter { v: Int64 } + FN main() RETURNS Void -> + c = Counter{v: 0}@shared:lokced; + _ = c; + END + CLEAR + } + + it "captures a fixable finding suggesting :locked" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /lokced/ } + expect(finding).not_to be_nil + edit = finding.fixes.first.edits.first + # Chain form: the user typed `lokced` (without `@`) after the + # `:`. Replacement keeps the same shape. + expect(edit.replacement).to eq("locked") + end + end + + describe "UNKNOWN_CAPABILITY_SIGIL — `@multiowned:lokced` second-position typo" do + # The first-position sigil (`@multiowned`, `@shared`, etc.) is + # tokenized as a single VAR_ID and dispatched by suffix rule — + # bare-sigil typos die in expression parsing, not here. The + # CHAINED form `@:` is the path that reaches the + # capability-sigil error site. + let(:src) { + <<~CLEAR + STRUCT Counter { v: Int64 } + FN main() RETURNS Void -> + c = Counter{v: 0}@multiowned:lokced; + _ = c; + END + CLEAR + } + + it "captures a fixable finding suggesting `locked`" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /lokced/ } + expect(finding).not_to be_nil + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("locked") + end + end + + describe "UNKNOWN_WITH_CAPABILITY — `RESTRIKT` typo" do + let(:src) { + <<~CLEAR + FN main() RETURNS Void -> + MUTABLE x = 5; + WITH RESTRIKT x { _ = x; } + END + CLEAR + } + + it "captures a fixable finding suggesting RESTRICT" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /RESTRIKT/i } + expect(finding).not_to be_nil + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("RESTRICT") + end + end +end diff --git a/spec/fixable_effect_typo_spec.rb b/spec/fixable_effect_typo_spec.rb new file mode 100644 index 000000000..c081864e3 --- /dev/null +++ b/spec/fixable_effect_typo_spec.rb @@ -0,0 +1,97 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# Effect / REQUIRES / reentrant-variant typos. Six parser sites that +# previously raised plain ParserErrors now offer typo suggestions: +# :alloc / :safe -> UNKNOWN_EFFECT +# :alloc:frame / :alloc:heap -> UNKNOWN_ALLOC_QUALIFIER +# EFFECTS REENTRANT -> UNKNOWN_FN_EFFECT +# REQUIRES p: -> UNKNOWN_REQUIRES_FAMILY +# REQUIRES p: NON_REENTRANT -> UNKNOWN_REQUIRES_KIND +# EFFECTS REENTRANT:THUNK|TAIL_CALL|NOT_LOGICAL... -> UNKNOWN_REENTRANT_VARIANT +RSpec.describe "Effect / REQUIRES / reentrant typo auto-fixes" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def parse(source) + tokens = Lexer.new(source).tokenize + Parser.new(tokens, source).parse + end + + describe "UNKNOWN_EFFECT — `:saf` typo for `:safe`" do + let(:src) { + 'EXTERN FN sha(s: String) RETURNS String EFFECTS :saf FROM "std.crypto";' + } + it "suggests :safe" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /:saf'/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("safe") + end + end + + describe "UNKNOWN_ALLOC_QUALIFIER — `:alloc:frme` typo" do + let(:src) { + 'EXTERN FN x() RETURNS Int64 EFFECTS :alloc:frme FROM "std.x";' + } + it "suggests `frame`" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /frme/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("frame") + end + end + + describe "UNKNOWN_FN_EFFECT — `EFFECTS RENTRANT` typo" do + let(:src) { + <<~CLEAR + FN factorial(n: Int64) RETURNS Int64 EFFECTS RENTRANT -> + RETURN n; + END + FN main() RETURNS Void -> END + CLEAR + } + it "suggests REENTRANT" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /RENTRANT/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("REENTRANT") + end + end + + describe "UNKNOWN_REQUIRES_FAMILY — `LOKKED` typo" do + let(:src) { + <<~CLEAR + FN incr!(MUTABLE c: Counter) REQUIRES c: LOKKED -> END + FN main() RETURNS Void -> END + CLEAR + } + it "suggests LOCKED" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /LOKKED/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("LOCKED") + end + end + + describe "UNKNOWN_REENTRANT_VARIANT — `EFFECTS REENTRANT:THONK` typo" do + let(:src) { + <<~CLEAR + FN factorial(n: Int64) RETURNS Int64 EFFECTS REENTRANT:THONK -> + RETURN n; + END + FN main() RETURNS Void -> END + CLEAR + } + it "suggests THUNK" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /THONK/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("THUNK") + end + end +end diff --git a/spec/fixable_immutable_param_spec.rb b/spec/fixable_immutable_param_spec.rb new file mode 100644 index 000000000..3226c94a6 --- /dev/null +++ b/spec/fixable_immutable_param_spec.rb @@ -0,0 +1,119 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# When a function body mutates a parameter the caller declared +# without `MUTABLE`, three errors can fire depending on the mutation +# shape: ASSIGN_VAR_IMMUTABLE (`p = ...`), ASSIGN_INDEX_IMMUTABLE_LIST +# (`p[i] = ...` — also fires for HashMap), and +# ASSIGN_FIELD_IMMUTABLE_STRUCT (`p.field = ...`). All three now emit +# a FixableFinding whose :auto fix inserts `MUTABLE ` at the +# parameter's declaration column in the function signature. +RSpec.describe "Immutable param auto-fix" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def annotate(source) + tokens = Lexer.new(source).tokenize + ast = Parser.new(tokens, source).parse + SemanticAnnotator.new.annotate!(ast) + ast + end + + describe "ASSIGN_INDEX_IMMUTABLE_LIST on a HashMap parameter" do + let(:src) { + <<~CLEAR + FN parseValue!(json: String, penv: HashMap) RETURNS Void -> + penv["__jp"] = 0; + END + FN main() RETURNS Void -> END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /immutable/i } + expect(findings.size).to be >= 1 + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that inserts `MUTABLE ` at the param column" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /immutable/i } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("MUTABLE ") + expect(edit.span.line).to eq(1) + # `FN parseValue!(json: String, penv: HashMap) ...` + # The 'penv' identifier starts at column 30 (1-indexed); insert + # before it. + expect(edit.span.col).to eq(30) + expect(edit.span.length).to eq(0) + end + + it "applying the fix produces compilable CLEAR" do + fixed = src.sub("penv: HashMap", "MUTABLE penv: HashMap") + expect { annotate(fixed) }.not_to raise_error + end + end + + describe "ASSIGN_VAR_IMMUTABLE on a scalar parameter" do + let(:src) { + <<~CLEAR + FN bump(p: Int64) RETURNS Int64 -> + p = p + 1; + RETURN p; + END + FN main() RETURNS Void -> END + CLEAR + } + + it "produces a MUTABLE-insert edit at the param column" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /immutable/i } + expect(finding).not_to be_nil + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("MUTABLE ") + expect(edit.span.line).to eq(1) + # `FN bump(p: Int64)` — 'p' starts at column 9. + expect(edit.span.col).to eq(9) + end + end + + describe "ASSIGN_FIELD_IMMUTABLE_STRUCT on a struct parameter" do + let(:src) { + <<~CLEAR + STRUCT Point { x: Int64, y: Int64 } + FN shift(pt: Point) RETURNS Void -> + pt.x = 10; + END + FN main() RETURNS Void -> END + CLEAR + } + + it "produces a MUTABLE-insert edit at the param column" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /immutable/i } + expect(finding).not_to be_nil + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("MUTABLE ") + expect(edit.span.line).to eq(2) + # `FN shift(pt: Point)` — 'pt' starts at column 10. + expect(edit.span.col).to eq(10) + end + end + + describe "fallback when the param has no decl token" do + it "ASSIGN_VAR_IMMUTABLE — falls back to plain error! when no fix is locatable" do + src = "FN bump(p: Int64) RETURNS Int64 ->\n p = p + 1;\n RETURN p;\nEND\nFN main() RETURNS Void -> END" + tokens = Lexer.new(src).tokenize + ast = Parser.new(tokens, src).parse + ann = SemanticAnnotator.new + allow(ann).to receive(:build_declare_mutable_fix).and_return(nil) + FixCollector.disable! + expect { ann.annotate!(ast) }.to raise_error(CompilerError, /immutable/i) + end + end +end diff --git a/spec/fixable_match_field_typo_spec.rb b/spec/fixable_match_field_typo_spec.rb new file mode 100644 index 000000000..f8e76354b --- /dev/null +++ b/spec/fixable_match_field_typo_spec.rb @@ -0,0 +1,130 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# A MATCH struct-pattern that names a field the schema doesn't declare +# now offers a typo-suggestion fix. Covers two pattern shapes: +# { fieldName: } — value match (A2) +# { fieldName } — destructuring bind (A3) +RSpec.describe "MATCH-pattern field typo auto-fix" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def annotate(source) + tokens = Lexer.new(source).tokenize + ast = Parser.new(tokens, source).parse + SemanticAnnotator.new.annotate!(ast) + ast + end + + describe "MATCH_FIELD_UNKNOWN — value-match form { x: 1 } typo as { xs: 1 }" do + let(:src) { + <<~CLEAR + STRUCT P { x: Int64, y: Int64 } + FN main() RETURNS Void -> + p = P{x: 1, y: 2}; + PARTIAL MATCH p + START { xs: 1 } -> _ = "hi";, + DEFAULT -> _ = "bye"; + END + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /'xs'/ } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit replacing 'xs' with the closest field name" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /'xs'/ } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("x") + expect(edit.span.length).to eq(2) # 'xs' + expect(edit.span.line).to eq(5) + end + + it "applying the fix produces compilable CLEAR" do + fixed = src.sub("xs: 1", "x: 1") + expect { annotate(fixed) }.not_to raise_error + end + end + + describe "MATCH_FIELD_UNKNOWN — destructure form { x } typo as { xx }" do + let(:src) { + <<~CLEAR + STRUCT P { x: Int64, y: Int64 } + FN main() RETURNS Void -> + p = P{x: 1, y: 2}; + PARTIAL MATCH p + START { xx } -> _ = "hi";, + DEFAULT -> _ = "bye"; + END + END + CLEAR + } + + it "captures a fixable finding with a typo suggestion" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /'xx'/ } + expect(finding).not_to be_nil + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("x") + expect(edit.span.length).to eq(2) + end + end + + describe "MATCH_DESTRUCTURE_FIELD_UNKNOWN — union variant destructure typo" do + let(:src) { + <<~CLEAR + UNION Shape { Circle { radius: Float64 }, Square } + FN main() RETURNS Void -> + c: Shape = Shape.Circle{radius: 5.0}; + MUTABLE r = 0.0; + PARTIAL MATCH c + START Shape.Circle{ radiu } -> r = radiu;, + DEFAULT -> r = 0.0; + END + END + CLEAR + } + + it "produces a typo-suggestion edit for the missing variant field" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /'radiu'/ } + expect(finding).not_to be_nil + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("radius") + expect(edit.span.length).to eq(5) # 'radiu' + end + end + + describe "no near-miss candidate (fallback to plain error)" do + let(:src) { + <<~CLEAR + STRUCT P { x: Int64, y: Int64 } + FN main() RETURNS Void -> + p = P{x: 1, y: 2}; + PARTIAL MATCH p + START { somethingDifferent: 1 } -> _ = "hi";, + DEFAULT -> _ = "bye"; + END + END + CLEAR + } + + it "raises plain CompilerError when no candidate is within Levenshtein threshold" do + ann = SemanticAnnotator.new + tokens = Lexer.new(src).tokenize + ast = Parser.new(tokens, src).parse + FixCollector.disable! + expect { ann.annotate!(ast) }.to raise_error(CompilerError, /does not exist on type|TYPO_SUGGESTION_REJECTED|somethingDifferent/i) + end + end +end diff --git a/spec/fixable_misc_typo_spec.rb b/spec/fixable_misc_typo_spec.rb new file mode 100644 index 000000000..b69371c4f --- /dev/null +++ b/spec/fixable_misc_typo_spec.rb @@ -0,0 +1,101 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# BG / DO branch prefix sigil typos + struct/type-name typos. +# +# UNKNOWN_LITERAL is defensive (literal-token-typo at lexer level — +# user can't author it) and UNION_METHOD_MISSING / inline-variant +# unknown-field aren't typo cases (the former is missing +# implementation, the latter would need parser plumbing for per- +# field tokens). Those are excluded from this batch. +RSpec.describe "BG / branch / type typo auto-fixes" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def parse(source) + tokens = Lexer.new(source).tokenize + Parser.new(tokens, source).parse + end + + def annotate(source) + ast = parse(source) + SemanticAnnotator.new.annotate!(ast) + ast + end + + describe "UNKNOWN_BG_PREFIX — `@srvice` typo for `@service`" do + let(:src) { + <<~CLEAR + FN main() RETURNS Void -> + fut = BG { @srvice -> _ = 1; }; + _ = NEXT fut; + END + CLEAR + } + it "suggests @service" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /@srvice/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("@service") + end + end + + describe "UNKNOWN_BRANCH_PREFIX — `@parralel` typo for `@parallel`" do + let(:src) { + <<~CLEAR + FN main() RETURNS Void -> + DO { + @parralel _ = 1;, + _ = 2; + } + END + CLEAR + } + it "suggests @parallel" do + parse(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /@parralel/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("@parallel") + end + end + + describe "UNKNOWN_STRUCT_TYPE — `Pont` typo for `Point`" do + let(:src) { + <<~CLEAR + STRUCT Point { x: Int64, y: Int64 } + FN main() RETURNS Void -> + p = Pont{x: 1, y: 2}; + _ = p; + END + CLEAR + } + it "suggests Point" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /'Pont'/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("Point") + end + end + + describe "UNKNOWN_TYPE — generic `Pir` typo for `Pair`" do + let(:src) { + <<~CLEAR + STRUCT Pair { first: T, second: T } + FN main() RETURNS Void -> + p = Pir{first: 1, second: 2}; + _ = p; + END + CLEAR + } + it "suggests Pair" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /'Pir'/ } + expect(finding).not_to be_nil + expect(finding.fixes.first.edits.first.replacement).to eq("Pair") + end + end +end diff --git a/spec/fixable_reentrant_spec.rb b/spec/fixable_reentrant_spec.rb new file mode 100644 index 000000000..a2c173a5e --- /dev/null +++ b/spec/fixable_reentrant_spec.rb @@ -0,0 +1,102 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# A directly-recursive (or mutually-recursive) function without an +# explicit reentrance declaration used to raise REENTRANCE_DIRECT_RECURSIVE +# (or REENTRANCE_INDIRECT_RECURSIVE for the @nonReentrant variant) with +# no fix. Both now emit a FixableFinding whose :auto fix inserts +# `EFFECTS REENTRANT ` before the function arrow. +RSpec.describe "Reentrant function auto-fix" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def annotate(source) + tokens = Lexer.new(source).tokenize + ast = Parser.new(tokens, source).parse + SemanticAnnotator.new.annotate!(ast) + ast + end + + describe "direct self-recursion without reentrance declaration" do + let(:src) { + <<~CLEAR + FN factorial(n: Int64) RETURNS Int64 -> + IF n <= 1 THEN RETURN 1; END + RETURN n * factorial(n - 1); + END + FN main() RETURNS Void -> END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /'factorial'.*recursi/i } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit inserting `EFFECTS REENTRANT ` before the arrow" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /recursi/i } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("EFFECTS REENTRANT ") + expect(edit.span.length).to eq(0) # zero-length insertion + expect(edit.span.line).to eq(1) + # `FN factorial(n: Int64) RETURNS Int64 ->` + # The arrow `->` starts at column 38; insertion goes there. + expect(edit.span.col).to eq(38) + end + + it "applying the fix produces compilable CLEAR" do + fixed = src.sub("RETURNS Int64 ->", "RETURNS Int64 EFFECTS REENTRANT ->") + expect { annotate(fixed) }.not_to raise_error + end + end + + describe "mutual recursion (transitive cycle)" do + let(:src) { + <<~CLEAR + FN isEven(n: Int64) RETURNS Bool -> + IF n == 0 THEN RETURN TRUE; END + RETURN isOdd(n - 1); + END + FN isOdd(n: Int64) RETURNS Bool -> + IF n == 0 THEN RETURN FALSE; END + RETURN isEven(n - 1); + END + FN main() RETURNS Void -> END + CLEAR + } + + it "captures fixable findings for the cycle members" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /recursi/i } + expect(findings.size).to be >= 1 + expect(findings.first.fixes.first.confidence).to eq(:auto) + expect(findings.first.fixes.first.edits.first.replacement).to eq("EFFECTS REENTRANT ") + end + end + + describe "fallback when arrow_token is missing" do + it "raises plain CompilerError when the fix isn't locatable" do + src = <<~CLEAR + FN factorial(n: Int64) RETURNS Int64 -> + IF n <= 1 THEN RETURN 1; END + RETURN n * factorial(n - 1); + END + FN main() RETURNS Void -> END + CLEAR + tokens = Lexer.new(src).tokenize + ast = Parser.new(tokens, src).parse + factorial = ast.statements.find { |s| s.is_a?(AST::FunctionDef) && s.name == "factorial" } + factorial.arrow_token = nil + ann = SemanticAnnotator.new + FixCollector.disable! + expect { ann.annotate!(ast) }.to raise_error(CompilerError, /recursi/i) + end + end +end diff --git a/spec/fixable_struct_field_typo_spec.rb b/spec/fixable_struct_field_typo_spec.rb new file mode 100644 index 000000000..dfa20b2a3 --- /dev/null +++ b/spec/fixable_struct_field_typo_spec.rb @@ -0,0 +1,80 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# Field-access on a known struct with an unknown field used to raise a +# bare ILLEGAL_FIELD_LOOKUP. With a known schema we have the candidate +# set, so emit a typo-suggestion FixableFinding instead. +RSpec.describe "Struct field typo auto-fix" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def annotate(source) + tokens = Lexer.new(source).tokenize + ast = Parser.new(tokens, source).parse + SemanticAnnotator.new.annotate!(ast) + ast + end + + describe "ILLEGAL_FIELD_LOOKUP with a near-miss field name" do + let(:src) { + <<~CLEAR + STRUCT Point { x: Int64, y: Int64 } + FN main() RETURNS Void -> + p = Point{x: 1, y: 2}; + _ = p.zz; + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /no field 'zz'/ } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that replaces 'zz' with the closest field name" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /no field 'zz'/ } + edit = finding.fixes.first.edits.first + # 'zz' is equidistant from 'x' and 'y' but Levenshtein ranking + # prefers earlier candidate when ties — Point lists x first. + expect(%w[x y]).to include(edit.replacement) + expect(edit.span.length).to eq(2) # length of 'zz' + expect(edit.span.line).to eq(4) + end + + it "applying the fix produces compilable CLEAR" do + finding = nil + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /no field 'zz'/ } + replacement = finding.fixes.first.edits.first.replacement + fixed = src.sub("p.zz", "p.#{replacement}") + expect { annotate(fixed) }.not_to raise_error + end + end + + describe "no near-miss candidate (fallback to plain error)" do + let(:src) { + <<~CLEAR + STRUCT Point { x: Int64, y: Int64 } + FN main() RETURNS Void -> + p = Point{x: 1, y: 2}; + _ = p.totallyDifferent; + END + CLEAR + } + + it "raises plain CompilerError when no candidate is within Levenshtein threshold" do + ann = SemanticAnnotator.new + tokens = Lexer.new(src).tokenize + ast = Parser.new(tokens, src).parse + FixCollector.disable! + expect { ann.annotate!(ast) }.to raise_error(CompilerError, /no field|TYPO_SUGGESTION_REJECTED/) + end + end +end diff --git a/spec/fixable_t1_spec.rb b/spec/fixable_t1_spec.rb new file mode 100644 index 000000000..9a2e9012e --- /dev/null +++ b/spec/fixable_t1_spec.rb @@ -0,0 +1,205 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# Tier 1 fixable findings — five error codes that previously raised a +# plain CompilerError now emit a FixableFinding with a deterministic +# auto-fix. Each spec captures the finding via FixCollector and +# verifies both halves: the error fires AND the fix's edit is exactly +# what the user would paste back into their source. +RSpec.describe "Tier 1 fixable findings" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def annotate(source) + tokens = Lexer.new(source).tokenize + ast = Parser.new(tokens, source).parse + SemanticAnnotator.new.annotate!(ast) + ast + end + + describe "WITH_RESTRICT_NEEDS_MUTABLE" do + let(:src) { + <<~CLEAR + FN main() RETURNS Void -> + x = 5; + WITH RESTRICT x { _ = x; } + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) + findings = FixCollector.drain.select { |f| f.message.include?("RESTRICT") } + expect(findings.size).to eq(1) + expect(findings.first.fixes.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that inserts `MUTABLE ` at the binding's column" do + annotate(src) + finding = FixCollector.drain.first + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("MUTABLE ") + expect(edit.span.line).to eq(2) # ` x = 5;` line + expect(edit.span.length).to eq(0) # insert, not replace + end + + it "applying the fix produces compilable CLEAR" do + lines = src.lines + lines[1] = " MUTABLE #{lines[1].lstrip}" + fixed = lines.join + expect { annotate(fixed) }.not_to raise_error + end + end + + describe "STYLE_MUTABLE_PARAM_NEEDS_BANG" do + let(:src) { + <<~CLEAR + FN inc(MUTABLE x: Int64) -> x += 1; END + FN main() RETURNS Void -> END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) + findings = FixCollector.drain.select { |f| f.message.include?("MUTABLE parameters") } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that appends `!` immediately after the function name" do + annotate(src) + edit = FixCollector.drain.first.fixes.first.edits.first + expect(edit.replacement).to eq("!") + expect(edit.span.line).to eq(1) + expect(edit.span.length).to eq(0) + # `FN inc` — name 'inc' starts at column 4, ends after column 6. + # The bang insertion goes at column 7 (1-indexed, 0-length insert). + expect(edit.span.col).to eq(7) + end + + it "applying the fix produces compilable CLEAR" do + fixed = src.sub("FN inc(", "FN inc!(") + expect { annotate(fixed) }.not_to raise_error + end + end + + describe "CAN_SMASH_NOT_SUPPORTED" do + # `@canSmash` lives inside the BG body's prefix block, between `{` + # and the body's `->`. Consume the future with NEXT so the + # PROMISE_NOT_CONSUMED check doesn't fire alongside. + let(:src) { + <<~CLEAR + FN doSomething() RETURNS Int64 -> RETURN 42; END + FN main() RETURNS Int64 -> + fut = BG { @canSmash -> _ = doSomething(); }; + RETURN NEXT fut; + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message.include?("@canSmash") } + expect(findings.size).to be >= 1 + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that replaces `@canSmash` with `@service`" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message.include?("@canSmash") } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("@service") + # length matches @canSmash = 9 chars + expect(edit.span.length).to eq("@canSmash".length) + end + end + + describe "TYPE_MISMATCH_ASSIGN" do + # Reassignment (not declaration) goes through validate_assignment_type; + # initial declaration with a wrong-typed RHS hits Type#coerce! and + # the TYPE_COERCION_FAILED umbrella code instead. + let(:src) { + <<~CLEAR + FN main() RETURNS Void -> + MUTABLE x: Int64 = 5; + x = "hello"; + END + CLEAR + } + + it "captures a fixable finding with one :interactive CAST fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.category == :type && f.message =~ /Type [Mm]ismatch/ } + expect(findings.size).to be >= 1 + fix = findings.first.fixes.first + expect(fix.confidence).to eq(:interactive) + expect(fix.description).to include("CAST") + expect(fix.description).to include("Int64") + end + + it "produces a paired edit that brackets the value with `CAST(... AS Int64)`" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.category == :type && f.message =~ /Type [Mm]ismatch/ } + edits = finding.fixes.first.edits + expect(edits.size).to eq(2) + expect(edits.first.replacement).to eq("CAST(") + expect(edits.last.replacement).to eq(" AS Int64)") + end + + it "offers a CAST fix when the value is a bare Identifier" do + ident_src = <<~CLEAR + FN main() RETURNS Void -> + MUTABLE x: Int64 = 5; + y: String = "hello"; + x = y; + END + CLEAR + annotate(ident_src) rescue nil + finding = FixCollector.drain.find { |f| f.category == :type && f.message =~ /Type [Mm]ismatch/ } + expect(finding).not_to be_nil + edits = finding.fixes.first.edits + # CAST wrap: insert "CAST(" before `y` and " AS Int64)" right after. + expect(edits.size).to eq(2) + expect(edits.first.replacement).to eq("CAST(") + expect(edits.last.replacement).to eq(" AS Int64)") + end + + it "skips the fix when the value isn't a Literal/Identifier (build_cast_wrap_fix returns nil)" do + # BinaryOp / FuncCall etc. fall through to the else branch and + # return nil; emit_type_mismatch_assign_error! then falls back + # to plain error! (no finding captured). + complex_src = <<~CLEAR + FN concat(a: String, b: String) RETURNS String -> RETURN a + b; END + FN main() RETURNS Void -> + MUTABLE x: Int64 = 5; + x = concat("a", "b"); + END + CLEAR + annotate(complex_src) rescue nil + finding = FixCollector.drain.find { |f| f.category == :type && f.message =~ /Type [Mm]ismatch/ } + if finding + expect(finding.fixes).to be_empty + end + end + end + + describe "fallback paths (no fix locatable)" do + it "WITH_RESTRICT_NEEDS_MUTABLE — falls back to plain error! when scope info is missing" do + # When the binding's symbol scope can't locate the declaration's + # token (e.g. the binding came from a sub-tree without a reg.token), + # build_declare_mutable_fix returns nil and the helper raises. + # Synthesize this by stubbing. + tokens = Lexer.new("FN main() RETURNS Void -> x = 5; WITH RESTRICT x { _ = x; } END").tokenize + ast = Parser.new(tokens, "FN main() RETURNS Void -> x = 5; WITH RESTRICT x { _ = x; } END").parse + ann = SemanticAnnotator.new + allow(ann).to receive(:build_declare_mutable_fix).and_return(nil) + FixCollector.disable! # raise instead of collect + expect { ann.annotate!(ast) }.to raise_error(CompilerError, /RESTRICT.*[Mm]utable/) + end + end +end diff --git a/spec/fixable_t2_spec.rb b/spec/fixable_t2_spec.rb new file mode 100644 index 000000000..49bdba86f --- /dev/null +++ b/spec/fixable_t2_spec.rb @@ -0,0 +1,178 @@ +require "rspec" +require_relative "../src/ast/lexer" +require_relative "../src/ast/parser" +require_relative "../src/ast/ast" +require_relative "../src/ast/fixable_error" +require_relative "../src/backends/transpiler" + +# Tier 2 fixable findings. Five additional error sites that previously +# raised a plain CompilerError now emit a FixableFinding. +RSpec.describe "Tier 2 fixable findings" do + before { FixCollector.enable! } + after { FixCollector.disable! } + + def annotate(source) + tokens = Lexer.new(source).tokenize + ast = Parser.new(tokens, source).parse + SemanticAnnotator.new.annotate!(ast) + ast + end + + describe "CAPTURE_IMMUTABLE_AS_MUTABLE" do + let(:src) { + <<~CLEAR + FN main() RETURNS Void -> + x = 5; + FN g() USE(MUTABLE x) RETURNS Int64 -> RETURN x; END + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /capture immutable/i } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that inserts MUTABLE at the captured binding's declaration" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /capture immutable/i } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("MUTABLE ") + expect(edit.span.line).to eq(2) # the ` x = 5;` line + expect(edit.span.length).to eq(0) + end + end + + describe "AMBIGUOUS_RETURN" do + let(:src) { + <<~CLEAR + FN classify(n: Int64) -> + IF n > 0 THEN RETURN n; ELSE RETURN "negative"; END + END + FN main() RETURNS Void -> + _ = classify(1); + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /Ambiguous Return|multiple types/ } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that inserts `RETURNS :Any ` before the arrow" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /Ambiguous Return|multiple types/ } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("RETURNS :Any ") + expect(edit.span.length).to eq(0) + end + end + + describe "MATCH_NEEDS_ENUM_OR_UNION" do + let(:src) { + <<~CLEAR + FN main() RETURNS Void -> + x: Int64 = 5; + MATCH x START + 5 -> _ = 0; + END + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /MATCH requires/ } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that inserts `PARTIAL ` before the MATCH keyword" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /MATCH requires/ } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("PARTIAL ") + expect(edit.span.length).to eq(0) + end + end + + describe "MATCH_NON_EXHAUSTIVE" do + let(:src) { + <<~CLEAR + ENUM Color { Red, Green, Blue } + FN main() RETURNS Void -> + c: Color = Color.Red; + MATCH c START + Color.Red -> _ = 0; + END + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /non-exhaustive/ } + expect(findings.size).to eq(1) + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces the same `PARTIAL ` insertion as MATCH_NEEDS_ENUM_OR_UNION" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /non-exhaustive/ } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("PARTIAL ") + end + end + + describe "RETURN_BORROWED_NO_COPY_OR_LIFETIME" do + let(:src) { + <<~CLEAR + UNION Val { Nil, Name: String } + STRUCT Foo { v: Val } + FN getVal(f: Foo) RETURNS Val -> + v = f.v; + RETURN v; + END + FN main() RETURNS Void -> + fb = Foo{ v: Val.Nil }; + _ = getVal(fb); + END + CLEAR + } + + it "captures a fixable finding with a single :auto fix" do + annotate(src) rescue nil + findings = FixCollector.drain.select { |f| f.message =~ /not implicitly copyable|return borrowed/i } + expect(findings.size).to be >= 1 + expect(findings.first.fixes.first.confidence).to eq(:auto) + end + + it "produces an edit that inserts `COPY ` before the returned value" do + annotate(src) rescue nil + finding = FixCollector.drain.find { |f| f.message =~ /not implicitly copyable|return borrowed/i } + edit = finding.fixes.first.edits.first + expect(edit.replacement).to eq("COPY ") + expect(edit.span.length).to eq(0) + end + end + + describe "fallback paths" do + it "AMBIGUOUS_RETURN — falls back to plain error! when arrow_token is missing" do + tokens = Lexer.new("FN classify(n: Int64) ->\n IF n > 0 THEN RETURN n; ELSE RETURN \"x\"; END\nEND\nFN main() RETURNS Void -> END").tokenize + src = "FN classify(n: Int64) ->\n IF n > 0 THEN RETURN n; ELSE RETURN \"x\"; END\nEND\nFN main() RETURNS Void -> END" + ast = Parser.new(tokens, src).parse + # Strip arrow_token from the synthesized fn so the helper falls + # through to plain error!. + classify = ast.statements.find { |s| s.is_a?(AST::FunctionDef) && s.name == "classify" } + classify.arrow_token = nil + ann = SemanticAnnotator.new + FixCollector.disable! # raise instead of collect + expect { ann.annotate!(ast) }.to raise_error(CompilerError, /Ambiguous Return|multiple types/) + end + end +end diff --git a/spec/linear_resources_spec.rb b/spec/linear_resources_spec.rb index 46745a046..688c7dcbe 100644 --- a/spec/linear_resources_spec.rb +++ b/spec/linear_resources_spec.rb @@ -349,46 +349,46 @@ def transpile_fn(clear_src) # File::open it "raises on use-after-move of File::open resource" do src = 'FN f() RETURNS !Void -> a = File::open("x"); b = a; fileWrite(a, "bad"); RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 'a'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `a`/) end it "raises on double-move of File::open resource" do src = 'FN f() RETURNS !Void -> a = File::open("x"); b = a; c = a; RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 'a'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `a`/) end # File::create it "raises on use-after-move of File::create resource" do src = 'FN f() RETURNS !Void -> a = File::create("x"); b = a; fileWrite(a, "bad"); RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 'a'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `a`/) end # TCPServer it "raises on use-after-move of TCPServer resource" do src = 'FN f() RETURNS !Void -> s = TCPServer::listen(0); s2 = s; c = accept(s); RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 's'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `s`/) end it "raises on double-move of TCPServer resource" do src = 'FN f() RETURNS !Void -> s = TCPServer::listen(0); s2 = s; s3 = s; RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 's'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `s`/) end # TCPClient it "raises on use-after-move of TCPClient resource" do src = 'FN f() RETURNS !Void -> s = TCPServer::listen(0); c = accept(s); c2 = c; d = tcpRead(c); RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 'c'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `c`/) end it "raises on use-after-move when writing to moved TCPClient" do src = 'FN f() RETURNS !Void -> s = TCPServer::listen(0); c = accept(s); c2 = c; tcpWrite(c, "bad"); RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 'c'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `c`/) end # TCPClient::connect it "raises on use-after-move of TCPClient::connect resource" do src = 'FN f() RETURNS !Void -> c = TCPClient::connect("127.0.0.1", 8080); c2 = c; tcpWrite(c, "bad"); RETURN; END' - expect { run(src) }.to raise_error(/Use of moved value 'c'/) + expect { run(src) }.to raise_error(/USE AFTER MOVE: You can't use `c`/) end # Normal use — should NOT raise @@ -591,13 +591,13 @@ def transpile_fn(clear_src) describe "String ownership semantics" do it "raises on reuse of moved String (strings are non-Copy)" do src = 'FN f() RETURNS !Void -> x = "hello"; y = x; z = x; RETURN; END' - expect { run(src) }.to raise_error(/moved/) + expect { run(src) }.to raise_error(/USE AFTER MOVE/) end it "raises on String reuse after move" do src = 'FN f(s: String) RETURNS !Void -> RETURN; END FN g() RETURNS Void -> x = "hello"; y = x; f(x); RETURN; END' - expect { run(src) }.to raise_error(/moved/) + expect { run(src) }.to raise_error(/USE AFTER MOVE/) end it "does not raise on normal string assignment and use" do diff --git a/spec/lsp/analyzer_spec.rb b/spec/lsp/analyzer_spec.rb new file mode 100644 index 000000000..a6df3c0c0 --- /dev/null +++ b/spec/lsp/analyzer_spec.rb @@ -0,0 +1,70 @@ +require "rspec" +require_relative "../../src/lsp/analyzer" + +# Drives the LSP analyzer with real CLEAR snippets and asserts that +# findings come back as expected. The analyzer is the bridge between +# the protocol layer and the CLEAR compiler — its contract is: +# * always returns a Result (never raises) +# * fatal errors become a `fatal_error` field, not an exception +# * FixCollector is left disabled afterwards (no leak) +RSpec.describe LSP::Analyzer do + describe ".run" do + it "returns an empty Result for clean source" do + result = LSP::Analyzer.run("FN main() RETURNS Void -> END\n") + expect(result.findings).to be_empty + expect(result.fatal?).to be false + end + + it "captures FixableFindings without raising" do + # WITH RESTRICT on an immutable binding — Tier 1 fix. + src = <<~CLEAR + FN main() RETURNS Void -> + x = 5; + WITH RESTRICT x { _ = x; } + END + CLEAR + result = LSP::Analyzer.run(src) + expect(result.fatal?).to be false + expect(result.findings.size).to be >= 1 + # The RESTRICT finding has a fix. + restrict = result.findings.find { |f| f.message =~ /RESTRICT/ } + expect(restrict).not_to be_nil + expect(restrict.fixes.size).to eq(1) + end + + it "surfaces a CompilerError as a fatal_error finding" do + # Undeclared variable — annotator raises CompilerError mid-pass. + result = LSP::Analyzer.run(<<~CLEAR) + FN main() RETURNS Void -> + _ = doesNotExist; + END + CLEAR + expect(result.fatal?).to be true + expect(result.fatal_error.message).to match(/Undefined variable/i) + expect(result.fatal_error.token).not_to be_nil + expect(result.fatal_error.token.line).to eq(2) + end + + it "surfaces a ParserError as a fatal_error finding" do + # Missing closing brace — parser raises. + result = LSP::Analyzer.run("FN main() RETURNS Void -> ") + expect(result.fatal?).to be true + expect(result.fatal_error.category).to eq(:syntax) + end + + it "leaves FixCollector disabled after running" do + LSP::Analyzer.run("FN main() RETURNS Void -> END\n") + expect(FixCollector.enabled?).to be false + end + + it "leaves FixCollector disabled even when the analyzer raises" do + # Force the lexer to blow up by passing an object that doesn't + # respond to the methods Lexer uses. + allow(Lexer).to receive(:new).and_raise(RuntimeError, "synthetic") + result = LSP::Analyzer.run("anything") + expect(result.fatal?).to be true + expect(result.fatal_error.message).to include("synthetic") + expect(FixCollector.enabled?).to be false + end + end +end diff --git a/spec/lsp/code_actions_spec.rb b/spec/lsp/code_actions_spec.rb new file mode 100644 index 000000000..df173f7bb --- /dev/null +++ b/spec/lsp/code_actions_spec.rb @@ -0,0 +1,175 @@ +require "rspec" +require_relative "../../src/lsp/code_actions" +require_relative "../../src/lsp/document_store" +require_relative "../../src/lsp/analyzer" +require_relative "../../src/ast/fixable_error" + +# CodeActions converts FixableFindings to LSP CodeActions. The unit +# tests below feed canned findings; the server-level integration tests +# (server_spec.rb) drive the full request → response path. +RSpec.describe LSP::CodeActions do + Token = Struct.new(:line, :column, :value, keyword_init: true) + StubFinding = Struct.new(:level, :message, :token, :category, :fixes, keyword_init: true) + + def make_doc(text = "FN main() RETURNS Void -> END\n", findings: []) + store = LSP::DocumentStore.new + store.open("file:///t.cht", text, 1) + doc = store.get("file:///t.cht") + doc.cached_findings = LSP::Analyzer::Result.new(findings: findings, fatal_error: nil) + doc + end + + def auto_fix(line: 1, col: 1, length: 0, replacement: "MUTABLE ", desc: "Add MUTABLE") + Fix.new( + description: desc, + confidence: :auto, + edits: [Edit.new(span: Span.new(file: nil, line: line, col: col, length: length), + replacement: replacement)], + ) + end + + def interactive_fix(line: 1, col: 1, length: 5, replacement: "CAST(x AS Int64)", desc: "Wrap with CAST") + Fix.new( + description: desc, + confidence: :interactive, + edits: [Edit.new(span: Span.new(file: nil, line: line, col: col, length: length), + replacement: replacement)], + ) + end + + def finding_with(*fixes, line: 1, col: 1, value: "x", message: "an error") + StubFinding.new( + level: :error, + message: message, + token: Token.new(line: line, column: col, value: value), + category: :type, + fixes: fixes, + ) + end + + def request_range(start_line: 0, start_char: 0, end_line: 0, end_char: 100) + { + "start" => { "line" => start_line, "character" => start_char }, + "end" => { "line" => end_line, "character" => end_char }, + } + end + + describe ".for_range" do + it "returns an empty array when the document is nil" do + expect(LSP::CodeActions.for_range(nil, request_range)).to eq([]) + end + + it "returns an empty array when there are no cached findings" do + doc = LSP::DocumentStore.new.open("file:///t.cht", "x", 1) + doc = LSP::DocumentStore.new.tap { |s| s.open("file:///t.cht", "x", 1) }.get("file:///t.cht") + doc.cached_findings = nil + expect(LSP::CodeActions.for_range(doc, request_range)).to eq([]) + end + + it "returns an empty array when no findings have fixes" do + f = finding_with # no fixes + doc = make_doc(findings: [f]) + expect(LSP::CodeActions.for_range(doc, request_range)).to eq([]) + end + + it "skips findings whose range doesn't overlap the request" do + # Finding on line 5, request on line 1 → no overlap. + f = finding_with(auto_fix, line: 5, col: 1, value: "x") + doc = make_doc(findings: [f]) + out = LSP::CodeActions.for_range(doc, request_range(start_line: 0, end_line: 0)) + expect(out).to eq([]) + end + + it "produces one CodeAction per Fix on overlapping findings" do + f = finding_with(auto_fix, interactive_fix, line: 1, col: 1, value: "x") + doc = make_doc(findings: [f]) + out = LSP::CodeActions.for_range(doc, request_range) + expect(out.size).to eq(2) + end + + it "marks :auto fixes as 'quickfix' kind and isPreferred=true" do + f = finding_with(auto_fix, line: 1, col: 1, value: "x") + doc = make_doc(findings: [f]) + action = LSP::CodeActions.for_range(doc, request_range).first + expect(action[:kind]).to eq("quickfix") + expect(action[:isPreferred]).to be true + end + + it "marks :interactive fixes as 'refactor' kind without isPreferred" do + f = finding_with(interactive_fix, line: 1, col: 1, value: "x") + doc = make_doc(findings: [f]) + action = LSP::CodeActions.for_range(doc, request_range).first + expect(action[:kind]).to eq("refactor") + expect(action.key?(:isPreferred)).to be false + end + + it "carries the fix's description as the action title" do + f = finding_with(auto_fix(desc: "Declare 'x' as MUTABLE"), value: "x") + doc = make_doc(findings: [f]) + action = LSP::CodeActions.for_range(doc, request_range).first + expect(action[:title]).to eq("Declare 'x' as MUTABLE") + end + + it "attaches the originating Diagnostic so the client can group actions" do + f = finding_with(auto_fix, message: "Variable 'x' is immutable", value: "x") + doc = make_doc(findings: [f]) + action = LSP::CodeActions.for_range(doc, request_range).first + expect(action[:diagnostics].size).to eq(1) + expect(action[:diagnostics].first[:message]).to eq("Variable 'x' is immutable") + end + + it "produces a WorkspaceEdit with the fix's edits as TextEdits" do + f = finding_with(auto_fix(line: 2, col: 5, replacement: "MUTABLE "), value: "x") + doc = make_doc(findings: [f]) + action = LSP::CodeActions.for_range(doc, request_range).first + changes = action[:edit][:documentChanges] + expect(changes.size).to eq(1) + td_edit = changes.first + expect(td_edit[:textDocument][:uri]).to eq("file:///t.cht") + expect(td_edit[:textDocument][:version]).to eq(1) + edit = td_edit[:edits].first + expect(edit[:newText]).to eq("MUTABLE ") + expect(edit[:range][:start][:line]).to eq(1) # 2 → 0-based 1 + expect(edit[:range][:start][:character]).to eq(4) # col 5 → 0-based 4 + end + + it "expands every Edit in a multi-edit Fix" do + multi_fix = Fix.new( + description: "Wrap with CAST", + confidence: :interactive, + edits: [ + Edit.new(span: Span.new(file: nil, line: 1, col: 1, length: 0), + replacement: "CAST("), + Edit.new(span: Span.new(file: nil, line: 1, col: 5, length: 0), + replacement: " AS Int64)"), + ], + ) + f = finding_with(multi_fix, value: "x") + doc = make_doc(findings: [f]) + action = LSP::CodeActions.for_range(doc, request_range).first + edits = action[:edit][:documentChanges].first[:edits] + expect(edits.size).to eq(2) + expect(edits[0][:newText]).to eq("CAST(") + expect(edits[1][:newText]).to eq(" AS Int64)") + end + end + + describe "range overlap" do + it "considers ranges overlapping when one end matches the other start" do + # Action range: line 0 char 0..1; request: line 0 char 1..5. + # Touching boundaries count as overlapping. + f = finding_with(auto_fix, line: 1, col: 1, value: "x") # range 0:0..0:1 + doc = make_doc(findings: [f]) + out = LSP::CodeActions.for_range(doc, request_range(start_line: 0, start_char: 1, end_line: 0, end_char: 5)) + expect(out.size).to eq(1) + end + + it "considers ranges non-overlapping when one ends strictly before the other starts" do + # Action range: line 0 char 0..1; request: line 0 char 5..10. + f = finding_with(auto_fix, line: 1, col: 1, value: "x") + doc = make_doc(findings: [f]) + out = LSP::CodeActions.for_range(doc, request_range(start_line: 0, start_char: 5, end_line: 0, end_char: 10)) + expect(out).to eq([]) + end + end +end diff --git a/spec/lsp/diagnostics_spec.rb b/spec/lsp/diagnostics_spec.rb new file mode 100644 index 000000000..76f368315 --- /dev/null +++ b/spec/lsp/diagnostics_spec.rb @@ -0,0 +1,118 @@ +require "rspec" +require_relative "../../src/lsp/diagnostics" +require_relative "../../src/lsp/analyzer" + +RSpec.describe LSP::Diagnostics do + Token = Struct.new(:line, :column, :value, keyword_init: true) + StubFinding = Struct.new(:level, :message, :token, :category, :fixes, keyword_init: true) + + describe ".from_finding" do + it "produces a complete LSP Diagnostic for an error-level finding" do + finding = StubFinding.new( + level: :error, + message: "Undefined variable 'foo'.", + token: Token.new(line: 3, column: 7, value: "foo"), + category: :type, + fixes: [], + ) + d = LSP::Diagnostics.from_finding(finding) + expect(d[:severity]).to eq(LSP::Diagnostics::SEVERITY_ERROR) + expect(d[:message]).to eq("Undefined variable 'foo'.") + expect(d[:source]).to eq("clear") + expect(d[:range]).to eq( + start: { line: 2, character: 6 }, + end: { line: 2, character: 9 }, + ) + end + + it "maps level → LSP severity correctly" do + [ + [:error, LSP::Diagnostics::SEVERITY_ERROR], + [:warning, LSP::Diagnostics::SEVERITY_WARNING], + [:info, LSP::Diagnostics::SEVERITY_INFO], + [:hint, LSP::Diagnostics::SEVERITY_HINT], + ].each do |level, expected| + f = StubFinding.new(level: level, message: "x", token: Token.new(line: 1, column: 1, value: "x"), category: :lint, fixes: []) + d = LSP::Diagnostics.from_finding(f) + expect(d[:severity]).to eq(expected), "level :#{level} should map to severity #{expected}, got #{d[:severity]}" + end + end + + it "defaults severity to ERROR for unknown levels" do + f = StubFinding.new(level: :weird, message: "x", token: Token.new(line: 1, column: 1, value: "x"), category: :type, fixes: []) + expect(LSP::Diagnostics.from_finding(f)[:severity]).to eq(LSP::Diagnostics::SEVERITY_ERROR) + end + + it "recovers the registry code when the message starts with a known template prefix" do + # ARITY_MISMATCH template: "Type Error: Function '%{name}' expects %{expected} arguments, got %{got}" + f = StubFinding.new( + level: :error, + message: "Type Error: Function 'add' expects 2 arguments, got 3", + token: Token.new(line: 1, column: 1, value: "add"), + category: :type, + fixes: [], + ) + d = LSP::Diagnostics.from_finding(f) + expect(d[:code]).to be_a(String) + expect(d[:code]).to match(/^[A-Z][A-Z0-9_]+$/) + end + + it "leaves code unset when no template prefix matches" do + f = StubFinding.new( + level: :error, + message: "this exactly-this string is in no registry template", + token: Token.new(line: 1, column: 1, value: "x"), + category: :type, + fixes: [], + ) + d = LSP::Diagnostics.from_finding(f) + expect(d).not_to have_key(:code) # .compact strips nil values + end + + it "computes a 1-character range for an empty-value token" do + f = StubFinding.new( + level: :error, + message: "x", + token: Token.new(line: 1, column: 1, value: ""), + category: :type, + fixes: [], + ) + d = LSP::Diagnostics.from_finding(f) + expect(d[:range][:start][:character]).to eq(0) + expect(d[:range][:end][:character]).to eq(1) + end + + it "uses the source string to compute UTF-16 columns when present" do + # `é` is one UTF-16 code unit; the byte column for `foo` differs + # from the character column. + source = " é foo\n" + f = StubFinding.new( + level: :error, + message: "x", + token: Token.new(line: 1, column: 6, value: "foo"), # 1-based byte col 6 + category: :type, + fixes: [], + ) + d = LSP::Diagnostics.from_finding(f, source) + expect(d[:range][:start][:character]).to eq(4) + expect(d[:range][:end][:character]).to eq(7) + end + end + + describe ".from_result" do + it "converts every finding plus the fatal error" do + f1 = StubFinding.new(level: :warning, message: "w", token: Token.new(line: 1, column: 1, value: "x"), category: :lint, fixes: []) + f2 = StubFinding.new(level: :error, message: "e", token: Token.new(line: 2, column: 1, value: "y"), category: :type, fixes: []) + fatal = StubFinding.new(level: :error, message: "boom", token: Token.new(line: 3, column: 1, value: "z"), category: :syntax, fixes: []) + result = LSP::Analyzer::Result.new(findings: [f1, f2], fatal_error: fatal) + diags = LSP::Diagnostics.from_result(result) + expect(diags.size).to eq(3) + expect(diags.last[:message]).to eq("boom") + end + + it "produces an empty array for a clean Result" do + result = LSP::Analyzer::Result.new(findings: [], fatal_error: nil) + expect(LSP::Diagnostics.from_result(result)).to eq([]) + end + end +end diff --git a/spec/lsp/document_store_spec.rb b/spec/lsp/document_store_spec.rb new file mode 100644 index 000000000..4c403de84 --- /dev/null +++ b/spec/lsp/document_store_spec.rb @@ -0,0 +1,72 @@ +require "rspec" +require_relative "../../src/lsp/document_store" + +RSpec.describe LSP::DocumentStore do + let(:store) { described_class.new } + let(:uri) { "file:///tmp/foo.cht" } + + describe "#open" do + it "stores text + version" do + store.open(uri, "hello", 1) + expect(store.text(uri)).to eq("hello") + expect(store.version(uri)).to eq(1) + end + end + + describe "#update" do + it "replaces text and bumps version" do + store.open(uri, "v1", 1) + store.update(uri, "v2", 2) + expect(store.text(uri)).to eq("v2") + expect(store.version(uri)).to eq(2) + end + + it "returns nil when the uri isn't open" do + expect(store.update("file:///nope.cht", "x", 1)).to be_nil + end + + it "invalidates cached findings on update" do + store.open(uri, "v1", 1) + doc = store.get(uri) + doc.cached_findings = :stale_value + doc.cached_version = 1 + + store.update(uri, "v2", 2) + expect(doc.cached_findings).to be_nil + expect(doc.cached_version).to be_nil + end + end + + describe "#close" do + it "drops the document" do + store.open(uri, "x", 1) + store.close(uri) + expect(store.get(uri)).to be_nil + end + + it "is a no-op for an unknown uri" do + expect { store.close("file:///nope.cht") }.not_to raise_error + end + end + + describe "cache fields" do + it "exposes cached_findings and cached_version" do + store.open(uri, "x", 1) + doc = store.get(uri) + doc.cached_findings = "FINDINGS" + doc.cached_version = 1 + expect(doc.cached_findings).to eq("FINDINGS") + expect(doc.cached_version).to eq(1) + end + end + + describe "#each" do + it "iterates every open document" do + store.open("file:///a.cht", "a", 1) + store.open("file:///b.cht", "b", 1) + texts = [] + store.each { |d| texts << d.text } + expect(texts.sort).to eq(["a", "b"]) + end + end +end diff --git a/spec/lsp/hover_spec.rb b/spec/lsp/hover_spec.rb new file mode 100644 index 000000000..d4d54bc3f --- /dev/null +++ b/spec/lsp/hover_spec.rb @@ -0,0 +1,201 @@ +require "rspec" +require_relative "../../src/lsp/hover" +require_relative "../../src/lsp/document_store" +require_relative "../../src/lsp/analyzer" +require_relative "../../src/ast/fixable_error" + +RSpec.describe LSP::Hover do + Token = Struct.new(:line, :column, :value, keyword_init: true) + StubFinding = Struct.new(:level, :message, :token, :category, :fixes, keyword_init: true) + + def make_doc(text = " x = 5;\n", findings: [], fatal: nil) + store = LSP::DocumentStore.new + store.open("file:///t.cht", text, 1) + doc = store.get("file:///t.cht") + doc.cached_findings = LSP::Analyzer::Result.new(findings: findings, fatal_error: fatal) + doc + end + + describe ".render" do + it "returns nil when document is nil" do + expect(LSP::Hover.render(nil, { "line" => 0, "character" => 0 })).to be_nil + end + + it "returns nil when cached_findings is nil" do + store = LSP::DocumentStore.new + store.open("file:///t.cht", "x = 5", 1) + doc = store.get("file:///t.cht") + expect(LSP::Hover.render(doc, { "line" => 0, "character" => 0 })).to be_nil + end + + it "returns nil when no finding overlaps the cursor" do + f = StubFinding.new(level: :error, message: "x", token: Token.new(line: 1, column: 1, value: "foo"), category: :type, fixes: []) + doc = make_doc(findings: [f]) + # Cursor on line 5 — way past the finding. + expect(LSP::Hover.render(doc, { "line" => 5, "character" => 0 })).to be_nil + end + + it "renders a hover for an overlapping finding with a known registry code" do + # ARITY_MISMATCH template: "Function '%{name}' expects %{expected} arguments, got %{got}." + f = StubFinding.new( + level: :error, + message: "Function 'add' expects 2 arguments, got 3.", + token: Token.new(line: 1, column: 5, value: "add"), + category: :type, + fixes: [], + ) + doc = make_doc(" add(1, 2, 3);\n", findings: [f]) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 5 }) + expect(hover).not_to be_nil + expect(hover[:contents][:kind]).to eq("markdown") + md = hover[:contents][:value] + expect(md).to include("ARITY_MISMATCH") + expect(md).to include("error") + end + + it "falls back to the raw message when no registry code resolves" do + f = StubFinding.new( + level: :error, + message: "totally bespoke message that no template prefix matches", + token: Token.new(line: 1, column: 1, value: "foo"), + category: :type, + fixes: [], + ) + doc = make_doc("foo;\n", findings: [f]) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 1 }) + expect(hover).not_to be_nil + md = hover[:contents][:value] + expect(md).to include("totally bespoke message") + end + + it "renders the cause and fix_hint when the registry entry has them" do + # ATOMIC_ESCAPE_RETURN umbrella was given rich docs in T9 backfill. + # Its template is "%{message}" so we provide a custom prefix + # — but DiagnosticRegistry won't recover the code from the + # rendered text. Instead, use a real registry entry whose template + # has a literal prefix and rich docs: + # UNDEFINED_VAR has cause + fix_hint and a template starting + # with "Undefined variable '". + f = StubFinding.new( + level: :error, + message: "Undefined variable 'foo'.", + token: Token.new(line: 1, column: 1, value: "foo"), + category: :type, + fixes: [], + ) + doc = make_doc("foo;\n", findings: [f]) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 1 }) + md = hover[:contents][:value] + expect(md).to include("UNDEFINED_VAR") + expect(md).to include("**Cause:**") + expect(md).to include("**Fix:**") + end + + it "renders the spec-pulled bad/good example when DiagnosticExamples has one" do + # ENUM_UNKNOWN_VARIANT has both rich docs AND a spec example. + f = StubFinding.new( + level: :error, + message: "Type Error: Enum 'Color' has no variant 'Yellow'.", + token: Token.new(line: 1, column: 1, value: "Yellow"), + category: :type, + fixes: [], + ) + doc = make_doc("Color.Yellow;\n", findings: [f]) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 1 }) + md = hover[:contents][:value] + expect(md).to include("**Example (bad):**") + expect(md).to include("**Example (good):**") + expect(md).to include("```clear") + end + + it "uses fatal_error finding when present" do + fatal = StubFinding.new( + level: :error, + message: "boom", + token: Token.new(line: 1, column: 1, value: "x"), + category: :syntax, + fixes: [], + ) + doc = make_doc("x\n", findings: [], fatal: fatal) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 0 }) + expect(hover).not_to be_nil + expect(hover[:contents][:value]).to include("boom") + end + + it "puts the diagnostic's range in the hover response" do + f = StubFinding.new( + level: :error, + message: "Undefined variable 'foo'.", + token: Token.new(line: 2, column: 5, value: "foo"), + category: :type, + fixes: [], + ) + doc = make_doc("a\nb\n foo;\n", findings: [f]) + hover = LSP::Hover.render(doc, { "line" => 1, "character" => 5 }) + expect(hover[:range][:start][:line]).to eq(1) + expect(hover[:range][:start][:character]).to eq(4) + end + + it "falls back to same-line finding when cursor isn't on the exact token" do + # The diagnostic's token covers col 5..8 (`add`); cursor is at + # col 30 on the same line. Strict overlap fails, but the + # same-line fallback picks the finding anyway. + f = StubFinding.new( + level: :error, + message: "Function 'add' expects 2 arguments, got 3.", + token: Token.new(line: 1, column: 5, value: "add"), + category: :type, + fixes: [], + ) + doc = make_doc(" add(1, 2, 3); comment\n", findings: [f]) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 30 }) + expect(hover).not_to be_nil + expect(hover[:contents][:value]).to include("ARITY_MISMATCH") + end + + it "still returns nil when the cursor is on a different line" do + f = StubFinding.new( + level: :error, + message: "Function 'add' expects 2 arguments, got 3.", + token: Token.new(line: 1, column: 5, value: "add"), + category: :type, + fixes: [], + ) + doc = make_doc(" add(1, 2, 3);\n comment;\n", findings: [f]) + expect(LSP::Hover.render(doc, { "line" => 1, "character" => 0 })).to be_nil + end + + it "renders the code-only header when registry.lookup returns nil" do + # Theoretical edge case: Diagnostics recovers a code from the + # message text, but a concurrent registry mutation (or stub) + # makes lookup return nil. Hover should still render — without + # the category half of the header. + f = StubFinding.new( + level: :error, + message: "Function 'add' expects 2 arguments, got 3.", + token: Token.new(line: 1, column: 1, value: "add"), + category: :type, + fixes: [], + ) + doc = make_doc("add(1,2,3);\n", findings: [f]) + allow(DiagnosticRegistry).to receive(:lookup).and_return(nil) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 0 }) + expect(hover[:contents][:value]).to include("ARITY_MISMATCH") + # No category italic when entry is nil. + expect(hover[:contents][:value]).not_to include("_type_") + end + + it "renders severity correctly for warning / hint / info levels" do + [ + [:warning, "warning"], + [:hint, "hint"], + [:info, "info"], + ].each do |level, label| + f = StubFinding.new(level: level, message: "x", token: Token.new(line: 1, column: 1, value: "x"), category: :lint, fixes: []) + doc = make_doc("x\n", findings: [f]) + hover = LSP::Hover.render(doc, { "line" => 0, "character" => 0 }) + expect(hover[:contents][:value]).to include("[#{label}]"), "expected level :#{level} → label '#{label}'" + end + end + end +end diff --git a/spec/lsp/position_spec.rb b/spec/lsp/position_spec.rb new file mode 100644 index 000000000..b003f96e1 --- /dev/null +++ b/spec/lsp/position_spec.rb @@ -0,0 +1,128 @@ +require "rspec" +require_relative "../../src/lsp/position" + +RSpec.describe LSP::Position do + Token = Struct.new(:line, :column, :value, keyword_init: true) + + describe ".range_for" do + it "converts a 1-based ASCII token to a 0-based UTF-16 range" do + tok = Token.new(line: 1, column: 5, value: "foo") + r = LSP::Position.range_for(tok, 3, " foo\n") + expect(r).to eq( + start: { line: 0, character: 4 }, + end: { line: 0, character: 7 }, + ) + end + + it "shifts the range to a higher line" do + tok = Token.new(line: 3, column: 1, value: "x") + r = LSP::Position.range_for(tok, 1, "a\nb\nx\n") + expect(r[:start][:line]).to eq(2) + expect(r[:end][:line]).to eq(2) + end + + it "falls through to byte offsets when source isn't supplied" do + tok = Token.new(line: 1, column: 5, value: "foo") + r = LSP::Position.range_for(tok, 3, nil) + expect(r).to eq( + start: { line: 0, character: 4 }, + end: { line: 0, character: 7 }, + ) + end + + it "treats UTF-8 multi-byte chars as 1 UTF-16 code unit when below U+FFFF" do + # `é` is 2 bytes (0xC3 0xA9) but 1 UTF-16 code unit. + # Line text: " é foo"; the `f` of `foo` is at byte column 5 + # but UTF-16 character 4 (because `é` counts as 1). + line = " é foo\n" + tok = Token.new(line: 1, column: 6, value: "foo") # 1-based byte col + r = LSP::Position.range_for(tok, 3, line) + expect(r[:start][:character]).to eq(4) + expect(r[:end][:character]).to eq(7) + end + + it "treats supplementary-plane chars as 2 UTF-16 code units" do + # 🎉 is U+1F389 (4 bytes UTF-8 / 2 UTF-16 surrogates). + line = "🎉 foo\n" + tok = Token.new(line: 1, column: 6, value: "foo") # `f` at byte col 6 + r = LSP::Position.range_for(tok, 3, line) + # `🎉` = 2 UTF-16 + space (1) = 3 UTF-16 chars → `f` is char 3 + expect(r[:start][:character]).to eq(3) + expect(r[:end][:character]).to eq(6) + end + end + + describe ".range_for_span" do + Span = Struct.new(:file, :line, :col, :length, keyword_init: true) do + def end_line; line; end + def end_col; col + length; end + end + + it "converts a single-line Span to an LSP range" do + span = Span.new(file: nil, line: 2, col: 3, length: 5) + r = LSP::Position.range_for_span(span, "row1\n hello\n") + expect(r).to eq( + start: { line: 1, character: 2 }, + end: { line: 1, character: 7 }, + ) + end + + it "handles a multi-line Span via end_line override" do + multi_span = Object.new + def multi_span.line; 2; end + def multi_span.end_line; 4; end + def multi_span.col; 1; end + def multi_span.end_col; 5; end + r = LSP::Position.range_for_span(multi_span, "a\nb\nc\nd\n") + expect(r[:start][:line]).to eq(1) + expect(r[:end][:line]).to eq(3) + end + end + + describe ".position_in_range?" do + let(:range) { + { start: { line: 2, character: 4 }, end: { line: 2, character: 10 } } + } + + it "returns true for a position inside the range" do + expect(LSP::Position.position_in_range?({ line: 2, character: 6 }, range)).to be true + end + + it "returns false above the range's start line" do + expect(LSP::Position.position_in_range?({ line: 1, character: 100 }, range)).to be false + end + + it "returns false below the range's end line" do + expect(LSP::Position.position_in_range?({ line: 3, character: 0 }, range)).to be false + end + + it "respects start-of-line column boundaries" do + expect(LSP::Position.position_in_range?({ line: 2, character: 3 }, range)).to be false + expect(LSP::Position.position_in_range?({ line: 2, character: 4 }, range)).to be true + end + + it "respects end-of-line column boundaries" do + expect(LSP::Position.position_in_range?({ line: 2, character: 10 }, range)).to be true + expect(LSP::Position.position_in_range?({ line: 2, character: 11 }, range)).to be false + end + + it "accepts string-keyed positions (LSP wire format)" do + expect(LSP::Position.position_in_range?({ "line" => 2, "character" => 6 }, range)).to be true + end + end + + describe ".line_at" do + it "returns the requested line without its trailing newline" do + expect(LSP::Position.line_at("a\nbb\nccc\n", 1)).to eq("bb") + end + + it "returns nil when out of bounds" do + expect(LSP::Position.line_at("a\n", 5)).to be_nil + expect(LSP::Position.line_at("a\n", -1)).to be_nil + end + + it "returns nil when source is nil" do + expect(LSP::Position.line_at(nil, 0)).to be_nil + end + end +end diff --git a/spec/lsp/rpc_spec.rb b/spec/lsp/rpc_spec.rb new file mode 100644 index 000000000..2630a2a6d --- /dev/null +++ b/spec/lsp/rpc_spec.rb @@ -0,0 +1,119 @@ +require "rspec" +require "stringio" +require_relative "../../src/lsp/rpc" + +# Round-trip tests for LSP::RPC. The framing is the foundation — if +# this fails, every higher-layer feature breaks. +RSpec.describe LSP::RPC do + describe ".write_message + .read_message round-trip" do + it "encodes and decodes a simple request" do + io = StringIO.new + LSP::RPC.write_message(io, { jsonrpc: "2.0", id: 1, method: "initialize", params: {} }) + io.rewind + msg = LSP::RPC.read_message(io) + expect(msg).to eq({ + "jsonrpc" => "2.0", + "id" => 1, + "method" => "initialize", + "params" => {}, + }) + end + + it "round-trips multi-byte UTF-8 in the body" do + io = StringIO.new + payload = { jsonrpc: "2.0", id: 7, result: "héllo — wörld" } + LSP::RPC.write_message(io, payload) + io.rewind + msg = LSP::RPC.read_message(io) + expect(msg["result"]).to eq("héllo — wörld") + end + + it "writes a properly-formatted Content-Length header" do + io = StringIO.new + LSP::RPC.write_message(io, { jsonrpc: "2.0", id: 1, result: "ok" }) + io.rewind + raw = io.read + expect(raw).to start_with("Content-Length: ") + expect(raw).to include("\r\n\r\n") + header, body = raw.split("\r\n\r\n", 2) + length = header[/Content-Length: (\d+)/, 1].to_i + expect(body.bytesize).to eq(length) + end + + it "handles consecutive frames in one stream" do + io = StringIO.new + LSP::RPC.write_message(io, { id: 1, method: "a" }) + LSP::RPC.write_message(io, { id: 2, method: "b" }) + io.rewind + first = LSP::RPC.read_message(io) + second = LSP::RPC.read_message(io) + expect(first["id"]).to eq(1) + expect(second["id"]).to eq(2) + end + end + + describe ".read_message" do + it "returns nil at EOF before any header" do + io = StringIO.new("") + expect(LSP::RPC.read_message(io)).to be_nil + end + + it "raises FramingError when Content-Length is missing" do + io = StringIO.new("X-Other: 1\r\n\r\n{}") + expect { + LSP::RPC.read_message(io) + }.to raise_error(LSP::RPC::FramingError, /missing Content-Length/) + end + + it "raises FramingError when Content-Length isn't numeric" do + io = StringIO.new("Content-Length: not-a-number\r\n\r\n{}") + expect { + LSP::RPC.read_message(io) + }.to raise_error(LSP::RPC::FramingError, /invalid Content-Length/) + end + + it "raises FramingError when Content-Length is negative" do + io = StringIO.new("Content-Length: -5\r\n\r\n{}") + expect { + LSP::RPC.read_message(io) + }.to raise_error(LSP::RPC::FramingError, /negative Content-Length/) + end + + it "raises FramingError when the body is truncated" do + # Content-Length advertises 100 bytes; only 5 are actually present. + io = StringIO.new("Content-Length: 100\r\n\r\nshort") + expect { + LSP::RPC.read_message(io) + }.to raise_error(LSP::RPC::FramingError, /truncated body/) + end + + it "raises FramingError on malformed JSON" do + raw = "not valid json" + io = StringIO.new("Content-Length: #{raw.bytesize}\r\n\r\n#{raw}") + expect { + LSP::RPC.read_message(io) + }.to raise_error(LSP::RPC::FramingError, /JSON parse error/) + end + + it "raises FramingError on a malformed header line" do + io = StringIO.new("not-a-header-just-a-string\r\n\r\n{}") + expect { + LSP::RPC.read_message(io) + }.to raise_error(LSP::RPC::FramingError, /malformed header/) + end + + it "raises FramingError on EOF mid-header" do + io = StringIO.new("Content-Length: 5\r\n") # no blank line ending headers + expect { + LSP::RPC.read_message(io) + }.to raise_error(LSP::RPC::FramingError, /unexpected EOF/) + end + + it "is case-insensitive on header names" do + raw = '{"id":1}' + io = StringIO.new("CONTENT-LENGTH: #{raw.bytesize}\r\n\r\n#{raw}") + msg = LSP::RPC.read_message(io) + expect(msg["id"]).to eq(1) + end + end +end diff --git a/spec/lsp/server_integration_spec.rb b/spec/lsp/server_integration_spec.rb new file mode 100644 index 000000000..ec0e9e4ce --- /dev/null +++ b/spec/lsp/server_integration_spec.rb @@ -0,0 +1,190 @@ +require "rspec" +require "json" +require "open3" +require "timeout" + +# End-to-end driver for `bin/clear-lsp`. Spawns the actual binary +# under `bundle exec` (the compiler uses `require 'bundler/setup'`), +# pipes JSON-RPC frames in, and asserts on the responses written +# back. Tagged `:integration` so it doesn't run during normal +# `bundle exec rspec spec/` invocations — kick it off explicitly: +# +# bundle exec rspec spec/lsp/server_integration_spec.rb --tag integration +# +# Each example is wrapped in a 5-second timeout so a hung server +# fails fast rather than blocking the suite. +RSpec.describe "clear-lsp end-to-end (binary)", :integration do + REPO_ROOT = File.expand_path("../../..", __FILE__) + BIN_PATH = File.join(REPO_ROOT, "bin", "clear-lsp") + + def frame(msg) + body = JSON.generate(msg) + "Content-Length: #{body.bytesize}\r\n\r\n#{body}" + end + + def parse_frames(raw) + out = [] + while raw && !raw.empty? + header_end = raw.index("\r\n\r\n") + break if header_end.nil? + length = raw[0...header_end][/Content-Length: (\d+)/i, 1].to_i + body_start = header_end + 4 + body = raw[body_start, length] + break unless body && body.bytesize == length + out << JSON.parse(body) + raw = raw[(body_start + length)..] + end + out + end + + def drive(input_messages) + drive_with_args([], input_messages) + end + + def drive_with_args(extra_args, input_messages) + input = input_messages.map { |m| frame(m) }.join + Timeout.timeout(5) do + stdout, stderr, status = Open3.capture3( + "bundle", "exec", BIN_PATH, *extra_args, + stdin_data: input, + chdir: REPO_ROOT, + ) + [parse_frames(stdout), stderr, status] + end + end + + it "completes the initialize/shutdown handshake" do + frames, _stderr, status = drive([ + { jsonrpc: "2.0", id: 1, method: "initialize", params: {} }, + { jsonrpc: "2.0", method: "initialized", params: {} }, + { jsonrpc: "2.0", id: 2, method: "shutdown", params: nil }, + { jsonrpc: "2.0", method: "exit", params: nil }, + ]) + expect(status.exitstatus).to eq(0) + + init = frames.find { |f| f["id"] == 1 } + expect(init["result"]["serverInfo"]["name"]).to eq("clear-lsp") + caps = init["result"]["capabilities"] + expect(caps["textDocumentSync"]).to eq(1) + expect(caps["hoverProvider"]).to be true + expect(caps["codeActionProvider"]["codeActionKinds"]).to include("quickfix", "refactor") + + shutdown = frames.find { |f| f["id"] == 2 } + expect(shutdown).to have_key("result") + end + + it "publishes diagnostics on didOpen for source with errors" do + src = "FN main() RETURNS Void ->\n _ = doesNotExist;\nEND\n" + frames, _stderr, status = drive([ + { jsonrpc: "2.0", id: 1, method: "initialize", params: {} }, + { jsonrpc: "2.0", method: "initialized", params: {} }, + { jsonrpc: "2.0", method: "textDocument/didOpen", params: { + textDocument: { uri: "file:///t.cht", languageId: "clear", version: 1, text: src }, + } }, + { jsonrpc: "2.0", id: 2, method: "shutdown", params: nil }, + { jsonrpc: "2.0", method: "exit", params: nil }, + ]) + expect(status.exitstatus).to eq(0) + + publish = frames.find { |f| f["method"] == "textDocument/publishDiagnostics" } + expect(publish).not_to be_nil + diagnostics = publish["params"]["diagnostics"] + expect(diagnostics.size).to be >= 1 + diag = diagnostics.first + expect(diag["severity"]).to eq(1) + expect(diag["source"]).to eq("clear") + expect(diag["code"]).to eq("UNDEFINED_VAR") + # `doesNotExist` lives on line 1 (0-based) starting at character 6. + expect(diag["range"]["start"]["line"]).to eq(1) + expect(diag["range"]["start"]["character"]).to eq(6) + end + + it "returns code actions for a fixable finding" do + src = <<~CLEAR + FN main() RETURNS Void -> + x = 5; + WITH RESTRICT x { _ = x; } + END + CLEAR + frames, _stderr, status = drive([ + { jsonrpc: "2.0", id: 1, method: "initialize", params: {} }, + { jsonrpc: "2.0", method: "initialized", params: {} }, + { jsonrpc: "2.0", method: "textDocument/didOpen", params: { + textDocument: { uri: "file:///t.cht", languageId: "clear", version: 1, text: src }, + } }, + { jsonrpc: "2.0", id: 2, method: "textDocument/codeAction", params: { + textDocument: { uri: "file:///t.cht" }, + range: { start: { line: 2, character: 0 }, end: { line: 2, character: 100 } }, + context: { diagnostics: [] }, + } }, + { jsonrpc: "2.0", id: 3, method: "shutdown", params: nil }, + { jsonrpc: "2.0", method: "exit", params: nil }, + ]) + expect(status.exitstatus).to eq(0) + + actions = frames.find { |f| f["id"] == 2 }["result"] + expect(actions.size).to be >= 1 + fix = actions.first + expect(fix["kind"]).to eq("quickfix") + expect(fix["isPreferred"]).to be true + expect(fix["title"]).to match(/MUTABLE/) + expect(fix["edit"]["documentChanges"].first["edits"].first["newText"]).to eq("MUTABLE ") + end + + it "renders hover content with registry markdown" do + src = "FN main() RETURNS Void ->\n _ = doesNotExist;\nEND\n" + frames, _stderr, status = drive([ + { jsonrpc: "2.0", id: 1, method: "initialize", params: {} }, + { jsonrpc: "2.0", method: "initialized", params: {} }, + { jsonrpc: "2.0", method: "textDocument/didOpen", params: { + textDocument: { uri: "file:///t.cht", languageId: "clear", version: 1, text: src }, + } }, + { jsonrpc: "2.0", id: 2, method: "textDocument/hover", params: { + textDocument: { uri: "file:///t.cht" }, + position: { line: 1, character: 8 }, + } }, + { jsonrpc: "2.0", id: 3, method: "shutdown", params: nil }, + { jsonrpc: "2.0", method: "exit", params: nil }, + ]) + expect(status.exitstatus).to eq(0) + + hover = frames.find { |f| f["id"] == 2 }["result"] + expect(hover["contents"]["kind"]).to eq("markdown") + md = hover["contents"]["value"] + expect(md).to include("UNDEFINED_VAR") + expect(md).to include("**Cause:**") + expect(md).to include("**Fix:**") + end + + it "accepts --stdio as a no-op (LSP clients pass it by default)" do + # vscode-languageclient appends --stdio when configured for stdio + # transport. Our binary must not reject it. + frames, _stderr, status = drive_with_args(["--stdio"], [ + { jsonrpc: "2.0", id: 1, method: "initialize", params: {} }, + { jsonrpc: "2.0", id: 2, method: "shutdown", params: nil }, + { jsonrpc: "2.0", method: "exit", params: nil }, + ]) + expect(status.exitstatus).to eq(0) + expect(frames.find { |f| f["id"] == 1 }["result"]["serverInfo"]["name"]).to eq("clear-lsp") + end + + it "rejects malformed --log-level with exit code 2" do + _stdout, stderr, status = Open3.capture3( + "bundle", "exec", BIN_PATH, "--log-level=screaming", + stdin_data: "", + chdir: REPO_ROOT, + ) + expect(status.exitstatus).to eq(2) + expect(stderr).to include("unknown --log-level") + end + + it "shows usage on --help" do + stdout, _stderr, status = Open3.capture3( + "bundle", "exec", BIN_PATH, "--help", + stdin_data: "", + chdir: REPO_ROOT, + ) + expect(status.exitstatus).to eq(0) + expect(stdout).to include("Usage: clear-lsp") + end +end diff --git a/spec/lsp/server_spec.rb b/spec/lsp/server_spec.rb new file mode 100644 index 000000000..26ee768f1 --- /dev/null +++ b/spec/lsp/server_spec.rb @@ -0,0 +1,485 @@ +require "rspec" +require "stringio" +require_relative "../../src/lsp/server" + +# Lifecycle tests for LSP::Server. Drives the server with canned +# stdin frames and asserts on the responses written to stdout. +# The server exits on `exit` notification or stdin EOF; we stub +# `Kernel.exit` to keep the spec process alive. +RSpec.describe LSP::Server do + let(:stdin) { StringIO.new } + let(:stdout) { StringIO.new } + + # Tests use a tiny debounce so they don't block. Production runs at + # the default 500ms. + def server(debounce_ms: 5) + LSP::Server.new(stdin: stdin, stdout: stdout, log_level: :error, debounce_ms: debounce_ms) + end + + def write(io, msg) + body = JSON.generate(msg) + io.write("Content-Length: #{body.bytesize}\r\n\r\n#{body}") + end + + def read_responses(io) + io.rewind + out = [] + until io.eof? + msg = LSP::RPC.read_message(io) + break if msg.nil? + out << msg + end + out + end + + describe "initialize / initialized handshake" do + it "responds to `initialize` with a capabilities envelope" do + write(stdin, jsonrpc: "2.0", id: 1, method: "initialize", params: {}) + stdin.rewind + + # Stub exit so we can drive a clean shutdown afterwards. + allow_any_instance_of(LSP::Server).to receive(:handle_exit) { throw :stop } + + catch(:stop) { server.run } + + responses = read_responses(stdout) + expect(responses.first["id"]).to eq(1) + expect(responses.first["result"]).to include("capabilities") + expect(responses.first["result"]["serverInfo"]["name"]).to eq("clear-lsp") + end + + it "accepts an `initialized` notification (no response)" do + write(stdin, jsonrpc: "2.0", id: 1, method: "initialize", params: {}) + write(stdin, jsonrpc: "2.0", method: "initialized", params: {}) + stdin.rewind + + server.run # stdin EOF → loop exits naturally + + responses = read_responses(stdout) + # Only the `initialize` response — `initialized` is a notification. + expect(responses.size).to eq(1) + expect(responses.first["id"]).to eq(1) + end + end + + describe "shutdown / exit handshake" do + it "responds to `shutdown` with a null result" do + write(stdin, jsonrpc: "2.0", id: 1, method: "initialize", params: {}) + write(stdin, jsonrpc: "2.0", id: 2, method: "shutdown", params: nil) + stdin.rewind + + server.run + + responses = read_responses(stdout) + shutdown_resp = responses.find { |r| r["id"] == 2 } + expect(shutdown_resp).not_to be_nil + expect(shutdown_resp).to have_key("result") + expect(shutdown_resp["result"]).to be_nil + end + + it "exits with code 0 after `shutdown` then `exit`" do + write(stdin, jsonrpc: "2.0", id: 1, method: "shutdown", params: nil) + write(stdin, jsonrpc: "2.0", method: "exit", params: nil) + stdin.rewind + + expect { server.run }.to raise_error(SystemExit) { |e| expect(e.status).to eq(0) } + end + + it "exits with code 1 when `exit` arrives without prior `shutdown`" do + write(stdin, jsonrpc: "2.0", method: "exit", params: nil) + stdin.rewind + + expect { server.run }.to raise_error(SystemExit) { |e| expect(e.status).to eq(1) } + end + end + + describe "unknown methods" do + it "responds with -32601 (Method not found) to unknown requests" do + write(stdin, jsonrpc: "2.0", id: 99, method: "completelyUnknownThing", params: {}) + stdin.rewind + + server.run # stdin EOF after the one message + + responses = read_responses(stdout) + err = responses.first + expect(err["id"]).to eq(99) + expect(err["error"]["code"]).to eq(-32601) + expect(err["error"]["message"]).to include("completelyUnknownThing") + end + + it "silently drops unknown notifications (no response)" do + write(stdin, jsonrpc: "2.0", method: "$/cancelRequest", params: { id: 5 }) + stdin.rewind + + server.run + + responses = read_responses(stdout) + expect(responses).to be_empty + end + end + + describe "framing errors" do + it "exits with code 1 on a malformed header" do + stdin.write("not a header\r\n\r\n{}") + stdin.rewind + + expect { server.run }.to raise_error(SystemExit) { |e| expect(e.status).to eq(1) } + end + + it "exits with code 1 on an unexpected exception during dispatch" do + # Force the dispatcher to raise something that isn't a FramingError. + write(stdin, jsonrpc: "2.0", id: 1, method: "initialize", params: {}) + stdin.rewind + + srv = server + allow(srv).to receive(:dispatch).and_raise(RuntimeError, "boom") + expect { srv.run }.to raise_error(SystemExit) { |e| expect(e.status).to eq(1) } + end + end + + describe "EOF handling" do + it "exits cleanly when stdin closes with no pending messages" do + stdin.rewind # empty stdin + expect { server.run }.not_to raise_error + end + end + + describe "textDocument lifecycle" do + let(:uri) { "file:///tmp/test.cht" } + + def open_doc(text, version: 1) + write(stdin, jsonrpc: "2.0", method: "textDocument/didOpen", params: { + textDocument: { uri: uri, languageId: "clear", version: version, text: text }, + }) + end + + it "publishes diagnostics on didOpen for source with errors" do + open_doc("FN main() RETURNS Void ->\n _ = doesNotExist;\nEND\n") + stdin.rewind + server.run + + responses = read_responses(stdout) + publish = responses.find { |r| r["method"] == "textDocument/publishDiagnostics" } + expect(publish).not_to be_nil + expect(publish["params"]["uri"]).to eq(uri) + expect(publish["params"]["diagnostics"]).not_to be_empty + diag = publish["params"]["diagnostics"].first + expect(diag["severity"]).to eq(1) + expect(diag["source"]).to eq("clear") + expect(diag["message"]).to match(/Undefined variable/) + end + + it "publishes an empty diagnostics array for clean source" do + open_doc("FN main() RETURNS Void -> END\n") + stdin.rewind + server.run + + responses = read_responses(stdout) + publish = responses.find { |r| r["method"] == "textDocument/publishDiagnostics" } + expect(publish).not_to be_nil + expect(publish["params"]["diagnostics"]).to eq([]) + end + + it "republishes after didChange (debounced full sync)" do + open_doc("FN main() RETURNS Void -> END\n") + write(stdin, jsonrpc: "2.0", method: "textDocument/didChange", params: { + textDocument: { uri: uri, version: 2 }, + contentChanges: [{ text: "FN main() RETURNS Void ->\n _ = doesNotExist;\nEND\n" }], + }) + stdin.rewind + srv = server + srv.run + srv.flush_pending! + + responses = read_responses(stdout) + publishes = responses.select { |r| r["method"] == "textDocument/publishDiagnostics" } + # Two publishes — one for didOpen (immediate, clean), one for + # didChange (after debounce, broken). + expect(publishes.size).to eq(2) + expect(publishes[0]["params"]["diagnostics"]).to eq([]) + expect(publishes[1]["params"]["diagnostics"]).not_to be_empty + end + + it "republishes after didSave" do + open_doc("FN main() RETURNS Void -> END\n") + write(stdin, jsonrpc: "2.0", method: "textDocument/didSave", params: { + textDocument: { uri: uri }, + }) + stdin.rewind + server.run + + responses = read_responses(stdout) + publishes = responses.select { |r| r["method"] == "textDocument/publishDiagnostics" } + expect(publishes.size).to eq(2) # didOpen + didSave + end + + it "clears diagnostics on didClose" do + open_doc("FN main() RETURNS Void ->\n _ = doesNotExist;\nEND\n") + write(stdin, jsonrpc: "2.0", method: "textDocument/didClose", params: { + textDocument: { uri: uri }, + }) + stdin.rewind + server.run + + responses = read_responses(stdout) + last_publish = responses.select { |r| r["method"] == "textDocument/publishDiagnostics" }.last + expect(last_publish["params"]["diagnostics"]).to eq([]) + end + + it "ignores didChange for an unopened document" do + write(stdin, jsonrpc: "2.0", method: "textDocument/didChange", params: { + textDocument: { uri: "file:///nope.cht", version: 1 }, + contentChanges: [{ text: "x" }], + }) + stdin.rewind + + # Should not crash; no publishDiagnostics for the unknown uri. + expect { server.run }.not_to raise_error + end + + it "ignores didChange with empty contentChanges" do + open_doc("FN main() RETURNS Void -> END\n") + write(stdin, jsonrpc: "2.0", method: "textDocument/didChange", params: { + textDocument: { uri: uri, version: 2 }, + contentChanges: [], + }) + stdin.rewind + server.run + # No crash; one publish from didOpen only. + publishes = read_responses(stdout).select { |r| r["method"] == "textDocument/publishDiagnostics" } + expect(publishes.size).to eq(1) + end + + it "rapid didChange notifications coalesce to one analysis" do + # Three rapid edits of the same document — only the last text + # should be analysed and published. Use a longer debounce so + # the messages all arrive before the first timer fires. + open_doc("FN main() RETURNS Void -> END\n") + 3.times do |i| + write(stdin, jsonrpc: "2.0", method: "textDocument/didChange", params: { + textDocument: { uri: uri, version: i + 2 }, + contentChanges: [{ text: "# edit #{i}\nFN main() RETURNS Void -> END\n" }], + }) + end + stdin.rewind + + srv = server(debounce_ms: 50) + srv.run # processes all 3 didChange synchronously + srv.flush_pending! # waits for the single pending timer + + publishes = read_responses(stdout).select { |r| r["method"] == "textDocument/publishDiagnostics" } + # didOpen (immediate) + exactly one debounced analysis. + expect(publishes.size).to eq(2) + end + + it "didSave cancels any pending debounced timer and analyses immediately" do + open_doc("FN main() RETURNS Void -> END\n") + write(stdin, jsonrpc: "2.0", method: "textDocument/didChange", params: { + textDocument: { uri: uri, version: 2 }, + contentChanges: [{ text: "FN main() RETURNS Void ->\n _ = doesNotExist;\nEND\n" }], + }) + write(stdin, jsonrpc: "2.0", method: "textDocument/didSave", params: { + textDocument: { uri: uri }, + }) + stdin.rewind + + # Use a long debounce so the didChange timer would NOT fire + # before didSave cancels it. If cancellation works, exactly + # 2 publishes (didOpen + didSave); the didChange's timer + # never gets to publish. + srv = server(debounce_ms: 5000) + srv.run + srv.flush_pending! + + publishes = read_responses(stdout).select { |r| r["method"] == "textDocument/publishDiagnostics" } + expect(publishes.size).to eq(2) + # The save reflects the post-didChange text — it should publish + # the broken-source diagnostic. + expect(publishes.last["params"]["diagnostics"]).not_to be_empty + end + + it "didClose cancels any pending debounced timer" do + open_doc("FN main() RETURNS Void -> END\n") + write(stdin, jsonrpc: "2.0", method: "textDocument/didChange", params: { + textDocument: { uri: uri, version: 2 }, + contentChanges: [{ text: "broken syntax that would error" }], + }) + write(stdin, jsonrpc: "2.0", method: "textDocument/didClose", params: { + textDocument: { uri: uri }, + }) + stdin.rewind + + srv = server(debounce_ms: 5000) + srv.run + srv.flush_pending! + + publishes = read_responses(stdout).select { |r| r["method"] == "textDocument/publishDiagnostics" } + # didOpen + didClose's empty publish; the didChange's timer never fires. + expect(publishes.size).to eq(2) + expect(publishes.last["params"]["diagnostics"]).to eq([]) + end + + it "logs and recovers when analysis raises an unexpected exception" do + open_doc("FN main() RETURNS Void -> END\n") + stdin.rewind + + # Force the diagnostics layer to blow up so we exercise the + # rescue in analyze_and_publish. + allow(LSP::Diagnostics).to receive(:from_result).and_raise(RuntimeError, "synthetic") + + expect { server.run }.not_to raise_error + # No publishDiagnostics — the rescue swallowed it. + publishes = read_responses(stdout).select { |r| r["method"] == "textDocument/publishDiagnostics" } + expect(publishes).to be_empty + end + end + + describe "initialize advertises capabilities" do + it "declares textDocumentSync = 1 (full sync)" do + write(stdin, jsonrpc: "2.0", id: 1, method: "initialize", params: {}) + stdin.rewind + server.run + + caps = read_responses(stdout).first["result"]["capabilities"] + expect(caps["textDocumentSync"]).to eq(1) + end + + it "declares codeActionProvider with quickfix and refactor kinds" do + write(stdin, jsonrpc: "2.0", id: 1, method: "initialize", params: {}) + stdin.rewind + server.run + + caps = read_responses(stdout).first["result"]["capabilities"] + expect(caps["codeActionProvider"]["codeActionKinds"]).to include("quickfix", "refactor") + end + + it "declares hoverProvider = true" do + write(stdin, jsonrpc: "2.0", id: 1, method: "initialize", params: {}) + stdin.rewind + server.run + + caps = read_responses(stdout).first["result"]["capabilities"] + expect(caps["hoverProvider"]).to be true + end + end + + describe "textDocument/hover" do + let(:uri) { "file:///tmp/test.cht" } + + it "renders hover content for a position with an overlapping diagnostic" do + write(stdin, jsonrpc: "2.0", method: "textDocument/didOpen", params: { + textDocument: { uri: uri, languageId: "clear", version: 1, text: "FN main() RETURNS Void ->\n _ = doesNotExist;\nEND\n" }, + }) + # Cursor on line 1 (0-based), inside `doesNotExist`. + write(stdin, jsonrpc: "2.0", id: 2, method: "textDocument/hover", params: { + textDocument: { uri: uri }, + position: { line: 1, character: 8 }, + }) + stdin.rewind + server.run + + hover_resp = read_responses(stdout).find { |r| r["id"] == 2 } + expect(hover_resp).not_to be_nil + result = hover_resp["result"] + expect(result).not_to be_nil + expect(result["contents"]["kind"]).to eq("markdown") + # Hover renders the registry summary for known codes; for + # UNDEFINED_VAR that's "The named binding does not exist in scope." + expect(result["contents"]["value"]).to include("UNDEFINED_VAR") + end + + it "returns null when no diagnostic overlaps the cursor" do + write(stdin, jsonrpc: "2.0", method: "textDocument/didOpen", params: { + textDocument: { uri: uri, languageId: "clear", version: 1, text: "FN main() RETURNS Void -> END\n" }, + }) + write(stdin, jsonrpc: "2.0", id: 2, method: "textDocument/hover", params: { + textDocument: { uri: uri }, + position: { line: 0, character: 5 }, + }) + stdin.rewind + server.run + + hover_resp = read_responses(stdout).find { |r| r["id"] == 2 } + expect(hover_resp["result"]).to be_nil + end + end + + describe "textDocument/codeAction" do + let(:uri) { "file:///tmp/test.cht" } + + # Source that has a Tier 1 :auto fix (WITH_RESTRICT_NEEDS_MUTABLE). + let(:src_with_fix) { + <<~CLEAR + FN main() RETURNS Void -> + x = 5; + WITH RESTRICT x { _ = x; } + END + CLEAR + } + + def open_doc(text) + write(stdin, jsonrpc: "2.0", method: "textDocument/didOpen", params: { + textDocument: { uri: uri, languageId: "clear", version: 1, text: text }, + }) + end + + def request_action(id, range) + write(stdin, jsonrpc: "2.0", id: id, method: "textDocument/codeAction", params: { + textDocument: { uri: uri }, + range: range, + context: { diagnostics: [] }, + }) + end + + it "returns the fixable findings overlapping the request range" do + open_doc(src_with_fix) + # The WITH RESTRICT diagnostic lands on line 3 (0-based 2). + request_action(2, { + start: { line: 2, character: 0 }, + end: { line: 2, character: 100 }, + }) + stdin.rewind + server.run + + responses = read_responses(stdout) + action_resp = responses.find { |r| r["id"] == 2 } + expect(action_resp).not_to be_nil + actions = action_resp["result"] + expect(actions.size).to be >= 1 + + first = actions.first + expect(first["kind"]).to eq("quickfix") + expect(first["title"]).to match(/MUTABLE/) + expect(first["isPreferred"]).to be true + + edit = first["edit"]["documentChanges"].first["edits"].first + expect(edit["newText"]).to eq("MUTABLE ") + end + + it "returns an empty array when the range doesn't overlap any finding" do + open_doc(src_with_fix) + # Request at line 0 — the diagnostics are on later lines. + request_action(2, { + start: { line: 0, character: 0 }, + end: { line: 0, character: 5 }, + }) + stdin.rewind + server.run + + action_resp = read_responses(stdout).find { |r| r["id"] == 2 } + expect(action_resp["result"]).to eq([]) + end + + it "returns an empty array when the document isn't open" do + request_action(2, { + start: { line: 0, character: 0 }, + end: { line: 0, character: 100 }, + }) + stdin.rewind + server.run + + action_resp = read_responses(stdout).find { |r| r["id"] == 2 } + expect(action_resp["result"]).to eq([]) + end + end +end diff --git a/spec/share_spec.rb b/spec/share_spec.rb index 44a83254f..6b08a20a5 100644 --- a/spec/share_spec.rb +++ b/spec/share_spec.rb @@ -243,7 +243,7 @@ def transpile(src) RETURN; END CLEAR - }.to raise_error(CompilerError, /Use of moved value 'b'.*moved at line 4 by GIVE/m) + }.to raise_error(CompilerError, /USE AFTER MOVE.*`b`.*already GAVE.*line 4/m) end it "consumes a bare source passed through SHARE" do @@ -258,7 +258,7 @@ def transpile(src) RETURN; END CLEAR - }.to raise_error(CompilerError, /Use of moved value 'b'.*moved at line 5 by SHARE/m) + }.to raise_error(CompilerError, /USE AFTER MOVE.*`b`.*already SHARED.*line 5/m) end it "reports the earlier SHARE site when sharing a consumed source again" do @@ -273,7 +273,7 @@ def transpile(src) RETURN; END CLEAR - }.to raise_error(CompilerError, /Use of moved value 'b'.*moved at line 5 by SHARE/m) + }.to raise_error(CompilerError, /USE AFTER MOVE.*`b`.*already SHARED.*line 5/m) end it "does not consume the source when SHARE wraps COPY" do diff --git a/spec/stream_spec.rb b/spec/stream_spec.rb index ac0ff0c6e..0b1715be6 100644 --- a/spec/stream_spec.rb +++ b/spec/stream_spec.rb @@ -1037,7 +1037,7 @@ def transpile_fn(clear_src) RETURN; END CLEAR - expect { run(src) }.to raise_error(SourceError, /Use of moved value 's'/) + expect { run(src) }.to raise_error(SourceError, /USE AFTER MOVE.*`s`/) end it "allows CLONE inside a BG block capture" do diff --git a/spec/takes_auto_move_spec.rb b/spec/takes_auto_move_spec.rb index cb4d20cd7..c4cd0b7df 100644 --- a/spec/takes_auto_move_spec.rb +++ b/spec/takes_auto_move_spec.rb @@ -49,7 +49,7 @@ def transpile(src) RETURN; END CLEAR - }.to raise_error(CompilerError, /moved/) + }.to raise_error(CompilerError, /USE AFTER MOVE/) end it "eliminates v cleanup when always consumed by TAKES" do diff --git a/spec/thunks_not_logical_spec.rb b/spec/thunks_not_logical_spec.rb index 1ef8e4e8b..90b6d9554 100644 --- a/spec/thunks_not_logical_spec.rb +++ b/spec/thunks_not_logical_spec.rb @@ -48,7 +48,7 @@ def annotate(source) RETURN n + 1; END CLEAR - }.to raise_error(/:THUNK, :TAIL_CALL, :NOT_LOGICAL/) + }.to raise_error(/Unknown REENTRANT variant 'BOGUS'/) end it "saves an effects_span covering the full clause text" do diff --git a/spec/thunks_parser_spec.rb b/spec/thunks_parser_spec.rb index 4aecfe344..f67c10ea3 100644 --- a/spec/thunks_parser_spec.rb +++ b/spec/thunks_parser_spec.rb @@ -68,7 +68,7 @@ def fn(ast, name = "main") RETURN; END CLEAR - }.to raise_error(/Unknown REENTRANT variant ':WAFFLES'/) + }.to raise_error(/Unknown REENTRANT variant 'WAFFLES'/) end it "rejects a non-REENTRANT effect at function level" do @@ -79,7 +79,7 @@ def fn(ast, name = "main") RETURN; END CLEAR - }.to raise_error(/Function-level EFFECTS only accepts REENTRANT/) + }.to raise_error(/Unknown function effect 'HEAP'/) end it "rejects mixing legacy @reentrant with EFFECTS REENTRANT" do @@ -202,7 +202,7 @@ def fn(ast, name = "main") RETURN; END CLEAR - }.to raise_error(/Unknown REQUIRES (family or kind|kind) 'WAFFLES'/) + }.to raise_error(/Unknown REQUIRES (family|kind) 'WAFFLES'/) end it "rejects duplicate REQUIRES clauses for the same name" do diff --git a/spec/union_spec.rb b/spec/union_spec.rb index 7b9eea5fb..38db1bd02 100644 --- a/spec/union_spec.rb +++ b/spec/union_spec.rb @@ -578,7 +578,7 @@ def transpile(src) END END CLEAR - }.to raise_error(CompilerError, /Type Error/) + }.to raise_error(CompilerError, /no field 'diameter'/) end end diff --git a/spec/use_after_move_spec.rb b/spec/use_after_move_spec.rb index a4464915c..f133f07e2 100644 --- a/spec/use_after_move_spec.rb +++ b/spec/use_after_move_spec.rb @@ -26,7 +26,7 @@ def expect_no_error(src) # 1. v2 = v1 consumes v1. Using v1 after is an error. # ========================================================================= it "raises on use after move via binding" do - expect_error(<<~CLEAR, /moved/) + expect_error(<<~CLEAR, /USE AFTER MOVE/) UNION Value { Num: Float64, List: Int64[] } FN makeList() RETURNS Value -> MUTABLE items: Int64[]@list = List[]; @@ -46,7 +46,7 @@ def expect_no_error(src) # 2. items.append(val) consumes val. Using val after is an error. # ========================================================================= it "raises on use after move via append" do - expect_error(<<~CLEAR, /moved/) + expect_error(<<~CLEAR, /USE AFTER MOVE/) UNION Value { Num: Float64, List: Int64[] } FN main() RETURNS Void -> MUTABLE v = Value{ Num: 1.0 }; @@ -59,7 +59,7 @@ def expect_no_error(src) end it "reports TAKES when a method argument consumes a value" do - expect_error(<<~CLEAR, /Use of moved value 'item'.*moved at line 5 by TAKES/m) + expect_error(<<~CLEAR, /USE AFTER MOVE.*`item`.*already TOOK.*line 5/m) STRUCT Item { v: Int64 } FN main() RETURNS Void -> MUTABLE pool: Item[10]@pool = []; @@ -71,19 +71,29 @@ def expect_no_error(src) CLEAR end - it "maps move actions to user-facing labels" do + it "maps move actions to user-facing phrases (active and passive forms)" do annotator = SemanticAnnotator.new - expect(annotator.send(:ownership_move_action_label, :return)).to eq("RETURN") - expect(annotator.send(:ownership_move_action_label, :collect)).to eq("COLLECT") - expect(annotator.send(:ownership_move_action_label, :capture)).to eq("capture") + # Active form is used by USE_OF_MOVED_VALUE — the consumer is the + # subject of the sentence ("`process(GIVE msg)` already GAVE it away"). + expect(annotator.send(:ownership_active_phrase, :give)).to eq("already GAVE it away") + expect(annotator.send(:ownership_active_phrase, :takes)).to eq("already TOOK it away") + expect(annotator.send(:ownership_active_phrase, :return)).to eq("already RETURNED it") + expect(annotator.send(:ownership_active_phrase, :share)).to eq("already SHARED it") + expect(annotator.send(:ownership_active_phrase, :move)).to eq("already MOVED it") + # Passive form is used by USE_OF_MOVED_PATH — the value (path's + # owner) is the subject ("its owner `b` was already TAKEN away"). + expect(annotator.send(:ownership_passive_phrase, :give)).to eq("was already GIVEN away") + expect(annotator.send(:ownership_passive_phrase, :takes)).to eq("was already TAKEN away") + expect(annotator.send(:ownership_passive_phrase, :return)).to eq("was already RETURNED") + expect(annotator.send(:ownership_passive_phrase, :move)).to eq("was already MOVED") end # ========================================================================= # 3. Struct literal consumes captured variables. # ========================================================================= it "raises on use after move via struct literal" do - expect_error(<<~CLEAR, /moved/) + expect_error(<<~CLEAR, /USE AFTER MOVE/) STRUCT Container { data: HashMap } FN main() RETURNS Void -> MUTABLE m: HashMap = {}; @@ -118,7 +128,7 @@ def expect_no_error(src) # 4b. TAKES fn(val) consumes val. Using val after is an error. # ========================================================================= it "raises on use after move via TAKES" do - expect_error(<<~CLEAR, /moved/) + expect_error(<<~CLEAR, /USE AFTER MOVE/) FN consume(TAKES items: Int64[]) RETURNS Int64 -> RETURN items.length(); END @@ -150,7 +160,7 @@ def expect_no_error(src) # 6. Strings are owned (non-Copy). Assignment moves. # ========================================================================= it "raises on reuse of moved string" do - expect_error(<<~CLEAR, /moved/) + expect_error(<<~CLEAR, /USE AFTER MOVE/) FN main() RETURNS Void -> s = "hello"; s2 = s; @@ -244,7 +254,7 @@ def expect_no_error(src) # ownership of the same pointer, which is an illegal state (double-free). # ========================================================================= it "raises on use after move for union with @indirect struct variant" do - expect_error(<<~CLEAR, /moved/) + expect_error(<<~CLEAR, /USE AFTER MOVE/) UNION Value { Nil, Num: Float64, Lambda { body: Value @indirect, id: Int64 } } FN makeLambda!() RETURNS Value -> RETURN Value.Lambda{ body: Value{ Num: 42.0 }, id: 1 }; diff --git a/src/annotator-helpers/capabilities.rb b/src/annotator-helpers/capabilities.rb index 80697372b..234945ee9 100644 --- a/src/annotator-helpers/capabilities.rb +++ b/src/annotator-helpers/capabilities.rb @@ -156,7 +156,7 @@ def validate_capability(node, capability_type, var_node) when :RESTRICT if var_node.respond_to?(:symbol) && var_node.symbol && !var_node.symbol.mutable - error!(node, :WITH_RESTRICT_NEEDS_MUTABLE, name: var_node.name) + emit_with_restrict_immutable_error!(node, var_node) end when :BORROWED diff --git a/src/annotator-helpers/effects.rb b/src/annotator-helpers/effects.rb index 21036947e..60a1a1d2e 100644 --- a/src/annotator-helpers/effects.rb +++ b/src/annotator-helpers/effects.rb @@ -1010,7 +1010,24 @@ def check_indirect_reentrancy! if callee == fn_name @fn_direct_effects[fn_name]&.add(EffectTracker::REENTRANT) - error!(node, :REENTRANCY_MUTUAL_CYCLE, name: fn_name) + arrow = node.respond_to?(:arrow_token) ? node.arrow_token : nil + if arrow && arrow.respond_to?(:line) && arrow.respond_to?(:column) + fix = Fix.new( + description: "Add `EFFECTS REENTRANT` so the runtime knows to schedule this fn on a service stack.", + confidence: :auto, + edits: [Edit.new( + span: Span.new(file: nil, line: arrow.line, col: arrow.column, length: 0), + replacement: 'EFFECTS REENTRANT ' + )] + ) + fixable!(node, + message: DiagnosticRegistry.format(:REENTRANCY_MUTUAL_CYCLE, name: fn_name), + category: :reentrance, + level: :error, + fixes: [fix]) + else + error!(node, :REENTRANCY_MUTUAL_CYCLE, name: fn_name) + end break end diff --git a/src/annotator-helpers/fixable_helpers.rb b/src/annotator-helpers/fixable_helpers.rb index 719aef9f2..c71232b45 100644 --- a/src/annotator-helpers/fixable_helpers.rb +++ b/src/annotator-helpers/fixable_helpers.rb @@ -218,13 +218,12 @@ def emit_variant_typo!(anchor, name, candidates, message, fix_label, # diagnostic still surfaces. def emit_use_of_moved_error!(use_node, og_node) name = use_node.name.to_s - return error!(use_node, :USE_OF_MOVED_VALUE, name: name) unless og_node - return error!(use_node, :USE_OF_MOVED_VALUE, name: name) unless og_node.move_line && og_node.move_col + unless og_node && og_node.move_line && og_node.move_col + msg = "USE AFTER MOVE: You can't use `#{name}`." + return error!(use_node, :USE_OF_MOVED_VALUE, message: msg) + end fixes = [] - move_action = ownership_move_action_label(og_node.move_action) - move_suffix = move_action ? " by #{move_action}" : "" - fixes << Fix.new( description: "Wrap the consuming reference with COPY at line #{og_node.move_line} " \ "(the original survives for the later use).", @@ -264,25 +263,94 @@ def emit_use_of_moved_error!(use_node, og_node) end end + consumer = consumer_source_text(og_node.move_line) + phrase = ownership_active_phrase(og_node.move_action) + msg = if consumer + "USE AFTER MOVE: You can't use `#{name}`. `#{consumer}` #{phrase} (line #{og_node.move_line})." + else + "USE AFTER MOVE: You can't use `#{name}` — it #{phrase} (line #{og_node.move_line})." + end + fixable!(use_node, - message: "Use of moved value '#{name}' (moved at line #{og_node.move_line}#{move_suffix})", + message: msg, category: :ownership, level: :error, fixes: fixes, raise_in_collector: true) end - def ownership_move_action_label(action) - case action - when :share then "SHARE" - when :give then "GIVE" - when :takes then "TAKES" - when :return then "RETURN" - when :next then "NEXT" - when :collect then "COLLECT" - when :capture then "capture" - else nil + # Loop-body use of a value that was moved on a prior iteration. The + # coda "Values can only be TAKEN once; subsequent iterations have + # nothing left to GIVE" is the canonical phrasing per WALKTHROUGH.md. + def emit_use_of_moved_in_loop_error!(node, name, og_node = nil, code: :USE_OF_MOVED_IN_LOOP) + consumer = og_node && og_node.move_line ? consumer_source_text(og_node.move_line) : nil + consumer_clause = consumer ? "`#{consumer}` already TOOK it. " : "" + msg = "USE AFTER MOVE: You can't use `#{name}` here — #{consumer_clause}" \ + "Values can only be TAKEN once; subsequent iterations have nothing left to GIVE." + error!(node, code, message: msg) + end + + # Sub-path use after the path's owner was consumed elsewhere. Uses + # passive voice ("was already TAKEN / GIVEN") because the subject of + # the sentence is the owner — what HAPPENED to it — not the consumer. + def emit_use_of_moved_path_error!(node, path, og_node = nil) + path_str = path.map(&:to_s).join('.') + root = path.first.to_s + msg = if og_node && og_node.move_line + phrase = ownership_passive_phrase(og_node.move_action) + "USE AFTER MOVE: You can't use `#{path_str}`. Its owner `#{root}` #{phrase} on line #{og_node.move_line}." + else + "USE AFTER MOVE: You can't use `#{path_str}`. Its owner `#{root}` was already consumed elsewhere." end + error!(node, :USE_OF_MOVED_PATH, message: msg) + end + + # Active form: subject is the consumer (e.g. "`process(GIVE msg)` + # already GAVE it away"). Used when we can quote the consumer site. + OWNERSHIP_ACTIVE_PHRASES = { + give: "already GAVE it away", + takes: "already TOOK it away", + return: "already RETURNED it", + next: "already consumed it via NEXT", + share: "already SHARED it", + collect: "already COLLECTED it", + capture: "already captured it", + move: "already MOVED it", + }.freeze + + # Passive form: subject is the value (e.g. "its owner `b` was + # already TAKEN away"). Used by USE_OF_MOVED_PATH where we name the + # path's owner rather than the consumer. + OWNERSHIP_PASSIVE_PHRASES = { + give: "was already GIVEN away", + takes: "was already TAKEN away", + return: "was already RETURNED", + next: "was already consumed via NEXT", + share: "was already SHARED", + collect: "was already COLLECTED", + capture: "was already captured", + move: "was already MOVED", + }.freeze + + def ownership_active_phrase(action) + OWNERSHIP_ACTIVE_PHRASES[action] || "already consumed it" + end + + def ownership_passive_phrase(action) + OWNERSHIP_PASSIVE_PHRASES[action] || "was already consumed" + end + + # Best-effort: extract the source-line text at the move site so the + # error can quote the consumer call (e.g. "process(GIVE msg)"). Falls + # back to nil when @source_code isn't set (programmatic use of the + # annotator) or the line is past EOF. + def consumer_source_text(line_num) + return nil unless @source_code && line_num + line = @source_code.lines[line_num - 1] + return nil unless line + text = line.strip + text = text.chomp(';').strip + text.empty? ? nil : text end # Type: `Integer literal N overflows T (range ...)`. When the @@ -452,14 +520,285 @@ def emit_immutable_arg_error!(arg_node, scope, arg_idx, param_name) raise_in_collector: true) end + # `x[i] = ...` or `m["k"] = ...` where x/m is an immutable binding. + # Same fix shape: insert MUTABLE at the binding's declaration. The + # error code is named `_LIST` for historical reasons but the same + # site fires for HashMap and any other indexable container. + def emit_immutable_index_assignment_error!(assignment_node, scope, var_name) + fix = build_declare_mutable_fix(var_name, scope) + return error!(assignment_node, :ASSIGN_INDEX_IMMUTABLE_LIST, name: var_name) unless fix + fixable!(assignment_node, + message: DiagnosticRegistry.format(:ASSIGN_INDEX_IMMUTABLE_LIST, name: var_name), + category: :ownership, + level: :error, + fixes: [fix]) + end + + # `x.field = ...` where x is an immutable binding. Mirrors the index + # variant; the fix is the same MUTABLE insertion. + def emit_immutable_field_assignment_error!(assignment_node, scope, var_name) + fix = build_declare_mutable_fix(var_name, scope) + return error!(assignment_node, :ASSIGN_FIELD_IMMUTABLE_STRUCT, name: var_name) unless fix + fixable!(assignment_node, + message: DiagnosticRegistry.format(:ASSIGN_FIELD_IMMUTABLE_STRUCT, name: var_name), + category: :ownership, + level: :error, + fixes: [fix]) + end + + # Reentrance: a function is recursive (directly or transitively) but + # carries no EFFECTS REENTRANT declaration. :auto fix inserts + # `EFFECTS REENTRANT ` immediately before the function's `->`. The + # arrow_token's column is where the insertion lands; the fix is a + # zero-length insert. + # + # `code` selects the error code that fires when the fix isn't + # locatable (REENTRANCE_DIRECT_RECURSIVE for @nonReentrant fns, + # REENTRANCE_INDIRECT_RECURSIVE for the no-marker case). `hint` is + # the human-readable migration text appended to the error template. + def emit_reentrant_error!(fn_node, code, hint:) + arrow = fn_node.respond_to?(:arrow_token) ? fn_node.arrow_token : nil + fix = nil + if arrow && arrow.respond_to?(:line) && arrow.respond_to?(:column) + fix = Fix.new( + description: "Add `EFFECTS REENTRANT` so the runtime knows to schedule this fn on a service stack.", + confidence: :auto, + edits: [Edit.new( + span: Span.new(file: nil, line: arrow.line, col: arrow.column, length: 0), + replacement: 'EFFECTS REENTRANT ' + )] + ) + end + return error!(fn_node, code, name: fn_node.name, hint: hint) unless fix + fixable!(fn_node, + message: DiagnosticRegistry.format(code, name: fn_node.name, hint: hint), + category: :reentrance, + level: :error, + fixes: [fix]) + end + + # Capture: USE(MUTABLE x) where x is an immutable binding. :auto + # fix inserts MUTABLE at the captured binding's declaration. Same + # shape as emit_immutable_assignment_error! / emit_immutable_arg_error!. + def emit_capture_immutable_as_mutable_error!(node, cap_name, owner_scope) + fix = build_declare_mutable_fix(cap_name, owner_scope) + return error!(node, :CAPTURE_IMMUTABLE_AS_MUTABLE, name: cap_name) unless fix + fixable!(node, + message: DiagnosticRegistry.format(:CAPTURE_IMMUTABLE_AS_MUTABLE, name: cap_name), + category: :ownership, + level: :error, + fixes: [fix]) + end + + # Type: function with multiple-typed RETURN branches and no explicit + # `RETURNS` annotation. :auto fix inserts `RETURNS :Any ` immediately + # before the function's `->` arrow so the compiler knows to accept + # the polymorphic return. + def emit_ambiguous_return_error!(fn_node, found_returns) + arrow = fn_node.respond_to?(:arrow_token) ? fn_node.arrow_token : nil + fix = nil + if arrow && arrow.respond_to?(:line) && arrow.respond_to?(:column) + fix = Fix.new( + description: "Insert `RETURNS :Any` so the function accepts the polymorphic return.", + confidence: :auto, + edits: [Edit.new( + span: Span.new(file: nil, line: arrow.line, col: arrow.column, length: 0), + replacement: 'RETURNS :Any ' + )] + ) + end + return error!(fn_node, :AMBIGUOUS_RETURN, types: found_returns) unless fix + fixable!(fn_node, + message: DiagnosticRegistry.format(:AMBIGUOUS_RETURN, types: found_returns), + category: :type, + level: :error, + fixes: [fix]) + end + + # MATCH on a non-discriminated subject (or non-exhaustive cases) — + # both fixed by inserting `PARTIAL ` before the MATCH keyword. :auto + # confidence because PARTIAL MATCH is strictly a superset (allows + # DEFAULT, allows guards, doesn't require exhaustiveness). + def emit_match_partial_fix!(match_node, code, **kwargs) + tok = match_node.respond_to?(:token) ? match_node.token : nil + fix = nil + if tok && tok.respond_to?(:line) + fix = Fix.new( + description: "Replace `MATCH` with `PARTIAL MATCH` (relaxes exhaustiveness; allows DEFAULT and WHEN guards).", + confidence: :auto, + edits: [Edit.new( + span: Span.new(file: nil, line: tok.line, col: tok.column, length: 0), + replacement: 'PARTIAL ' + )] + ) + end + return error!(match_node, code, **kwargs) unless fix + fixable!(match_node, + message: DiagnosticRegistry.format(code, **kwargs), + category: :type, + level: :error, + fixes: [fix]) + end + + # Lifetime: returning a borrowed value without COPY or a `RETURNS x:T` + # annotation. :auto fix wraps the return value with `COPY ` — safe for + # values the compiler considers copy-eligible at runtime; user can + # decline and add a lifetime annotation instead. + def emit_return_borrowed_no_copy_error!(node) + fix = nil + if node.respond_to?(:token) && node.token + tok = node.token + fix = Fix.new( + description: "Wrap the returned value with `COPY ` so it doesn't borrow from the parameter.", + confidence: :auto, + edits: [Edit.new( + span: Span.new(file: nil, line: tok.line, col: tok.column, length: 0), + replacement: 'COPY ' + )] + ) + end + kw = { type: node.full_type } + return error!(node, :RETURN_BORROWED_NO_COPY_OR_LIFETIME, **kw) unless fix + fixable!(node, + message: DiagnosticRegistry.format(:RETURN_BORROWED_NO_COPY_OR_LIFETIME, **kw), + category: :lifetime, + level: :error, + fixes: [fix]) + end + + # Capability: WITH RESTRICT on an immutable binding. :auto fix + # locates the declaration and inserts `MUTABLE ` at its column — + # same shape as emit_immutable_assignment_error!. + def emit_with_restrict_immutable_error!(node, var_node) + name = var_node.name + scope = (var_node.respond_to?(:symbol) && var_node.symbol&.scope) || current_scope + fix = build_declare_mutable_fix(name, scope) + return error!(node, :WITH_RESTRICT_NEEDS_MUTABLE, name: name) unless fix + fixable!(node, + message: DiagnosticRegistry.format(:WITH_RESTRICT_NEEDS_MUTABLE, name: name), + category: :capability, + level: :error, + fixes: [fix]) + end + + # Style lint: a function with at least one MUTABLE param should end + # in `!`. :auto fix appends `!` immediately after the function name. + # Falls back to plain error! when the name token isn't available + # (e.g. synthesized fns). + def emit_style_mutable_param_needs_bang!(fn_node) + name = fn_node.name + name_tok = fn_node.respond_to?(:name_token) ? fn_node.name_token : nil + fix = nil + if name_tok && name_tok.respond_to?(:line) && name_tok.respond_to?(:column) + end_col = name_tok.column + name.length + fix = Fix.new( + description: "Append `!` to '#{name}' (signals that it takes a MUTABLE parameter).", + confidence: :auto, + edits: [Edit.new( + span: Span.new(file: nil, line: name_tok.line, col: end_col, length: 0), + replacement: '!' + )] + ) + end + return error!(fn_node, :STYLE_MUTABLE_PARAM_NEEDS_BANG, name: name) unless fix + fixable!(fn_node, + message: DiagnosticRegistry.format(:STYLE_MUTABLE_PARAM_NEEDS_BANG, name: name), + category: :lint, + level: :error, + fixes: [fix]) + end + + # Reentrance: `@canSmash` on BG/DO is recognized but not yet + # implemented. :auto fix replaces the prefix sigil with `@service` + # (OS-thread spawn — supported today, same compile-time guarantee). + def emit_can_smash_unsupported_error!(node) + fix = nil + tok = node.respond_to?(:can_smash_token) ? node.can_smash_token : nil + if tok && tok.respond_to?(:line) + fix = Fix.new( + description: "Replace `@canSmash` with `@service` (OS-thread spawn — supported today).", + confidence: :auto, + edits: [Edit.new( + span: Span.new(file: nil, line: tok.line, col: tok.column, length: tok.value.to_s.length), + replacement: '@service' + )] + ) + end + return error!(node, :CAN_SMASH_NOT_SUPPORTED) unless fix + fixable!(node, + message: DiagnosticRegistry.format(:CAN_SMASH_NOT_SUPPORTED), + category: :reentrance, + level: :error, + fixes: [fix]) + end + + # Type: `x: TargetType = some_value` where some_value's type doesn't + # match. :interactive fix wraps the value in `CAST(value AS TargetType)` + # — interactive because narrowing can lose data. Only offered when + # the value is a literal whose source span is precisely known + # (Literal nodes carry a token for the start; the value's textual + # length is known from the parsed token's value). + def emit_type_mismatch_assign_error!(node, target_type, value_type) + kw = { got: value_type, expected: target_type } + value = node.respond_to?(:value) ? node.value : nil + fix = build_cast_wrap_fix(value, target_type) + return error!(node, :TYPE_MISMATCH_ASSIGN, **kw) unless fix + fixable!(node, + message: DiagnosticRegistry.format(:TYPE_MISMATCH_ASSIGN, **kw), + category: :type, + level: :error, + fixes: [fix]) + end + + # Helper: wrap a literal-or-identifier value with `CAST(... AS T)`. + # Returns a Fix or nil. Only handles values whose textual span we + # can compute exactly — Literal nodes (numeric / boolean / string) + # and bare Identifier references. Anything else (binary expr, + # function call) gets nil so the caller falls back to plain error!. + def build_cast_wrap_fix(value, target_type) + return nil unless value + return nil unless value.respond_to?(:token) && value.token + tok = value.token + target_name = target_type.respond_to?(:resolved) ? target_type.resolved : target_type + text_length = case value + when AST::Literal + tok.value.to_s.length + when AST::Identifier + value.name.to_s.length + else + nil + end + return nil unless text_length + Fix.new( + description: "Wrap value with `CAST(... AS #{target_name})` (narrowing — verify it can't lose data).", + confidence: :interactive, + edits: [ + Edit.new(span: Span.new(file: nil, line: tok.line, col: tok.column, length: 0), + replacement: "CAST("), + Edit.new(span: Span.new(file: nil, line: tok.line, col: tok.column + text_length, length: 0), + replacement: " AS #{target_name})"), + ] + ) + end + # Shared helper — returns a Fix that inserts `MUTABLE ` at the # declaration of `name` in `scope`. Returns nil when the declaration # isn't locatable or already carries `MUTABLE`. def build_declare_mutable_fix(name, scope) info = scope.locals[name] - decl = info&.reg - return nil unless decl && decl.respond_to?(:token) && decl.token - tok = decl.token + return nil unless info + # Locals carry a reg whose token is the binding's first source position. + # Parameters have reg=nil but stash the VAR_ID token at decl time as + # `param_decl_token` (set by declare_and_verify_params) so we can still + # point a MUTABLE insertion at the signature. + tok = nil + decl = info.reg + if decl && decl.respond_to?(:token) && decl.token + tok = decl.token + elsif info.is_param && info.param_decl_token + tok = info.param_decl_token + end + return nil unless tok return nil if tok.respond_to?(:value) && tok.value == 'MUTABLE' Fix.new( diff --git a/src/annotator-helpers/function_analysis.rb b/src/annotator-helpers/function_analysis.rb index 0c45714dd..4793b8671 100644 --- a/src/annotator-helpers/function_analysis.rb +++ b/src/annotator-helpers/function_analysis.rb @@ -431,7 +431,10 @@ def verify_function_signature!(node, signature) node.args[i] = owned if owned end - move_if_not_copyable!(inner_node) + # `is_give` already had visit_GiveNode set the :give action; + # for plain TAKES (no GIVE wrapper) record :takes so the + # USE_OF_MOVED_VALUE diagnostic can phrase "TOOK it away". + move_if_not_copyable!(inner_node, action: is_give ? :give : :takes) inner_node.was_moved = true arg_node.was_moved = true # If ensure_owned_value! wrapped the arg in a fresh CopyNode (auto-COPY @@ -848,6 +851,7 @@ def declare_and_verify_params(node) # Mark as a parameter so deferred WITH validation (P1.7) can # distinguish it from local bindings. param[:symbol].is_param = true + param[:symbol].param_decl_token = param[:name_token] # Atomics M1.6.5: stamp sync_families from the REQUIRES disjunction so # call-site effect resolution can detect polymorphic bindings (size > 1). if node.respond_to?(:requires) && node.requires @@ -893,7 +897,7 @@ def verify_captures!(node) entry = owner_scope.locals[cap_name] if cap[:mutable] && !entry.mutable - error!(node, :CAPTURE_IMMUTABLE_AS_MUTABLE, name: cap_name) + emit_capture_immutable_as_mutable_error!(node, cap_name, owner_scope) end # Mark the captured variable as used in its declaring scope. @@ -929,7 +933,7 @@ def verify_returns(node, found_returns, declared_return) (t.start_with?("Byte[") || t == "String") ? :String : r[:type] }.uniq.size if declared_return != :Any && normalized > 1 - error!(node, :AMBIGUOUS_RETURN, types: found_returns) + emit_ambiguous_return_error!(node, found_returns) end end end @@ -971,7 +975,7 @@ def verify_return(node) is_type_param = fn_type_params.include?(type_info&.resolved) unless has_lifetime || is_copyable || is_type_param - error!(node, :RETURN_BORROWED_NO_COPY_OR_LIFETIME, type: node.full_type) + emit_return_borrowed_no_copy_error!(node) end return true unless has_lifetime diff --git a/src/annotator-helpers/generic_analysis.rb b/src/annotator-helpers/generic_analysis.rb index 2c17bb779..1b441dac8 100644 --- a/src/annotator-helpers/generic_analysis.rb +++ b/src/annotator-helpers/generic_analysis.rb @@ -193,7 +193,17 @@ def validate_type_annotation!(node, type_obj, is_param: false) schema = lookup_type_schema(base_name) if schema.nil? - error!(node, :UNKNOWN_TYPE, name: base_name) + tok = node.respond_to?(:token) ? node.token : nil + if tok + emit_typo_suggestion!( + tok, base_name.to_s, all_known_type_names, + "Unknown type '#{base_name}'", + "closest declared type", + category: :type, cascade: true + ) + else + error!(node, :UNKNOWN_TYPE, name: base_name) + end end unless schema.is_a?(Hash) && schema[:type_params] diff --git a/src/annotator.rb b/src/annotator.rb index e2b60cc55..8c5365f2e 100644 --- a/src/annotator.rb +++ b/src/annotator.rb @@ -717,7 +717,7 @@ def visit_FunctionDef(node) # 2. Validation & Lifetime has_mutable_param = node.params.any? { |p| p[:mutable] } if has_mutable_param && !node.name.end_with?("!") - error!(node, :STYLE_MUTABLE_PARAM_NEEDS_BANG, name: node.name) + emit_style_mutable_param_needs_bang!(node) end verify_lifetime!(node) @@ -772,10 +772,12 @@ def visit_FunctionDef(node) # Both share `reentrant = :non_reentrant` (the bridge piggybacks # on the legacy codegen path), so suppress here for either. unless [:reentrant_not_logical, :reentrant_max_depth].include?(node.reentrance_kind) - error!(node, :REENTRANCE_DIRECT_RECURSIVE, name: node.name, hint: "Use @reentrant (not @nonReentrant) for directly recursive functions.") + emit_reentrant_error!(node, :REENTRANCE_DIRECT_RECURSIVE, + hint: "Replace `@nonReentrant` with `EFFECTS REENTRANT` (directly recursive functions need a recursion budget).") end when nil - error!(node, :REENTRANCE_INDIRECT_RECURSIVE, name: node.name, hint: "Add @reentrant to the function signature to allow this.") + emit_reentrant_error!(node, :REENTRANCE_INDIRECT_RECURSIVE, + hint: "Add `EFFECTS REENTRANT` to the function signature to allow this.") end # Tail call validation: if @reentrant:tailCall, verify the self-call is in tail position. @@ -1438,7 +1440,18 @@ def annotate_struct_pattern!(match_node, pat) if schema unless schema.key?(f[:name]) - error!(match_node, :MATCH_FIELD_UNKNOWN, field: f[:name], type: expr_type) + name_tok = f[:name_token] + if name_tok + valid_fields = schema.keys.reject { |k| k.is_a?(Symbol) || k.to_s.start_with?('_') } + emit_typo_suggestion!( + name_tok, f[:name], valid_fields, + "MATCH struct pattern: field '#{f[:name]}' does not exist on type #{expr_type}", + "field of #{expr_type}", + category: :type, cascade: true + ) + else + error!(match_node, :MATCH_FIELD_UNKNOWN, field: f[:name], type: expr_type) + end end end @@ -1663,7 +1676,18 @@ def visit_MatchStatement(node) c[:destructure].fields.each do |f| next unless f[:value] == :bind unless payload_schema.key?(f[:name]) - error!(node, :MATCH_DESTRUCTURE_FIELD_UNKNOWN, field: f[:name], variant: variant_name) + name_tok = f[:name_token] + if name_tok + valid_fields = payload_schema.keys.reject { |k| k.is_a?(Symbol) || k.to_s.start_with?('_') } + emit_typo_suggestion!( + name_tok, f[:name], valid_fields, + "MATCH destructure: field '#{f[:name]}' is not on variant #{variant_name}", + "field of variant #{variant_name}", + category: :type, cascade: true + ) + else + error!(node, :MATCH_DESTRUCTURE_FIELD_UNKNOWN, field: f[:name], variant: variant_name) + end end field_def = payload_schema[f[:name]] field_type = field_def.is_a?(Hash) ? field_def[:type] : field_def @@ -1703,7 +1727,7 @@ def visit_MatchStatement(node) # must opt in to PARTIAL MATCH. unless is_enum || is_union type_label = expr_t.resolved - error!(node, :MATCH_NEEDS_ENUM_OR_UNION, type: type_label) + emit_match_partial_fix!(node, :MATCH_NEEDS_ENUM_OR_UNION, type: type_label) end # MATCH forbids DEFAULT — the whole point of an exhaustive MATCH is @@ -1733,7 +1757,7 @@ def visit_MatchStatement(node) missing = all_variants - covered unless missing.empty? type_label2 = is_enum ? "enum" : "union" - error!(node, :MATCH_NON_EXHAUSTIVE, + emit_match_partial_fix!(node, :MATCH_NON_EXHAUSTIVE, kind: type_label2, name: type_name, missing: missing.sort.join(', ')) end end @@ -1861,7 +1885,7 @@ def visit_WhileLoop(node) type_obj = var_type.is_a?(Type) ? var_type : Type.new(var_type.to_s) is_copy = type_obj.implicitly_copyable? { |t| lookup_type_schema(t) } unless is_copy - error!(node, :USE_OF_MOVED_IN_LOOP, name: name) + emit_use_of_moved_in_loop_error!(node, name, @og&.[](name), code: :USE_OF_MOVED_IN_LOOP) end end end @@ -1936,7 +1960,7 @@ def visit_WhileBindLoop(node) type_obj = var_type.is_a?(Type) ? var_type : Type.new(var_type.to_s) is_copy = type_obj.implicitly_copyable? { |t| lookup_type_schema(t) } unless is_copy - error!(node, :USE_OF_MOVED_IN_LOOP_SHORT, name: name) + emit_use_of_moved_in_loop_error!(node, name, @og&.[](name), code: :USE_OF_MOVED_IN_LOOP_SHORT) end end end @@ -3106,7 +3130,16 @@ def visit_assignment_variable(identifier_or_name, node) end if scope.is_immutable?(var_name) - error!(node, :ASSIGN_VAR_IMMUTABLE, name: var_name) + fix = build_declare_mutable_fix(var_name, scope) + if fix + fixable!(node, + message: DiagnosticRegistry.format(:ASSIGN_VAR_IMMUTABLE, name: var_name), + category: :ownership, + level: :error, + fixes: [fix]) + else + error!(node, :ASSIGN_VAR_IMMUTABLE, name: var_name) + end end validate_assignment_type(node, scope.resolve_type(var_name), node.value.resolved_type) @@ -3125,7 +3158,7 @@ def visit_assignment_index(index_node, assignment_node) if index_node.target.is_a?(AST::Identifier) var_name = index_node.target.name if current_scope.is_immutable?(var_name) - error!(assignment_node, :ASSIGN_INDEX_IMMUTABLE_LIST, name: var_name) + emit_immutable_index_assignment_error!(assignment_node, current_scope, var_name) end mark_var_mutated(var_name) else @@ -3185,7 +3218,7 @@ def visit_assignment_field(field_node, assignment_node) var_name = field_node.target.name syn = field_node.target.symbol&.sync if current_scope.is_immutable?(var_name) && syn != :always_mutable - error!(assignment_node, :ASSIGN_FIELD_IMMUTABLE_STRUCT, name: var_name) + emit_immutable_field_assignment_error!(assignment_node, current_scope, var_name) end mark_var_mutated(var_name) @@ -3217,7 +3250,7 @@ def validate_assignment_type(node, target_type, value_type) return if target_type == value_type if !is_safe_autocast?(value_type, target_type) - error!(node, :TYPE_MISMATCH_ASSIGN, got: value_type, expected: target_type) + emit_type_mismatch_assign_error!(node, target_type, value_type) else node.value.coerced_type = target_type end @@ -3292,7 +3325,7 @@ def visit_GetField(node) path.each do |seg| check = check.empty? ? seg.to_s : "#{check}.#{seg}" if @og.moved?(check) - error!(node, :USE_OF_MOVED_PATH, path: path.map(&:to_s).join(".")) + emit_use_of_moved_path_error!(node, path, @og[check]) break end end @@ -3329,6 +3362,18 @@ def visit_GetField(node) field_type = apply_type_subst(field_type, subst) if subst.any? end node.full_type = field_type + elsif schema.is_a?(Hash) && node.token + # Struct schema resolved but the requested field doesn't exist — + # emit a typo suggestion when one of the schema's fields is close + # to what the user typed. The bare error code stays as the + # fallback when no candidate is within Levenshtein threshold. + valid_fields = schema.keys.reject { |k| k.is_a?(Symbol) || k.to_s.start_with?('_') } + emit_typo_suggestion!( + node.token, node.field, valid_fields, + "Struct '#{type}' has no field '#{node.field}'", + "field of #{type}", + category: :type, cascade: true + ) else error!(node, :ILLEGAL_FIELD_LOOKUP, field: node.field, type: type) end @@ -3414,7 +3459,17 @@ def visit_HashLit(node) def visit_StructLit(node) schema = lookup_type_schema(node.name.to_sym) if schema.nil? - error!(node, :UNKNOWN_STRUCT_TYPE, name: node.name) + tok = node.respond_to?(:token) ? node.token : nil + if tok + emit_typo_suggestion!( + tok, node.name, all_known_type_names, + "Unknown struct type '#{node.name}'", + "closest declared type", + category: :type, cascade: true + ) + else + error!(node, :UNKNOWN_STRUCT_TYPE, name: node.name) + end end # Union literal: Result{ Ok: 42 } or Option{ Some: 42.0 } @@ -6194,7 +6249,7 @@ def assign_fiber_stack_tiers!(program_node) # `:NOT_LOGICAL` / `:MAX_DEPTH(N)`) on the callee. def validate_fiber_stack!(node, call_names, user_size, can_smash) if can_smash - error!(node, :CAN_SMASH_NOT_SUPPORTED) + emit_can_smash_unsupported_error!(node) return end @@ -6392,14 +6447,23 @@ def share_consumes_source?(node) # Mark an identifier as moved if its type is non-Copy. # Skips generic type params (can't determine copyability at annotation time). - def move_if_not_copyable!(node) + # Skips when the binding is already marked moved with a more-specific + # action (e.g., `:give` set by visit_GiveNode) — overwriting it with + # `:move` would destroy the action info that the + # USE_OF_MOVED_VALUE diagnostic uses to phrase "GAVE/TOOK/etc.". + def move_if_not_copyable!(node, action: :move) return unless node.is_a?(AST::Identifier) vt = node.type_info vt = Type.new(vt) if vt && !vt.is_a?(Type) return if vt.nil? return if current_fn_ctx&.type_params&.include?(vt.resolved) return if vt.implicitly_copyable? { |t| lookup_type_schema(t) rescue nil } - og_set_moved(node.name, at_token: node.token, action: :move) + existing = @og&.nodes&.[](node.name) + if existing && existing.moved? && existing.move_action && existing.move_action != :move + node.was_moved = true + return + end + og_set_moved(node.name, at_token: node.token, action: action) node.was_moved = true end diff --git a/src/ast/ast.rb b/src/ast/ast.rb index a92459d1f..47f7b7338 100644 --- a/src/ast/ast.rb +++ b/src/ast/ast.rb @@ -463,6 +463,7 @@ def metatype attr_accessor :tail_call # true if @reentrant:tailCall — compiler emits @call(.always_tail, ...) attr_accessor :reentrant_token # Token for the legacy @reentrant annotation (drives `clear fix` span) attr_accessor :arrow_token # Token for the `->` after the function header (drives REQUIRES insertion span) + attr_accessor :name_token # Token for the function name itself (drives the `!`-suffix fix for STYLE_MUTABLE_PARAM_NEEDS_BANG) # Phase 4f.2: { start_tok:, end_tok: } pair covering the full # `EFFECTS REENTRANT[:VARIANT]` clause text. Used by `clear fix` # to swap variants (e.g., `:THUNK` -> plain or `:NOT_LOGICAL`). @@ -991,7 +992,9 @@ def name; target.respond_to?(:name) ? target.name : nil end # Phase 4g: tokens that drive `clear fix` for stack-tier sigil # rewrites. open_brace_token = `{` (insert @service -> after); # prefix_token = the user's existing tier sigil (replace). - attr_accessor :open_brace_token, :prefix_token + # can_smash_token = the `@canSmash` sigil token specifically + # (drives the @canSmash -> @service auto-fix). + attr_accessor :open_brace_token, :prefix_token, :can_smash_token end # ThenChain: sequential chaining of steps inside a BG block fiber. diff --git a/src/ast/diagnostic_registry.rb b/src/ast/diagnostic_registry.rb index 0f2c3ca4d..d0a3166eb 100644 --- a/src/ast/diagnostic_registry.rb +++ b/src/ast/diagnostic_registry.rb @@ -1180,23 +1180,31 @@ module DiagnosticRegistry }, USE_OF_MOVED_IN_LOOP: { severity: :error, category: :ownership, - template: "Use of moved value '%{name}' in loop. The variable is moved in the first iteration and not available for the next.", - summary: "Loop body moves a value on the first iteration; it isn't available for subsequent iterations.", + template: "%{message}", + summary: "Loop body consumes a value on the first iteration; subsequent iterations have nothing left to GIVE.", + cause: "An affine value can only be TAKEN once. The loop body moves (GIVE / TAKES / RETURN / etc.) the binding, so the second iteration would be reading something that's already been transferred.", + fix_hint: "Hoist the move out of the loop, or wrap the consuming reference with `COPY` (if the type permits) so each iteration gets its own owned copy. For shared aggregation, declare the binding `@multiowned` (single-scheduler Rc) or `@shared` (cross-fiber Arc).", }, USE_OF_MOVED_VALUE: { severity: :error, category: :ownership, - template: "Use of moved value '%{name}'", - summary: "Variable was moved at a prior site and is no longer accessible.", + template: "%{message}", + summary: "Binding was already TAKEN / GIVEN at a prior site and is no longer accessible.", + cause: "An affine binding has exactly one owner. A prior expression (a `TAKES` parameter, `GIVE`, `RETURN`, `SHARE`, `NEXT`, etc.) consumed ownership; the current use is left holding nothing.", + fix_hint: "Wrap the consuming reference with `COPY` (if the type permits — primitives, strings, and enums are Copy by default; non-Copy types need `@multiowned` / `@shared` to share). Or restructure so only one site consumes the value.", }, USE_OF_MOVED_IN_LOOP_SHORT: { severity: :error, category: :ownership, - template: "Use of moved value '%{name}' in loop.", - summary: "Loop body uses a value that was moved on a prior iteration.", + template: "%{message}", + summary: "Loop body uses a value that was already TAKEN on a prior iteration.", + cause: "Same as USE_OF_MOVED_IN_LOOP — the binding was consumed on the first iteration; the second iteration has nothing left to GIVE.", + fix_hint: "Same: hoist the move out of the loop, or `COPY` per-iteration, or upgrade to `@multiowned` / `@shared`.", }, USE_OF_MOVED_PATH: { severity: :error, category: :ownership, - template: "Use of moved value '%{path}'", - summary: "Path expression refers to a binding (or sub-binding) that was already moved.", + template: "%{message}", + summary: "Path's owner (root binding) was already TAKEN or GIVEN; sub-paths are no longer accessible.", + cause: "Sub-path access (`b.field`, `arr[i]`) reads through an owner. If the owner itself was transferred (TAKES / GIVE / RETURN / etc.), the sub-path goes with it — the owner takes its fields along.", + fix_hint: "Either consume the field directly (`GIVE b.field`) before the owner is transferred, or `COPY` the field, or restructure so the owner isn't moved before the field's last use.", }, WHILE_AS_NEEDS_OPTIONAL: { severity: :error, category: :type, diff --git a/src/ast/parser.rb b/src/ast/parser.rb index 6596acdd9..585f57534 100644 --- a/src/ast/parser.rb +++ b/src/ast/parser.rb @@ -3,12 +3,14 @@ require_relative "./error_registry" require_relative "./source_error" require_relative "./fixable_error" +require_relative "../annotator-helpers/fixable_helpers" # ========================================== # PARSER # ========================================== class Parser include ErrorHelper + include FixableHelper @@stmt_rules = {} @@primary_rules = {} @@ -765,7 +767,8 @@ def parse_argument_list() end end - p_name = consume(:VAR_ID).value unless is_comptime + name_tok = is_comptime ? nil : consume(:VAR_ID) + p_name = name_tok&.value p_type = :Any default_val = nil @@ -787,7 +790,7 @@ def parse_argument_list() end end - { name: p_name, type: p_type, default: default_val, mutable: is_mutable, takes: takes, comptime: is_comptime } + { name: p_name, type: p_type, default: default_val, mutable: is_mutable, takes: takes, comptime: is_comptime, name_token: name_tok } end .last # always ignore the first token end @@ -903,17 +906,29 @@ def parse_extern_fn(extern_tok) if match!(:KEYWORD, 'EFFECTS') loop do consume(:CHAR, ':') - eff_name = consume(:VAR_ID).value.to_sym + eff_tok = consume(:VAR_ID) + eff_name = eff_tok.value.to_sym unless [:alloc, :safe].include?(eff_name) - error!(current, :UNKNOWN_EFFECT, value: eff_name) + emit_typo_suggestion!( + eff_tok, eff_tok.value, %w[alloc safe], + "Unknown effect ':#{eff_name}'", + "closest effect", + category: :type, cascade: true + ) end if eff_name == :safe effects[:safe] = true elsif eff_name == :alloc && match?(:CHAR, ':') consume(:CHAR, ':') - qualifier = consume(:VAR_ID).value.to_sym + qual_tok = consume(:VAR_ID) + qualifier = qual_tok.value.to_sym unless [:frame, :heap].include?(qualifier) - error!(current, :UNKNOWN_ALLOC_QUALIFIER, value: qualifier) + emit_typo_suggestion!( + qual_tok, qual_tok.value, %w[frame heap], + "Unknown alloc qualifier ':#{qualifier}'", + "closest alloc qualifier", + category: :type, cascade: true + ) end effects[:alloc] = qualifier else @@ -1131,7 +1146,8 @@ def parse_function_def(visibility = :package, is_method: false) else consume(:KEYWORD, 'FN') end - name = consume(:VAR_ID).value + name_tok = consume(:VAR_ID) + name = name_tok.value # Predicate suffix: FN name?(...) — ? is part of the function name if match?(:CHAR, '?') consume(:CHAR, '?') @@ -1410,6 +1426,7 @@ def parse_function_def(visibility = :package, is_method: false) node.requires = requires_clause node.reentrant_token = reentrant_token node.arrow_token = arrow_token + node.name_token = name_tok node.effects_decl = effects_decl node.effects_span = effects_span if effects_span node.max_depth_n = effects_span[:max_depth] if effects_span && effects_span[:max_depth] @@ -1493,7 +1510,13 @@ def parse_requires_family_or_reentrance end { reentrance: kind } else - error!(tok, :UNKNOWN_REQUIRES_FAMILY, name: tok.value, families: REQUIRES_VALID_FAMILIES.to_a.join(', '), kinds: REQUIRES_REENTRANCE_KINDS.to_a.join(', ')) + candidates = REQUIRES_VALID_FAMILIES.to_a + REQUIRES_REENTRANCE_KINDS.to_a + emit_typo_suggestion!( + tok, tok.value, candidates, + "Unknown REQUIRES family '#{tok.value}' (valid: #{REQUIRES_VALID_FAMILIES.to_a.join(', ')}; kinds: #{REQUIRES_REENTRANCE_KINDS.to_a.join(', ')})", + "closest REQUIRES family/kind", + category: :type, cascade: true + ) end end @@ -1528,7 +1551,12 @@ def parse_requires_clauses(fn_name) case kind_tok.value when 'NON_REENTRANT' then :non_reentrant else - error!(kind_tok, :UNKNOWN_REQUIRES_KIND, value: kind_tok.value) + emit_typo_suggestion!( + kind_tok, kind_tok.value, %w[NON_REENTRANT], + "Unknown REQUIRES kind '#{kind_tok.value}'", + "closest REQUIRES kind", + category: :type, cascade: true + ) end if out.key?(name_tok.value) error!(name_tok, :DUPLICATE_REQUIRES_CLAUSE, fn: fn_name, name: name_tok.value) @@ -1549,7 +1577,12 @@ def parse_effects_decl eff_kw = consume(:KEYWORD, 'EFFECTS') eff_tok = consume(:TYPE_ID) unless eff_tok.value == 'REENTRANT' - error!(eff_tok, :UNKNOWN_FN_EFFECT, value: eff_tok.value) + emit_typo_suggestion!( + eff_tok, eff_tok.value, %w[REENTRANT], + "Unknown function effect '#{eff_tok.value}'", + "closest function effect", + category: :type, cascade: true + ) end span_start = eff_kw span_end_tok = eff_tok # tail of `EFFECTS REENTRANT` so far @@ -1584,7 +1617,12 @@ def parse_effects_decl when 'NOT_LOGICAL' then :reentrant_not_logical when 'MAX_DEPTH' then :reentrant_max_depth else - error!(variant_tok, :UNKNOWN_REENTRANT_VARIANT, value: variant_tok.value) + emit_typo_suggestion!( + variant_tok, variant_tok.value, %w[THUNK TAIL_CALL NOT_LOGICAL MAX_DEPTH], + "Unknown REENTRANT variant '#{variant_tok.value}'", + "closest REENTRANT variant", + category: :type, cascade: true + ) end if tight && (kind == :reentrant_not_logical || kind == :reentrant_max_depth) label = kind == :reentrant_not_logical ? "NOT_LOGICAL" : "MAX_DEPTH" @@ -2147,21 +2185,22 @@ def parse_struct_pattern break end - name = consume(:VAR_ID).value + name_tok = consume(:VAR_ID) + name = name_tok.value if match?(:CHAR, ':') consume(:CHAR, ':') # `_` as value means wildcard — ignore this field's value if current.type == :VAR_ID && current.value == '_' consume(:VAR_ID) - fields << { name: name, value: :wildcard } + fields << { name: name, value: :wildcard, name_token: name_tok } else - fields << { name: name, value: parse_expression } + fields << { name: name, value: parse_expression, name_token: name_tok } end else # Bare name: destructuring bind — extract field into a local variable. # { x, y } means bind subject.x to x, subject.y to y. - fields << { name: name, value: :bind } + fields << { name: name, value: :bind, name_token: name_tok } end match!(:CHAR, ',') # optional comma between fields @@ -3031,7 +3070,12 @@ def apply_capability!(result, token, value = token.value) # on type semantics; consumed by capabilities that need a span. result[:observable_token] = token else - error!(token, :UNKNOWN_CAPABILITY_MODIFIER, value: value) + emit_typo_suggestion!( + token, value, CAPABILITY_TOKENS, + "Unknown capability modifier '#{value}'", + "closest capability", + category: :capability, cascade: true + ) end end @@ -3085,11 +3129,32 @@ def parse_with_capability # Locked form: WITH EXCLUSIVE lockedVar AS alias { } — acquire mutex, bind inner value. capabilities = [] + # `WITH RESTRIKT x { ... }` — a typo of an UPPERCASE capability + # keyword tokenizes as TYPE_ID and the loop below would silently + # exit the capability list, then fail at the `{` body. Catch this + # shape early and offer a typo suggestion against the known + # capability keyword set. + if match?(:TYPE_ID) + typo_tok = current + emit_typo_suggestion!( + typo_tok, typo_tok.value, AST::CAPABILITIES.map(&:to_s), + "Unknown WITH capability '#{typo_tok.value}'", + "closest WITH capability", + category: :capability, cascade: true + ) + end + while match?(:KEYWORD) || match?(:VAR_ID) do capability = if match?(:KEYWORD) && current.value != 'AS' - cap = consume(:KEYWORD).value.to_sym + cap_tok = consume(:KEYWORD) + cap = cap_tok.value.to_sym unless AST::CAPABILITIES.include?(cap) - error!(previous, :UNKNOWN_WITH_CAPABILITY, value: cap) + emit_typo_suggestion!( + cap_tok, cap_tok.value, AST::CAPABILITIES.map(&:to_s), + "Unknown WITH capability '#{cap}'", + "closest WITH capability", + category: :capability, cascade: true + ) end cap else @@ -3451,7 +3516,17 @@ def parse_cap_join(tok, first_attrs) normalized = current.value.start_with?('@') ? current.value : "@#{current.value}" attrs = CAP_SIGIL_ATTRS[normalized] unless attrs - error!(current, :UNKNOWN_CAPABILITY_SIGIL, value: current.value) + # Chain form `@shared:foo` arrives without the `@`; root form + # arrives with it. Match the candidate-set shape to whichever + # form the user typed so the replacement slots in cleanly. + has_at = current.value.start_with?('@') + candidates = has_at ? CAP_SIGIL_ATTRS.keys : CAP_SIGIL_ATTRS.keys.map { |k| k.sub(/^@/, '') } + emit_typo_suggestion!( + current, current.value, candidates, + "Unknown capability sigil '#{current.value}'", + "closest capability sigil", + category: :capability, cascade: true + ) end next_tok = consume(:VAR_ID) apply_cap_dim!(next_tok, attrs, dims) @@ -3537,14 +3612,26 @@ def parse_branch_prefix can_smash = false stack_size = nil + # Enter the loop on a known sigil OR on a `@` token that the + # user clearly intended as a sigil (so the typo path can fire). + looks_like_sigil = current.type == :VAR_ID && current.value.start_with?('@') return { pinned: pinned, parallel: parallel, stack_size: stack_size, can_smash: can_smash } unless - current.type == :VAR_ID && DO_BRANCH_SIGILS.key?(current.value) + looks_like_sigil loop do tok = consume(:VAR_ID) cap_name = tok.value.start_with?('@') ? tok.value : "@#{tok.value}" attrs = DO_BRANCH_SIGILS[cap_name] - error!(tok, :UNKNOWN_BRANCH_PREFIX, value: tok.value.inspect) unless attrs + unless attrs + has_at = tok.value.start_with?('@') + candidates = has_at ? DO_BRANCH_SIGILS.keys : DO_BRANCH_SIGILS.keys.map { |k| k.sub(/^@/, '') } + emit_typo_suggestion!( + tok, tok.value, candidates, + "Unknown branch prefix #{tok.value.inspect}", + "closest DO branch sigil", + category: :type, cascade: true + ) + end if attrs[:stack_size] error!(tok, :DUPLICATE_STACK_SIZE, kind: "branch") if stack_size @@ -3595,15 +3682,28 @@ def parse_bg_prefix can_smash = false stack_size = nil stack_size_token = nil + can_smash_token = nil - return { pinned: pinned, parallel: parallel, stack_size: stack_size, arena: arena, can_smash: can_smash, stack_size_token: nil } unless - current.type == :VAR_ID && BG_SIGILS.key?(current.value) + # Enter the loop on a known sigil OR on `@` that the user + # clearly intended as a BG sigil (so the typo path can fire). + looks_like_sigil = current.type == :VAR_ID && current.value.start_with?('@') + return { pinned: pinned, parallel: parallel, stack_size: stack_size, arena: arena, can_smash: can_smash, stack_size_token: nil, can_smash_token: nil } unless + looks_like_sigil loop do tok = consume(:VAR_ID) cap_name = tok.value.start_with?('@') ? tok.value : "@#{tok.value}" attrs = BG_SIGILS[cap_name] - error!(tok, :UNKNOWN_BG_PREFIX, value: tok.value.inspect) unless attrs + unless attrs + has_at = tok.value.start_with?('@') + candidates = has_at ? BG_SIGILS.keys : BG_SIGILS.keys.map { |k| k.sub(/^@/, '') } + emit_typo_suggestion!( + tok, tok.value, candidates, + "Unknown BG prefix #{tok.value.inspect}", + "closest BG body sigil", + category: :type, cascade: true + ) + end if attrs[:stack_size] error!(tok, :DUPLICATE_STACK_SIZE, kind: "BG") if stack_size @@ -3613,7 +3713,10 @@ def parse_bg_prefix pinned = true if attrs[:pinned] parallel = true if attrs[:parallel] arena = true if attrs[:arena] - can_smash = true if attrs[:can_smash] + if attrs[:can_smash] + can_smash = true + can_smash_token = tok + end # More sigils chained with ':'? break unless match?(:CHAR, ':') @@ -3621,7 +3724,7 @@ def parse_bg_prefix end consume(:ARROW, '->') - { pinned: pinned, parallel: parallel, stack_size: stack_size, arena: arena, can_smash: can_smash, stack_size_token: stack_size_token } + { pinned: pinned, parallel: parallel, stack_size: stack_size, arena: arena, can_smash: can_smash, stack_size_token: stack_size_token, can_smash_token: can_smash_token } end def parse_bg_block @@ -3636,6 +3739,7 @@ def parse_bg_block node = AST::BgBlock.new(bg_token, body, nil, prefix[:stack_size], prefix[:pinned], prefix[:parallel], prefix[:arena], prefix[:can_smash]) node.open_brace_token = open_brace node.prefix_token = prefix[:stack_size_token] + node.can_smash_token = prefix[:can_smash_token] node end diff --git a/src/ast/scope.rb b/src/ast/scope.rb index 141524e69..8454ed34b 100644 --- a/src/ast/scope.rb +++ b/src/ast/scope.rb @@ -272,6 +272,17 @@ def lookup_type_schema(name) nil end + # Every type name visible from the current scope (struct, enum, union, + # generic). Used by typo-suggestion fixes that need a candidate set. + def all_known_type_names + names = [] + @scope_stack.each do |scope| + types = scope.instance_variable_get(:@types) + names.concat(types.keys.map(&:to_s)) if types + end + names.uniq + end + def with_new_scope(scope = nil) new_scope = scope.nil? ? Scope.new : scope.dup # Atomics M2.6: stamp depth on the freshly pushed scope so diff --git a/src/ast/symbol_entry.rb b/src/ast/symbol_entry.rb index 5050d1b2d..b16655b18 100644 --- a/src/ast/symbol_entry.rb +++ b/src/ast/symbol_entry.rb @@ -81,6 +81,11 @@ class SymbolEntry :ownership_kind, # :value, :collection, :affine, :resource, :rc, :sync :takes, # true if parameter declared with TAKES (callee owns) :is_param, # true when entry was declared as a function parameter + :param_decl_token, # for is_param entries: the VAR_ID token at the + # param's position in the function signature. + # Used by build_declare_mutable_fix to point an + # auto-fix at the parameter when the body + # mutates it without `MUTABLE`. :link_source, # :shared or :multiowned — tracks which strong ref @link was created from :lifetime, # Atomics M2.1: nil | :current_scope | { source: SymbolEntry } :borrowed_alias, # true only for BORROWED/RESTRICT aliases — fiber capture is stack-UAF diff --git a/src/backends/compiler_frontend.rb b/src/backends/compiler_frontend.rb index bcee80895..5bbe83aee 100644 --- a/src/backends/compiler_frontend.rb +++ b/src/backends/compiler_frontend.rb @@ -34,7 +34,7 @@ def self.compile(cheat_code, importer:, source_dir:, strict_test: false) tokens = Lexer.new(cheat_code).tokenize ast = Parser.new(tokens, cheat_code).parse - annotator = SemanticAnnotator.new(importer: importer, source_dir: source_dir, strict_test: strict_test) + annotator = SemanticAnnotator.new(importer: importer, source_dir: source_dir, strict_test: strict_test, source_code: cheat_code) annotator.annotate!(ast) PipelineRewriter.new(annotator).rewrite!(ast) diff --git a/src/lsp/README.md b/src/lsp/README.md new file mode 100644 index 000000000..0f2800114 --- /dev/null +++ b/src/lsp/README.md @@ -0,0 +1,383 @@ +# clear-lsp — CLEAR Language Server + +A Language Server Protocol (LSP) implementation for the CLEAR +programming language. Drop the snippet below into your Neovim config +and you get diagnostics, hover docs, and quick-fix actions on +`.cht` files. Drives the same `Lexer → Parser → SemanticAnnotator` +pipeline the `clear` CLI uses, so behaviour matches `clear build` +exactly. + +## What you'll get + +- **Squiggles** on every error, with the registry code shown + (`UNDEFINED_VAR`, `WITH_RESTRICT_NEEDS_MUTABLE`, etc.). +- **Hover popups** (default key `K`) with the registry's summary, + cause, fix hint, and a worked bad-vs-good example pulled from the + test suite. +- **Quick-fix menu** for the 9 fixable findings — one keypress + inserts `MUTABLE`, replaces `@canSmash` with `@service`, wraps + with `CAST`, etc. + +--- + +## Step 1 — Verify prerequisites + +You need Ruby ≥ 3.0 and bundler installed: + +```sh +ruby --version # → 3.0 or newer +bundler --version # → any 2.x +``` + +If you don't have bundler: `gem install bundler`. + +Then from the repo root, install the gems: + +```sh +cd /path/to/cheat # wherever you cloned this repo +bundle install +``` + +This is a one-time setup. Skip if you already run `clear` from this +checkout. + +## Step 2 — Verify the binary works (terminal smoke test) + +Before touching Neovim, confirm the LSP server actually runs. From +the repo root: + +```sh +bundle exec bin/clear-lsp --help +``` + +You should see: + +``` +Usage: clear-lsp [--log-level=debug|info|warn|error] +``` + +If you get a Ruby error here (missing gem, version mismatch, syntax +error), fix it before moving on — the Neovim config can't make a +broken binary work. + +For a deeper smoke test that actually exercises the protocol, the +test suite drives the binary end-to-end: + +```sh +bundle exec rspec spec/lsp/server_integration_spec.rb --tag integration +``` + +Six tests should pass in about 2 seconds. If they pass, the binary +works; any Neovim issues from here are config, not code. + +## Step 3 — Find your Neovim config directory + +Run `:echo stdpath('config')` inside Neovim. On most systems it'll +be one of: + +- Linux/macOS: `~/.config/nvim/` +- Windows: `~\AppData\Local\nvim\` + +Inside that directory, your main config is `init.lua` (or +`init.vim` on older setups — the Lua block below assumes Lua. If +you're on `init.vim`, wrap the snippet in `lua << EOF ... EOF`). + +## Step 4 — Add the LSP setup + +There are two paths depending on how you manage your Neovim config. +Both work. **The first is plain Neovim with no plugin manager.** The +second is for users on lazy.nvim or similar. + +### Plain Neovim (no plugin manager) — recommended for trying it out + +Add this to your `init.lua`. **Replace `/absolute/path/to/cheat` +with your actual repo path** (run `pwd` in the repo to get it): + +```lua +--------------------------------------------------------------------- +-- CLEAR (.cht) language support +--------------------------------------------------------------------- + +-- 1. Tell Neovim what filetype `.cht` files are. +vim.filetype.add({ extension = { cht = "clear" } }) + +-- 2. Auto-start the LSP whenever a CLEAR buffer opens. +local clear_lsp_root = "/absolute/path/to/cheat" -- ← edit this +vim.api.nvim_create_autocmd("FileType", { + pattern = "clear", + callback = function() + vim.lsp.start({ + name = "clear-lsp", + cmd = { + "bundle", "exec", + clear_lsp_root .. "/bin/clear-lsp", + "--log-level=info", + }, + cmd_cwd = clear_lsp_root, + root_dir = vim.fs.dirname( + vim.fs.find({ ".git", "Gemfile" }, { upward = true })[1] + ) or vim.fn.getcwd(), + }) + end, +}) + +-- 3. Default LSP keymaps (only active in CLEAR buffers). +vim.api.nvim_create_autocmd("LspAttach", { + callback = function(args) + local opts = { buffer = args.buf, silent = true } + vim.keymap.set("n", "K", vim.lsp.buf.hover, opts) + vim.keymap.set("n", "ca", vim.lsp.buf.code_action, opts) + vim.keymap.set("n", "[d", vim.diagnostic.goto_prev, opts) + vim.keymap.set("n", "]d", vim.diagnostic.goto_next, opts) + vim.keymap.set("n", "e", vim.diagnostic.open_float, opts) + end, +}) + +-- 4. Make diagnostics actually visible. Neovim's defaults are +-- minimal — show inline virtual text and gutter signs. +vim.diagnostic.config({ + virtual_text = true, + signs = true, + underline = true, + update_in_insert = false, -- don't recompute mid-keystroke + severity_sort = true, +}) +``` + +Save the file, then restart Neovim. The next section verifies it. + +### lazy.nvim users + +If you use lazy.nvim or another plugin manager, drop this in a +`lua/plugins/clear.lua` (or wherever your plugin specs live): + +```lua +return { + -- Bootstrap the CLEAR LSP. No external plugin needed; we just + -- register the filetype + autocmd from inside the spec. + { + name = "clear-lsp", + dir = vim.fn.stdpath("data") .. "/lazy/clear-lsp-noop", -- placeholder + lazy = false, + config = function() + local clear_lsp_root = "/absolute/path/to/cheat" -- ← edit + + vim.filetype.add({ extension = { cht = "clear" } }) + + vim.api.nvim_create_autocmd("FileType", { + pattern = "clear", + callback = function() + vim.lsp.start({ + name = "clear-lsp", + cmd = { + "bundle", "exec", + clear_lsp_root .. "/bin/clear-lsp", + "--log-level=info", + }, + cmd_cwd = clear_lsp_root, + root_dir = vim.fs.dirname( + vim.fs.find({ ".git", "Gemfile" }, { upward = true })[1] + ) or vim.fn.getcwd(), + }) + end, + }) + + vim.api.nvim_create_autocmd("LspAttach", { + callback = function(args) + local opts = { buffer = args.buf, silent = true } + vim.keymap.set("n", "K", vim.lsp.buf.hover, opts) + vim.keymap.set("n", "ca", vim.lsp.buf.code_action, opts) + vim.keymap.set("n", "[d", vim.diagnostic.goto_prev, opts) + vim.keymap.set("n", "]d", vim.diagnostic.goto_next, opts) + vim.keymap.set("n", "e", vim.diagnostic.open_float, opts) + end, + }) + + vim.diagnostic.config({ + virtual_text = true, + signs = true, + underline = true, + severity_sort = true, + }) + end, + }, +} +``` + +## Step 5 — Verify it's working in Neovim + +Open a CLEAR file with a deliberate error. From the repo root: + +```sh +nvim transpile-tests/01_smoke.cht +``` + +You should see no diagnostics (it's valid CLEAR). Now try a broken +file. Save this somewhere as `/tmp/broken.cht`: + +```clear +FN main() RETURNS Void -> + _ = doesNotExist; + x = 5; + WITH RESTRICT x { _ = x; } +END +``` + +Open it: `nvim /tmp/broken.cht`. Within ~1 second: + +1. **Diagnostic squiggle** under `doesNotExist` and on the `WITH + RESTRICT x` line. +2. Press **`K`** with the cursor on `doesNotExist` — a popup + appears with `**[error] UNDEFINED_VAR**`, the cause, and the fix + hint. +3. Move the cursor to the `x` in `RESTRICT x` and press + **`ca`** — a menu offers "Declare 'x' as MUTABLE at its + binding site (line 3).". Accepting it inserts `MUTABLE ` at line + 3 and the diagnostic vanishes. + +If all three work, you're done. + +## Step 6 — Troubleshooting + +### "Nothing happens when I open a .cht file" + +Run `:LspInfo` inside Neovim. If `clear-lsp` isn't listed, the +autocmd didn't fire. Check: + +- `:set filetype?` — should say `clear`. If it says `cht` or empty, + the `vim.filetype.add` call didn't run; verify the snippet + actually loaded (try `:lua print("clear-lsp config loaded")` at + the top to confirm). +- The path in `cmd` exists and is executable: `:! ls -la + /absolute/path/to/cheat/bin/clear-lsp` — should show `-rwxr-xr-x`. + +### "Server is attached but no diagnostics appear" + +Turn on Neovim's LSP debug log to see what's happening over the +wire: + +```vim +:lua vim.lsp.set_log_level("debug") +:LspLog +``` + +Look for: + +- `[ERROR]` lines pointing at framing problems. +- A long pause with no `publishDiagnostics` after `didOpen` — + usually means the analyzer threw something we don't handle. The + server's own log goes to stderr; in Neovim, stderr appears in + `:LspLog` too (look for `[clear-lsp/error]` lines). + +### "I see `bundler/setup` errors when starting" + +The binary needs to run inside the repo's bundler environment. Two +fixes: + +1. Confirm `cmd_cwd` in the config is set to the repo root (where + `Gemfile` lives). Without it, Bundler can't find `Gemfile.lock`. +2. Confirm the Ruby on `$PATH` in your shell matches the one Neovim + uses. If you use `rbenv` or `asdf`, sometimes Neovim launches + under a different Ruby. Test by adding `RBENV_VERSION=3.2.0` (or + your version) to the `cmd` env, or use a wrapper script. + +If `bundle exec` itself errors with "could not find gem ostruct": + +```sh +cd /path/to/cheat +bundle update ostruct +``` + +### "Hover or code action does nothing" + +Both rely on cached findings from the most recent analysis pass. If +the file has no errors, hover returns `null` (popup dismissed) and +code action returns `[]` (empty menu). Try a deliberately-broken +file like the example in step 5. + +### "I want to see the LSP's own logs" + +The server logs to stderr at the `--log-level` you specify. In +Neovim, those appear in `:LspLog` interleaved with the client's +own messages. Bump verbosity in the `cmd` array: + +```lua +cmd = { "bundle", "exec", clear_lsp_root .. "/bin/clear-lsp", + "--log-level=debug" } +``` + +`debug` shows every JSON-RPC request/response method name; `info` +shows lifecycle events plus per-document diagnostic counts; `warn` +and `error` are quiet. + +### "I want a fresh server" + +`:LspRestart` (Neovim ≥ 0.10) or `:lua vim.lsp.stop_client( +vim.lsp.get_active_clients()[1].id)` then re-open the buffer. + +--- + +## Capabilities advertised + +``` +{ + textDocumentSync: 1, -- full sync + hoverProvider: true, + codeActionProvider: { codeActionKinds: ["quickfix", "refactor"] } +} +``` + +## Architecture + +``` +neovim ─[stdio JSON-RPC]─→ bundle exec bin/clear-lsp + │ + ├─ src/lsp/rpc.rb Content-Length framing + ├─ src/lsp/server.rb Message loop + dispatch + ├─ src/lsp/document_store.rb open buffers + cached findings + ├─ src/lsp/analyzer.rb Lexer→Parser→Annotator + │ wraps FixCollector to capture findings + │ mirrors the lambda used by `clear fix` + ├─ src/lsp/diagnostics.rb Finding → LSP::Diagnostic + ├─ src/lsp/code_actions.rb Fix → LSP::CodeAction + ├─ src/lsp/hover.rb Diagnostic+ → markdown + └─ src/lsp/position.rb bytes ↔ UTF-16 columns +``` + +Each module is independently tested under `spec/lsp/` with 100% line +coverage (431/431 lines). The integration spec drives the actual +binary end-to-end. + +## VS Code + +The repo ships an in-tree VS Code extension at +`.vscode/extensions/cheat-lang/`. From the repo root: + +```sh +cd .vscode/extensions/cheat-lang +npm install +npm run compile +``` + +Reload your VS Code window once. Open a `.cht` file → squiggles, +hover, and Ctrl+. quick-fix all light up. Full instructions and +settings reference are in that extension's own README. + +## GitHub Codespaces + +`.devcontainer/devcontainer.json` provisions a full environment +(Ruby 3.2, Node 22, Zig pinned to CI's version) and runs +`bundle install` + builds the VS Code extension automatically. +Click "Code → Codespaces → Create codespace on lsp" on GitHub and +the LSP is ready as soon as the build finishes (~1-2 min cold). + +## Out of scope (future work) + +- Completion (`textDocument/completion`) +- Go-to-definition / rename / references / workspace symbols +- Semantic tokens +- Multi-file `REQUIRE` graph: editing one file should re-analyse + files that import it +- Incremental sync (full sync is fast enough for now) + +Each is additive — adding any won't break the MVP's behaviour. diff --git a/src/lsp/analyzer.rb b/src/lsp/analyzer.rb new file mode 100644 index 000000000..680572dd1 --- /dev/null +++ b/src/lsp/analyzer.rb @@ -0,0 +1,82 @@ +require_relative "../backends/transpiler" # loads Lexer, Parser, SemanticAnnotator, FixCollector + +module LSP + # Runs the canonical CLEAR compiler frontend on a source string and + # returns the captured FixableFindings (plus any unrecoverable + # CompilerError/ParserError as a synthetic finding so the client + # still sees a diagnostic). + # + # Mirrors the `run_compiler_and_drain` lambda in `bin/clear` (used + # by `clear fix`) — the LSP and the CLI take exactly the same + # analysis path so behaviour stays consistent. + # + # FixCollector is module-global state. The Server serialises + # `Analyzer.run` calls behind a mutex so concurrent analyses don't + # interleave their findings. + module Analyzer + # Pseudo-token shape used when we can't extract a real token from + # a raised CompilerError/ParserError (synthetic frontend errors, + # EOF errors, etc.). Exposes the fields Diagnostics expects. + SyntheticToken = Struct.new(:line, :column, :value, keyword_init: true) + + # Result of one analysis pass. + Result = Struct.new(:findings, :fatal_error, keyword_init: true) do + def fatal?; !fatal_error.nil?; end + end + + module_function + + # Run the lexer, parser, and annotator on `source`. Returns a + # Result with the FixCollector findings and an optional + # `fatal_error` (a synthetic FixableFinding) if the parser or + # annotator raised. + def run(source) + FixCollector.enable! + findings = [] + fatal = nil + begin + tokens = Lexer.new(source).tokenize + ast = Parser.new(tokens, source).parse + annotator = SemanticAnnotator.new + annotator.source_code = source + annotator.annotate!(ast) + rescue CompilerError, ParserError => e + fatal = synthetic_finding_from(e) + rescue => e + # Lexer / unforeseen errors. Don't lose them — surface as a + # generic synthetic diagnostic at line 1 col 1. + fatal = SyntheticFinding.new( + level: :error, + message: "Internal compiler error: #{e.class}: #{e.message}", + token: SyntheticToken.new(line: 1, column: 1, value: ""), + category: :type, + fixes: [], + ) + ensure + findings = FixCollector.drain + FixCollector.disable! + end + Result.new(findings: findings, fatal_error: fatal) + end + + # Internals -------------------------------------------------- + + # Lightweight stand-in for FixableFinding so the Diagnostics + # converter can treat both uniformly. Has the same surface + # (level, message, token, category, fixes). + SyntheticFinding = Struct.new(:level, :message, :token, :category, :fixes, keyword_init: true) do + def fatal?; @level == :error; end + end + + def synthetic_finding_from(err) + tok = err.respond_to?(:token) && err.token ? err.token : SyntheticToken.new(line: 1, column: 1, value: "") + SyntheticFinding.new( + level: :error, + message: err.original_message || err.message, + token: tok, + category: err.is_a?(ParserError) ? :syntax : :type, + fixes: [], + ) + end + end +end diff --git a/src/lsp/code_actions.rb b/src/lsp/code_actions.rb new file mode 100644 index 000000000..d8b581632 --- /dev/null +++ b/src/lsp/code_actions.rb @@ -0,0 +1,103 @@ +require_relative "position" +require_relative "diagnostics" + +module LSP + # Converts FixableFinding fixes into LSP CodeActions. + # + # The client sends `textDocument/codeAction` with a uri + range. + # We return every fix attached to a finding whose token range + # overlaps the requested range. Each CodeAction carries: + # + # - title — fix.description (shown in the quick-fix menu) + # - kind — 'quickfix' for :auto, 'refactor' for :interactive + # - diagnostics — the originating Diagnostic (lets the client + # group actions under their error) + # - edit — a WorkspaceEdit with a TextDocumentEdit array; + # each TextEdit's range comes from the Fix's + # Edit span. + # - isPreferred — true for :auto fixes (Neovim picks these by + # default when binding `ca`). + # + # No new analysis runs here. We read from `DocumentStore`'s + # cached findings, populated by `Server#analyze_and_publish`. + module CodeActions + KIND_QUICKFIX = "quickfix".freeze + KIND_REFACTOR = "refactor".freeze + + module_function + + # Build the CodeAction array for `request_range` against the + # document. Returns an empty array when there's nothing relevant + # (no findings, no overlap, or no fixes). + def for_range(document, request_range) + return [] unless document + result = document.cached_findings + return [] unless result + + source = document.text + out = [] + + result.findings.each do |finding| + next if finding.fixes.empty? + diag = Diagnostics.from_finding(finding, source) + next unless ranges_overlap?(diag[:range], request_range) + + finding.fixes.each do |fix| + out << build_action(fix, finding, diag, document, source) + end + end + + out + end + + # ---- internals ---- + + def build_action(fix, _finding, diag, document, source) + kind = fix.confidence == :auto ? KIND_QUICKFIX : KIND_REFACTOR + edits = fix.edits.map { |e| build_text_edit(e, source) } + + action = { + title: fix.description, + kind: kind, + diagnostics: [diag], + edit: { + documentChanges: [ + { + textDocument: { uri: document.uri, version: document.version }, + edits: edits, + }, + ], + }, + } + action[:isPreferred] = true if fix.confidence == :auto + action + end + + # Convert a Fix's Edit (line/col/length-based) into an LSP + # TextEdit (range/newText). + def build_text_edit(edit, source) + { + range: Position.range_for_span(edit.span, source), + newText: edit.replacement, + } + end + + # LSP range overlap. Two ranges overlap unless one ends strictly + # before the other begins. Each range is `{start: {line, character}, + # end: {line, character}}`. Compare via `<=>` since Array#< + # isn't defined. + def ranges_overlap?(a, b) + return false if (range_position(a, :end) <=> range_position(b, :start)) < 0 + return false if (range_position(b, :end) <=> range_position(a, :start)) < 0 + true + end + + # Pack a range's start or end into a comparable [line, char] + # tuple. Tolerates string-keyed positions from the LSP wire. + def range_position(range, side) + pos = range[side] + pos ||= range[side.to_s] + [pos[:line] || pos["line"], pos[:character] || pos["character"]] + end + end +end diff --git a/src/lsp/diagnostics.rb b/src/lsp/diagnostics.rb new file mode 100644 index 000000000..3bd808e3c --- /dev/null +++ b/src/lsp/diagnostics.rb @@ -0,0 +1,95 @@ +require_relative "position" +require_relative "../ast/diagnostic_registry" + +module LSP + # Converts CLEAR's FixableFinding (and synthetic findings from the + # Analyzer) into LSP `Diagnostic` objects. The output shape matches + # the LSP 3.17 spec: severity (1-4), range (start/end with UTF-16 + # character offsets), code, source, message, and optional related + # information. + # + # The `code` field is the registry symbol when we can recover it — + # we look up the registered template against the message text. For + # synthetic errors (ParserError / unrecoverable CompilerError), we + # leave `code` nil and just surface the message. + module Diagnostics + # LSP DiagnosticSeverity values. + SEVERITY_ERROR = 1 + SEVERITY_WARNING = 2 + SEVERITY_INFO = 3 + SEVERITY_HINT = 4 + + SEVERITY_FOR_LEVEL = { + error: SEVERITY_ERROR, + warning: SEVERITY_WARNING, + info: SEVERITY_INFO, + hint: SEVERITY_HINT, + }.freeze + + SOURCE_NAME = "clear".freeze + + module_function + + # Convert a single FixableFinding (or synthetic equivalent) to an + # LSP Diagnostic hash. `source_text` is optional — when provided, + # we compute exact UTF-16 column offsets for tokens that span + # multi-byte characters. + def from_finding(finding, source_text = nil) + tok = finding.token + length = token_length(tok) + range = Position.range_for(tok, length, source_text) + + { + range: range, + severity: SEVERITY_FOR_LEVEL.fetch(finding.level, SEVERITY_ERROR), + source: SOURCE_NAME, + message: finding.message.to_s, + code: code_for(finding), + }.compact + end + + # Convert a list of findings + an optional fatal error into the + # array of Diagnostics for a single document. + def from_result(result, source_text = nil) + diags = result.findings.map { |f| from_finding(f, source_text) } + diags << from_finding(result.fatal_error, source_text) if result.fatal? + diags + end + + # ---- internals ---- + + # The token's length in bytes. CLEAR tokens carry a `value` (the + # parsed lexeme); its byte size is the column-extent. Synthetic + # tokens may have an empty value — we floor at 1 so the squiggle + # is at least one character wide. + def token_length(tok) + val = tok.respond_to?(:value) ? tok.value : nil + len = val.is_a?(String) ? val.bytesize : 1 + len <= 0 ? 1 : len + end + + # Try to recover the registry code from a finding's message. The + # registry stores templates with `%{name}` placeholders; we + # extract the literal-prefix of each template (everything before + # the first placeholder) and check if the message starts with it. + # First match wins. This is best-effort — exact backwards mapping + # is tricky because messages built via DiagnosticRegistry.format + # don't carry their code at the call site. + def code_for(finding) + msg = finding.message.to_s + return nil if msg.empty? + + DiagnosticRegistry::DIAGNOSTICS.each do |code, entry| + template = entry[:template] + next unless template + prefix = template.split(/%\{[^}]+\}/, 2).first.to_s + # Skip umbrella templates like "%{message}" (prefix is empty). + next if prefix.empty? + # Strip trailing punctuation/whitespace for a slightly looser + # match — the template's prefix often ends mid-word. + return code.to_s if msg.start_with?(prefix) + end + nil + end + end +end diff --git a/src/lsp/document_store.rb b/src/lsp/document_store.rb new file mode 100644 index 000000000..0db3f06da --- /dev/null +++ b/src/lsp/document_store.rb @@ -0,0 +1,68 @@ +module LSP + # In-memory store of open documents. The LSP advertises full-sync + # mode (`textDocumentSync: 1`) so `didChange` notifications carry + # the entire new buffer in `contentChanges[0].text` — we just + # replace the stored text. Incremental sync (mode 2) would require + # patching ranges; deferred until performance demands it. + # + # Each entry tracks the integer `version` from the client (LSP + # requires it to be monotonically increasing per uri) so later + # commits can cache analysis results keyed by version. + # + # The store is single-threaded — the Server only mutates it from + # the main message-loop thread. Re-analysis runs on a separate + # thread but only reads the latest text snapshot. + class DocumentStore + Document = Struct.new(:uri, :text, :version, keyword_init: true) do + # Cached findings + the version they reflect. Hover and + # codeAction read these without re-analysing. Set by the Server + # after each `analyze_and_publish` pass. + def cached_findings; @cached_findings; end + def cached_findings=(value); @cached_findings = value; end + def cached_version; @cached_version; end + def cached_version=(value); @cached_version = value; end + end + + def initialize + @docs = {} + end + + # didOpen — new document arrives. + def open(uri, text, version) + @docs[uri] = Document.new(uri: uri, text: text, version: version) + end + + # didChange — full-sync replacement. + def update(uri, text, version) + doc = @docs[uri] + return nil unless doc + doc.text = text + doc.version = version + # Stale cache; next analysis will refresh. + doc.cached_findings = nil + doc.cached_version = nil + doc + end + + # didClose — drop the document. + def close(uri) + @docs.delete(uri) + end + + def get(uri) + @docs[uri] + end + + def text(uri) + @docs[uri]&.text + end + + def version(uri) + @docs[uri]&.version + end + + def each(&block) + @docs.each_value(&block) + end + end +end diff --git a/src/lsp/hover.rb b/src/lsp/hover.rb new file mode 100644 index 000000000..2d5aab67e --- /dev/null +++ b/src/lsp/hover.rb @@ -0,0 +1,148 @@ +require_relative "position" +require_relative "diagnostics" +require_relative "../ast/diagnostic_registry" +require_relative "../ast/diagnostic_examples" + +module LSP + # `textDocument/hover` handler. + # + # The MVP version is diagnostic-driven: when the cursor sits on a + # token that has an active diagnostic, we render the registered + # template's metadata (summary / cause / fix_hint) plus any worked + # example pulled from `spec/error_emission_coverage_spec.rb` via + # DiagnosticExamples. + # + # Returns an LSP `Hover` object — `{contents:, range:}` — or nil + # when there's nothing to show. nil tells the client to dismiss + # the hover popup. + # + # Identifier-based hover (signature, type, doc-string of any + # symbol at the cursor) is a follow-up; the registry path covers + # the highest-value case first. + module Hover + module_function + + # Build a hover response for the document at `position`. Returns + # nil when no diagnostic overlaps the cursor. + def render(document, position) + return nil unless document + result = document.cached_findings + return nil unless result + + source = document.text + finding = find_overlapping(result, position, source) + return nil unless finding + + diag = Diagnostics.from_finding(finding, source) + code = diag[:code]&.to_sym + entry = code ? DiagnosticRegistry.lookup(code) : nil + example = code ? DiagnosticExamples.lookup(code) : nil + + { + contents: { kind: "markdown", value: build_markdown(diag, entry, example) }, + range: diag[:range], + } + end + + # ---- internals ---- + + # Find the most-relevant finding for the cursor position. We try + # two passes: first an exact range overlap (so the squiggled + # token always wins when the cursor is on it), then a same-line + # fallback so the user gets hover anywhere on a line that has a + # diagnostic. Without the fallback, diagnostics whose range is + # narrow (e.g. a 2-char `->` arrow anchor used by some fixable + # findings to position their edit) make hover effectively + # invisible — the user would have to pinpoint the cursor on the + # exact token to see anything. + def find_overlapping(result, position, source) + candidates = result.findings.dup + candidates << result.fatal_error if result.fatal? + + # Pass 1 — strict range overlap. Wins for every finding whose + # token squigglesthe cursor sits on. + strict = candidates.find do |f| + diag = Diagnostics.from_finding(f, source) + Position.position_in_range?(position, diag[:range]) + end + return strict if strict + + # Pass 2 — same-line fallback. Pick the finding whose start + # column is nearest the cursor's column on the same line, so + # the user can hover anywhere on the line and get something + # relevant. + cursor_line = position[:line] || position["line"] + cursor_char = position[:character] || position["character"] + same_line = candidates.filter_map do |f| + diag = Diagnostics.from_finding(f, source) + next nil unless diag[:range][:start][:line] == cursor_line + [f, (diag[:range][:start][:character] - cursor_char).abs] + end + return nil if same_line.empty? + same_line.min_by { |_, dist| dist }.first + end + + def build_markdown(diag, entry, example) + lines = [] + lines << header_line(diag, entry) + lines << "" + lines << (entry && entry[:summary] ? entry[:summary] : diag[:message]) + + if entry && entry[:cause] + lines << "" + lines << "**Cause:** #{entry[:cause]}" + end + + if entry && entry[:fix_hint] + lines << "" + lines << "**Fix:** #{entry[:fix_hint]}" + end + + if example + if example[:bad] + lines << "" + lines << "**Example (bad):**" + lines << "```clear" + lines << example[:bad].rstrip + lines << "```" + end + if example[:fix] && !example[:fix].empty? + lines << "" + lines << "**Fix prose:** #{example[:fix].gsub("\n", " ")}" + end + if example[:good] + lines << "" + lines << "**Example (good):**" + lines << "```clear" + lines << example[:good].rstrip + lines << "```" + end + end + + lines.join("\n") + end + + def header_line(diag, entry) + severity = severity_label(diag[:severity]) + code = diag[:code] + if code && entry + "**[#{severity}] #{code}** _#{entry[:category]}_" + elsif code + "**[#{severity}] #{code}**" + else + "**[#{severity}]**" + end + end + + SEVERITY_LABELS = { + Diagnostics::SEVERITY_ERROR => "error", + Diagnostics::SEVERITY_WARNING => "warning", + Diagnostics::SEVERITY_INFO => "info", + Diagnostics::SEVERITY_HINT => "hint", + }.freeze + + def severity_label(severity) + SEVERITY_LABELS.fetch(severity, "error") + end + end +end diff --git a/src/lsp/logger.rb b/src/lsp/logger.rb new file mode 100644 index 000000000..5693dec77 --- /dev/null +++ b/src/lsp/logger.rb @@ -0,0 +1,26 @@ +module LSP + # Stderr logger. LSP clients display the server's stderr — never + # write log output to stdout (that's reserved for JSON-RPC frames). + class Logger + LEVELS = { debug: 0, info: 1, warn: 2, error: 3 }.freeze + + def initialize(level: :info, io: $stderr) + @level = LEVELS.fetch(level) + @io = io + end + + def debug(msg); log(:debug, msg); end + def info(msg); log(:info, msg); end + def warn(msg); log(:warn, msg); end + def error(msg); log(:error, msg); end + + private + + def log(level, msg) + return if LEVELS.fetch(level) < @level + ts = Time.now.strftime("%H:%M:%S.%3N") + @io.write("[#{ts}] [clear-lsp/#{level}] #{msg}\n") + @io.flush + end + end +end diff --git a/src/lsp/position.rb b/src/lsp/position.rb new file mode 100644 index 000000000..bae87f36f --- /dev/null +++ b/src/lsp/position.rb @@ -0,0 +1,108 @@ +module LSP + # Converts CLEAR's 1-based (line, column, length) tokens to LSP's + # 0-based (line, character) positions. The two coordinate systems + # disagree on: + # + # 1. Origin: CLEAR is 1-based; LSP is 0-based. + # 2. Encoding: LSP characters are UTF-16 code units. CLEAR tokens + # hold byte columns from the source. For ASCII source they're + # equivalent; for UTF-8 strings, `é` is one byte but two UTF-16 + # code units (still — most CLEAR source is ASCII so this only + # bites on string literals or comments with multi-byte chars). + # + # We expose two entry points: `range_for(token, length, source)` for + # tokens (used by Diagnostics) and `range_for_span(span, source)` for + # Edits (used by CodeActions). + module Position + module_function + + # Convert a CLEAR token + length into an LSP `Range` hash. + # `source` is the full document text (needed for the UTF-16 + # column calculation). When `source` is nil or the line is pure + # ASCII, this falls through to the fast byte-equals-character + # path. + def range_for(token, length, source = nil) + line = token.line - 1 + col_start_byte = token.column - 1 + col_end_byte = col_start_byte + length + + line_text = line_at(source, line) if source + start_char = byte_to_utf16(line_text, col_start_byte) + end_char = byte_to_utf16(line_text, col_end_byte) + + { + start: { line: line, character: start_char }, + end: { line: line, character: end_char }, + } + end + + # Convert a Span (file/line/col/length, with possibly multi-line + # extent) into an LSP `Range`. CLEAR Spans currently always live + # on a single line; if that changes, the helper extends naturally. + def range_for_span(span, source = nil) + start_line = span.line - 1 + end_line = span.end_line - 1 + start_byte = span.col - 1 + end_byte = span.end_col - 1 + + if start_line == end_line + line_text = line_at(source, start_line) if source + { + start: { line: start_line, character: byte_to_utf16(line_text, start_byte) }, + end: { line: start_line, character: byte_to_utf16(line_text, end_byte) }, + } + else + start_text = line_at(source, start_line) if source + end_text = line_at(source, end_line) if source + { + start: { line: start_line, character: byte_to_utf16(start_text, start_byte) }, + end: { line: end_line, character: byte_to_utf16(end_text, end_byte) }, + } + end + end + + # Test whether an LSP position falls within an LSP range. + def position_in_range?(position, range) + pl, pc = position[:line] || position["line"], position[:character] || position["character"] + sl, sc = range[:start][:line], range[:start][:character] + el, ec = range[:end][:line], range[:end][:character] + return false if pl < sl || pl > el + return false if pl == sl && pc < sc + return false if pl == el && pc > ec + true + end + + # ---- internals ---- + + # Return the substring of `source` for the given 0-based line, or + # nil if out of bounds. We split lazily to keep large documents + # cheap for single-token lookups. + def line_at(source, line_idx) + return nil unless source + lines = source.lines + return nil if line_idx < 0 || line_idx >= lines.size + lines[line_idx].chomp + end + + # Given a line of text and a byte offset, return the UTF-16 code + # unit count from the start of the line. ASCII-only lines short- + # circuit to the byte count. For multi-byte source, walk the line's + # codepoints and sum their UTF-16 widths. + def byte_to_utf16(line_text, byte_offset) + return byte_offset if line_text.nil? || line_text.ascii_only? + + bytes = 0 + utf16 = 0 + line_text.each_char do |ch| + break if bytes >= byte_offset + bytes += ch.bytesize + # Codepoints above U+FFFF take two UTF-16 code units (surrogate + # pair); below, one. Ruby's String#each_char yields one Unicode + # character per iteration, so counting code units means a per- + # char dispatch on codepoint magnitude. + utf16 += ch.ord > 0xFFFF ? 2 : 1 + end + utf16 + end + end +end diff --git a/src/lsp/rpc.rb b/src/lsp/rpc.rb new file mode 100644 index 000000000..eafa34eee --- /dev/null +++ b/src/lsp/rpc.rb @@ -0,0 +1,75 @@ +require "json" + +module LSP + # JSON-RPC framing for LSP. The protocol wraps every message in a + # tiny HTTP-like envelope: + # + # Content-Length: \r\n + # \r\n + # + # + # We read messages by parsing the header, reading exactly N bytes, + # and JSON-decoding. We write the inverse. Stdout MUST be unbuffered + # for the client to see frames promptly — the Server sets that. + # + # No other output may go to stdout. Logging goes to stderr (the LSP + # convention; corruption of the stdout frame disconnects the client). + module RPC + # Raised when the framing is malformed (missing Content-Length, + # truncated body, non-JSON payload). The server treats these as + # fatal — there's no way to recover an out-of-sync stream. + class FramingError < StandardError; end + + module_function + + # Read the next LSP message from `io`. Returns the parsed Hash, or + # nil at EOF (clean shutdown). Raises FramingError on malformed + # frames. + def read_message(io) + headers = read_headers(io) + return nil if headers.nil? # EOF before any header line + + length_str = headers["content-length"] + raise FramingError, "missing Content-Length header" if length_str.nil? + length = Integer(length_str) rescue nil + raise FramingError, "invalid Content-Length: #{length_str.inspect}" if length.nil? + raise FramingError, "negative Content-Length: #{length}" if length.negative? + + body = io.read(length) + raise FramingError, "truncated body (expected #{length} bytes)" if body.nil? || body.bytesize < length + + JSON.parse(body) + rescue JSON::ParserError => e + raise FramingError, "JSON parse error: #{e.message}" + end + + # Write `msg` (a Hash) as an LSP frame to `io`. + def write_message(io, msg) + body = JSON.generate(msg) + io.write("Content-Length: #{body.bytesize}\r\n\r\n#{body}") + io.flush + end + + # ---- internals ---- + + # Read header lines from `io` until a blank line. Returns a Hash + # of lowercased header names → values, or nil at EOF before any + # header line was read. + def read_headers(io) + headers = {} + first = true + loop do + line = io.gets + return nil if line.nil? && first + raise FramingError, "unexpected EOF in headers" if line.nil? + line = line.chomp + break if line.empty? + first = false + name, value = line.split(":", 2) + raise FramingError, "malformed header: #{line.inspect}" if value.nil? + headers[name.strip.downcase] = value.strip + end + headers + end + end +end diff --git a/src/lsp/server.rb b/src/lsp/server.rb new file mode 100644 index 000000000..61ae339cd --- /dev/null +++ b/src/lsp/server.rb @@ -0,0 +1,301 @@ +require_relative "rpc" +require_relative "logger" +require_relative "document_store" +require_relative "analyzer" +require_relative "diagnostics" +require_relative "code_actions" +require_relative "hover" + +module LSP + # CLEAR Language Server. The lifecycle pieces (initialize, shutdown, + # exit) handle protocol setup; the textDocument/* handlers run the + # canonical Lexer→Parser→SemanticAnnotator pipeline against open + # documents and publish diagnostics back to the client. + class Server + # JSON-RPC 2.0 reserved error codes used by LSP. + METHOD_NOT_FOUND = -32601 + + # `debounce_ms` is configurable so specs can drive the debounce + # path without sleeping for half a second; production runs at the + # default 500. + def initialize(stdin: $stdin, stdout: $stdout, log_level: :info, debounce_ms: 500) + @stdin = stdin + @stdout = stdout + @stdout.sync = true + @logger = Logger.new(level: log_level) + @initialized = false + @shutdown_requested = false + @docs = DocumentStore.new + # FixCollector is module-global; serialise analyses across + # whatever threads might trigger them. + @analyze_mutex = Mutex.new + # Stdout writes happen from the main loop AND from timer + # threads — guard frame integrity. + @output_mutex = Mutex.new + # Debounce machinery for didChange. One pending timer per uri; + # rapid edits cancel the prior timer. + @debounce_ms = debounce_ms + @timers = {} + @timer_mutex = Mutex.new + end + + # Main loop. Runs until `exit` notification or stdin EOF. + def run + @logger.info("clear-lsp starting") + loop do + msg = RPC.read_message(@stdin) + if msg.nil? + @logger.info("stdin closed; exiting") + break + end + dispatch(msg) + end + rescue RPC::FramingError => e + @logger.error("framing error: #{e.message}; client stream desynced — exiting") + exit_code = 1 + Kernel.exit(exit_code) + rescue => e + @logger.error("fatal: #{e.class}: #{e.message}\n #{e.backtrace.first(5).join("\n ")}") + Kernel.exit(1) + end + + # Synchronously wait for every pending timer thread to complete. + # Production never needs this — the LSP runs forever and exits + # via `exit` notification — but tests use it to step past the + # debounce window deterministically. + def flush_pending! + threads = nil + @timer_mutex.synchronize { threads = @timers.values.dup } + threads.each(&:join) + end + + private + + # Dispatch a parsed message to the right handler. JSON-RPC messages + # are either requests (have an `id`, expect a response) or + # notifications (no `id`). Unknown methods get a MethodNotFound + # error response if it was a request; notifications are dropped + # silently (per JSON-RPC 2.0 spec). + def dispatch(msg) + method = msg["method"] + id = msg["id"] + params = msg["params"] || {} + @logger.debug("← #{method} #{id ? "(request id=#{id})" : "(notification)"}") + + case method + when "initialize" then respond(id, handle_initialize(params)) + when "initialized" then handle_initialized(params) + when "textDocument/didOpen" then handle_did_open(params) + when "textDocument/didChange" then handle_did_change(params) + when "textDocument/didSave" then handle_did_save(params) + when "textDocument/didClose" then handle_did_close(params) + when "textDocument/codeAction" then respond(id, handle_code_action(params)) + when "textDocument/hover" then respond(id, handle_hover(params)) + when "shutdown" then respond(id, handle_shutdown(params)) + when "exit" then handle_exit + else + if id + respond_error(id, METHOD_NOT_FOUND, "Method not found: #{method}") + else + @logger.debug("ignoring unknown notification: #{method}") + end + end + end + + def respond(id, result) + send_message(jsonrpc: "2.0", id: id, result: result) + end + + def respond_error(id, code, message) + send_message(jsonrpc: "2.0", id: id, error: { code: code, message: message }) + end + + def send_message(msg) + @logger.debug("→ #{msg[:method] || (msg[:result] ? "result(id=#{msg[:id]})" : "error(id=#{msg[:id]})")}") + @output_mutex.synchronize do + RPC.write_message(@stdout, msg) + end + end + + # ---- lifecycle handlers ---- + + # `initialize` request — the very first message. We declare the + # capabilities the server will support. + # + # `textDocumentSync: 1` = Full sync. The client sends the entire + # buffer on every `didChange`. Simpler than incremental sync; + # CLEAR files are small enough that the cost is negligible. + def handle_initialize(_params) + { + capabilities: { + textDocumentSync: 1, + hoverProvider: true, + codeActionProvider: { + codeActionKinds: [CodeActions::KIND_QUICKFIX, CodeActions::KIND_REFACTOR], + }, + }, + serverInfo: { + name: "clear-lsp", + version: "0.1.0", + }, + } + end + + def handle_initialized(_params) + @initialized = true + @logger.info("initialization complete") + nil + end + + # `shutdown` request — client asks the server to wind down. We + # acknowledge with a null result; the server keeps running until + # the subsequent `exit` notification. + def handle_shutdown(_params) + @shutdown_requested = true + @logger.info("shutdown requested") + nil + end + + # `exit` notification — terminate. Per LSP, exit code 0 if a + # `shutdown` was received first, 1 otherwise. + def handle_exit + @logger.info("exit (clean=#{@shutdown_requested})") + Kernel.exit(@shutdown_requested ? 0 : 1) + end + + # ---- textDocument/* handlers ---- + + # `textDocument/didOpen` — the client just opened a buffer. Cache + # it and run a first pass. + def handle_did_open(params) + td = params["textDocument"] + uri = td["uri"] + txt = td["text"] + ver = td["version"] + @docs.open(uri, txt, ver) + @logger.info("didOpen #{uri} (version=#{ver}, #{txt.lines.size} lines)") + analyze_and_publish(uri) + end + + # `textDocument/didChange` — full-sync replacement. The client + # sends the entire new text in `contentChanges[0].text`. We + # debounce the analysis so a flurry of keystrokes only triggers + # one full re-parse after the user pauses. + def handle_did_change(params) + td = params["textDocument"] + uri = td["uri"] + ver = td["version"] + changes = params["contentChanges"] || [] + return if changes.empty? + new_text = changes.last["text"] + @docs.update(uri, new_text, ver) + @logger.debug("didChange #{uri} (version=#{ver}) — scheduled") + schedule_reanalyze(uri) + end + + # `textDocument/didSave` — re-analyze immediately (save is an + # explicit user action; no need to debounce). + def handle_did_save(params) + uri = params["textDocument"]["uri"] + @logger.debug("didSave #{uri}") + cancel_timer(uri) # prevent racing with a pending didChange timer + analyze_and_publish(uri) + end + + # `textDocument/didClose` — drop the document and clear any + # pending diagnostics on the client. + def handle_did_close(params) + uri = params["textDocument"]["uri"] + cancel_timer(uri) + @docs.close(uri) + publish_diagnostics(uri, []) + @logger.info("didClose #{uri}") + end + + # `textDocument/codeAction` — return the FixableFinding fixes + # that overlap the requested range as LSP CodeActions. No new + # analysis runs; we read from cached findings. + def handle_code_action(params) + uri = params["textDocument"]["uri"] + range = params["range"] + doc = @docs.get(uri) + actions = CodeActions.for_range(doc, range) + @logger.debug("codeAction #{uri} → #{actions.size} action(s)") + actions + end + + # `textDocument/hover` — when the cursor sits on a token that has + # an active diagnostic, render the registry entry + spec example + # as markdown. Returns nil to dismiss the hover popup when there's + # nothing relevant. + def handle_hover(params) + uri = params["textDocument"]["uri"] + pos = params["position"] + doc = @docs.get(uri) + hover = Hover.render(doc, pos) + @logger.debug("hover #{uri} → #{hover ? "rendered" : "none"}") + hover + end + + # Run the analyzer on the current text for `uri` and publish the + # resulting diagnostics. Caches the findings on the Document for + # later hover / code-action requests. + def analyze_and_publish(uri) + doc = @docs.get(uri) + return unless doc + result = @analyze_mutex.synchronize { Analyzer.run(doc.text) } + doc.cached_findings = result + doc.cached_version = doc.version + + diagnostics = Diagnostics.from_result(result, doc.text) + publish_diagnostics(uri, diagnostics) + rescue => e + @logger.error("analyze_and_publish failed for #{uri}: #{e.class}: #{e.message}") + end + + # Send a `textDocument/publishDiagnostics` notification. + def publish_diagnostics(uri, diagnostics) + send_message( + jsonrpc: "2.0", + method: "textDocument/publishDiagnostics", + params: { uri: uri, diagnostics: diagnostics }, + ) + @logger.info("published #{diagnostics.size} diagnostic(s) for #{uri}") + end + + # ---- debounce machinery ---- + + # Schedule a re-analysis of `uri` after `@debounce_ms`. If a + # timer is already pending, kill it first — only the latest + # edit's analysis fires. The timer thread cleans up its own + # @timers entry on completion (unless a newer thread has + # already replaced it). + def schedule_reanalyze(uri) + delay = @debounce_ms / 1000.0 + @timer_mutex.synchronize do + @timers[uri]&.kill + @timers[uri] = Thread.new do + begin + sleep delay + analyze_and_publish(uri) + ensure + @timer_mutex.synchronize do + # Don't accidentally drop a NEWER timer that replaced us. + @timers.delete(uri) if @timers[uri] == Thread.current + end + end + end + end + end + + # Cancel any pending timer for `uri`. Used by didSave (which + # analyses immediately) and didClose (which drops the document). + def cancel_timer(uri) + @timer_mutex.synchronize do + t = @timers.delete(uri) + t&.kill + end + end + + end +end diff --git a/syntaxes/cheat.tmLanguage.json b/syntaxes/cheat.tmLanguage.json index 55025c363..2be4453c9 100644 --- a/syntaxes/cheat.tmLanguage.json +++ b/syntaxes/cheat.tmLanguage.json @@ -1,51 +1,84 @@ { "$schema": "https://raw.githubusercontent.com/martinring/tmlanguage/master/tmlanguage.json", "name": "CLEAR", + "scopeName": "source.clear", "patterns": [ { "include": "#comments" }, { "include": "#strings" }, + { "include": "#capabilities" }, { "include": "#keywords" }, { "include": "#types" }, - { "include": "#capabilities" }, - { "include": "#functions" }, { "include": "#numbers" }, { "include": "#operators" }, - { "include": "#sigils" } + { "include": "#sigils" }, + { "include": "#bindings" }, + { "include": "#functions" } ], "repository": { "comments": { "patterns": [ { - "name": "comment.line.double-dash.clear", - "match": "--.*$" + "name": "comment.line.number-sign.clear", + "match": "#.*$" } ] }, "keywords": { "patterns": [ { + "comment": "Control flow", "name": "keyword.control.clear", - "match": "\\b(IF|THEN|ELSE|ELSE_IF|END|WHILE|DO|FOR|IN|BG|NEXT|BREAK|CONTINUE|RETURN|RETURNS|MATCH|START|DEFAULT|WHEN|IFF|CATCH|EXIT|DIE|PASS|PRUNE|RAISE|ASSERT|TIGHT|YIELD)\\b" + "match": "\\b(IF|THEN|ELSE|ELSE_IF|END|WHILE|DO|FOR|IN|BG|NEXT|BREAK|CONTINUE|RETURN|RETURNS|MATCH|PARTIAL|START|DEFAULT|WHEN|IFF|CATCH|EXIT|DIE|PASS|PRUNE|RAISE|ASSERT|TIGHT|YIELD|OR_RESCUE|ON|RETRY)\\b" }, { + "comment": "Storage / declarations", "name": "storage.type.clear", - "match": "\\b(FN|MUTABLE|STRUCT|ENUM|UNION|STREAM)\\b" + "match": "\\b(FN|METHOD|MUTABLE|STRUCT|ENUM|UNION|STREAM)\\b" }, { + "comment": "Ownership / memory operators", "name": "keyword.other.memory.clear", - "match": "\\b(GIVE|TAKES|COPY|MOVE)\\b" + "match": "\\b(GIVE|TAKES|COPY|MOVE|SHARE|LINK|RESOLVE|FREEZE|CLONE)\\b" }, { + "comment": "Pipeline / query operators", "name": "keyword.other.pipeline.clear", - "match": "\\b(SELECT|WHERE|UNNEST|EACH|FIND|ANY|ALL|INDEX|SORT|ORDER_BY|LIMIT|DISTINCT|REDUCE|COUNT|SUM|AVERAGE|MIN|MAX|CONCURRENT|SHARD)\\b" + "match": "\\b(SELECT|WHERE|UNNEST|EACH|FIND|ANY|ALL|INDEX|SORT|ORDER_BY|LIMIT|SKIP|DISTINCT|REDUCE|COUNT|SUM|AVERAGE|AVG|MIN|MAX|CONCURRENT|SHARD|JOIN|WINDOW|TAKE_WHILE|TAKEWHILE|TAP|FOLD|COLLECT)\\b" }, { + "comment": "Module / FFI / visibility", "name": "keyword.other.module.clear", - "match": "\\b(REQUIRE|USE|PUB|PRIVATE|EXTERN|FROM|EFFECTS|CLOSE)\\b" + "match": "\\b(REQUIRE|USE|PUB|PRIVATE|EXTERN|FROM|EFFECTS|CLOSE|CAPTURES)\\b" }, { + "comment": "WITH-block capabilities", "name": "keyword.other.capability.clear", - "match": "\\b(WITH|EXCLUSIVE|RESTRICT|CAST|AS|MOD|OR)\\b" + "match": "\\b(WITH|EXCLUSIVE|RESTRICT|BORROWED|VIEW|MATERIALIZED|SNAPSHOT|POLYMORPHIC|GUARD|CAST|AS|POSSIBLE_DEADLOCK|POSSIBLE_LOCK_CYCLE)\\b" + }, + { + "comment": "REQUIRES families and reentrance variants", + "name": "keyword.other.requires.clear", + "match": "\\b(REQUIRES|LOCKED|VERSIONED|ATOMIC|LOCAL|ACTOR|NON_REENTRANT|REENTRANT|SNAPSHOTTED|MAX_DEPTH|NOT_LOGICAL|THUNK|TAIL_CALL)\\b" + }, + { + "comment": "Predicate clauses", + "name": "keyword.other.contract.clear", + "match": "\\b(PRE|DEBUG_POST)\\b" + }, + { + "comment": "SYNC POLICY (top-level concurrency policy)", + "name": "keyword.other.policy.clear", + "match": "\\b(SYNC|POLICY)\\b" + }, + { + "comment": "Test framework keywords", + "name": "keyword.other.test.clear", + "match": "\\b(TEST|THAT|BENCH|BENCHMARK|BEFORE|AFTER|SETUP|LET|EXPECT|PENDING|STUB)\\b" + }, + { + "comment": "Word-shaped operators", + "name": "keyword.operator.word.clear", + "match": "\\b(AND|OR|NOT|MOD|IS)\\b" }, { "name": "constant.language.boolean.clear", @@ -56,10 +89,12 @@ "types": { "patterns": [ { + "comment": "Built-in primitive and stdlib types", "name": "support.type.clear", - "match": "\\b(Number|Int64|Float64|Byte|Bool|String|Void|HashMap|Id|TCPServer|TCPClient|File)\\b" + "match": "\\b(Number|Int8|Int16|Int32|Int64|UInt8|UInt16|UInt32|UInt64|Float32|Float64|Byte|Bool|String|Void|Auto|Any|HashMap|Set|List|Pool|Map|Stream|Promise|Id|TCPServer|TCPClient|File|Counter|Box)\\b" }, { + "comment": "User-defined type names — capitalised identifiers", "name": "entity.name.type.clear", "match": "\\b[A-Z][a-zA-Z0-9]*\\b" } @@ -68,8 +103,12 @@ "capabilities": { "patterns": [ { - "name": "storage.modifier.capability.clear", - "match": "@(multiowned|shared|locked|writeLocked|list|pool|set|sharded|local|indirect|pinned|arena|large|xl|service|micro|standard)" + "comment": "Capability sigil with optional :modifier:modifier... chain (e.g. @shared:locked, @indirect:atomic, @sharded(N))", + "match": "(@(?:multiowned|shared|locked|writeLocked|list|pool|set|map|sharded|striped|local|indirect|atomic|versioned|observable|pinned|arena|large|xl|service|micro|standard|reentrant|nonReentrant|canSmash|parallel|soa|split|raw|frozen|alwaysMutable|link|thunk|maxDepth))((?::[a-zA-Z][a-zA-Z0-9]*)*)", + "captures": { + "1": { "name": "storage.modifier.capability.clear" }, + "2": { "name": "storage.modifier.capability.chain.clear" } + } } ] }, @@ -83,20 +122,31 @@ "match": "\\\\." }, { + "comment": "${...} interpolation with embedded CLEAR expressions", "name": "meta.embedded.expression.clear", "begin": "\\$\\{", "end": "\\}", "beginCaptures": { "0": { "name": "punctuation.definition.interpolation.begin.clear" } }, - "endCaptures": { "0": { "name": "punctuation.definition.interpolation.end.clear" } }, + "endCaptures": { "0": { "name": "punctuation.definition.interpolation.end.clear" } }, "patterns": [ { "include": "$self" } ] } ] }, + "bindings": { + "patterns": [ + { + "comment": "Pipeline AS-binding alias: AS $u, AS $u.field, etc.", + "name": "variable.parameter.pipeline.clear", + "match": "\\$[a-zA-Z_][a-zA-Z0-9_]*" + } + ] + }, "functions": { "patterns": [ { + "comment": "Function call: identifier (with optional !/? suffix) followed by (", "match": "\\b([a-zA-Z_][a-zA-Z0-9_]*[!?]?)(?=\\()", "captures": { "1": { "name": "entity.name.function.clear" } @@ -108,71 +158,76 @@ "patterns": [ { "name": "constant.numeric.hex.clear", - "match": "\\b0x[0-9a-fA-F_]+(u8|u16|u32|u64|i8|i16|i32|i64)?\\b" + "match": "\\b0x[0-9a-fA-F_]+(_?(u8|u16|u32|u64|i8|i16|i32|i64))?\\b" }, { "name": "constant.numeric.binary.clear", - "match": "\\b0b[01_]+(u8|u16|u32|u64|i8|i16|i32|i64)?\\b" + "match": "\\b0b[01_]+(_?(u8|u16|u32|u64|i8|i16|i32|i64))?\\b" }, { "name": "constant.numeric.float.clear", - "match": "\\b\\d[\\d_]*\\.\\d[\\d_]*(f32|f64)?\\b" + "match": "\\b\\d[\\d_]*\\.\\d[\\d_]*(_?(f32|f64))?\\b" }, { "name": "constant.numeric.integer.clear", - "match": "\\b\\d[\\d_]*(u8|u16|u32|u64|i8|i16|i32|i64|_i64|_f64)?\\b" + "match": "\\b\\d[\\d_]*(_?(u8|u16|u32|u64|i8|i16|i32|i64|f32|f64))?\\b" } ] }, "sigils": { "patterns": [ { - "name": "keyword.operator.sigil.heap.clear", - "match": "%" - }, - { - "name": "keyword.operator.sigil.borrow.clear", - "match": "&" - }, - { + "comment": "Explicit panic operator", "name": "keyword.operator.panic.clear", "match": "!!" }, { + "comment": "Mutation suffix on identifiers (foo!, increment!) — must follow a word", "name": "keyword.operator.mutation.clear", "match": "(?<=[a-zA-Z0-9_])!" }, { + "comment": "Error-union prefix (RETURNS !T, !Void, ...)", "name": "keyword.operator.error-union.clear", - "match": "(?" + "match": "\\|>" }, { + "comment": "Function arrow", "name": "keyword.operator.arrow.clear", "match": "->" }, { + "comment": "Range operators: inclusive, exclusive, slice-wildcard", "name": "keyword.operator.range.clear", - "match": "(\\.\\.<|\\.\\.\\.=|\\.\\.<=|\\[\\*\\])" + "match": "(\\.\\.<|\\.\\.=|\\.\\.|\\[\\*\\])" }, { "name": "keyword.operator.compound-assign.clear", - "match": "(\\+=|-=|\\*=|/=)" + "match": "(\\+=|-=|\\*=|/=|\\.=)" }, { "name": "keyword.operator.comparison.clear", @@ -187,11 +242,11 @@ "match": "=" }, { + "comment": "Discard / placeholder identifier", "name": "variable.language.placeholder.clear", "match": "\\b_\\b" } ] } - }, - "scopeName": "source.clear" + } } diff --git a/syntaxes/cheat.vim b/syntaxes/cheat.vim index 7c80aba26..9e1fe6055 100644 --- a/syntaxes/cheat.vim +++ b/syntaxes/cheat.vim @@ -1,106 +1,196 @@ " Vim syntax file " Language: CLEAR -" Maintainer: CLEAR Language Team -" Latest Revision: 31 March 2026 +" Filetype: clear +" File extension: .cht +" +" Install for Neovim: +" ln -s /path/to/cheat/syntaxes/cheat.vim ~/.config/nvim/syntax/clear.vim +" Or for Vim: +" ln -s /path/to/cheat/syntaxes/cheat.vim ~/.vim/syntax/clear.vim +" +" The buffer must have `set filetype=clear` (the LSP autocmd in +" `src/lsp/README.md` handles this for `.cht` files). if exists("b:current_syntax") finish endif +" ------------------------------------------------------------------- +" Comments — `#` line comments (was `--` historically) +" ------------------------------------------------------------------- +syn match clearComment "#.*$" + +" ------------------------------------------------------------------- " Control flow +" ------------------------------------------------------------------- syn keyword clearControl IF THEN ELSE ELSE_IF END WHILE DO FOR IN BG NEXT syn keyword clearControl BREAK CONTINUE RETURN RETURNS -syn keyword clearControl MATCH START DEFAULT WHEN IFF +syn keyword clearControl MATCH PARTIAL START DEFAULT WHEN IFF syn keyword clearControl CATCH EXIT DIE PASS PRUNE RAISE ASSERT -syn keyword clearControl TIGHT YIELD +syn keyword clearControl TIGHT YIELD OR_RESCUE ON RETRY -" Storage and declarations -syn keyword clearStorage FN MUTABLE STRUCT ENUM UNION STREAM +" ------------------------------------------------------------------- +" Storage / declarations +" ------------------------------------------------------------------- +syn keyword clearStorage FN METHOD MUTABLE STRUCT ENUM UNION STREAM -" Ownership and memory -syn keyword clearMemory GIVE TAKES COPY MOVE +" ------------------------------------------------------------------- +" Ownership / memory operators +" ------------------------------------------------------------------- +syn keyword clearMemory GIVE TAKES COPY MOVE SHARE LINK RESOLVE FREEZE CLONE +" ------------------------------------------------------------------- " Pipeline / query operators +" ------------------------------------------------------------------- syn keyword clearPipeline SELECT WHERE UNNEST EACH FIND ANY ALL -syn keyword clearPipeline INDEX SORT ORDER_BY LIMIT DISTINCT REDUCE -syn keyword clearPipeline COUNT SUM AVERAGE MIN MAX CONCURRENT SHARD - -" Module system and FFI -syn keyword clearModule REQUIRE USE PUB PRIVATE EXTERN FROM EFFECTS CLOSE - -" Other keywords -syn keyword clearOther WITH EXCLUSIVE RESTRICT CAST AS MOD OR - -" Boolean and nil literals +syn keyword clearPipeline INDEX SORT ORDER_BY LIMIT SKIP DISTINCT REDUCE +syn keyword clearPipeline COUNT SUM AVERAGE AVG MIN MAX +syn keyword clearPipeline CONCURRENT SHARD JOIN WINDOW +syn keyword clearPipeline TAKE_WHILE TAKEWHILE TAP FOLD COLLECT + +" ------------------------------------------------------------------- +" Module / FFI / visibility +" ------------------------------------------------------------------- +syn keyword clearModule REQUIRE USE PUB PRIVATE EXTERN FROM EFFECTS CLOSE CAPTURES + +" ------------------------------------------------------------------- +" WITH-block capabilities + lock-cycle escape modifiers +" ------------------------------------------------------------------- +syn keyword clearCapKeyword WITH EXCLUSIVE RESTRICT BORROWED VIEW MATERIALIZED +syn keyword clearCapKeyword SNAPSHOT POLYMORPHIC GUARD CAST AS +syn keyword clearCapKeyword POSSIBLE_DEADLOCK POSSIBLE_LOCK_CYCLE + +" ------------------------------------------------------------------- +" REQUIRES families and reentrance variants +" ------------------------------------------------------------------- +syn keyword clearRequires REQUIRES LOCKED VERSIONED ATOMIC LOCAL ACTOR +syn keyword clearRequires NON_REENTRANT REENTRANT SNAPSHOTTED +syn keyword clearRequires MAX_DEPTH NOT_LOGICAL THUNK TAIL_CALL + +" ------------------------------------------------------------------- +" Predicate / contract clauses +" ------------------------------------------------------------------- +syn keyword clearContract PRE DEBUG_POST + +" ------------------------------------------------------------------- +" SYNC POLICY (top-level concurrency policy) +" ------------------------------------------------------------------- +syn keyword clearPolicy SYNC POLICY + +" ------------------------------------------------------------------- +" Test framework keywords +" ------------------------------------------------------------------- +syn keyword clearTestKw TEST THAT BENCH BENCHMARK BEFORE AFTER SETUP +syn keyword clearTestKw LET EXPECT PENDING STUB + +" ------------------------------------------------------------------- +" Word-shaped operators +" ------------------------------------------------------------------- +syn keyword clearWordOp AND OR NOT MOD IS + +" ------------------------------------------------------------------- +" Boolean / nil literals +" ------------------------------------------------------------------- syn keyword clearBoolean TRUE FALSE NIL -" Built-in types -syn keyword clearType Number Int64 Float64 Byte Bool String Void -syn keyword clearType HashMap Id TCPServer TCPClient File +" ------------------------------------------------------------------- +" Built-in primitive and stdlib types +" ------------------------------------------------------------------- +syn keyword clearType Number Int8 Int16 Int32 Int64 UInt8 UInt16 UInt32 UInt64 +syn keyword clearType Float32 Float64 Byte Bool String Void Auto Any +syn keyword clearType HashMap Set List Pool Map Stream Promise Id +syn keyword clearType TCPServer TCPClient File Counter Box -" User-defined types (capitalized identifiers) +" User-defined types — capitalised identifiers syn match clearUserType "\<[A-Z][a-zA-Z0-9]*\>" -" Comments -syn match clearComment "--.*$" +" ------------------------------------------------------------------- +" Capability sigils — @cap with optional :modifier:modifier... chain +" e.g. @shared:locked, @indirect:atomic, @list:soa +" ------------------------------------------------------------------- +syn match clearCapability "@\(multiowned\|shared\|locked\|writeLocked\|list\|pool\|set\|map\|sharded\|striped\|local\|indirect\|atomic\|versioned\|observable\|pinned\|arena\|large\|xl\|service\|micro\|standard\|reentrant\|nonReentrant\|canSmash\|parallel\|soa\|split\|raw\|frozen\|alwaysMutable\|link\|thunk\|maxDepth\)\(:[a-zA-Z][a-zA-Z0-9]*\)*" -" Functions (including mutation/predicate suffix) -syn match clearFunction "\w\+[!?]\?" display contained -syn match clearFunctionCall "\w\+[!?]\?("he=e-1 contains=clearFunction +" ------------------------------------------------------------------- +" Pipeline AS-binding alias and string-interpolation prefix +" ------------------------------------------------------------------- +syn match clearBinding "\$[a-zA-Z_][a-zA-Z0-9_]*" -" Capability annotations -syn match clearCapability "@\(multiowned\|shared\|locked\|writeLocked\|list\|pool\|set\|sharded\|local\|indirect\|pinned\|arena\|large\|xl\|service\|micro\|standard\)" +" ------------------------------------------------------------------- +" Functions (calls and definitions) +" ------------------------------------------------------------------- +syn match clearFunction "\w\+[!?]\?" display contained +syn match clearFunctionCall "\w\+[!?]\?(" contains=clearFunction +" ------------------------------------------------------------------- " Sigils and special operators -syn match clearSigilHeap "%" -syn match clearSigilBorrow "&" +" ------------------------------------------------------------------- syn match clearPanic "!!" -syn match clearPipelineOp "s>" +syn match clearMutationBang "[a-zA-Z0-9_]\@<=!" +syn match clearErrorUnion "[a-zA-Z0-9_!]\@" syn match clearArrow "->" syn match clearRange "\.\.<\|\.\.=\|\.\." -syn match clearTense "\~" +syn match clearCompoundAssign "\(+=\|-=\|\*=\|/=\|\.=\)" +syn match clearComparison "==\|!=\|<=\|>=\|<\|>" syn match clearPlaceholder "\<_\>" -syn match clearCompoundAssign "\(+=\|-=\|\*=\|/=\)" +" ------------------------------------------------------------------- " Numbers (with underscore separators and type suffixes) -syn match clearNumber "\<\d[\d_]*\(_i64\|_f64\|u8\|u16\|u32\|u64\|i8\|i16\|i32\|i64\)\?\>" -syn match clearFloat "\<\d[\d_]*\.\d[\d_]*\(f32\|f64\)\?\>" -syn match clearHex "\<0x[0-9a-fA-F_]\+\>" -syn match clearBinary "\<0b[01_]\+\>" - -" Strings with interpolation +" ------------------------------------------------------------------- +syn match clearNumber "\<\d[\d_]*\(_\?\(u8\|u16\|u32\|u64\|i8\|i16\|i32\|i64\|f32\|f64\)\)\?\>" +syn match clearFloat "\<\d[\d_]*\.\d[\d_]*\(_\?\(f32\|f64\)\)\?\>" +syn match clearHex "\<0x[0-9a-fA-F_]\+\(_\?\(u8\|u16\|u32\|u64\|i8\|i16\|i32\|i64\)\)\?\>" +syn match clearBinary "\<0b[01_]\+\(_\?\(u8\|u16\|u32\|u64\|i8\|i16\|i32\|i64\)\)\?\>" + +" ------------------------------------------------------------------- +" Strings with ${...} interpolation +" ------------------------------------------------------------------- syn region clearString start='"' end='"' skip='\\"' contains=clearInterpolation,clearEscape -syn match clearEscape "\\." contained +syn match clearEscape "\\." contained syn region clearInterpolation start='\${' end='}' contained contains=TOP -" Highlighting links -hi def link clearControl Conditional -hi def link clearStorage StorageClass -hi def link clearMemory Exception -hi def link clearPipeline Keyword -hi def link clearModule Include -hi def link clearOther Keyword -hi def link clearBoolean Boolean -hi def link clearType Type -hi def link clearUserType Type -hi def link clearComment Comment -hi def link clearString String -hi def link clearEscape SpecialChar +" ------------------------------------------------------------------- +" Highlight links to standard groups +" ------------------------------------------------------------------- +hi def link clearControl Conditional +hi def link clearStorage StorageClass +hi def link clearMemory Exception +hi def link clearPipeline Keyword +hi def link clearModule Include +hi def link clearCapKeyword Keyword +hi def link clearRequires Keyword +hi def link clearContract PreProc +hi def link clearPolicy Keyword +hi def link clearTestKw Macro +hi def link clearWordOp Operator +hi def link clearBoolean Boolean +hi def link clearType Type +hi def link clearUserType Type +hi def link clearComment Comment +hi def link clearString String +hi def link clearEscape SpecialChar hi def link clearInterpolation Special -hi def link clearNumber Number -hi def link clearFloat Float -hi def link clearHex Number -hi def link clearBinary Number -hi def link clearCapability StorageClass -hi def link clearSigilHeap Special -hi def link clearSigilBorrow Special -hi def link clearPanic Error -hi def link clearPipelineOp Operator -hi def link clearArrow Operator -hi def link clearRange Operator -hi def link clearTense Special -hi def link clearPlaceholder Special +hi def link clearNumber Number +hi def link clearFloat Float +hi def link clearHex Number +hi def link clearBinary Number +hi def link clearCapability StorageClass +hi def link clearBinding Identifier +hi def link clearPanic Error +hi def link clearMutationBang Special +hi def link clearErrorUnion Special +hi def link clearOptional Special +hi def link clearTense Special +hi def link clearSigilHeap Special +hi def link clearPipelineOp Operator +hi def link clearArrow Operator +hi def link clearRange Operator hi def link clearCompoundAssign Operator -hi def link clearFunction Function +hi def link clearComparison Operator +hi def link clearPlaceholder Special +hi def link clearFunction Function let b:current_syntax = "clear"