diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml index 4b00693e..3e316d34 100644 --- a/.github/workflows/ci-cd.yml +++ b/.github/workflows/ci-cd.yml @@ -1,9 +1,19 @@ +# TODO: check formatting +# TODO: lock files on all platforms +# TODO: test with all supported python versions name: CI on: merge_group: push: pull_request: + workflow_dispatch: + inputs: + publish_testpypi: + description: 'Publish to TestPyPI' + required: true + type: boolean + default: false defaults: run: @@ -47,7 +57,7 @@ jobs: run: | python -m venv .venvs/dev_workspace source .venvs/dev_workspace/bin/activate - python -m pip install --upgrade pip==25.1.1 + python -m pip install --upgrade pip==25.3 # split installation into 2 steps: first internal preset because it is not # published as pypi package, so pip will fail to resolve it in dev_workspace @@ -58,102 +68,61 @@ jobs: python -m finecode prepare-envs shell: bash - # TODO: install all other supported python versions. Version can be extracted from finecode + - name: Lint + run: | + source .venvs/dev_workspace/bin/activate + python -m finecode run lint + shell: bash + + - name: Build artifacts + id: build + if: runner.os == 'Linux' + run: | + source .venvs/dev_workspace/bin/activate + python -m finecode run build_artifact + shell: bash - # - name: Lint + # - name: Run unit tests + # if: ${{ !cancelled() }} # run: | - # poetry run python -m finecode run lint + # source .venvs/dev_workspace/bin/activate + # python -m finecode run test # shell: bash - - name: Build all packages - if: runner.os == 'Linux' + - name: Publish to TestPyPI and verify + if: runner.os == 'Linux' && github.event_name == 'workflow_dispatch' && inputs.publish_testpypi + env: + FINECODE_CONFIG_PUBLISH_AND_VERIFY_ARTIFACT__INIT_REPOSITORY_PROVIDER__REPOSITORIES: '[{"name": "testpypi", "url": "https://test.pypi.org/"}]' + FINECODE_CONFIG_PUBLISH_AND_VERIFY_ARTIFACT__INIT_REPOSITORY_PROVIDER__CREDENTIALS_BY_REPOSITORY: '{"testpypi": {"username": "${{ secrets.TESTPYPI_USERNAME }}", "password": "${{ secrets.TESTPYPI_PASSWORD }}"}}' run: | source .venvs/dev_workspace/bin/activate - python -m finecode run build + python -m finecode run \ + --map-payload-fields="src-artifact-def-path,dist-artifact-paths" \ + publish_and_verify_artifact \ + --src-artifact-def-path="build_artifact.src_artifact_def_path" \ + --dist-artifact-paths="build_artifact.build_output_paths" shell: bash - - name: Collect all distribution packages - if: runner.os == 'Linux' + - name: Publish to PyPI and verify + if: runner.os == 'Linux' && startsWith(github.ref, 'refs/tags/') + env: + FINECODE_CONFIG_PUBLISH_AND_VERIFY_ARTIFACT__INIT_REPOSITORY_PROVIDER__REPOSITORIES: '[{"name": "pypi", "url": "https://pypi.org/"}]' + FINECODE_CONFIG_PUBLISH_AND_VERIFY_ARTIFACT__INIT_REPOSITORY_PROVIDER__CREDENTIALS_BY_REPOSITORY: '{"pypi": {"username": "${{ secrets.PYPI_USERNAME }}", "password": "${{ secrets.PYPI_PASSWORD }}"}}' run: | - # TODO: finecode action to copy only updated packages in dist - mkdir -p dist - cp finecode_extension_api/dist/* dist/ - cp extensions/fine_python_ast/dist/* dist/ - cp extensions/fine_python_black/dist/* dist/ - cp extensions/fine_python_flake8/dist/* dist/ - cp extensions/fine_python_isort/dist/* dist/ - cp extensions/fine_python_module_exports/dist/* dist/ - cp extensions/fine_python_mypy/dist/* dist/ - cp presets/fine_python_format/dist/* dist/ - cp presets/fine_python_lint/dist/* dist/ - cp presets/fine_python_recommended/dist/* dist/ + # TODO: make sure git tag exists (for manual trigger) + source .venvs/dev_workspace/bin/activate + python -m finecode run \ + --map-payload-fields="src-artifact-def-path,dist-artifact-paths" \ + publish_and_verify_artifact \ + --src-artifact-def-path="build_artifact.src_artifact_def_path" \ + --dist-artifact-paths="build_artifact.build_output_paths" + shell: bash + # TODO: try to replace by finecode action - name: Store the distribution packages uses: actions/upload-artifact@v4 if: runner.os == 'Linux' with: name: python-package-distributions path: dist/ - - # - name: Run unit tests - # if: ${{ !cancelled() }} - # run: | - # poetry run python -m pytest tests/ - # shell: bash - - publish-to-pypi: - name: >- - Publish Python 🐍 distribution 📦 to PyPI - if: startsWith(github.ref, 'refs/tags/') - needs: - - build - runs-on: ubuntu-24.04 - environment: - name: pypi - url: https://pypi.org/p/finecode - permissions: - id-token: write # IMPORTANT: mandatory for trusted publishing - - steps: - - name: Download all the dists - uses: actions/download-artifact@v4 - with: - name: python-package-distributions - path: dist/ - - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - # temporary skip existing packages, because not always all packages at once - # are updated. - # TODO: implement publishing only of changed in finecode - skip-existing: true - - publish-to-testpypi: - name: Publish Python 🐍 distribution 📦 to TestPyPI - needs: - - build - runs-on: ubuntu-24.04 - - environment: - name: testpypi - url: https://test.pypi.org/p/finecode - - permissions: - id-token: write # IMPORTANT: mandatory for trusted publishing - - steps: - - name: Download all the dists - uses: actions/download-artifact@v4 - with: - name: python-package-distributions - path: dist/ - - name: Publish distribution 📦 to TestPyPI - uses: pypa/gh-action-pypi-publish@release/v1 - with: - repository-url: https://test.pypi.org/legacy/ - verbose: true - # temporary skip existing packages, because not always all packages at once - # are updated. - # TODO: implement publishing only of changed in finecode - skip-existing: true diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..590726d3 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,42 @@ +name: Deploy docs + +on: + push: + tags: + - 'v*' + workflow_dispatch: + +jobs: + deploy: + runs-on: ubuntu-24.04 + + steps: + - uses: actions/checkout@v5 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.13' + + - name: Install docs dependencies + run: | + python -m pip install --upgrade pip + # TODO: prepare_env docs + python -m venv .venvs/docs + source .venvs/docs/bin/activate + python -m pip install --group="docs" + + - name: Build docs + env: + MKDOCS_SITE_URL: https://finecode-dev.github.io + run: | + source .venvs/docs/bin/activate + mkdocs build + + - name: Deploy to finecode.github.io + uses: peaceiris/actions-gh-pages@v4 + with: + personal_token: ${{ secrets.PAGES_DEPLOY_TOKEN }} + external_repository: finecode-dev/finecode.github.io + publish_branch: main + publish_dir: ./site diff --git a/.vscode/tasks.json b/.vscode/tasks.json deleted file mode 100644 index 5d9cada4..00000000 --- a/.vscode/tasks.json +++ /dev/null @@ -1,18 +0,0 @@ -{ - "version": "2.0.0", - "tasks": [ - { - "label": "Sleepdelay", - "type": "shell", - "command": "sleep 15", - "windows": { - "command": "ping 127.0.0.1 -n 15 > nul" - }, - "group": "none", - "presentation": { - "reveal": "silent", - "panel": "new" - } - } - ] -} \ No newline at end of file diff --git a/docs/cli.md b/docs/cli.md new file mode 100644 index 00000000..19aec257 --- /dev/null +++ b/docs/cli.md @@ -0,0 +1,135 @@ +# CLI Reference + +All commands are run from the workspace or project root directory, inside the `dev_workspace` virtual environment. + +```bash +source .venvs/dev_workspace/bin/activate +python -m finecode [options] +``` + +--- + +## `run` + +Run one or more actions across projects. + +``` +python -m finecode run [options] [ ...] [payload] [--config.= ...] +``` + +### Options + +| Option | Description | +|---|---| +| `--workdir=` | Use `` as the workspace root instead of `cwd` | +| `--project=` | Run only in this project. Repeatable for multiple projects. | +| `--concurrently` | Run actions concurrently within each project | +| `--trace` | Enable verbose (trace-level) logging | +| `--no-env-config` | Ignore `FINECODE_CONFIG_*` environment variables | +| `--no-save-results` | Do not write action results to the cache directory | + +### Payload + +Named parameters passed to the action payload. All must use `--=` form: + +```bash +python -m finecode run format --save=true +python -m finecode run lint --target=files --file-paths='["src/main.py"]' +``` + +### Config overrides + +Override handler configuration inline: + +```bash +# Action-level (applies to all handlers) +python -m finecode run lint --config.line_length=120 + +# Handler-specific +python -m finecode run lint --config.ruff.line_length=120 --config.mypy.strict=true +``` + +See [Configuration](configuration.md) for full details on config precedence. + +### Behavior + +- With no `--project`: FineCode treats `cwd` (or `--workdir`) as the workspace root, discovers all projects, and runs the action in each project that defines it. +- With `--project`: the action must exist in every specified project. +- Action results are saved to `/cache/finecode/results/.json` (one entry per project path). + +### Examples + +```bash +# Lint all projects +python -m finecode run lint + +# Lint and check_formatting concurrently +python -m finecode run --concurrently lint check_formatting + +# Run only in two specific projects +python -m finecode run --project=fine_python_mypy --project=fine_python_ruff run lint + +# Run from a different directory +python -m finecode --workdir=./finecode_extension_api run lint + +# Override ruff line length +python -m finecode run lint --config.ruff.line_length=120 +``` + +--- + +## `prepare-envs` + +Create and populate virtual environments for all handler dependencies. + +``` +python -m finecode prepare-envs [--recreate] [--trace] [--debug] +``` + +Must be run from the workspace or project root. Creates venvs under `.venvs//` and installs each handler's declared dependencies. + +| Option | Description | +|---|---| +| `--recreate` | Delete and recreate all venvs from scratch | +| `--trace` | Enable verbose logging | +| `--debug` | Wait for a debugpy client on port 5680 before starting | + +--- + +## `dump-config` + +Dump the fully resolved configuration for a project to disk, useful for debugging preset and config merging. + +``` +python -m finecode dump-config --project= [--trace] [--debug] +``` + +Output is written to `/finecode_config_dump/`. + +| Option | Description | +|---|---| +| `--project=` | **(Required)** Project to dump config for | +| `--trace` | Enable verbose logging | +| `--debug` | Wait for a debugpy client on port 5680 | + +--- + +## `start-api` + +Start the FineCode LSP server. Used by the IDE extension — you typically don't call this directly. + +``` +python -m finecode start-api --stdio | --socket | --ws [--host ] [--port ] +``` + +| Option | Description | +|---|---| +| `--stdio` | Communicate over stdin/stdout | +| `--socket ` | Start a TCP server on the given port | +| `--ws` | Start a WebSocket server | +| `--host ` | Host for TCP/WS server (default: 127.0.0.1 for TCP) | +| `--port ` | Port for TCP/WS server | +| `--mcp` | Also start an MCP server | +| `--mcp-port ` | Port for the MCP server | +| `--trace` | Enable verbose logging | +| `--debug` | Wait for a debugpy client on port 5680 | diff --git a/docs/concepts.md b/docs/concepts.md new file mode 100644 index 00000000..812a03cf --- /dev/null +++ b/docs/concepts.md @@ -0,0 +1,125 @@ +# Concepts + +Understanding a few core concepts makes the rest of FineCode straightforward. + +## Action + +An **Action** is a named operation — `lint`, `format`, `build_artifact`, etc. It is defined as a Python class that declares the types of its payload, execution context, and result: + +```python +class LintAction(code_action.Action[LintRunPayload, LintRunContext, LintRunResult]): + PAYLOAD_TYPE = LintRunPayload + RUN_CONTEXT_TYPE = LintRunContext + RESULT_TYPE = LintRunResult +``` + +Actions are identified by their **import path** (e.g. `finecode_extension_api.actions.lint.LintAction`), not by the name used in config. The config name is just a human-readable alias. + +Actions can be called from: + +- the CLI (`python -m finecode run lint`) +- the IDE via the LSP server (diagnostics, code actions, formatting) +- other handlers + +## ActionHandler + +An **ActionHandler** is a concrete implementation of an action. Multiple handlers can be registered for a single action. For example, the `lint` action might have handlers for ruff, flake8, and mypy — each independently checking the code. + +Each handler: + +- specifies which **virtual environment** (`env`) it runs in +- declares its own **dependencies** (installed automatically by `prepare-envs`) +- receives the action payload and returns a result + +### Sequential vs. concurrent execution + +Handlers for an action run either **sequentially** (default) or **concurrently**. + +```mermaid +flowchart LR + subgraph Sequential + direction TB + P1[payload] --> H1[handler 1] + H1 -->|current_result| H2[handler 2] + H2 -->|current_result| H3[handler 3] + H3 --> R1[result] + end + + subgraph Concurrent + direction TB + P2[payload] --> H4[handler 1] + P2 --> H5[handler 2] + P2 --> H6[handler 3] + H4 --> M[merge] + H5 --> M + H6 --> M + M --> R2[result] + end +``` + +**Sequential mode** (default): handlers run one after another. Each handler can read the accumulated result so far via `context.current_result`. Useful when handlers depend on each other's output (e.g. formatter → save-to-disk). + +**Concurrent mode** (`run_handlers_concurrently: true`): all handlers run in parallel and results are merged afterward. Accessing `context.current_result` in concurrent mode raises `RuntimeError`. Useful for independent linters. + +## Preset + +A **Preset** is a Python package that bundles action and handler declarations into a reusable, distributable configuration. Users install a preset as a `dev_workspace` dependency and reference it in `pyproject.toml`: + +```toml +[tool.finecode] +presets = [{ source = "fine_python_recommended" }] +``` + +A preset contains a `preset.toml` file that declares which handlers to activate for which actions. The user's `pyproject.toml` configuration is merged on top of the preset, giving the user full control to override, extend, or disable individual handlers. + +### Handler modes + +When configuring an action in `pyproject.toml`, you can control how your configuration relates to preset handlers: + +- **Default (additive):** your handlers are added to the preset's handlers. +- **`handlers_mode = "replace"`:** your handler list completely replaces the preset's handlers for that action. +- **`disabled = true` on a handler entry:** disables that specific inherited handler. + +## Project + +A **Project** is any directory containing a `pyproject.toml` with a `[tool.finecode]` section. FineCode discovers all projects under the workspace root automatically. + +A project may belong to a **workspace** — a directory containing multiple projects. FineCode handles multi-project workspaces transparently: running `python -m finecode run lint` from the workspace root runs lint in all projects that define it. + +## Workspace Manager and Extension Runner + +FineCode has two runtime components: + +### Workspace Manager (WM) + +The `finecode` package. It: + +- discovers projects and resolves merged configuration +- manages virtual environments (`prepare-envs`) +- exposes an **LSP server** to the IDE +- delegates action execution to Extension Runners + +### Extension Runner (ER) + +The `finecode_extension_runner` package. It: + +- runs inside an isolated virtual environment (e.g. `.venvs/dev_no_runtime`) +- imports and executes handler code +- communicates results back to the WM via LSP/JSON-RPC + +The WM/ER split means handler dependencies never pollute the workspace Python environment. + +## Environments + +Each handler declares an `env` (e.g. `dev_no_runtime`, `runtime`). FineCode creates a separate virtualenv for each env name it encounters, and installs the handler's declared `dependencies` into it. `prepare-envs` automates this. + +```toml +# Example: handler in pyproject.toml +[[tool.finecode.action.lint.handlers]] +name = "ruff" +source = "fine_python_ruff.RuffLintFilesHandler" +env = "dev_no_runtime" +dependencies = ["fine_python_ruff~=0.2.0"] +``` + +The env name is arbitrary — it's just a label FineCode uses to group handlers that share a virtualenv. diff --git a/docs/configuration.md b/docs/configuration.md new file mode 100644 index 00000000..f26bc29e --- /dev/null +++ b/docs/configuration.md @@ -0,0 +1,156 @@ +# Configuration + +FineCode merges configuration from multiple sources in order of increasing priority: + +``` +preset.toml → pyproject.toml → environment variables → CLI flags +``` + +Higher-priority sources override lower-priority ones. + +## pyproject.toml + +All FineCode configuration lives under `[tool.finecode]`. + +### Enabling presets + +```toml +[tool.finecode] +presets = [ + { source = "fine_python_recommended" }, + { source = "my_custom_preset" }, +] +``` + +Presets are applied in order. Later presets' handlers are added after earlier ones. + +### Declaring actions and handlers + +You can declare or extend actions directly in your project: + +```toml +[tool.finecode.action.lint] +source = "finecode_extension_api.actions.lint.LintAction" +handlers = [ + { name = "ruff", source = "fine_python_ruff.RuffLintFilesHandler", env = "dev_no_runtime", dependencies = ["fine_python_ruff~=0.2.0"] }, + { name = "mypy", source = "fine_python_mypy.MypyLintHandler", env = "dev_no_runtime", dependencies = ["fine_python_mypy~=0.3.0"] }, +] +``` + +### Replacing preset handlers + +To completely replace the handlers from presets for an action: + +```toml +[tool.finecode.action.lint] +source = "finecode_extension_api.actions.lint.LintAction" +handlers_mode = "replace" +handlers = [ + { name = "mypy", source = "fine_python_mypy.MypyLintHandler", env = "dev_no_runtime", dependencies = ["fine_python_mypy~=0.3.0"] }, +] +``` + +### Disabling a specific handler + +```toml +[tool.finecode.action.lint] +handlers = [ + { name = "flake8", disabled = true }, +] +``` + +### Configuring a handler + +Use `[[tool.finecode.action_handler]]` to configure a handler by its source path: + +```toml +[[tool.finecode.action_handler]] +source = "fine_python_ruff.RuffLintFilesHandler" +config.extend_select = ["B", "I"] +config.line_length = 100 + +[[tool.finecode.action_handler]] +source = "fine_python_flake8.Flake8LintFilesHandler" +config.max_line_length = 88 +config.extend_ignore = ["E203", "E501"] +``` + +### Environment-specific dependencies + +You can pin or override dependencies installed into each env: + +```toml +[tool.finecode.env.dev_no_runtime.dependencies] +fine_python_ruff = { path = "./my_local_ruff_fork", editable = true } +``` + +## Environment variables + +Override handler config at runtime without modifying files. + +**Format:** + +``` +FINECODE_CONFIG___= +FINECODE_CONFIG_____= +``` + +- ``, ``, `` are **uppercase**, separated by double underscores (`__`) +- Values are parsed as **JSON** (use `"true"`, `123`, `"string"`, `["a","b"]`, etc.) + +**Examples:** + +```bash +# Set line_length for all handlers of the lint action +FINECODE_CONFIG_LINT__LINE_LENGTH=100 python -m finecode run lint + +# Set line_length only for the ruff handler +FINECODE_CONFIG_LINT__RUFF__LINE_LENGTH=120 python -m finecode run lint + +# Pass a JSON array +FINECODE_CONFIG_LINT__RUFF__EXTEND_SELECT='["B","I"]' python -m finecode run lint +``` + +To disable env var config entirely: + +```bash +python -m finecode run --no-env-config lint +``` + +## CLI config flags + +Override config inline on the command line. CLI flags take precedence over env vars. + +**Format:** + +``` +--config.= +--config..= +``` + +**Examples:** + +```bash +# Action-level: applies to all handlers +python -m finecode run lint --config.line_length=120 + +# Handler-specific +python -m finecode run lint --config.ruff.line_length=120 + +# Combined +python -m finecode run lint --config.ruff.line_length=120 --config.mypy.strict=true + +# CLI overrides env vars (line_length will be 120) +FINECODE_CONFIG_LINT__RUFF__LINE_LENGTH=100 python -m finecode run lint --config.ruff.line_length=120 +``` + +## Inspecting resolved configuration + +Dump the fully merged configuration for a project to a file: + +```bash +python -m finecode dump-config --project=my_project +# Output written to finecode_config_dump/ +``` + +This is useful for debugging config merging from multiple presets. diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 00000000..f0a7a5ef --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,96 @@ +# Getting Started + +This guide walks you through installing FineCode, applying an existing preset, and running your first actions. + +## Prerequisites + +- Python 3.11–3.14 +- pip 25.1 or newer (for `--group` support) + +```bash +python -m pip install --upgrade pip +``` + +## 1. Add FineCode to your project + +FineCode is installed into a dedicated `dev_workspace` virtual environment, separate from your project's runtime dependencies. This keeps tooling isolated. + +Add the `dev_workspace` dependency group to your `pyproject.toml`: + +```toml +[dependency-groups] +dev_workspace = ["finecode==0.3.*"] +``` + +Create the venv and install: + +```bash +python -m venv .venvs/dev_workspace +source .venvs/dev_workspace/bin/activate # Windows: .venvs\dev_workspace\Scripts\activate +python -m pip install --group="dev_workspace" +``` + +## 2. Add a preset + +Presets bundle ready-made tool configurations. Add `fine_python_recommended` to get linting and formatting for Python: + +```toml +[dependency-groups] +dev_workspace = ["finecode==0.3.*", "fine_python_recommended==0.3.*"] +``` + +Reinstall after updating the dependency group: + +```bash +python -m pip install --group="dev_workspace" +``` + +### Available presets + +| Preset | What it includes | +|---|---| +| `fine_python_recommended` | Ruff + Flake8 linting, Ruff formatter + isort | +| `fine_python_lint` | Ruff, Flake8, Pyrefly linting only | +| `fine_python_format` | Ruff formatter + isort only | + +## 3. Enable the preset in config + +Tell FineCode which preset to use: + +```toml +[tool.finecode] +presets = [{ source = "fine_python_recommended" }] +``` + +## 4. Prepare environments + +FineCode runs each tool handler in its own virtual environment. Set them up with: + +```bash +python -m finecode prepare-envs +``` + +This installs the handler dependencies (ruff, flake8, etc.) into isolated venvs under `.venvs/`. + +## 5. Run actions + +```bash +# Lint all projects in the workspace +python -m finecode run lint + +# Check formatting (without modifying files) +python -m finecode run check_formatting + +# Format all files +python -m finecode run format + +# Run lint and check_formatting concurrently +python -m finecode run --concurrently lint check_formatting +``` + +## Next steps + +- [IDE Integration](ide-integration.md) — set up the VSCode extension for real-time feedback +- [Configuration](configuration.md) — customize tool settings and override handler config +- [Concepts](concepts.md) — understand how Actions, Handlers, and Presets fit together +- [Creating an Extension](guides/creating-extension.md) — write your own tool integration diff --git a/docs/guides/creating-extension.md b/docs/guides/creating-extension.md new file mode 100644 index 00000000..5ee6757b --- /dev/null +++ b/docs/guides/creating-extension.md @@ -0,0 +1,178 @@ +# Creating an Extension + +An extension is a Python package that implements one or more **ActionHandlers**. Each handler provides the logic for executing a specific action (e.g. running a linter, formatter, or build tool). + +## 1. Create the package + +``` +my_linter/ + pyproject.toml + my_linter/ + __init__.py + handler.py +``` + +**`pyproject.toml`** — declare `finecode_extension_api` as a dependency: + +```toml +[project] +name = "my_linter" +version = "0.1.0" +requires-python = ">=3.11" +dependencies = ["finecode_extension_api~=0.4.0"] + +[build-system] +requires = ["setuptools>=64"] +build-backend = "setuptools.build_meta" +``` + +## 2. Implement a handler + +Import the action you want to handle and subclass `ActionHandler`: + +```python +# my_linter/handler.py +from finecode_extension_api import code_action +from finecode_extension_api.actions.lint_files import ( + LintFilesAction, + LintFilesRunPayload, + LintFilesRunContext, + LintFilesRunResult, + LintMessage, +) + + +class MyLinterHandler( + code_action.ActionHandler[ + LintFilesRunPayload, + LintFilesRunContext, + LintFilesRunResult, + ] +): + action = LintFilesAction + + async def run( + self, payload: LintFilesRunPayload, context: LintFilesRunContext + ) -> LintFilesRunResult: + diagnostics: list[LintMessage] = [] + + for file_path in payload.file_paths: + # run your tool and collect results + messages = run_my_tool(file_path) + diagnostics.extend(messages) + + return LintFilesRunResult(diagnostics=diagnostics) +``` + +## 3. Export from `__init__.py` + +```python +# my_linter/__init__.py +from my_linter.handler import MyLinterHandler + +__all__ = ["MyLinterHandler"] +``` + +## 4. Register the handler in a project + +Add the handler to the target action in `pyproject.toml`: + +```toml +[tool.finecode.action.lint] +source = "finecode_extension_api.actions.lint.LintAction" +handlers = [ + { + name = "my_linter", + source = "my_linter.MyLinterHandler", + env = "dev_no_runtime", + dependencies = ["my_linter~=0.1.0"] + } +] +``` + +Then run `python -m finecode prepare-envs` to install your handler into the venv. + +## Handler configuration + +To make your handler configurable, define a config model and declare `CONFIG_TYPE`: + +```python +import dataclasses +from finecode_extension_api import code_action +from finecode_extension_api.actions.lint_files import ( + LintFilesAction, LintFilesRunPayload, LintFilesRunContext, LintFilesRunResult, +) + + +@dataclasses.dataclass +class MyLinterConfig: + line_length: int = 88 + extend_ignore: list[str] = dataclasses.field(default_factory=list) + + +class MyLinterHandler( + code_action.ActionHandler[ + LintFilesRunPayload, + LintFilesRunContext, + LintFilesRunResult, + ] +): + action = LintFilesAction + CONFIG_TYPE = MyLinterConfig + + async def run( + self, payload: LintFilesRunPayload, context: LintFilesRunContext + ) -> LintFilesRunResult: + config: MyLinterConfig = context.handler_config + # use config.line_length, config.extend_ignore, ... + ... +``` + +Users can then configure it in `pyproject.toml`: + +```toml +[[tool.finecode.action_handler]] +source = "my_linter.MyLinterHandler" +config.line_length = 100 +config.extend_ignore = ["E501"] +``` + +Or via CLI/env vars at runtime (see [Configuration](../configuration.md)). + +## Handler lifecycle + +For handlers that need to start a background process (e.g. a language server), use the lifecycle hooks: + +```python +class MyLspHandler(code_action.ActionHandler[...]): + action = LintFilesAction + + async def run(self, payload, context): + ... + + def on_start(self) -> None: + # called once when the handler is first loaded + self._process = start_my_server() + + def on_shutdown(self) -> None: + # called when the Extension Runner shuts down + self._process.terminate() +``` + +## Sequential handlers: using `current_result` + +If your handler runs in sequential mode and depends on the result of a previous handler, read it from the context: + +```python +async def run(self, payload, context): + previous: MyActionResult = context.current_result + # extend or modify the previous result + ... +``` + +!!! warning + `context.current_result` raises `RuntimeError` in concurrent handler mode. Only use it when `run_handlers_concurrently` is `false` (the default). + +## Available actions to handle + +See the [Built-in Actions reference](../reference/actions.md) for the full list of action classes, payload types, and result types you can implement handlers for. diff --git a/docs/guides/creating-preset.md b/docs/guides/creating-preset.md new file mode 100644 index 00000000..e0eb579d --- /dev/null +++ b/docs/guides/creating-preset.md @@ -0,0 +1,124 @@ +# Creating a Preset + +A **Preset** is a Python package that bundles action and handler declarations into a reusable, distributable configuration. Teams use presets to standardize tooling across projects without duplicating config. + +## 1. Create the package + +``` +my_preset/ + pyproject.toml + my_preset/ + __init__.py + preset.toml +``` + +**`pyproject.toml`**: + +```toml +[project] +name = "my_preset" +version = "0.1.0" +requires-python = ">=3.11" +dependencies = [] # no runtime dependencies needed for a preset-only package + +[build-system] +requires = ["setuptools>=64"] +build-backend = "setuptools.build_meta" +``` + +**`my_preset/__init__.py`** — can be empty: + +```python +``` + +## 2. Declare actions in `preset.toml` + +The `preset.toml` file lives inside the package directory (next to `__init__.py`). It uses the same `[tool.finecode.*]` syntax as `pyproject.toml`. + +```toml +# my_preset/my_preset/preset.toml + +[tool.finecode.action.lint] +source = "finecode_extension_api.actions.lint.LintAction" +handlers = [ + { name = "ruff", source = "fine_python_ruff.RuffLintFilesHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_ruff~=0.2.0", + ] }, + { name = "mypy", source = "fine_python_mypy.MypyLintHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_mypy~=0.3.0", + ] }, +] + +[tool.finecode.action.format] +source = "finecode_extension_api.actions.format.FormatAction" +handlers = [ + { name = "ruff", source = "fine_python_ruff.RuffFormatFilesHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_ruff~=0.2.0", + ] }, + { name = "save", source = "finecode_builtin_handlers.SaveFormatFilesHandler", env = "dev_no_runtime", dependencies = [ + "finecode_builtin_handlers~=0.2.0", + ] }, +] + +# Set default handler configs +[[tool.finecode.action_handler]] +source = "fine_python_ruff.RuffLintFilesHandler" +config.extend_select = ["B", "I"] +config.line_length = 88 +``` + +## 3. Use the preset in a project + +Install the preset package (e.g. from PyPI or a local path) into the `dev_workspace` dependency group: + +```toml +# User's pyproject.toml +[dependency-groups] +dev_workspace = [ + "finecode==0.3.*", + "my_preset==0.1.*", +] + +[tool.finecode] +presets = [{ source = "my_preset" }] +``` + +Then run: + +```bash +python -m pip install --group="dev_workspace" +python -m finecode prepare-envs +``` + +## 4. Allow users to override your defaults + +Users can add `[[tool.finecode.action_handler]]` entries in their own `pyproject.toml` to override any config you set in `preset.toml`. Your preset's values are the baseline; user config always wins. + +Users can also: + +- Add more handlers to actions you declared +- Replace all handlers with `handlers_mode = "replace"` +- Disable specific handlers with `disabled = true` + +## Composing multiple presets + +A project can activate multiple presets. They are applied in order, and later preset handlers are added after earlier ones: + +```toml +[tool.finecode] +presets = [ + { source = "my_lint_preset" }, + { source = "my_format_preset" }, +] +``` + +A preset can itself reference other presets in its `preset.toml` if needed. + +## Publishing + +Presets are regular Python packages — publish them to PyPI with any standard build tool: + +```bash +python -m build +python -m twine upload dist/* +``` diff --git a/docs/guides/workspace.md b/docs/guides/workspace.md new file mode 100644 index 00000000..1ffdf637 --- /dev/null +++ b/docs/guides/workspace.md @@ -0,0 +1,122 @@ +# Multi-Project Workspace + +FineCode natively supports workspaces containing multiple projects. This is common in monorepos where each package is a separate Python project. + +## Structure + +A workspace is a directory containing one or more projects. Each project has its own `pyproject.toml` with `[tool.finecode]`: + +``` +my_workspace/ + pyproject.toml ← workspace-level (optional) + package_a/ + pyproject.toml ← project A + src/package_a/ + package_b/ + pyproject.toml ← project B + src/package_b/ + common_preset/ ← shared preset package + pyproject.toml + common_preset/ + preset.toml +``` + +## Running actions across all projects + +Run from the workspace root to target all projects: + +```bash +python -m finecode run lint +``` + +FineCode discovers all `pyproject.toml` files under the workspace root, finds those with `[tool.finecode]`, and runs the action in each. + +To run concurrently across projects: + +```bash +python -m finecode run --concurrently lint check_formatting +``` + +## Filtering to specific projects + +```bash +# Single project +python -m finecode run --project=package_a lint + +# Multiple projects +python -m finecode run --project=package_a --project=package_b lint +``` + +When `--project` is specified, the action must exist in all listed projects. + +## Sharing configuration across projects + +The recommended approach for sharing config is a **local preset package** in the workspace. Each subproject installs it as a dependency and references it in `pyproject.toml`. + +**Why a package, not hierarchical config?** + +- Subprojects don't depend on workspace directory structure — they can be moved or extracted without changing tool config +- Configuration is fully explicit: the complete config is visible inside each subproject +- No implicit workspace-root lookup needed + +**Example — shared lint configuration:** + +``` +my_workspace/ + my_lint_config/ + pyproject.toml + my_lint_config/ + preset.toml ← declares ruff, mypy handlers with shared settings + package_a/ + pyproject.toml ← references my_lint_config as a preset + package_b/ + pyproject.toml ← references my_lint_config as a preset +``` + +```toml +# package_a/pyproject.toml +[dependency-groups] +dev_workspace = [ + "finecode==0.3.*", + "my_lint_config", # local package +] + +[tool.finecode.env.dev_workspace.dependencies] +my_lint_config = { path = "../my_lint_config", editable = true } + +[tool.finecode] +presets = [{ source = "my_lint_config" }] +``` + +## Saving and reading action results + +Results of actions are saved to `/cache/finecode/results/.json`, keyed by project path. This makes it easy to collect results from all projects in CI: + +```bash +python -m finecode run --concurrently lint check_formatting +cat .venvs/dev_workspace/cache/finecode/results/lint.json +``` + +To opt out of saving results: + +```bash +python -m finecode run --no-save-results lint +``` + +## CI usage + +```bash +# Run lint and formatting check in all projects, fail if any fails +python -m finecode run --concurrently lint check_formatting + +# Save results for later processing +python -m finecode run lint +cat .venvs/dev_workspace/cache/finecode/results/lint.json +``` + +To pass results between CI steps via environment variables (legacy approach): + +```bash +python -m finecode run --save-results-to-env build_artifact +# Result is available as FINECODE_RESULT__BUILD_ARTIFACT +``` diff --git a/docs/ide-integration.md b/docs/ide-integration.md new file mode 100644 index 00000000..2947e194 --- /dev/null +++ b/docs/ide-integration.md @@ -0,0 +1,70 @@ +# IDE Integration + +FineCode exposes a standard **Language Server Protocol (LSP)** server that IDE extensions connect to. This gives you real-time diagnostics, code actions, formatting, and more — powered by the same tool configurations you use in the CLI. + +## VSCode + +Install the [FineCode VSCode extension](https://github.com/finecode-dev/finecode-vscode). + +The extension: + +- Automatically starts the FineCode LSP server when you open a workspace +- Shows linting diagnostics inline as you type +- Provides quick-fix code actions +- Formats files on save (when configured) +- Exposes the FineCode action tree in the sidebar + +### Requirements + +- FineCode installed in your `dev_workspace` venv (see [Getting Started](getting-started.md)) +- `prepare-envs` run at least once so handler venvs are set up + +### Configuration + +The extension discovers the `dev_workspace` venv automatically from `.venvs/dev_workspace/`. No per-project extension configuration is required — everything comes from `pyproject.toml`. + +## How the LSP server works + +```mermaid +sequenceDiagram + participant IDE as IDE Extension + participant WM as Workspace Manager (LSP) + participant ER as Extension Runner + + IDE->>WM: textDocument/didOpen + WM->>ER: run lint_files action + ER-->>WM: LintFilesRunResult (diagnostics) + WM-->>IDE: textDocument/publishDiagnostics + + IDE->>WM: textDocument/formatting + WM->>ER: run format_files action + ER-->>WM: FormatFilesRunResult (edits) + WM-->>IDE: TextEdit[] +``` + +The WM translates LSP requests into FineCode actions and delegates execution to the appropriate Extension Runner. Results are translated back into LSP responses. + +## Starting the server manually + +If you need to connect a custom client or debug the server: + +```bash +# stdio (most common for LSP clients) +python -m finecode start-api --stdio + +# TCP (useful for debugging) +python -m finecode start-api --socket 2087 + +# WebSocket +python -m finecode start-api --ws --port 2087 +``` + +## MCP server + +FineCode also supports the **Model Context Protocol (MCP)**, which allows AI agents to invoke FineCode actions directly. + +```bash +python -m finecode start-api --stdio --mcp --mcp-port 3000 +``` + +This starts both the LSP server (for IDE) and an MCP server simultaneously. diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..65bcefef --- /dev/null +++ b/docs/index.md @@ -0,0 +1,137 @@ +# FineCode + +**Stop configuring tools. Start using them.** + +Every Python project needs linting, formatting, type checking. And in every project you end up doing the same thing: installing the same tools, writing the same configuration, wiring them up to your IDE — again. + +FineCode solves this once. + +## One line to get linting and formatting + +```toml +# pyproject.toml +[dependency-groups] +dev_workspace = ["finecode==0.3.*", "fine_python_recommended==0.3.*"] + +[tool.finecode] +presets = [{ source = "fine_python_recommended" }] +``` + +```bash +python -m finecode prepare-envs +python -m finecode run lint check_formatting +``` + +That's it. Ruff, Flake8, and isort — installed, configured, and running. No per-tool setup, no config files to write. + +## Your IDE just works + +Install the [VSCode extension](ide-integration.md) and get inline diagnostics, quick fixes, and format-on-save — powered by the same configuration as your CLI. No separate language server setup, no per-project extension configuration. + +## Share configuration across projects + +Package your tool configuration and share it across your team's projects as a regular Python package: + +```toml +# Any project that wants your standard setup: +[tool.finecode] +presets = [{ source = "my_team_standards" }] +``` + +Update the preset package — all projects pick it up on next install. No drift, no copy-paste. + +## Tools stay out of your project + +Dev tools, runtime dependencies, and your project stay in separate virtual environments. Ruff's dependencies don't mix with your project's dependencies. Mypy doesn't break because something else updated a package. Everything is contained. + +## Your rules, not ours + +Presets give you a working setup instantly, but nothing is locked in. Every default can be overridden: + +```toml +# Adjust a single handler's config +[[tool.finecode.action_handler]] +source = "fine_python_ruff.RuffLintFilesHandler" +config.line_length = 120 + +# Swap out individual tools while keeping the rest of the preset +[tool.finecode.action.lint] +handlers = [ + { name = "flake8", disabled = true }, + { name = "my_linter", source = "my_team.MyLinterHandler", ... }, +] + +# Or replace everything and build from scratch +[tool.finecode.action.lint] +handlers_mode = "replace" +handlers = [...] +``` + +You can adopt FineCode incrementally — start with a preset, customise as needed, replace entirely if you want. There's no framework lock-in. + +## Virtual environment management included + +FineCode manages virtual environments for you, with a clear separation by purpose: + +```text +.venvs/ + dev_workspace/ ← FineCode itself, presets, dev tools + dev_no_runtime/ ← linters, formatters, type checkers + runtime/ ← your project's runtime dependencies + docs/ ← documentation tools +``` + +Each tool runs in the right environment. Runtime dependencies never get polluted by dev tools, and dev tools never break because a runtime package updated. + +In a monorepo with many packages, this becomes especially valuable — FineCode handles environment setup across all of them automatically. No manual venv juggling, no shared environment where everything mixes together. + +```bash +# One command sets up all environments across all packages +python -m finecode prepare-envs +``` + +## Simpler CI, any platform + +Without FineCode, CI pipelines tend to grow — separate steps for each tool, platform-specific workarounds, duplicated logic between local and CI runs. + +With FineCode, your CI is just: + +```yaml +- run: python -m finecode run lint check_formatting +``` + +That's the same command you run locally. FineCode handles tool invocation, configuration, and environment differences. Switch CI provider, add a new linter, change a tool version — the pipeline stays the same. + +## Not just linting and formatting + +FineCode ships with built-in actions for the most common workflows — lint, format, type-check, build, publish — but actions are just Python classes. You can define your own for anything that fits your development process: running migrations, generating code, validating architecture, checking licenses, or anything specific to your project. + +Your custom actions get the same CLI interface, IDE integration, and environment isolation as the built-in ones — for free. + +## Extend it with your own tools + +FineCode has a clean handler interface. If you have an internal tool, a custom linter, or anything that fits into a lint/format/build workflow — you can plug it in and get CLI and IDE integration for free. + +```python +class MyLinterHandler(ActionHandler[...]): + action = LintFilesAction + + async def run(self, payload, context) -> LintFilesRunResult: + ... # your tool logic here +``` + +## Your AI assistant knows your tools + +FineCode exposes an [MCP server](ide-integration.md#mcp-server) that AI assistants connect to. Instead of guessing which linter you use, how to run it, or what flags to pass — the assistant gets the exact tool configuration from your project directly. + +No explanations needed. No wrong commands. The assistant just knows. + +## Community + +Have questions, ideas, or want to share what you've built? Join the FineCode community on [Discord](https://discord.gg/nwb3CRVN). + +## Ready to try it? + +[Get started in 5 minutes →](getting-started.md) + +Or browse what's included: [available presets and extensions](reference/extensions.md). diff --git a/docs/reference/actions.md b/docs/reference/actions.md new file mode 100644 index 00000000..7bab75bf --- /dev/null +++ b/docs/reference/actions.md @@ -0,0 +1,220 @@ +# Built-in Actions + +All built-in actions are defined in `finecode_extension_api.actions`. Use their import paths as the `source` when declaring actions in `pyproject.toml` or `preset.toml`. + +--- + +## `lint` + +Run linting on a project or specific files. + +- **Source:** `finecode_extension_api.actions.lint.LintAction` +- **Default handler execution:** concurrent + +**Payload fields:** + +| Field | Type | Default | Description | +|---|---|---|---| +| `target` | `"project"` \| `"files"` | `"project"` | Lint the whole project or specific files | +| `file_paths` | `list[Path]` | `[]` | Files to lint (required when `target="files"`) | + +**Result:** list of diagnostics (file, line, column, message, severity) + +--- + +## `lint_files` + +Lint a specific set of files, with language filtering. + +- **Source:** `finecode_extension_api.actions.lint_files.LintFilesAction` +- **Default handler execution:** concurrent + +Similar to `lint` but designed for language-aware per-file linting. Used internally by the LSP server for real-time diagnostics. + +--- + +## `format` + +Format a project or specific files. + +- **Source:** `finecode_extension_api.actions.format.FormatAction` +- **Default handler execution:** sequential + +**Payload fields:** + +| Field | Type | Default | Description | +|---|---|---|---| +| `save` | `bool` | `true` | Write formatted content back to disk | +| `target` | `"project"` \| `"files"` | `"project"` | Format whole project or specific files | +| `file_paths` | `list[Path]` | `[]` | Files to format (required when `target="files"`) | + +!!! note + The `save` payload field controls whether changes are written to disk. The built-in `SaveFormatFilesHandler` reads this flag. If you omit the save handler from your preset, files won't be written regardless. + +--- + +## `format_files` + +Format a specific set of files, with language filtering. + +- **Source:** `finecode_extension_api.actions.format_files.FormatFilesAction` +- **Default handler execution:** sequential + +Used internally by the LSP server for on-save formatting. + +--- + +## `check_formatting` + +Check whether files are formatted correctly, without modifying them. + +- **Source:** `finecode_extension_api.actions.check_formatting.CheckFormattingAction` + +Returns a non-zero exit code if any file is not properly formatted. + +--- + +## `build_artifact` + +Build a distributable artifact (e.g. a Python wheel). + +- **Source:** `finecode_extension_api.actions.build_artifact_action.BuildArtifactAction` + +**Payload fields:** + +| Field | Type | Default | Description | +|---|---|---|---| +| `src_artifact_def_path` | `Path \| None` | `None` | Path to the artifact definition. If omitted, builds the current project. | + +**Result fields:** + +| Field | Type | Description | +|---|---|---| +| `src_artifact_def_path` | `Path` | Path of the artifact that was built | +| `build_output_paths` | `list[Path]` | Paths of the generated build outputs | + +--- + +## `get_src_artifact_version` + +Get the current version of a source artifact. + +- **Source:** `finecode_extension_api.actions.get_src_artifact_version.GetSrcArtifactVersionAction` + +Default handler in this repo: `fine_python_setuptools_scm.GetSrcArtifactVersionSetuptoolsScmHandler` + +--- + +## `get_dist_artifact_version` + +Get the version of a distributable artifact. + +- **Source:** `finecode_extension_api.actions.get_dist_artifact_version.GetDistArtifactVersionAction` + +--- + +## `get_src_artifact_registries` + +List available registries for publishing an artifact. + +- **Source:** `finecode_extension_api.actions.get_src_artifact_registries.GetSrcArtifactRegistriesAction` + +--- + +## `publish_artifact` + +Publish a built artifact. + +- **Source:** `finecode_extension_api.actions.publish_artifact.PublishArtifactAction` + +--- + +## `publish_artifact_to_registry` + +Publish an artifact to a specific registry. + +- **Source:** `finecode_extension_api.actions.publish_artifact_to_registry.PublishArtifactToRegistryAction` + +--- + +## `is_artifact_published_to_registry` + +Check whether a specific version of an artifact is already published. + +- **Source:** `finecode_extension_api.actions.is_artifact_published_to_registry.IsArtifactPublishedToRegistryAction` + +--- + +## `verify_artifact_published_to_registry` + +Verify that publishing succeeded by checking the registry. + +- **Source:** `finecode_extension_api.actions.verify_artifact_published_to_registry.VerifyArtifactPublishedToRegistryAction` + +--- + +## `list_src_artifact_files_by_lang` + +List source files grouped by programming language. + +- **Source:** `finecode_extension_api.actions.list_src_artifact_files_by_lang.ListSrcArtifactFilesByLangAction` + +--- + +## `group_src_artifact_files_by_lang` + +Group source files by language (internal, used by language-aware actions). + +- **Source:** `finecode_extension_api.actions.group_src_artifact_files_by_lang.GroupSrcArtifactFilesByLangAction` + +--- + +## `prepare_envs` + +Set up virtual environments for all handler dependencies. + +- **Source:** `finecode_extension_api.actions.prepare_envs.PrepareEnvsAction` + +Also available as the `python -m finecode prepare-envs` CLI command. + +--- + +## `install_deps_in_env` + +Install dependencies into a specific environment. + +- **Source:** `finecode_extension_api.actions.install_deps_in_env.InstallDepsInEnvAction` + +--- + +## `dump_config` + +Dump the resolved configuration for a project. + +- **Source:** `finecode_extension_api.actions.dump_config.DumpConfigAction` + +Also available as `python -m finecode dump-config`. + +--- + +## `init_repository_provider` + +Initialize a repository provider (used in artifact publishing flows). + +- **Source:** `finecode_extension_api.actions.init_repository_provider.InitRepositoryProviderAction` + +--- + +## `prepare_runners` + +Prepare Extension Runners (internal, called by the Workspace Manager). + +- **Source:** `finecode_extension_api.actions.prepare_runners.PrepareRunnersAction` + +--- + +## `clean_finecode_logs` + +Remove FineCode log files. + +- **Source:** `finecode_extension_api.actions.clean_finecode_logs.CleanFineCodeLogsAction` diff --git a/docs/reference/extensions.md b/docs/reference/extensions.md new file mode 100644 index 00000000..a81c1c40 --- /dev/null +++ b/docs/reference/extensions.md @@ -0,0 +1,185 @@ +# Extensions + +Extensions provide concrete implementations of actions as handlers. All extensions listed here are maintained in the FineCode repository under `extensions/`. + +To use an extension, add its handler to an action in your `pyproject.toml` or use a [preset](../guides/creating-preset.md) that includes it. The extension package is installed automatically into the handler's virtual environment by `prepare-envs`. + +--- + +## Linting + +### `fine_python_ruff` + +Linting and formatting via [Ruff](https://docs.astral.sh/ruff/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_ruff.RuffLintFilesHandler` | `lint_files` | Lint Python files with Ruff | +| `fine_python_ruff.RuffFormatFilesHandler` | `format_files` | Format Python files with Ruff formatter | + +**Example config:** + +```toml +[[tool.finecode.action_handler]] +source = "fine_python_ruff.RuffLintFilesHandler" +config.extend_select = ["B", "I"] +config.line_length = 88 +config.target_version = "py311" +``` + +--- + +### `fine_python_flake8` + +Linting via [Flake8](https://flake8.pycqa.org/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_flake8.Flake8LintFilesHandler` | `lint_files` | Lint Python files with Flake8 | + +**Example config:** + +```toml +[[tool.finecode.action_handler]] +source = "fine_python_flake8.Flake8LintFilesHandler" +config.max_line_length = 88 +config.extend_ignore = ["E203", "E501"] +config.select = [] # disable all standard rules (use only custom rules) +``` + +--- + +### `fine_python_mypy` + +Type checking via [Mypy](https://mypy-lang.org/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_mypy.MypyLintHandler` | `lint` / `lint_files` | Type-check Python files | + +--- + +### `fine_python_import_linter` + +Architecture validation via [import-linter](https://import-linter.readthedocs.io/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_import_linter.ImportLinterHandler` | `lint` | Validate import contracts | + +Reads contract definitions from `[tool.importlinter]` in `pyproject.toml`. + +--- + +### `fine_python_pyrefly` + +Type checking and LSP integration via [Pyrefly](https://pyrefly.org/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_pyrefly.PyreflyLintFilesHandler` | `lint_files` | Type-check with Pyrefly | + +--- + +### `fine_python_ast` + +AST-based analysis for Python source files. + +| Handler | Action | Description | +|---|---|---| +| `fine_python_ast.AstHandler` | Various | AST traversal and analysis | + +--- + +## Formatting + +### `fine_python_black` + +Code formatting via [Black](https://black.readthedocs.io/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_black.BlackFormatFilesHandler` | `format_files` | Format Python files with Black | + +--- + +### `fine_python_isort` + +Import sorting via [isort](https://pycqa.github.io/isort/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_isort.IsortFormatFilesHandler` | `format_files` | Sort Python imports | + +**Example config** (compatible with Ruff formatter / Black): + +```toml +[[tool.finecode.action_handler]] +source = "fine_python_isort.IsortFormatFilesHandler" +config.multi_line_output = 3 +config.include_trailing_comma = true +config.line_length = 88 +config.split_on_trailing_comma = true +``` + +--- + +## Package & environment management + +### `fine_python_pip` + +Environment management via pip. + +| Handler | Action | Description | +|---|---|---| +| `fine_python_pip.PipInstallDepsInEnvHandler` | `install_deps_in_env` | Install dependencies with pip | + +**Example config:** + +```toml +[[tool.finecode.action_handler]] +source = "fine_python_pip.PipInstallDepsInEnvHandler" +config.editable_mode = "compat" # "compat", "strict", or default +``` + +--- + +### `fine_python_virtualenv` + +Virtual environment creation. + +| Handler | Action | Description | +|---|---|---| +| `fine_python_virtualenv.VirtualenvPrepareEnvHandler` | `prepare_envs` | Create virtualenvs | + +--- + +### `fine_python_setuptools_scm` + +Version management via [setuptools-scm](https://setuptools-scm.readthedocs.io/). + +| Handler | Action | Description | +|---|---|---| +| `fine_python_setuptools_scm.GetSrcArtifactVersionSetuptoolsScmHandler` | `get_src_artifact_version` | Read version from VCS tags | + +--- + +## Analysis + +### `fine_python_module_exports` + +Analyzes and validates Python module exports. + +| Handler | Action | Description | +|---|---|---| +| `fine_python_module_exports.*` | Various | Validate `__all__` and public API | + +--- + +### `fine_python_package_info` + +Reads package metadata from `pyproject.toml`. + +| Handler | Action | Description | +|---|---|---| +| `fine_python_package_info.*` | Various | Provide package name, version, files | diff --git a/extensions/fine_python_ast/.gitignore b/extensions/fine_python_ast/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_ast/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_ast/fine_python_ast/activator.py b/extensions/fine_python_ast/fine_python_ast/activator.py new file mode 100644 index 00000000..8f4b6aea --- /dev/null +++ b/extensions/fine_python_ast/fine_python_ast/activator.py @@ -0,0 +1,13 @@ +from fine_python_ast import ast_provider, iast_provider +from finecode_extension_api import extension +from finecode_extension_api.interfaces import iserviceregistry + + +class Activator(extension.ExtensionActivator): + def __init__(self, registry: iserviceregistry.IServiceRegistry) -> None: + self.registry = registry + + def activate(self) -> None: + self.registry.register_impl( + iast_provider.IPythonSingleAstProvider, ast_provider.PythonSingleAstProvider + ) diff --git a/extensions/fine_python_ast/fine_python_ast/ast_provider.py b/extensions/fine_python_ast/fine_python_ast/ast_provider.py index 2bf73b05..6f4e1810 100644 --- a/extensions/fine_python_ast/fine_python_ast/ast_provider.py +++ b/extensions/fine_python_ast/fine_python_ast/ast_provider.py @@ -3,20 +3,23 @@ from fine_python_ast import iast_provider -from finecode_extension_api.interfaces import icache, ifilemanager, ilogger +from finecode_extension_api.interfaces import icache, ifileeditor, ilogger class PythonSingleAstProvider(iast_provider.IPythonSingleAstProvider): CACHE_KEY = "PythonSingleAstProvider" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="PythonSingleAstProvider" + ) def __init__( self, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, cache: icache.ICache, logger: ilogger.ILogger, ): self.cache = cache - self.file_manager = file_manager + self.file_editor = file_editor self.logger = logger async def get_file_ast(self, file_path: Path) -> ast.Module: @@ -30,8 +33,12 @@ async def get_file_ast(self, file_path: Path) -> ast.Module: except icache.CacheMissException: ... - file_content: str = await self.file_manager.get_content(file_path) - file_version: str = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_content: str = file_info.content + file_version: str = file_info.version try: ast_instance = ast.parse(file_content) diff --git a/extensions/fine_python_ast/pyproject.toml b/extensions/fine_python_ast/pyproject.toml index de61a520..e4a7367c 100644 --- a/extensions/fine_python_ast/pyproject.toml +++ b/extensions/fine_python_ast/pyproject.toml @@ -1,14 +1,17 @@ [project] name = "fine_python_ast" -version = "0.2.0" +version = "0.3.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*"] +dependencies = ["finecode_extension_api~=0.4.0a0"] + +[project.entry-points."finecode.activator"] +fine_python_ast = "fine_python_ast.activator:Activator" [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +21,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_black/.gitignore b/extensions/fine_python_black/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_black/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_black/pyproject.toml b/extensions/fine_python_black/pyproject.toml index fca87674..37d2b894 100644 --- a/extensions/fine_python_black/pyproject.toml +++ b/extensions/fine_python_black/pyproject.toml @@ -1,14 +1,14 @@ [project] name = "fine_python_black" -version = "0.2.0" +version = "0.3.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "black (>=25.1.0,<26.0.0)"] +dependencies = ["finecode_extension_api~=0.4.0a0", "black (>=25.1.0,<26.0.0)"] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +18,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_flake8/.gitignore b/extensions/fine_python_flake8/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_flake8/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_flake8/fine_python_flake8/__init__.py b/extensions/fine_python_flake8/fine_python_flake8/__init__.py index 67c886c9..27f2788d 100644 --- a/extensions/fine_python_flake8/fine_python_flake8/__init__.py +++ b/extensions/fine_python_flake8/fine_python_flake8/__init__.py @@ -1,6 +1,6 @@ -from .action import Flake8LintHandler, Flake8LintHandlerConfig +from .action import Flake8LintFilesHandler, Flake8LintFilesHandlerConfig __all__ = [ - "Flake8LintHandler", - "Flake8LintHandlerConfig", + "Flake8LintFilesHandler", + "Flake8LintFilesHandlerConfig", ] diff --git a/extensions/fine_python_flake8/fine_python_flake8/action.py b/extensions/fine_python_flake8/fine_python_flake8/action.py index f55f8e6e..055067fd 100644 --- a/extensions/fine_python_flake8/fine_python_flake8/action.py +++ b/extensions/fine_python_flake8/fine_python_flake8/action.py @@ -12,21 +12,21 @@ from flake8.plugins import finder from finecode_extension_api import code_action -from finecode_extension_api.actions import lint as lint_action +from finecode_extension_api.actions import lint_files as lint_files_action from finecode_extension_api.interfaces import ( icache, - ifilemanager, + ifileeditor, ilogger, iprocessexecutor, ) -def map_flake8_check_result_to_lint_message(result: tuple) -> lint_action.LintMessage: +def map_flake8_check_result_to_lint_message(result: tuple) -> lint_files_action.LintMessage: error_code, line_number, column, text, physical_line = result - return lint_action.LintMessage( - range=lint_action.Range( - start=lint_action.Position(line=line_number, character=column), - end=lint_action.Position( + return lint_files_action.LintMessage( + range=lint_files_action.Range( + start=lint_files_action.Position(line=line_number, character=column), + end=lint_files_action.Position( line=line_number, character=len(physical_line) if physical_line is not None else column, ), @@ -35,9 +35,9 @@ def map_flake8_check_result_to_lint_message(result: tuple) -> lint_action.LintMe code=error_code, source="flake8", severity=( - lint_action.LintMessageSeverity.WARNING + lint_files_action.LintMessageSeverity.WARNING if error_code.startswith("W") - else lint_action.LintMessageSeverity.ERROR + else lint_files_action.LintMessageSeverity.ERROR ), ) @@ -46,9 +46,9 @@ def run_flake8_on_single_file( file_path: Path, file_content: str, file_ast: ast.Module, - config: Flake8LintHandlerConfig, -) -> list[lint_action.LintMessage]: - lint_messages: list[lint_action.LintMessage] = [] + config: Flake8LintFilesHandlerConfig, +) -> list[lint_files_action.LintMessage]: + lint_messages: list[lint_files_action.LintMessage] = [] # flake8 expects lines with newline at the end file_lines = [line + "\n" for line in file_content.split("\n")] # TODO: investigate whether guide and decider can be reused. They cannot be @@ -109,31 +109,34 @@ def run_flake8_on_single_file( @dataclasses.dataclass -class Flake8LintHandlerConfig(code_action.ActionHandlerConfig): +class Flake8LintFilesHandlerConfig(code_action.ActionHandlerConfig): max_line_length: int = 79 select: list[str] | None = None extend_select: list[str] | None = None extend_ignore: list[str] | None = None -class Flake8LintHandler( - code_action.ActionHandler[lint_action.LintAction, Flake8LintHandlerConfig] +class Flake8LintFilesHandler( + code_action.ActionHandler[lint_files_action.LintFilesAction, Flake8LintFilesHandlerConfig] ): CACHE_KEY = "flake8" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="Flake8LintFilesHandler" + ) def __init__( self, - config: Flake8LintHandlerConfig, + config: Flake8LintFilesHandlerConfig, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, ast_provider: iast_provider.IPythonSingleAstProvider, process_executor: iprocessexecutor.IProcessExecutor, ) -> None: self.config = config self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.ast_provider = ast_provider self.process_executor = process_executor @@ -145,19 +148,24 @@ def __init__( async def run_on_single_file( self, file_path: Path - ) -> lint_action.LintRunResult | None: + ) -> lint_files_action.LintFilesRunResult | None: messages = {} try: cached_lint_messages = await self.cache.get_file_cache( file_path, self.CACHE_KEY ) messages[str(file_path)] = cached_lint_messages - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) except icache.CacheMissException: pass - file_content = await self.file_manager.get_content(file_path) - file_version = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_content: str = file_info.content + file_version: str = file_info.version + try: file_ast = await self.ast_provider.get_file_ast(file_path=file_path) except SyntaxError: @@ -175,12 +183,12 @@ async def run_on_single_file( file_path, file_version, self.CACHE_KEY, lint_messages ) - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) async def run( self, - payload: lint_action.LintRunPayload, - run_context: code_action.RunActionWithPartialResultsContext, + payload: lint_files_action.LintFilesRunPayload, + run_context: lint_files_action.LintFilesRunContext, ) -> None: if self.config.select is not None and len(self.config.select) == 0: # empty set of rules is selected, no need to run flake8 diff --git a/extensions/fine_python_flake8/pyproject.toml b/extensions/fine_python_flake8/pyproject.toml index 2939cf07..49a15f10 100644 --- a/extensions/fine_python_flake8/pyproject.toml +++ b/extensions/fine_python_flake8/pyproject.toml @@ -1,13 +1,13 @@ [project] name = "fine_python_flake8" -version = "0.2.1" +version = "0.3.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" dependencies = [ - "finecode_extension_api==0.3.*", - "fine_python_ast==0.2.*", + "finecode_extension_api~=0.4.0a0", + "fine_python_ast~=0.3.0a0", "types-flake8 (>=7.1.0.20241020,<8.0.0.0)", "flake8 (>=7.1.2,<8.0.0)", ] diff --git a/extensions/fine_python_import_linter/.gitignore b/extensions/fine_python_import_linter/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_import_linter/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_import_linter/poetry.lock b/extensions/fine_python_import_linter/poetry.lock deleted file mode 100644 index 7d33a3b6..00000000 --- a/extensions/fine_python_import_linter/poetry.lock +++ /dev/null @@ -1,378 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main"] -markers = "platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "finecode-extension-api" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["main"] -files = [] -develop = false - -[package.dependencies] -pydantic = ">=2.10.6,<3.0.0" -typing-extensions = ">=4.12.2,<5.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "80b8f9322334128d30248231175d5154fc1c90f1" -subdirectory = "finecode_extension_api" - -[[package]] -name = "grimp" -version = "3.8.2" -description = "Builds a queryable graph of the imports within one or more Python packages." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "grimp-3.8.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7491764b2c4292e0c6c74e9f52642632402d0cec55151849df25ca79d8e3b686"}, - {file = "grimp-3.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c21592970560bc6a5f96683b2fb02756bef9600a3f0962dff33185ca0058344a"}, - {file = "grimp-3.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edb45b50cd5ab09a9c969d42d1d5ec96b56abbc5b35683b9987f4b5669456484"}, - {file = "grimp-3.8.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c804378c652d6ef33657c994a51207ddaa23a0e0868d1a41a36329befb6c4195"}, - {file = "grimp-3.8.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4345f5026bd5b0d1e8494fd5c0b6c64749ca0e149e733c122f075c0633ec44a8"}, - {file = "grimp-3.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4900ac2d8ea0e749cb93cc13fcdad4795f0705b7c9abfcb02d62ab7de2d4e45f"}, - {file = "grimp-3.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:857c3b28641b200eb56eeebf28b9edc394bc2d50d2ab45d25c88a65902a5ec02"}, - {file = "grimp-3.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c68b8624a1d3218ab6762aa4d1e5e16586b08b19606cd65e324d0fde1098986"}, - {file = "grimp-3.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:63797bfdd5bedd6de5c0390d5ebf5eb99e756ca344a6cd1a940d7d92ba734980"}, - {file = "grimp-3.8.2-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:3cf76b3bbb69e2c5219a791a1e357f43a519e1e055f3b357a18ea2c94dff9eb5"}, - {file = "grimp-3.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:6af8a4157c5a1c1a1d63e6687fa5681dd5d509a769e037de46ea8b648626cb0a"}, - {file = "grimp-3.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dda6cdce017d1a116ab1426a9406b1b6d72c7fcb8b22990845cce260a175b62d"}, - {file = "grimp-3.8.2-cp310-cp310-win32.whl", hash = "sha256:6582338ab39dffa0c2da3e599dbd1d458a954e3fa33f983320621093b1496c40"}, - {file = "grimp-3.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:8db30d1fe2cc9e9f669f53fbabed780c9316bff90d76b0480f98134930eca16a"}, - {file = "grimp-3.8.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:66f908a90fd5e3e45a005580ce7e1510eab9cbf8d22a9b65647c73f80055cccb"}, - {file = "grimp-3.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:345c102e05a4c34fcbe7508c156279b276c9b9113bb3dec9d314ff5bf5f2ec37"}, - {file = "grimp-3.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa4ae46e6d766cbda423c6270d1d3a41f8f6a4fed8764d2c3e1a376fd617a765"}, - {file = "grimp-3.8.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3e2cb0fb74eb7eb29534d69d78802442ea7551be7ba1b98b7ae5705589d3df31"}, - {file = "grimp-3.8.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba8436135291eb0b25633b926842ef41df2e9917e42de519a8131d1cbb973256"}, - {file = "grimp-3.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7a21ed1c25104ed89ec7b03251e3a4c3b95ab25924825ecfd8c6fbe2a7fbd75e"}, - {file = "grimp-3.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b05daa9216293e748debd4c17f73bf11fd44597147a518afff86eb64bd0943f6"}, - {file = "grimp-3.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47467cdf5edb90219b92b284ca7c2ab42b8e4555c3293017daff9d92fef6457d"}, - {file = "grimp-3.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40f6b582f65230f098ab288335c9d9e92f62ade8126aadbe31aebbb153aaf14f"}, - {file = "grimp-3.8.2-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:28035c248cbee4e59e8686b0e4146e3304981a81e7287072281a90f04f3d66f7"}, - {file = "grimp-3.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d4aef9850dde96130a6ecff0048496d299b87db8c38a0623835dadf196494c1a"}, - {file = "grimp-3.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5c892f816bc6a85708576adf60854fdfae74229397b651852c66be7dc7f7016b"}, - {file = "grimp-3.8.2-cp311-cp311-win32.whl", hash = "sha256:ff33e44cf5f0118a4955fd027b9d53b88207c8e34381933daa31ef93a692d447"}, - {file = "grimp-3.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:61a6855bfcd19c83711dac98da12c85fccdc5e77c3aaf9672cb03f68823261fb"}, - {file = "grimp-3.8.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:81ff34e23938032fc7c529d5526edbc046fb43d1a5a6dd5291167ef3955255d3"}, - {file = "grimp-3.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:531c6a311d744f75b42f685183943d721ed0cf67c904671894b0bc307e7c422e"}, - {file = "grimp-3.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a5dae74b37b29ccf1b86afc7aee56578add35e1184d408c8fa207106b824ae6"}, - {file = "grimp-3.8.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f357a3615c1f42aec2eb37892916223f63c7222b33c2aa3a3d7b5de713e29158"}, - {file = "grimp-3.8.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:75b693e814393cbcb1883c855df5d77fe4884ecd5e4e0245de9eccbb5eeb19aa"}, - {file = "grimp-3.8.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3ec7c50f70478595c47cf64004f40182055a3bfef2712805d134cd234f3b2ddb"}, - {file = "grimp-3.8.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a11addfe4c61f8bcc3cf825731470abd6ffd3ab18a87c94dec5c6fe7a1ec96c2"}, - {file = "grimp-3.8.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4dd4feca3bd71d6e9ea25468ef1e48b1664e99fa56021ba7a4dc737439b302be"}, - {file = "grimp-3.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1b60fa34dd1e488889f9ca4dbc1366a76541f715113ae2e5e5e1c535d4db7944"}, - {file = "grimp-3.8.2-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5d7a8cd6ba593bbe7c0e0c40eced9ad6f404114e2d5abce22e3bf72337c8e76"}, - {file = "grimp-3.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:f5d36dcf4c62bdd91aecd7d260693b417a19a670af12b54f62cf691324e6b640"}, - {file = "grimp-3.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7eaf8fb91e8a7a0de498c7cce82cb5a8fedc34d40640ce2f5a09170ad3321f74"}, - {file = "grimp-3.8.2-cp312-cp312-win32.whl", hash = "sha256:bdd5942aad7bf8d8ce2f1987309fb5666074b8a1513622e65d7fad0d5e53c6a0"}, - {file = "grimp-3.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:effc3433d2ca285130853aa10565d52240ec64cd4590930210b8524e9bc7e7e0"}, - {file = "grimp-3.8.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:bc8178dece86f7eefac1c13720f703ee2db3200df87a4eccef810f5f17979ed1"}, - {file = "grimp-3.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:037a7fff978e11070f040b9fd6e4269cfdfed98aeec3b8b6b39504f51cc16b14"}, - {file = "grimp-3.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ed5f3d6f605af786d44041c3d32e31010e73fa7ad9790604c5d55c9a94a9a3e"}, - {file = "grimp-3.8.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66dbf8011bef79b4c99b45cc34a07d153d7e943aa7239e89c4435450c364df59"}, - {file = "grimp-3.8.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a42a963d6d6ac7efc5ff4d100797cc41fd947b4c6d048f1af5ba84418e9231d1"}, - {file = "grimp-3.8.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4e4ba4005d924870c6d2c7dc9e2908d424f2fbed001a9aa61f497100ffe007d7"}, - {file = "grimp-3.8.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6fbdf475a157577212721455f12ed08cd9fda00612c534b672fa964b22962a40"}, - {file = "grimp-3.8.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2be94f44acfc590433840336f03091fd2e6a5d42d9dba03b490bbd75e8302532"}, - {file = "grimp-3.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:861939fe32beab5caaefacc2d6a4e32a36f1d3fdf9d405c9c6f94b3099f560a8"}, - {file = "grimp-3.8.2-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:f5e92760b624a6d8b132762328e600d7f13c83b74c770e82ce66d4feaf97591d"}, - {file = "grimp-3.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:693282d7adaa9b98a46c77f15d545b681b39c9e3912abee573f47288e260db69"}, - {file = "grimp-3.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f71e206d27fe1e5c0bfa5e4fb2a8abda735c045b887ed4310b8d36cc7e43640c"}, - {file = "grimp-3.8.2-cp313-cp313-win32.whl", hash = "sha256:b05173750c7729a52ea75fcb5b0a27f3046d02a18a1168051f28e768b616462f"}, - {file = "grimp-3.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:aeabbe0fd705658528600e9adaa01bbb4f4e85456ab648ec2798d41d4c2e6c65"}, - {file = "grimp-3.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bf04b5e59eb06bf05694c52d01d344458b05273b984a18fc201d38d3c85ee14"}, - {file = "grimp-3.8.2-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:21acca9e4710f7e7abf7d8c5248798c0e5bf0c8034eb3b59f53b9f496649a0e1"}, - {file = "grimp-3.8.2-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:67d6c4cef6250c020803edd838ace00ab74b1ddf46d41b34ab9b9a4d3bb12ca2"}, - {file = "grimp-3.8.2-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:85c58f2de7e15342b077ac3ff836058f30d5b9f6a94af8e30bfcf5fac55b0f63"}, - {file = "grimp-3.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:01b3041a4dea1ba958b6da9670a017b30dafcb5682de40ff008885939e17d1fd"}, - {file = "grimp-3.8.2-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d117ca9d512134caacb10a4e766e36c1a46a4c5320aa0310f541a7ec961cff68"}, - {file = "grimp-3.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:880dbfcf11281a39d4107742b230997cee7a8a2e1b954380510cc1e80abb7474"}, - {file = "grimp-3.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:efcc0f7731b8b2f219ef6bec10e4697762d21851abc6aca009ee9b1819f58337"}, - {file = "grimp-3.8.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:cee5fcb3a669fa776e5b0580bfd3b1c7489a245571e6f70787e11ce01fb2b10c"}, - {file = "grimp-3.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dda76e16e1efc639b3fc9ec3845ecfb50b2f6b3503487031168d28ffea7df757"}, - {file = "grimp-3.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fff1aeeabb0197a9e59d2fa69555fcd2fee99dae77a755e9baed1f4a51c0558"}, - {file = "grimp-3.8.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f9d97973833ccd1637ac738178b0a6073546dc2732e952cb4f0afddfc441828"}, - {file = "grimp-3.8.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6aee4c8902d40c47b204ccaa76d3714175b03acd3285bce6bb202444ac5f4d76"}, - {file = "grimp-3.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0176eae585523e042f1d96a2bccdac86a1f8d63f59af1a4cf0c2ae9b05ec143"}, - {file = "grimp-3.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb5208953627a5141f2419f989394829a21a7540ea7af9a08eac87ba887b494d"}, - {file = "grimp-3.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fd168ad9b1608371e33aaf8cfbc5047193ec5dea948f883f7ac6780741a5e51"}, - {file = "grimp-3.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:d6bf6f425a6a50d4ac082f443d59d034ad42bb1d54e0c12df4d73d77b9937c14"}, - {file = "grimp-3.8.2-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:f206cd40bdbe1a5d72ee4b3a0d2c4c094328174d3e1fce764066355c61f33389"}, - {file = "grimp-3.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8ee2869eb075dffe5f2fe64a722a84aba0d0d5e4234ddec76681d330180168b0"}, - {file = "grimp-3.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:65ad42b498d4533fdbb9f8aa36781c19cd8c12d8bdd5b4b0128d8aa2537762c5"}, - {file = "grimp-3.8.2-cp39-cp39-win32.whl", hash = "sha256:c8a4f69da9c912b1c206efb8a34c887ece3ddc9251bed4f308e952b642442cb8"}, - {file = "grimp-3.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:dd3912e4d8bb72c59623ce8910cc130f0625f96e9e1dd542ac0ab3423071ebd3"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a92ab5151dbd00c1c0346698df8c6fde3375452df7e1ddaadfc3741a06bb126"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:693dcbace682ee56be8bdc56991df78eeedbfefabe6c3d627ebaf290cad5c85b"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66712866e2fd7916cfad442c376dd34555de609690e9174119d3253139884575"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:949bba3ee8c353e0b9d7b986c15d380f57e99cd1cf35e28b8e78bf88eea43cd0"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a00c232bdf5e145f6adaa5b51012ed3fa26fe8e7309dab15e5a8c33c5f08f6"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1cdc50039056ef3486bda2d5e86a998b43b21499e93441a343611b8a060d6da"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:02e0eb9162152acde649350bd2d08ad2bace7cbc5da9ba2b73e4e81d7dea7a50"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:bc88a752a7a0877513a5317b7d611a6db52920484b68ac1df2cd4cdcd0c01d8f"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:abc9252de7c91f2732f55e9c868e7b888fcfa2105ec9d2fb125804d8ae08629a"}, - {file = "grimp-3.8.2-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:c054ec758b02621fa1990cda862acd5a488f3ae7b00faa6e00256185430e1f96"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8219c8b45992699a3a5371b44d4ecdc65dff9d63eb82affa521d0c27f57c93"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c33115f0da4ef467d9999e59f50379ad04e7a0a28fff899bd8c957335dc1730e"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599fd162e1a5c1a82e7ef3fca86578a5d2f77370245f5e9cf4e37710d0ccd80a"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3430b5da08be37b49f8c89d248d27ce73cea8589f9b61faa821deaf303620b86"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c62f56e338e76cdc5ebcc2bdb27da16363f07113e45b351c4e727e5adf2ac430"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:895c7592bc49c330ea29732f3ac019cf11e3b8908b283a00d96967cb453b0700"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ad821f1817e0c5f9349e7f26d95bf739581ec4e66192f31a35ef4230df43b6bd"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:535767126106ff3c473bc6e7a6b2f0a09f00118a122b84395a01392801a1e559"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:67b0277476ff74bf99ff150c1938c5d5f01a4927a4991c85d4a08bf53ffab031"}, - {file = "grimp-3.8.2-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:1b22e647257ca0f9561aea5c047c235d3b4400b5391c12284a05c8ed496e38a4"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aebb7731e556b6e55a6731449546e5a883efbe19e62c33b9e9a4f2a0f069535c"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc1cde95c183673eae10cb4c640c0f63e840f57d94325469df752e0f984f51dc"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa0b75ba61b1ae3bbea3b43e3d89483ca00a9ef3c05462cc08547c1456524213"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d4cd1d83b2a67e84d271c4dd136f61e738524773f0fc5ca41e7f14982e24d59d"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:ffab17fbb0f020befbe56408829213edac4e5d5de23f933d20bd47e57b2d9825"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-musllinux_1_2_armv7l.whl", hash = "sha256:458384a9a5cdea3e82fc7ff74f507072a8832406aecaf71741b602488e1400e3"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:cbc9f2378c81b7bdc9d126fbd1dd2d46e97ae8f4f05067e9aed092fc5aaa56c2"}, - {file = "grimp-3.8.2-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:9d15f6bf98110a6568f89af1e6932e99bcfae6d2abb8fc51fda15eb94c0ac3a0"}, - {file = "grimp-3.8.2.tar.gz", hash = "sha256:09e442bf06b478bcc0a6a7c2c7ee383709b90127427f8ded4953139828dad292"}, -] - -[package.dependencies] -joblib = ">=1.3.0" -typing-extensions = ">=3.10.0.0" - -[[package]] -name = "import-linter" -version = "2.3" -description = "Enforces rules for the imports within and between Python packages." -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "import_linter-2.3-py3-none-any.whl", hash = "sha256:5b851776782048ff1be214f1e407ef2e3d30dcb23194e8b852772941811a1258"}, - {file = "import_linter-2.3.tar.gz", hash = "sha256:863646106d52ee5489965670f97a2a78f2c8c68d2d20392322bf0d7cc0111aa7"}, -] - -[package.dependencies] -click = ">=6" -grimp = ">=3.7" -typing-extensions = ">=3.10.0.0" - -[[package]] -name = "joblib" -version = "1.4.2" -description = "Lightweight pipelining with Python functions" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, - {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, -] - -[[package]] -name = "pydantic" -version = "2.11.4" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pydantic-2.11.4-py3-none-any.whl", hash = "sha256:d9615eaa9ac5a063471da949c8fc16376a84afb5024688b3ff885693506764eb"}, - {file = "pydantic-2.11.4.tar.gz", hash = "sha256:32738d19d63a226a52eed76645a98ee07c1f410ee41d93b4afbfa85ed8111c2d"}, -] - -[package.dependencies] -annotated-types = ">=0.6.0" -pydantic-core = "2.33.2" -typing-extensions = ">=4.12.2" -typing-inspection = ">=0.4.0" - -[package.extras] -email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] - -[[package]] -name = "pydantic-core" -version = "2.33.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2b3d326aaef0c0399d9afffeb6367d5e26ddc24d351dbc9c636840ac355dc5d8"}, - {file = "pydantic_core-2.33.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e5b2671f05ba48b94cb90ce55d8bdcaaedb8ba00cc5359f6810fc918713983d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0069c9acc3f3981b9ff4cdfaf088e98d83440a4c7ea1bc07460af3d4dc22e72d"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d53b22f2032c42eaaf025f7c40c2e3b94568ae077a606f006d206a463bc69572"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0405262705a123b7ce9f0b92f123334d67b70fd1f20a9372b907ce1080c7ba02"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4b25d91e288e2c4e0662b8038a28c6a07eaac3e196cfc4ff69de4ea3db992a1b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bdfe4b3789761f3bcb4b1ddf33355a71079858958e3a552f16d5af19768fef2"}, - {file = "pydantic_core-2.33.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:efec8db3266b76ef9607c2c4c419bdb06bf335ae433b80816089ea7585816f6a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:031c57d67ca86902726e0fae2214ce6770bbe2f710dc33063187a68744a5ecac"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:f8de619080e944347f5f20de29a975c2d815d9ddd8be9b9b7268e2e3ef68605a"}, - {file = "pydantic_core-2.33.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:73662edf539e72a9440129f231ed3757faab89630d291b784ca99237fb94db2b"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win32.whl", hash = "sha256:0a39979dcbb70998b0e505fb1556a1d550a0781463ce84ebf915ba293ccb7e22"}, - {file = "pydantic_core-2.33.2-cp310-cp310-win_amd64.whl", hash = "sha256:b0379a2b24882fef529ec3b4987cb5d003b9cda32256024e6fe1586ac45fc640"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4c5b0a576fb381edd6d27f0a85915c6daf2f8138dc5c267a57c08a62900758c7"}, - {file = "pydantic_core-2.33.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e799c050df38a639db758c617ec771fd8fb7a5f8eaaa4b27b101f266b216a246"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dc46a01bf8d62f227d5ecee74178ffc448ff4e5197c756331f71efcc66dc980f"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a144d4f717285c6d9234a66778059f33a89096dfb9b39117663fd8413d582dcc"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cf6373c21bc80b2e0dc88444f41ae60b2f070ed02095754eb5a01df12256de"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dc625f4aa79713512d1976fe9f0bc99f706a9dee21dfd1810b4bbbf228d0e8a"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:881b21b5549499972441da4758d662aeea93f1923f953e9cbaff14b8b9565aef"}, - {file = "pydantic_core-2.33.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bdc25f3681f7b78572699569514036afe3c243bc3059d3942624e936ec93450e"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:fe5b32187cbc0c862ee201ad66c30cf218e5ed468ec8dc1cf49dec66e160cc4d"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:bc7aee6f634a6f4a95676fcb5d6559a2c2a390330098dba5e5a5f28a2e4ada30"}, - {file = "pydantic_core-2.33.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:235f45e5dbcccf6bd99f9f472858849f73d11120d76ea8707115415f8e5ebebf"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win32.whl", hash = "sha256:6368900c2d3ef09b69cb0b913f9f8263b03786e5b2a387706c5afb66800efd51"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e063337ef9e9820c77acc768546325ebe04ee38b08703244c1309cccc4f1bab"}, - {file = "pydantic_core-2.33.2-cp311-cp311-win_arm64.whl", hash = "sha256:6b99022f1d19bc32a4c2a0d544fc9a76e3be90f0b3f4af413f87d38749300e65"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc"}, - {file = "pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6"}, - {file = "pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2"}, - {file = "pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f"}, - {file = "pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d"}, - {file = "pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e"}, - {file = "pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9"}, - {file = "pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5"}, - {file = "pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a2b911a5b90e0374d03813674bf0a5fbbb7741570dcd4b4e85a2e48d17def29d"}, - {file = "pydantic_core-2.33.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6fa6dfc3e4d1f734a34710f391ae822e0a8eb8559a85c6979e14e65ee6ba2954"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54c939ee22dc8e2d545da79fc5381f1c020d6d3141d3bd747eab59164dc89fb"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:53a57d2ed685940a504248187d5685e49eb5eef0f696853647bf37c418c538f7"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:09fb9dd6571aacd023fe6aaca316bd01cf60ab27240d7eb39ebd66a3a15293b4"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e6116757f7959a712db11f3e9c0a99ade00a5bbedae83cb801985aa154f071b"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d55ab81c57b8ff8548c3e4947f119551253f4e3787a7bbc0b6b3ca47498a9d3"}, - {file = "pydantic_core-2.33.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c20c462aa4434b33a2661701b861604913f912254e441ab8d78d30485736115a"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44857c3227d3fb5e753d5fe4a3420d6376fa594b07b621e220cd93703fe21782"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:eb9b459ca4df0e5c87deb59d37377461a538852765293f9e6ee834f0435a93b9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9fcd347d2cc5c23b06de6d3b7b8275be558a0c90549495c699e379a80bf8379e"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win32.whl", hash = "sha256:83aa99b1285bc8f038941ddf598501a86f1536789740991d7d8756e34f1e74d9"}, - {file = "pydantic_core-2.33.2-cp39-cp39-win_amd64.whl", hash = "sha256:f481959862f57f29601ccced557cc2e817bce7533ab8e01a797a48b49c9692b3"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c4aa4e82353f65e548c476b37e64189783aa5384903bfea4f41580f255fddfa"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d946c8bf0d5c24bf4fe333af284c59a19358aa3ec18cb3dc4370080da1e8ad29"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87b31b6846e361ef83fedb187bb5b4372d0da3f7e28d85415efa92d6125d6e6d"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa9d91b338f2df0508606f7009fde642391425189bba6d8c653afd80fd6bb64e"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2058a32994f1fde4ca0480ab9d1e75a0e8c87c22b53a3ae66554f9af78f2fe8c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:0e03262ab796d986f978f79c943fc5f620381be7287148b8010b4097f79a39ec"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:1a8695a8d00c73e50bff9dfda4d540b7dee29ff9b8053e38380426a85ef10052"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:fa754d1850735a0b0e03bcffd9d4b4343eb417e47196e4485d9cca326073a42c"}, - {file = "pydantic_core-2.33.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a11c8d26a50bfab49002947d3d237abe4d9e4b5bdc8846a63537b6488e197808"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:dd14041875d09cc0f9308e37a6f8b65f5585cf2598a53aa0123df8b129d481f8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:d87c561733f66531dced0da6e864f44ebf89a8fba55f31407b00c2f7f9449593"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f82865531efd18d6e07a04a17331af02cb7a651583c418df8266f17a63c6612"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bfb5112df54209d820d7bf9317c7a6c9025ea52e49f46b6a2060104bba37de7"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:64632ff9d614e5eecfb495796ad51b0ed98c453e447a76bcbeeb69615079fc7e"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:f889f7a40498cc077332c7ab6b4608d296d852182211787d4f3ee377aaae66e8"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:de4b83bb311557e439b9e186f733f6c645b9417c84e2eb8203f3f820a4b988bf"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:82f68293f055f51b51ea42fafc74b6aad03e70e191799430b90c13d643059ebb"}, - {file = "pydantic_core-2.33.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:329467cecfb529c925cf2bbd4d60d2c509bc2fb52a20c1045bf09bb70971a9c1"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:87acbfcf8e90ca885206e98359d7dca4bcbb35abdc0ff66672a293e1d7a19101"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:7f92c15cd1e97d4b12acd1cc9004fa092578acfa57b67ad5e43a197175d01a64"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3f26877a748dc4251cfcfda9dfb5f13fcb034f5308388066bcfe9031b63ae7d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dac89aea9af8cd672fa7b510e7b8c33b0bba9a43186680550ccf23020f32d535"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:970919794d126ba8645f3837ab6046fb4e72bbc057b3709144066204c19a455d"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3eb3fe62804e8f859c49ed20a8451342de53ed764150cb14ca71357c765dc2a6"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:3abcd9392a36025e3bd55f9bd38d908bd17962cc49bc6da8e7e96285336e2bca"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:3a1c81334778f9e3af2f8aeb7a960736e5cab1dfebfb26aabca09afd2906c039"}, - {file = "pydantic_core-2.33.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2807668ba86cb38c6817ad9bc66215ab8584d1d304030ce4f0887336f28a5e27"}, - {file = "pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, -] - -[[package]] -name = "typing-inspection" -version = "0.4.0" -description = "Runtime typing introspection tools" -optional = false -python-versions = ">=3.9" -groups = ["main"] -files = [ - {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, - {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, -] - -[package.dependencies] -typing-extensions = ">=4.12.0" - -[metadata] -lock-version = "2.1" -python-versions = ">= 3.11, < 3.14" -content-hash = "99cd12c76631d7240fede4b8d01d223df338f07c600376d5d7bdb6684add8940" diff --git a/extensions/fine_python_import_linter/pyproject.toml b/extensions/fine_python_import_linter/pyproject.toml index cbdec0c1..891a7cbe 100644 --- a/extensions/fine_python_import_linter/pyproject.toml +++ b/extensions/fine_python_import_linter/pyproject.toml @@ -5,15 +5,10 @@ description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">= 3.11, <= 3.14" -dependencies = ["finecode_extension_api == 0.3.*", "import-linter (>=2.1,<3.0)"] - - -[build-system] -requires = ["poetry-core>=2.0.0,<3.0.0"] -build-backend = "poetry.core.masonry.api" +dependencies = ["finecode_extension_api == 0.4.*", "import-linter (>=2.1,<3.0)"] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -23,3 +18,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_isort/.gitignore b/extensions/fine_python_isort/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_isort/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_isort/fine_python_isort/__init__.py b/extensions/fine_python_isort/fine_python_isort/__init__.py index 3160d5d8..288d9d2e 100644 --- a/extensions/fine_python_isort/fine_python_isort/__init__.py +++ b/extensions/fine_python_isort/fine_python_isort/__init__.py @@ -1,6 +1,6 @@ -from .action import IsortFormatHandler, IsortFormatHandlerConfig +from .action import IsortFormatFilesHandler, IsortFormatFilesHandlerConfig __all__ = [ - "IsortFormatHandler", - "IsortFormatHandlerConfig", + "IsortFormatFilesHandler", + "IsortFormatFilesHandlerConfig", ] diff --git a/extensions/fine_python_isort/fine_python_isort/action.py b/extensions/fine_python_isort/fine_python_isort/action.py index d20ec6d5..5dce2da2 100644 --- a/extensions/fine_python_isort/fine_python_isort/action.py +++ b/extensions/fine_python_isort/fine_python_isort/action.py @@ -6,25 +6,31 @@ import isort.api as isort_api import isort.settings as isort_settings - from finecode_extension_api import code_action -from finecode_extension_api.actions import format as format_action +from finecode_extension_api.actions import format_files as format_files_action from finecode_extension_api.interfaces import icache, ilogger, iprocessexecutor @dataclasses.dataclass -class IsortFormatHandlerConfig(code_action.ActionHandlerConfig): - profile: str = "" - - -class IsortFormatHandler( - code_action.ActionHandler[format_action.FormatAction, IsortFormatHandlerConfig] +class IsortFormatFilesHandlerConfig(code_action.ActionHandlerConfig): + profile: str | None = None + line_length: int | None = None + multi_line_output: int | None = None + include_trailing_comma: bool | None = None + force_grid_wrap: int | None = None + use_parentheses: bool | None = None + ensure_newline_before_comments: bool | None = None + split_on_trailing_comma: bool | None = None + + +class IsortFormatFilesHandler( + code_action.ActionHandler[ + format_files_action.FormatFilesAction, IsortFormatFilesHandlerConfig + ] ): - CACHE_KEY = "Isort" - def __init__( self, - config: IsortFormatHandlerConfig, + config: IsortFormatFilesHandlerConfig, logger: ilogger.ILogger, cache: icache.ICache, process_executor: iprocessexecutor.IProcessExecutor, @@ -36,54 +42,45 @@ def __init__( async def run( self, - payload: format_action.FormatRunPayload, - run_context: format_action.FormatRunContext, - ) -> format_action.FormatRunResult: - result_by_file_path: dict[Path, format_action.FormatRunFileResult] = {} + payload: format_files_action.FormatFilesRunPayload, + run_context: format_files_action.FormatFilesRunContext, + ) -> format_files_action.FormatFilesRunResult: + result_by_file_path: dict[Path, format_files_action.FormatRunFileResult] = {} for file_path in payload.file_paths: file_content, file_version = run_context.file_info_by_path[file_path] - try: - new_file_content = await self.cache.get_file_cache( - file_path, self.CACHE_KEY - ) - result_by_file_path[file_path] = format_action.FormatRunFileResult( - changed=False, code=new_file_content - ) - continue - except icache.CacheMissException: - pass new_file_content, file_changed = await self.process_executor.submit( - format_one, file_content, self.config + format_one, file_content, dataclasses.asdict(self.config) ) # save for next handlers - run_context.file_info_by_path[file_path] = format_action.FileInfo( + run_context.file_info_by_path[file_path] = format_files_action.FileInfo( new_file_content, file_version ) - await self.cache.save_file_cache( - file_path, file_version, self.CACHE_KEY, new_file_content - ) - result_by_file_path[file_path] = format_action.FormatRunFileResult( + result_by_file_path[file_path] = format_files_action.FormatRunFileResult( changed=file_changed, code=new_file_content ) - return format_action.FormatRunResult(result_by_file_path=result_by_file_path) + return format_files_action.FormatFilesRunResult( + result_by_file_path=result_by_file_path + ) def format_one( - file_content: str, handler_config: IsortFormatHandlerConfig + file_content: str, handler_config: dict[str, object] ) -> tuple[str, bool]: + isort_config_overrides = { + k: v for k, v in handler_config.items() if v is not None + } + input_stream = StringIO(file_content) output_stream_context = isort_api._in_memory_output_stream_context() with output_stream_context as output_stream: changed = isort_api.sort_stream( input_stream=input_stream, output_stream=output_stream, - config=isort_settings.Config( - profile=handler_config.profile - ), # TODO: config + config=isort_settings.Config(**isort_config_overrides), file_path=None, disregard_skip=True, extension=".py", diff --git a/extensions/fine_python_isort/pyproject.toml b/extensions/fine_python_isort/pyproject.toml index 8392d6f4..e4fb2050 100644 --- a/extensions/fine_python_isort/pyproject.toml +++ b/extensions/fine_python_isort/pyproject.toml @@ -1,14 +1,14 @@ [project] name = "fine_python_isort" -version = "0.2.0" +version = "0.3.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">= 3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "isort (>=5.13, <6)"] +dependencies = ["finecode_extension_api~=0.4.0a0", "isort (>=5.13, <6)"] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +18,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_module_exports/.gitignore b/extensions/fine_python_module_exports/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_module_exports/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_module_exports/pyproject.toml b/extensions/fine_python_module_exports/pyproject.toml index e62bb0f0..c57cad14 100644 --- a/extensions/fine_python_module_exports/pyproject.toml +++ b/extensions/fine_python_module_exports/pyproject.toml @@ -1,14 +1,14 @@ [project] name = "fine_python_module_exports" -version = "0.2.0" +version = "0.3.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">= 3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "fine_python_ast==0.2.*"] +dependencies = ["finecode_extension_api~=0.4.0a0", "fine_python_ast~=0.3.0a0"] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +18,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_mypy/.gitignore b/extensions/fine_python_mypy/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_mypy/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_mypy/fine_python_mypy/action.py b/extensions/fine_python_mypy/fine_python_mypy/action.py index 77236719..b191abde 100644 --- a/extensions/fine_python_mypy/fine_python_mypy/action.py +++ b/extensions/fine_python_mypy/fine_python_mypy/action.py @@ -13,7 +13,7 @@ from finecode_extension_api.interfaces import ( icache, icommandrunner, - ifilemanager, + ifileeditor, ilogger, iextensionrunnerinfoprovider, iprojectinfoprovider, @@ -31,6 +31,7 @@ class MypyLintHandler( code_action.ActionHandler[lint_action.LintAction, MypyManyCodeActionConfig] ): CACHE_KEY = "mypy" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(erovid) DMYPY_ARGS = [ "--no-color-output", @@ -50,7 +51,7 @@ def __init__( project_info_provider: iprojectinfoprovider.IProjectInfoProvider, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, lifecycle: code_action.ActionHandlerLifecycle, command_runner: icommandrunner.ICommandRunner, ) -> None: @@ -58,7 +59,7 @@ def __init__( self.project_info_provider = project_info_provider self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.command_runner = command_runner lifecycle.on_shutdown(self.shutdown) @@ -116,9 +117,12 @@ async def run_on_single_file( files_versions: dict[Path, str] = {} # can we exclude cached files here? Using the right cache(one that handles # dependencies as well) should be possible - for file_path in all_project_files: - file_version = await self.file_manager.get_file_version(file_path) - files_versions[file_path] = file_version + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + for file_path in all_project_files: + file_version = await session.read_file_version(file_path) + files_versions[file_path] = file_version try: all_processed_files_with_messages = await self._run_dmypy_on_project( @@ -132,22 +136,25 @@ async def run_on_single_file( ) in all_processed_files_with_messages.items() } - for ( - file_path, - lint_messages, - ) in all_processed_files_with_messages.items(): - try: - file_version = files_versions[file_path] - except KeyError: - # mypy can resolve dependencies which are not in `files_to_lint` - # and as result also not in `files_versions` - file_version = await self.file_manager.get_file_version( - file_path + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + for ( + file_path, + lint_messages, + ) in all_processed_files_with_messages.items(): + try: + file_version = files_versions[file_path] + except KeyError: + # mypy can resolve dependencies which are not in `files_to_lint` + # and as result also not in `files_versions` + file_version = await session.read_file_version( + file_path + ) + + await self.cache.save_file_cache( + file_path, file_version, self.CACHE_KEY, lint_messages ) - - await self.cache.save_file_cache( - file_path, file_version, self.CACHE_KEY, lint_messages - ) finally: project_checked_event.set() del self._projects_being_checked_done_events[project_path] @@ -188,7 +195,7 @@ async def _run_dmypy_on_project( async def run( self, payload: lint_action.LintRunPayload, - run_context: code_action.RunActionWithPartialResultsContext, + run_context: lint_action.LintRunContext, ) -> None: file_paths = [file_path async for file_path in payload] diff --git a/extensions/fine_python_mypy/fine_python_mypy/activator.py b/extensions/fine_python_mypy/fine_python_mypy/activator.py new file mode 100644 index 00000000..111a2b32 --- /dev/null +++ b/extensions/fine_python_mypy/fine_python_mypy/activator.py @@ -0,0 +1,13 @@ +from fine_python_mypy import ast_provider, iast_provider +from finecode_extension_api import extension +from finecode_extension_api.interfaces import iserviceregistry + + +class Activator(extension.ExtensionActivator): + def __init__(self, registry: iserviceregistry.IServiceRegistry) -> None: + self.registry = registry + + def activate(self) -> None: + self.registry.register_impl( + iast_provider.IMypySingleAstProvider, ast_provider.MypySingleAstProvider + ) diff --git a/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py b/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py index d81fcfcd..f5e8e5a4 100644 --- a/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py +++ b/extensions/fine_python_mypy/fine_python_mypy/ast_provider.py @@ -7,20 +7,23 @@ import mypy.options as mypy_options from fine_python_mypy import iast_provider -from finecode_extension_api.interfaces import icache, ifilemanager, ilogger +from finecode_extension_api.interfaces import icache, ifileeditor, ilogger class MypySingleAstProvider(iast_provider.IMypySingleAstProvider): CACHE_KEY = "MypySingleAstProvider" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="MypySingleAstProvider" + ) def __init__( self, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, cache: icache.ICache, logger: ilogger.ILogger, ): self.cache = cache - self.file_manager = file_manager + self.file_editor = file_editor self.logger = logger async def get_file_ast(self, file_path: Path) -> mypy_nodes.MypyFile: @@ -34,8 +37,13 @@ async def get_file_ast(self, file_path: Path) -> mypy_nodes.MypyFile: except icache.CacheMissException: ... - file_text: str = await self.file_manager.get_content(file_path) - file_version: str = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_text: str = file_info.content + file_version: str = file_info.version + base_dir = self.get_file_package_parent_dir_path(file_path) module_program_path = self.get_file_program_path( file_path=file_path, root_package_parent_dir_path=base_dir diff --git a/extensions/fine_python_mypy/pyproject.toml b/extensions/fine_python_mypy/pyproject.toml index 2f2e6ac4..da3a34b6 100644 --- a/extensions/fine_python_mypy/pyproject.toml +++ b/extensions/fine_python_mypy/pyproject.toml @@ -1,14 +1,17 @@ [project] name = "fine_python_mypy" -version = "0.2.0" +version = "0.3.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "mypy (>=1.15, <2.0)"] +dependencies = ["finecode_extension_api~=0.4.0a0", "mypy (>=1.15, <2.0)"] + +[project.entry-points."finecode.activator"] +fine_python_mypy = "fine_python_mypy.activator:Activator" [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +21,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_package_info/.gitignore b/extensions/fine_python_package_info/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_package_info/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_package_info/fine_python_package_info/__init__.py b/extensions/fine_python_package_info/fine_python_package_info/__init__.py index 5c467d28..30a04de0 100644 --- a/extensions/fine_python_package_info/fine_python_package_info/__init__.py +++ b/extensions/fine_python_package_info/fine_python_package_info/__init__.py @@ -1,4 +1,27 @@ -from .list_project_files_by_lang_python import ListProjectFilesByLangPythonHandler +from .build_artifact_py_handler import BuildArtifactPyHandler +from .get_dist_artifact_version_py_handler import \ + GetDistArtifactVersionPyHandler +from .get_src_artifact_registries_py_handler import \ + GetSrcArtifactRegistriesPyHandler +from .get_src_artifact_version_py_handler import GetSrcArtifactVersionPyHandler +from .group_src_artifact_files_by_lang_python import \ + GroupSrcArtifactFilesByLangPythonHandler +from .is_artifact_published_to_registry_py_handler import \ + IsArtifactPublishedToRegistryPyHandler +from .list_src_artifact_files_by_lang_python import \ + ListSrcArtifactFilesByLangPythonHandler +from .publish_artifact_to_registry_py_handler import \ + PublishArtifactToRegistryPyHandler from .py_package_layout_info_provider import PyPackageLayoutInfoProvider -__all__ = ["ListProjectFilesByLangPythonHandler", "PyPackageLayoutInfoProvider"] +__all__ = [ + "BuildArtifactPyHandler", + "GetDistArtifactVersionPyHandler", + "GroupSrcArtifactFilesByLangPythonHandler", + "ListSrcArtifactFilesByLangPythonHandler", + "PyPackageLayoutInfoProvider", + "GetSrcArtifactVersionPyHandler", + "GetSrcArtifactRegistriesPyHandler", + "PublishArtifactToRegistryPyHandler", + "IsArtifactPublishedToRegistryPyHandler", +] diff --git a/extensions/fine_python_package_info/fine_python_package_info/activator.py b/extensions/fine_python_package_info/fine_python_package_info/activator.py new file mode 100644 index 00000000..d9894e8b --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/activator.py @@ -0,0 +1,22 @@ +from fine_python_package_info import ( + ipypackagelayoutinfoprovider, + py_package_layout_info_provider, + py_src_artifact_file_classifier, +) +from finecode_extension_api import extension +from finecode_extension_api.interfaces import iserviceregistry, isrcartifactfileclassifier + + +class Activator(extension.ExtensionActivator): + def __init__(self, registry: iserviceregistry.IServiceRegistry) -> None: + self.registry = registry + + def activate(self) -> None: + self.registry.register_impl( + ipypackagelayoutinfoprovider.IPyPackageLayoutInfoProvider, + py_package_layout_info_provider.PyPackageLayoutInfoProvider, + ) + self.registry.register_impl( + isrcartifactfileclassifier.ISrcArtifactFileClassifier, + py_src_artifact_file_classifier.PySrcArtifactFileClassifier, + ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/build_artifact_py_handler.py b/extensions/fine_python_package_info/fine_python_package_info/build_artifact_py_handler.py new file mode 100644 index 00000000..872ee878 --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/build_artifact_py_handler.py @@ -0,0 +1,96 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import build_artifact_action +from finecode_extension_api.interfaces import ( + icommandrunner, + iextensionrunnerinfoprovider, + ilogger, + iprojectinfoprovider, +) + + +@dataclasses.dataclass +class BuildArtifactPyHandlerConfig(code_action.ActionHandlerConfig): ... + + +class BuildArtifactPyHandler( + code_action.ActionHandler[ + build_artifact_action.BuildArtifactAction, + BuildArtifactPyHandlerConfig, + ] +): + def __init__( + self, + config: BuildArtifactPyHandlerConfig, + command_runner: icommandrunner.ICommandRunner, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + extension_runner_info_provider: iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider, + logger: ilogger.ILogger, + ) -> None: + self.config = config + self.command_runner = command_runner + self.project_info_provider = project_info_provider + self.extension_runner_info_provider = extension_runner_info_provider + self.logger = logger + + async def run( + self, + payload: build_artifact_action.BuildArtifactRunPayload, + run_context: build_artifact_action.BuildArtifactRunContext, + ) -> build_artifact_action.BuildArtifactRunResult: + # Use current project if src_artifact_def_path is not provided + src_artifact_def_path = payload.src_artifact_def_path + if src_artifact_def_path is None: + src_artifact_def_path = ( + self.project_info_provider.get_current_project_def_path() + ) + + # Get the project directory (parent of pyproject.toml) + project_dir = src_artifact_def_path.parent + + self.logger.info(f"Building artifact in {project_dir}") + + # Get the python interpreter from the current venv + venv_dir = self.extension_runner_info_provider.get_current_venv_dir_path() + python_path = self.extension_runner_info_provider.get_venv_python_interpreter( + venv_dir + ) + + # Run python -m build + process = await self.command_runner.run( + cmd=f"{python_path} -m build", + cwd=project_dir, + ) + await process.wait_for_end() + + exit_code = process.get_exit_code() + if exit_code != 0: + error_output = process.get_error_output() + raise code_action.ActionFailedException( + f"Build failed with exit code {exit_code}: {error_output}" + ) + + # Parse the build output to get the produced file names + # Example line: "Successfully built pkg-1.0.tar.gz and pkg-1.0-py3-none-any.whl" + dist_dir = project_dir / "dist" + build_output_paths = [] + + output = process.get_output() + for line in output.splitlines(): + if line.startswith("Successfully built "): + files_part = line[len("Successfully built ") :] + file_names = [f.strip() for f in files_part.split(" and ")] + build_output_paths = [dist_dir / name for name in file_names] + break + + if not build_output_paths: + # Fallback: return the dist directory if parsing failed + build_output_paths = [dist_dir] + + self.logger.info(f"Build completed. Output: {build_output_paths}") + + return build_artifact_action.BuildArtifactRunResult( + src_artifact_def_path=src_artifact_def_path, + build_output_paths=build_output_paths, + ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/get_dist_artifact_version_py_handler.py b/extensions/fine_python_package_info/fine_python_package_info/get_dist_artifact_version_py_handler.py new file mode 100644 index 00000000..a78ecad0 --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/get_dist_artifact_version_py_handler.py @@ -0,0 +1,60 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import \ + get_dist_artifact_version as get_dist_artifact_version_action +from finecode_extension_api.interfaces import ilogger + + +@dataclasses.dataclass +class GetDistArtifactVersionPyHandlerConfig(code_action.ActionHandlerConfig): ... + + +class GetDistArtifactVersionPyHandler( + code_action.ActionHandler[ + get_dist_artifact_version_action.GetDistArtifactVersionAction, + GetDistArtifactVersionPyHandlerConfig, + ] +): + def __init__( + self, + config: GetDistArtifactVersionPyHandlerConfig, + logger: ilogger.ILogger, + ) -> None: + self.config = config + self.logger = logger + + async def run( + self, + payload: get_dist_artifact_version_action.GetDistArtifactVersionRunPayload, + run_context: get_dist_artifact_version_action.GetDistArtifactVersionRunContext, + ) -> get_dist_artifact_version_action.GetDistArtifactVersionRunResult: + filename = payload.dist_artifact_path.name + version = self._extract_version_from_filename(filename) + + if version is None: + raise code_action.ActionFailedException( + f"Could not extract version from dist filename: {filename}" + ) + + return get_dist_artifact_version_action.GetDistArtifactVersionRunResult( + version=version + ) + + def _extract_version_from_filename(self, filename: str) -> str | None: + if filename.endswith('.whl'): + # Wheel: name-version-python-abi-platform.whl + parts = filename[:-4].split('-') + if len(parts) >= 5: + return parts[1] + elif filename.endswith('.tar.gz'): + # Source dist: name-version.tar.gz + parts = filename[:-7].split('-') + if len(parts) >= 2: + return parts[1] + elif filename.endswith('.zip'): + # Source dist: name-version.zip + parts = filename[:-4].split('-') + if len(parts) >= 2: + return parts[1] + return None diff --git a/extensions/fine_python_package_info/fine_python_package_info/get_src_artifact_registries_py_handler.py b/extensions/fine_python_package_info/fine_python_package_info/get_src_artifact_registries_py_handler.py new file mode 100644 index 00000000..43210318 --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/get_src_artifact_registries_py_handler.py @@ -0,0 +1,46 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import \ + get_src_artifact_registries as get_src_artifact_registries_action +from finecode_extension_api.interfaces import ( + ilogger, + irepositorycredentialsprovider, +) + + +@dataclasses.dataclass +class GetSrcArtifactRegistriesPyHandlerConfig(code_action.ActionHandlerConfig): ... + + +class GetSrcArtifactRegistriesPyHandler( + code_action.ActionHandler[ + get_src_artifact_registries_action.GetSrcArtifactRegistriesAction, + GetSrcArtifactRegistriesPyHandlerConfig, + ] +): + def __init__( + self, + config: GetSrcArtifactRegistriesPyHandlerConfig, + repository_credentials_provider: irepositorycredentialsprovider.IRepositoryCredentialsProvider, + logger: ilogger.ILogger, + ) -> None: + self.config = config + self.repository_credentials_provider = repository_credentials_provider + self.logger = logger + + async def run( + self, + payload: get_src_artifact_registries_action.GetSrcArtifactRegistriesRunPayload, + run_context: get_src_artifact_registries_action.GetSrcArtifactRegistriesRunContext, + ) -> get_src_artifact_registries_action.GetSrcArtifactRegistriesRunResult: + repositories = self.repository_credentials_provider.get_all_repositories() + + registries = [ + get_src_artifact_registries_action.Registry(url=repo.url, name=repo.name) + for repo in repositories + ] + + return get_src_artifact_registries_action.GetSrcArtifactRegistriesRunResult( + registries=registries + ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/get_src_artifact_version_py_handler.py b/extensions/fine_python_package_info/fine_python_package_info/get_src_artifact_version_py_handler.py new file mode 100644 index 00000000..3ebabf55 --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/get_src_artifact_version_py_handler.py @@ -0,0 +1,57 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import \ + get_src_artifact_version as get_src_artifact_version_action +from finecode_extension_api.interfaces import ilogger, iprojectinfoprovider + + +@dataclasses.dataclass +class GetSrcArtifactVersionPyHandlerConfig(code_action.ActionHandlerConfig): ... + + +class GetSrcArtifactVersionPyHandler( + code_action.ActionHandler[ + get_src_artifact_version_action.GetSrcArtifactVersionAction, + GetSrcArtifactVersionPyHandlerConfig, + ] +): + def __init__( + self, + config: GetSrcArtifactVersionPyHandlerConfig, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + logger: ilogger.ILogger, + ) -> None: + self.config = config + self.project_info_provider = project_info_provider + self.logger = logger + + async def run( + self, + payload: get_src_artifact_version_action.GetSrcArtifactVersionRunPayload, + run_context: get_src_artifact_version_action.GetSrcArtifactVersionRunContext, + ) -> get_src_artifact_version_action.GetSrcArtifactVersionRunResult: + src_artifact_raw_def = await self.project_info_provider.get_project_raw_config( + project_def_path=payload.src_artifact_def_path + ) + version = src_artifact_raw_def.get("project", {}).get("version", None) + + if version is None: + dynamic_fields = src_artifact_raw_def.get('project', {}).get('dynamic', []) + if 'version' in dynamic_fields: + raise code_action.ActionFailedException( + f"Version is dynamic in {payload.src_artifact_def_path}, use the right handler for that" + ) + else: + raise code_action.ActionFailedException( + f"Version not found in {payload.src_artifact_def_path}" + ) + + if not isinstance(version, str): + raise code_action.ActionFailedException( + f"project.version in {payload.src_artifact_def_path} expected to be a string, but is {type(version)}" + ) + + return get_src_artifact_version_action.GetSrcArtifactVersionRunResult( + version=version + ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/group_src_artifact_files_by_lang_python.py b/extensions/fine_python_package_info/fine_python_package_info/group_src_artifact_files_by_lang_python.py new file mode 100644 index 00000000..0a1f3275 --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/group_src_artifact_files_by_lang_python.py @@ -0,0 +1,84 @@ +from finecode_extension_api.interfaces import ( + iprojectinfoprovider, + ilogger, +) +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + group_src_artifact_files_by_lang as group_src_artifact_files_by_lang_action, +) + +from fine_python_package_info import ipypackagelayoutinfoprovider + + +@dataclasses.dataclass +class GroupSrcArtifactFilesByLangPythonHandlerConfig(code_action.ActionHandlerConfig): + # list of relative pathes relative to project directory with additional python + # sources if they are not in one of default pathes + additional_dirs: list[pathlib.Path] | None = None + + +class GroupSrcArtifactFilesByLangPythonHandler( + code_action.ActionHandler[ + group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangAction, + GroupSrcArtifactFilesByLangPythonHandlerConfig, + ] +): + def __init__( + self, + config: GroupSrcArtifactFilesByLangPythonHandlerConfig, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + py_package_layout_info_provider: ipypackagelayoutinfoprovider.IPyPackageLayoutInfoProvider, + logger: ilogger.ILogger, + ) -> None: + self.config = config + self.project_info_provider = project_info_provider + self.py_package_layout_info_provider = py_package_layout_info_provider + self.logger = logger + + self.current_project_dir_path = ( + self.project_info_provider.get_current_project_dir_path() + ) + self.tests_dir_path = self.current_project_dir_path / "tests" + self.scripts_dir_path = self.current_project_dir_path / "scripts" + self.setup_py_path = self.current_project_dir_path / "setup.py" + + async def run( + self, + payload: group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunPayload, + run_context: group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunContext, + ) -> group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunResult: + # TODO + py_files: list[pathlib.Path] = [] + project_package_src_root_dir_path = ( + await self.py_package_layout_info_provider.get_package_src_root_dir_path( + package_dir_path=self.current_project_dir_path + ) + ) + py_files += list(project_package_src_root_dir_path.rglob("*.py")) + + if self.scripts_dir_path.exists(): + py_files += list(self.scripts_dir_path.rglob("*.py")) + + if self.tests_dir_path.exists(): + py_files += list(self.tests_dir_path.rglob("*.py")) + + if self.setup_py_path.exists(): + py_files.append(self.setup_py_path) + + if self.config.additional_dirs is not None: + for dir_path in self.config.additional_dirs: + dir_absolute_path = self.current_project_dir_path / dir_path + if not dir_absolute_path.exists(): + self.logger.warning( + f"Skip {dir_path} because {dir_absolute_path} doesn't exist" + ) + continue + + py_files += list(dir_absolute_path.rglob("*.py")) + + return group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunResult( + files_by_lang={"python": py_files} + ) diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ipypackagelayoutinfoprovider.py b/extensions/fine_python_package_info/fine_python_package_info/ipypackagelayoutinfoprovider.py similarity index 100% rename from finecode_extension_api/src/finecode_extension_api/interfaces/ipypackagelayoutinfoprovider.py rename to extensions/fine_python_package_info/fine_python_package_info/ipypackagelayoutinfoprovider.py diff --git a/extensions/fine_python_package_info/fine_python_package_info/is_artifact_published_to_registry_py_handler.py b/extensions/fine_python_package_info/fine_python_package_info/is_artifact_published_to_registry_py_handler.py new file mode 100644 index 00000000..c20c5baa --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/is_artifact_published_to_registry_py_handler.py @@ -0,0 +1,143 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import \ + get_src_artifact_registries as get_src_artifact_registries_action +from finecode_extension_api.actions import \ + is_artifact_published_to_registry as is_artifact_published_to_registry_action +from finecode_extension_api.interfaces import ( + iactionrunner, + ihttpclient, + ilogger, + iprojectinfoprovider, +) + + +@dataclasses.dataclass +class IsArtifactPublishedToRegistryPyHandlerConfig(code_action.ActionHandlerConfig): ... + + +class IsArtifactPublishedToRegistryPyHandler( + code_action.ActionHandler[ + is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryAction, + IsArtifactPublishedToRegistryPyHandlerConfig, + ] +): + def __init__( + self, + config: IsArtifactPublishedToRegistryPyHandlerConfig, + action_runner: iactionrunner.IActionRunner, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + logger: ilogger.ILogger, + http_client: ihttpclient.IHttpClient, + ) -> None: + self.config = config + self.action_runner = action_runner + self.project_info_provider = project_info_provider + self.logger = logger + self.http_client = http_client + + async def run( + self, + payload: is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryRunPayload, + run_context: is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryRunContext, + ) -> is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryRunResult: + run_meta = run_context.meta + + # Get package name from project config + src_artifact_raw_def = await self.project_info_provider.get_project_raw_config( + project_def_path=payload.src_artifact_def_path + ) + package_name = src_artifact_raw_def.get("project", {}).get("name", None) + + if package_name is None: + raise code_action.ActionFailedException( + f"Package name not found in {payload.src_artifact_def_path}" + ) + + if not isinstance(package_name, str): + raise code_action.ActionFailedException( + f"project.name in {payload.src_artifact_def_path} expected to be a string, but is {type(package_name)}" + ) + + # normalize package name + package_name = package_name.replace('_', '-') + + # Get registries using the action + get_registries_action = self.action_runner.get_action_by_name( + "get_src_artifact_registries", get_src_artifact_registries_action.GetSrcArtifactRegistriesAction + ) + registries_payload = ( + get_src_artifact_registries_action.GetSrcArtifactRegistriesRunPayload( + src_artifact_def_path=payload.src_artifact_def_path + ) + ) + registries_result = await self.action_runner.run_action( + action=get_registries_action, payload=registries_payload, meta=run_meta + ) + + # Find the registry by name + registry_url = None + for registry in registries_result.registries: + if registry.name == payload.registry_name: + registry_url = registry.url + break + + if registry_url is None: + raise code_action.ActionFailedException( + f"Registry '{payload.registry_name}' not found in configuration" + ) + + # Check if package version exists using PyPI Simple API + check_url = f"{registry_url.rstrip('/')}/simple/{package_name}/" + + self.logger.debug( + f"Checking if {package_name} {payload.version} is published to {payload.registry_name} at {check_url}" + ) + + try: + async with self.http_client.session() as session: + response = await session.get(check_url, headers={"Accept": "application/vnd.pypi.simple.v1+json"}, timeout=10.0) + except Exception as exception: + raise code_action.ActionFailedException( + f"Error checking publication status: {str(exception)}" + ) from exception + + if response.status_code == 404: + # Package does not exist in the registry yet + is_published_by_dist_path = {dist_path: False for dist_path in payload.dist_artifact_paths} + return is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryRunResult( + is_published_by_dist_path=is_published_by_dist_path + ) + + response_json = response.json() + version_list = response_json.get('versions', None) + if version_list is None: + raise code_action.ActionFailedException("No 'versions' key in response from registry") + + if not isinstance(version_list, list): + raise code_action.ActionFailedException("'versions' key in response from registry expected to be a list") + + version_is_published = payload.version in version_list + dist_artifact_paths = payload.dist_artifact_paths + if version_is_published: + try: + published_files_objs = response_json['files'] + except KeyError as exception: + raise code_action.ActionFailedException("'files' key is missing in response from registry") from exception + + if not isinstance(published_files_objs, list): + raise code_action.ActionFailedException("'files' key in response from registry expected to be a list") + + try: + published_file_names = [file_obj['filename'] for file_obj in published_files_objs] + except KeyError as exception: + raise code_action.ActionFailedException("File object has no 'filename' key") from exception + + is_published_by_dist_path = {dist_path: dist_path.name in published_file_names for dist_path in dist_artifact_paths} + else: + is_published_by_dist_path = {dist_path: False for dist_path in dist_artifact_paths} + + return is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryRunResult( + is_published_by_dist_path=is_published_by_dist_path + ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/list_project_files_by_lang_python.py b/extensions/fine_python_package_info/fine_python_package_info/list_src_artifact_files_by_lang_python.py similarity index 74% rename from extensions/fine_python_package_info/fine_python_package_info/list_project_files_by_lang_python.py rename to extensions/fine_python_package_info/fine_python_package_info/list_src_artifact_files_by_lang_python.py index 5fccb749..fde35069 100644 --- a/extensions/fine_python_package_info/fine_python_package_info/list_project_files_by_lang_python.py +++ b/extensions/fine_python_package_info/fine_python_package_info/list_src_artifact_files_by_lang_python.py @@ -1,6 +1,5 @@ from finecode_extension_api.interfaces import ( iprojectinfoprovider, - ipypackagelayoutinfoprovider, ilogger, ) import dataclasses @@ -8,26 +7,28 @@ from finecode_extension_api import code_action from finecode_extension_api.actions import ( - list_project_files_by_lang as list_project_files_by_lang_action, + list_src_artifact_files_by_lang as list_src_artifact_files_by_lang_action, ) +from fine_python_package_info import ipypackagelayoutinfoprovider + @dataclasses.dataclass -class ListProjectFilesByLangPythonHandlerConfig(code_action.ActionHandlerConfig): +class ListSrcArtifactFilesByLangPythonHandlerConfig(code_action.ActionHandlerConfig): # list of relative pathes relative to project directory with additional python # sources if they are not in one of default pathes additional_dirs: list[pathlib.Path] | None = None -class ListProjectFilesByLangPythonHandler( +class ListSrcArtifactFilesByLangPythonHandler( code_action.ActionHandler[ - list_project_files_by_lang_action.ListProjectFilesByLangAction, - ListProjectFilesByLangPythonHandlerConfig, + list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangAction, + ListSrcArtifactFilesByLangPythonHandlerConfig, ] ): def __init__( self, - config: ListProjectFilesByLangPythonHandlerConfig, + config: ListSrcArtifactFilesByLangPythonHandlerConfig, project_info_provider: iprojectinfoprovider.IProjectInfoProvider, py_package_layout_info_provider: ipypackagelayoutinfoprovider.IPyPackageLayoutInfoProvider, logger: ilogger.ILogger, @@ -46,9 +47,9 @@ def __init__( async def run( self, - payload: list_project_files_by_lang_action.ListProjectFilesByLangRunPayload, - run_context: list_project_files_by_lang_action.ListProjectFilesByLangRunContext, - ) -> list_project_files_by_lang_action.ListProjectFilesByLangRunResult: + payload: list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangRunPayload, + run_context: list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangRunContext, + ) -> list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangRunResult: py_files: list[pathlib.Path] = [] project_package_src_root_dir_path = ( await self.py_package_layout_info_provider.get_package_src_root_dir_path( @@ -77,6 +78,6 @@ async def run( py_files += list(dir_absolute_path.rglob("*.py")) - return list_project_files_by_lang_action.ListProjectFilesByLangRunResult( + return list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangRunResult( files_by_lang={"python": py_files} ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/publish_artifact_to_registry_py_handler.py b/extensions/fine_python_package_info/fine_python_package_info/publish_artifact_to_registry_py_handler.py new file mode 100644 index 00000000..a455e97c --- /dev/null +++ b/extensions/fine_python_package_info/fine_python_package_info/publish_artifact_to_registry_py_handler.py @@ -0,0 +1,93 @@ +import asyncio +import dataclasses + +import requests +from twine import settings as twine_settings +from twine.commands import upload as twine_upload + +from finecode_extension_api import code_action +from finecode_extension_api.actions import \ + publish_artifact_to_registry as publish_artifact_to_registry_action +from finecode_extension_api.interfaces import ( + icommandrunner, + ilogger, + irepositorycredentialsprovider, +) + + +@dataclasses.dataclass +class PublishArtifactToRegistryPyHandlerConfig(code_action.ActionHandlerConfig): + verbose: bool = False + + +class PublishArtifactToRegistryPyHandler( + code_action.ActionHandler[ + publish_artifact_to_registry_action.PublishArtifactToRegistryAction, + PublishArtifactToRegistryPyHandlerConfig, + ] +): + def __init__( + self, + config: PublishArtifactToRegistryPyHandlerConfig, + command_runner: icommandrunner.ICommandRunner, + logger: ilogger.ILogger, + repository_credentials_provider: irepositorycredentialsprovider.IRepositoryCredentialsProvider, + ) -> None: + self.config = config + self.command_runner = command_runner + self.logger = logger + self.repository_credentials_provider = repository_credentials_provider + + async def run( + self, + payload: publish_artifact_to_registry_action.PublishArtifactToRegistryRunPayload, + run_context: publish_artifact_to_registry_action.PublishArtifactToRegistryRunContext, + ) -> publish_artifact_to_registry_action.PublishArtifactToRegistryRunResult: + # Get registry URL from repository provider + repository = self.repository_credentials_provider.get_repository(payload.registry_name) + if repository is None: + raise code_action.ActionFailedException( + f"Registry '{payload.registry_name}' not found in repository provider" + ) + upload_url = f"{repository.url.rstrip('/')}/legacy/" + + # Get credentials from provider + credentials = self.repository_credentials_provider.get_credentials(payload.registry_name) + username = credentials.username if credentials else None + password = credentials.password if credentials else None + + # Configure twine settings + upload_settings = twine_settings.Settings( + repository_url=upload_url, + skip_existing=not payload.force, + non_interactive=True, + verbose=self.config.verbose, + username=username, + password=password + ) + + # Run twine upload in executor to avoid blocking + dist_artifact_paths = payload.dist_artifact_paths + self.logger.info( + f"Publishing {dist_artifact_paths} to {payload.registry_name}..." + ) + + try: + await asyncio.to_thread(twine_upload.upload, upload_settings, [dist_artifact_path.as_posix() for dist_artifact_path in dist_artifact_paths]) + except requests.HTTPError as e: + status_code = e.response.status_code if e.response is not None else None + response_body = e.response.text if e.response is not None else None + raise code_action.ActionFailedException( + f"Failed to upload package: {str(e)}\nStatus code: {status_code}\nResponse body: {response_body}" + ) from e + except Exception as e: + raise code_action.ActionFailedException( + f"Failed to upload package: {str(e)}" + ) from e + + self.logger.info( + f"Successfully published {dist_artifact_paths} to {payload.registry_name}" + ) + + return publish_artifact_to_registry_action.PublishArtifactToRegistryRunResult( + ) diff --git a/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py b/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py index 58d52c02..540b18ad 100644 --- a/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py +++ b/extensions/fine_python_package_info/fine_python_package_info/py_package_layout_info_provider.py @@ -4,12 +4,12 @@ import tomlkit.exceptions from finecode_extension_api.interfaces import ( - ifilemanager, - ipypackagelayoutinfoprovider, + ifileeditor, icache, ) from finecode_extension_api import service +from fine_python_package_info import ipypackagelayoutinfoprovider class ConfigParseError(Exception): def __init__(self, message: str) -> None: @@ -20,11 +20,14 @@ class PyPackageLayoutInfoProvider( ipypackagelayoutinfoprovider.IPyPackageLayoutInfoProvider, service.Service ): PACKAGE_NAME_CACHE_KEY = "PyPackageLayoutInfoProviderPackageName" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="PyPackageLayoutInfoProvider" + ) def __init__( - self, file_manager: ifilemanager.IFileManager, cache: icache.ICache + self, file_editor: ifileeditor.IFileEditor, cache: icache.ICache ) -> None: - self.file_manager = file_manager + self.file_editor = file_editor self.cache = cache async def _get_package_name(self, package_dir_path: pathlib.Path) -> str: @@ -43,12 +46,13 @@ async def _get_package_name(self, package_dir_path: pathlib.Path) -> str: except icache.CacheMissException: ... - package_def_file_content = await self.file_manager.get_content( - file_path=package_def_file - ) - package_def_file_version = await self.file_manager.get_file_version( - file_path=package_def_file - ) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=package_def_file) as file_info: + package_def_file_content: str = file_info.content + package_def_file_version: str = file_info.version + try: package_def_dict = tomlkit.loads(package_def_file_content) except tomlkit.exceptions.ParseError as exception: @@ -82,7 +86,7 @@ async def get_package_layout( except ConfigParseError as exception: raise ipypackagelayoutinfoprovider.FailedToGetPackageLayout( exception.message - ) + ) from exception if (package_dir_path / package_name).exists(): return ipypackagelayoutinfoprovider.PyPackageLayout.FLAT diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/project_file_classifier.py b/extensions/fine_python_package_info/fine_python_package_info/py_src_artifact_file_classifier.py similarity index 66% rename from finecode_extension_runner/src/finecode_extension_runner/impls/project_file_classifier.py rename to extensions/fine_python_package_info/fine_python_package_info/py_src_artifact_file_classifier.py index 1477c7b9..5dc26f1f 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/project_file_classifier.py +++ b/extensions/fine_python_package_info/fine_python_package_info/py_src_artifact_file_classifier.py @@ -1,16 +1,13 @@ import pathlib -from finecode_extension_api.interfaces import ( - iprojectfileclassifier, - iprojectinfoprovider, - ipypackagelayoutinfoprovider, -) +from fine_python_package_info import ipypackagelayoutinfoprovider from finecode_extension_api import service +from finecode_extension_api.interfaces import (iprojectinfoprovider, + isrcartifactfileclassifier) -# TODO: it should be package file classifier? -class ProjectFileClassifier( - iprojectfileclassifier.IProjectFileClassifier, service.Service +class PySrcArtifactFileClassifier( + isrcartifactfileclassifier.ISrcArtifactFileClassifier, service.Service ): # requirements: # - all project sources should be in a single directory @@ -26,10 +23,10 @@ def __init__( ) -> None: self.project_info_provider = project_info_provider self.py_package_layout_info_provider = py_package_layout_info_provider - # ProjectFileClassifier is instantiated as singletone, cache can be stored in + # PySrcArtifactFileClassifier is instantiated as singletone, cache can be stored in # object self._file_type_by_path: dict[ - pathlib.Path, iprojectfileclassifier.ProjectFileType + pathlib.Path, isrcartifactfileclassifier.SrcArtifactFileType ] = {} self.project_src_dir_path: pathlib.Path @@ -43,14 +40,15 @@ async def init(self) -> None: package_dir_path=project_dir_path ) ) + # TODO: move to layout provider? self.project_tests_dir_path: pathlib.Path = project_dir_path / "tests" - def get_project_file_type( + def get_src_artifact_file_type( self, file_path: pathlib.Path - ) -> iprojectfileclassifier.ProjectFileType: + ) -> isrcartifactfileclassifier.SrcArtifactFileType: if self.project_src_dir_path is None: raise NotImplementedError( - f"{self.project_layout} project layout is not supported" + f"Layout of python package {self.project_info_provider.get_current_project_dir_path()} is not supported" ) if file_path in self._file_type_by_path: @@ -65,15 +63,15 @@ def get_project_file_type( "__tests__" in file_path_relative_to_project.parts or "tests" in file_path_relative_to_project.parts ): - file_type = iprojectfileclassifier.ProjectFileType.TEST + file_type = isrcartifactfileclassifier.SrcArtifactFileType.TEST else: - file_type = iprojectfileclassifier.ProjectFileType.SOURCE + file_type = isrcartifactfileclassifier.SrcArtifactFileType.SOURCE else: # not source, check whether test if file_path.is_relative_to(self.project_tests_dir_path): - file_type = iprojectfileclassifier.ProjectFileType.TEST + file_type = isrcartifactfileclassifier.SrcArtifactFileType.TEST else: - file_type = iprojectfileclassifier.ProjectFileType.UNKNOWN + file_type = isrcartifactfileclassifier.SrcArtifactFileType.UNKNOWN # cache self._file_type_by_path[file_path] = file_type @@ -81,12 +79,12 @@ def get_project_file_type( return file_type def get_env_for_file_type( - self, file_type: iprojectfileclassifier.ProjectFileType + self, file_type: isrcartifactfileclassifier.SrcArtifactFileType ) -> str: match file_type: - case iprojectfileclassifier.ProjectFileType.SOURCE: + case isrcartifactfileclassifier.SrcArtifactFileType.SOURCE: return "runtime" - case iprojectfileclassifier.ProjectFileType.TEST: + case isrcartifactfileclassifier.SrcArtifactFileType.TEST: # TODO: dynamic. In future test tool can be installed in any env, we # need a way to define it in config and get it here # TODO: there can be also e2e tests that don't use runtime and are in @@ -94,5 +92,5 @@ def get_env_for_file_type( return "dev" case _: raise NotImplementedError( - f"Project file type {file_type} is not supported by ProjectFileClassifier" + f"Source artifact file type {file_type} is not supported by PySrcArtifactFileClassifier" ) diff --git a/extensions/fine_python_package_info/pyproject.toml b/extensions/fine_python_package_info/pyproject.toml index 92aab9f4..07551276 100644 --- a/extensions/fine_python_package_info/pyproject.toml +++ b/extensions/fine_python_package_info/pyproject.toml @@ -1,14 +1,22 @@ [project] name = "fine_python_package_info" -version = "0.1.0" +version = "0.2.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "tomlkit==0.11.*"] +dependencies = [ + "finecode_extension_api~=0.4.0a0", + "tomlkit==0.11.*", + "twine==6.2.*", + "build==1.4.*", +] + +[project.entry-points."finecode.activator"] +fine_python_package_info = "fine_python_package_info.activator:Activator" [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +26,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_pip/.gitignore b/extensions/fine_python_pip/.gitignore index d1daceee..757f6537 100644 --- a/extensions/fine_python_pip/.gitignore +++ b/extensions/fine_python_pip/.gitignore @@ -1,4 +1,4 @@ .venvs -src/*.egg-info +*.egg-info/ __pycache__ finecode_config_dump/ diff --git a/extensions/fine_python_pip/pyproject.toml b/extensions/fine_python_pip/pyproject.toml index db39e9a7..83dbd3cf 100644 --- a/extensions/fine_python_pip/pyproject.toml +++ b/extensions/fine_python_pip/pyproject.toml @@ -1,14 +1,14 @@ [project] name = "fine_python_pip" -version = "0.1.1" +version = "0.2.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*"] +dependencies = ["finecode_extension_api~=0.4.0a0"] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +18,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_pyrefly/.gitignore b/extensions/fine_python_pyrefly/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_pyrefly/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py b/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py index 2b0ee587..91e19d4d 100644 --- a/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py +++ b/extensions/fine_python_pyrefly/fine_python_pyrefly/__init__.py @@ -1,6 +1,8 @@ -from .lint_handler import PyreflyLintHandler, PyreflyLintHandlerConfig +from .lint_files_handler import PyreflyLintFilesHandler, PyreflyLintFilesHandlerConfig +from .pyrefly_lsp_service import PyreflyLspService __all__ = [ - "PyreflyLintHandler", - "PyreflyLintHandlerConfig", + "PyreflyLintFilesHandler", + "PyreflyLintFilesHandlerConfig", + "PyreflyLspService", ] diff --git a/extensions/fine_python_pyrefly/fine_python_pyrefly/activator.py b/extensions/fine_python_pyrefly/fine_python_pyrefly/activator.py new file mode 100644 index 00000000..b1a7d799 --- /dev/null +++ b/extensions/fine_python_pyrefly/fine_python_pyrefly/activator.py @@ -0,0 +1,15 @@ +from finecode_extension_api import extension +from finecode_extension_api.interfaces import iserviceregistry + +from fine_python_pyrefly.pyrefly_lsp_service import PyreflyLspService + + +class Activator(extension.ExtensionActivator): + def __init__(self, registry: iserviceregistry.IServiceRegistry) -> None: + self.registry = registry + + def activate(self) -> None: + self.registry.register_impl( + PyreflyLspService, + PyreflyLspService, + ) diff --git a/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_handler.py b/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_files_handler.py similarity index 55% rename from extensions/fine_python_pyrefly/fine_python_pyrefly/lint_handler.py rename to extensions/fine_python_pyrefly/fine_python_pyrefly/lint_files_handler.py index e996f7b1..29560afc 100644 --- a/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_handler.py +++ b/extensions/fine_python_pyrefly/fine_python_pyrefly/lint_files_handler.py @@ -6,24 +6,29 @@ from pathlib import Path from finecode_extension_api import code_action -from finecode_extension_api.actions import lint as lint_action +from finecode_extension_api.actions import lint_files as lint_files_action from finecode_extension_api.interfaces import ( icache, icommandrunner, ilogger, - ifilemanager, - iprojectfileclassifier, + ifileeditor, + iprojectinfoprovider, + isrcartifactfileclassifier, iextensionrunnerinfoprovider, ) +from fine_python_pyrefly.pyrefly_lsp_service import PyreflyLspService @dataclasses.dataclass -class PyreflyLintHandlerConfig(code_action.ActionHandlerConfig): +class PyreflyLintFilesHandlerConfig(code_action.ActionHandlerConfig): python_version: str | None = None + use_cli: bool = False -class PyreflyLintHandler( - code_action.ActionHandler[lint_action.LintAction, PyreflyLintHandlerConfig] +class PyreflyLintFilesHandler( + code_action.ActionHandler[ + lint_files_action.LintFilesAction, PyreflyLintFilesHandlerConfig + ] ): """ NOTE: pyrefly currently can check only saved files, not file content provided by @@ -32,51 +37,87 @@ class PyreflyLintHandler( """ CACHE_KEY = "PyreflyLinter" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="PyreflyLinter" + ) def __init__( self, - config: PyreflyLintHandlerConfig, + config: PyreflyLintFilesHandlerConfig, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, command_runner: icommandrunner.ICommandRunner, - project_file_classifier: iprojectfileclassifier.IProjectFileClassifier, + src_artifact_file_classifier: isrcartifactfileclassifier.ISrcArtifactFileClassifier, extension_runner_info_provider: iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + lsp_service: PyreflyLspService, ) -> None: self.config = config self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.command_runner = command_runner - self.project_file_classifier = project_file_classifier + self.src_artifact_file_classifier = src_artifact_file_classifier self.extension_runner_info_provider = extension_runner_info_provider + self.project_info_provider: iprojectinfoprovider.IProjectInfoProvider = project_info_provider + self.lsp_service: PyreflyLspService = lsp_service self.pyrefly_bin_path = Path(sys.executable).parent / "pyrefly" - async def run_on_single_file(self, file_path: Path) -> lint_action.LintRunResult: + if not self.config.use_cli: + # Pyrefly uses pull-based config: the LSP server sends + # workspace/configuration requests with section="python", + # expecting responses like [{"pyrefly": {"displayTypeErrors": ...}}]. + # The same format is used for initializationOptions. + venv_dir = self.extension_runner_info_provider.get_venv_dir_path_of_env("runtime") + interpreter_path = self.extension_runner_info_provider.get_venv_python_interpreter(venv_dir) + site_packages = self.extension_runner_info_provider.get_venv_site_packages(venv_dir) + self.lsp_service.update_settings({ + "pythonPath": str(interpreter_path), + "pyrefly": { + "displayTypeErrors": "force-on", + "extraPaths": [str(p) for p in site_packages], + }, + }) + + async def run_on_single_file( + self, file_path: Path + ) -> lint_files_action.LintFilesRunResult: messages = {} try: cached_lint_messages = await self.cache.get_file_cache( file_path, self.CACHE_KEY ) messages[str(file_path)] = cached_lint_messages - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) except icache.CacheMissException: pass + + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + file_version = await session.read_file_version(file_path) + + if self.config.use_cli: + lint_messages = await self.run_pyrefly_lint_on_single_file(file_path) + else: + root_uri = self.project_info_provider.get_current_project_dir_path().as_uri() + await self.lsp_service.ensure_started(root_uri) + + lint_messages = await self.lsp_service.check_file(file_path) - file_version = await self.file_manager.get_file_version(file_path) - lint_messages = await self.run_pyrefly_lint_on_single_file(file_path) messages[str(file_path)] = lint_messages await self.cache.save_file_cache( file_path, file_version, self.CACHE_KEY, lint_messages ) - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) async def run( self, - payload: lint_action.LintRunPayload, - run_context: code_action.RunActionWithPartialResultsContext, + payload: lint_files_action.LintFilesRunPayload, + run_context: lint_files_action.LintFilesRunContext, ) -> None: file_paths = [file_path async for file_path in payload] @@ -89,16 +130,16 @@ async def run( async def run_pyrefly_lint_on_single_file( self, file_path: Path, - ) -> list[lint_action.LintMessage]: + ) -> list[lint_files_action.LintMessage]: """Run pyrefly type checking on a single file""" - lint_messages: list[lint_action.LintMessage] = [] + lint_messages: list[lint_files_action.LintMessage] = [] try: - # project file classifier caches result, we can just get it each time again - file_type = self.project_file_classifier.get_project_file_type( + # src artifact file classifier caches result, we can just get it each time again + file_type = self.src_artifact_file_classifier.get_src_artifact_file_type( file_path=file_path ) - file_env = self.project_file_classifier.get_env_for_file_type( + file_env = self.src_artifact_file_classifier.get_env_for_file_type( file_type=file_type ) except NotImplementedError: @@ -122,7 +163,7 @@ async def run_pyrefly_lint_on_single_file( ) # --skip-interpreter-query isn't used because it is not compatible - # with --python-interpreter parameter + # with --python-interpreter-path parameter # --disable-search-path-heuristics=true isn't used because pyrefly doesn't # recognize some imports without it. For example, it cannot resolve relative # imports in root __init__.py . Needs to be investigated @@ -132,7 +173,7 @@ async def run_pyrefly_lint_on_single_file( "--output-format=json", # path to python interpreter because pyrefly resolves .pth files only if # it is provided - f"--python-interpreter='{str(interpreter_path)}'", + f"--python-interpreter-path='{str(interpreter_path)}'", ] if self.config.python_version is not None: @@ -153,15 +194,15 @@ async def run_pyrefly_lint_on_single_file( for error in pyrefly_results["errors"]: lint_message = map_pyrefly_error_to_lint_message(error) lint_messages.append(lint_message) - except json.JSONDecodeError: + except json.JSONDecodeError as exception: raise code_action.ActionFailedException( f"Output of pyrefly is not json: {output}" - ) + ) from exception return lint_messages -def map_pyrefly_error_to_lint_message(error: dict) -> lint_action.LintMessage: +def map_pyrefly_error_to_lint_message(error: dict) -> lint_files_action.LintMessage: """Map a pyrefly error to a lint message""" # Extract line/column info (pyrefly uses 1-based indexing) start_line = error["line"] @@ -172,12 +213,12 @@ def map_pyrefly_error_to_lint_message(error: dict) -> lint_action.LintMessage: # Determine severity based on error type error_code = str(error.get("code", "")) code_description = error.get("name", "") - severity = lint_action.LintMessageSeverity.ERROR + severity = lint_files_action.LintMessageSeverity.ERROR - return lint_action.LintMessage( - range=lint_action.Range( - start=lint_action.Position(line=start_line, character=start_column), - end=lint_action.Position(line=end_line, character=end_column), + return lint_files_action.LintMessage( + range=lint_files_action.Range( + start=lint_files_action.Position(line=start_line, character=start_column), + end=lint_files_action.Position(line=end_line, character=end_column), ), message=error.get("description", ""), code=error_code, diff --git a/extensions/fine_python_pyrefly/fine_python_pyrefly/pyrefly_lsp_service.py b/extensions/fine_python_pyrefly/fine_python_pyrefly/pyrefly_lsp_service.py new file mode 100644 index 00000000..ea9cef96 --- /dev/null +++ b/extensions/fine_python_pyrefly/fine_python_pyrefly/pyrefly_lsp_service.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import sys +from pathlib import Path +from typing import override + +from finecode_extension_api import service +from finecode_extension_api.actions import lint_files as lint_files_action +from finecode_extension_api.interfaces import ifileeditor, ilspclient, ilogger +from finecode_extension_api.contrib.lsp_service import LspService, map_diagnostics_to_lint_messages + + +class PyreflyLspService(service.DisposableService): + """Pyrefly LSP service — thin wrapper around generic LspService.""" + + def __init__( + self, + lsp_client: ilspclient.ILspClient, + file_editor: ifileeditor.IFileEditor, + logger: ilogger.ILogger, + ) -> None: + pyrefly_bin = Path(sys.executable).parent / "pyrefly" + self._lsp_service = LspService( + lsp_client=lsp_client, + file_editor=file_editor, + logger=logger, + cmd=f"{pyrefly_bin} lsp", + language_id="python", + readable_id="pyrefly-lsp", + ) + + @override + async def init(self) -> None: + await self._lsp_service.init() + + @override + def dispose(self) -> None: + self._lsp_service.dispose() + + def update_settings(self, settings: dict[str, object]) -> None: + self._lsp_service.update_settings(settings) + + async def ensure_started(self, root_uri: str) -> None: + await self._lsp_service.ensure_started(root_uri) + + async def check_file( + self, + file_path: Path, + timeout: float = 30.0, + ) -> list[lint_files_action.LintMessage]: + raw_diagnostics = await self._lsp_service.check_file(file_path, timeout) + return map_diagnostics_to_lint_messages( + raw_diagnostics, default_source="pyrefly" + ) diff --git a/extensions/fine_python_pyrefly/pyproject.toml b/extensions/fine_python_pyrefly/pyproject.toml index 311a799c..33a1d7e1 100644 --- a/extensions/fine_python_pyrefly/pyproject.toml +++ b/extensions/fine_python_pyrefly/pyproject.toml @@ -1,14 +1,20 @@ [project] name = "fine_python_pyrefly" -version = "0.1.0" +version = "0.2.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "pyrefly (>=0.30.0,<1.0.0)"] +dependencies = ["finecode_extension_api~=0.4.0a0", "pyrefly (>=0.30.0,<1.0.0)"] + +[project.optional-dependencies] +jsonrpc = ["finecode_jsonrpc~=0.1.0a0"] + +[project.entry-points."finecode.activator"] +fine_python_pyrefly = "fine_python_pyrefly.activator:Activator" [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +24,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_ruff/.gitignore b/extensions/fine_python_ruff/.gitignore new file mode 100644 index 00000000..022858b7 --- /dev/null +++ b/extensions/fine_python_ruff/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +fine_python_ruff/*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_ruff/fine_python_ruff/__init__.py b/extensions/fine_python_ruff/fine_python_ruff/__init__.py index 3e774657..c07966f2 100644 --- a/extensions/fine_python_ruff/fine_python_ruff/__init__.py +++ b/extensions/fine_python_ruff/fine_python_ruff/__init__.py @@ -1,9 +1,9 @@ -from .format_handler import RuffFormatHandler, RuffFormatHandlerConfig -from .lint_handler import RuffLintHandler, RuffLintHandlerConfig +from .format_files_handler import RuffFormatFilesHandler, RuffFormatFilesHandlerConfig +from .lint_files_handler import RuffLintFilesHandler, RuffLintFilesHandlerConfig __all__ = [ - "RuffFormatHandler", - "RuffFormatHandlerConfig", - "RuffLintHandler", - "RuffLintHandlerConfig", + "RuffFormatFilesHandler", + "RuffFormatFilesHandlerConfig", + "RuffLintFilesHandler", + "RuffLintFilesHandlerConfig", ] diff --git a/extensions/fine_python_ruff/fine_python_ruff/activator.py b/extensions/fine_python_ruff/fine_python_ruff/activator.py new file mode 100644 index 00000000..f790df2d --- /dev/null +++ b/extensions/fine_python_ruff/fine_python_ruff/activator.py @@ -0,0 +1,15 @@ +from finecode_extension_api import extension +from finecode_extension_api.interfaces import iserviceregistry + +from fine_python_ruff.ruff_lsp_service import RuffLspService + + +class Activator(extension.ExtensionActivator): + def __init__(self, registry: iserviceregistry.IServiceRegistry) -> None: + self.registry = registry + + def activate(self) -> None: + self.registry.register_impl( + RuffLspService, + RuffLspService, + ) diff --git a/extensions/fine_python_ruff/fine_python_ruff/format_handler.py b/extensions/fine_python_ruff/fine_python_ruff/format_files_handler.py similarity index 71% rename from extensions/fine_python_ruff/fine_python_ruff/format_handler.py rename to extensions/fine_python_ruff/fine_python_ruff/format_files_handler.py index ca3f0e86..82ecd485 100644 --- a/extensions/fine_python_ruff/fine_python_ruff/format_handler.py +++ b/extensions/fine_python_ruff/fine_python_ruff/format_files_handler.py @@ -12,7 +12,7 @@ from typing import override from finecode_extension_api import code_action -from finecode_extension_api.actions import format as format_action +from finecode_extension_api.actions import format_files as format_files_action from finecode_extension_api.interfaces import ( icache, icommandrunner, @@ -22,7 +22,7 @@ @dataclasses.dataclass -class RuffFormatHandlerConfig(code_action.ActionHandlerConfig): +class RuffFormatFilesHandlerConfig(code_action.ActionHandlerConfig): line_length: int = 88 indent_width: int = 4 quote_style: str = "double" # "double" or "single" @@ -30,14 +30,14 @@ class RuffFormatHandlerConfig(code_action.ActionHandlerConfig): preview: bool = False -class RuffFormatHandler( - code_action.ActionHandler[format_action.FormatAction, RuffFormatHandlerConfig] +class RuffFormatFilesHandler( + code_action.ActionHandler[ + format_files_action.FormatFilesAction, RuffFormatFilesHandlerConfig + ] ): - CACHE_KEY = "RuffFormatter" - def __init__( self, - config: RuffFormatHandlerConfig, + config: RuffFormatFilesHandlerConfig, extension_runner_info_provider: iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider, logger: ilogger.ILogger, cache: icache.ICache, @@ -54,40 +54,29 @@ def __init__( @override async def run( self, - payload: format_action.FormatRunPayload, - run_context: format_action.FormatRunContext, - ) -> format_action.FormatRunResult: - result_by_file_path: dict[Path, format_action.FormatRunFileResult] = {} + payload: format_files_action.FormatFilesRunPayload, + run_context: format_files_action.FormatFilesRunContext, + ) -> format_files_action.FormatFilesRunResult: + result_by_file_path: dict[Path, format_files_action.FormatRunFileResult] = {} for file_path in payload.file_paths: file_content, file_version = run_context.file_info_by_path[file_path] - try: - new_file_content = await self.cache.get_file_cache( - file_path, self.CACHE_KEY - ) - result_by_file_path[file_path] = format_action.FormatRunFileResult( - changed=False, code=new_file_content - ) - continue - except icache.CacheMissException: - pass new_file_content, file_changed = await self.format_one( file_path, file_content ) # save for next handlers - run_context.file_info_by_path[file_path] = format_action.FileInfo( + run_context.file_info_by_path[file_path] = format_files_action.FileInfo( new_file_content, file_version ) - await self.cache.save_file_cache( - file_path, file_version, self.CACHE_KEY, new_file_content - ) - result_by_file_path[file_path] = format_action.FormatRunFileResult( + result_by_file_path[file_path] = format_files_action.FormatRunFileResult( changed=file_changed, code=new_file_content ) - return format_action.FormatRunResult(result_by_file_path=result_by_file_path) + return format_files_action.FormatFilesRunResult( + result_by_file_path=result_by_file_path + ) async def format_one(self, file_path: Path, file_content: str) -> tuple[str, bool]: """Format a single file using ruff format""" diff --git a/extensions/fine_python_ruff/fine_python_ruff/lint_handler.py b/extensions/fine_python_ruff/fine_python_ruff/lint_files_handler.py similarity index 55% rename from extensions/fine_python_ruff/fine_python_ruff/lint_handler.py rename to extensions/fine_python_ruff/fine_python_ruff/lint_files_handler.py index 41c1706f..e984982a 100644 --- a/extensions/fine_python_ruff/fine_python_ruff/lint_handler.py +++ b/extensions/fine_python_ruff/fine_python_ruff/lint_files_handler.py @@ -6,71 +6,112 @@ from pathlib import Path from finecode_extension_api import code_action -from finecode_extension_api.actions import lint as lint_action +from finecode_extension_api.actions import lint_files as lint_files_action from finecode_extension_api.interfaces import ( icache, icommandrunner, ilogger, - ifilemanager, + ifileeditor, + iprojectinfoprovider, ) +from fine_python_ruff.ruff_lsp_service import RuffLspService @dataclasses.dataclass -class RuffLintHandlerConfig(code_action.ActionHandlerConfig): +class RuffLintFilesHandlerConfig(code_action.ActionHandlerConfig): line_length: int = 88 target_version: str = "py38" select: list[str] | None = None # Rules to enable ignore: list[str] | None = None # Rules to disable extend_select: list[str] | None = None preview: bool = False + use_cli: bool = False -class RuffLintHandler( - code_action.ActionHandler[lint_action.LintAction, RuffLintHandlerConfig] +class RuffLintFilesHandler( + code_action.ActionHandler[ + lint_files_action.LintFilesAction, RuffLintFilesHandlerConfig + ] ): CACHE_KEY = "RuffLinter" + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(id="RuffLinterAstProvider") def __init__( self, - config: RuffLintHandlerConfig, + config: RuffLintFilesHandlerConfig, cache: icache.ICache, logger: ilogger.ILogger, - file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor, command_runner: icommandrunner.ICommandRunner, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + lsp_service: RuffLspService, ) -> None: self.config = config self.cache = cache self.logger = logger - self.file_manager = file_manager + self.file_editor = file_editor self.command_runner = command_runner + self.project_info_provider: iprojectinfoprovider.IProjectInfoProvider = project_info_provider + self.lsp_service: RuffLspService = lsp_service self.ruff_bin_path = Path(sys.executable).parent / "ruff" - async def run_on_single_file(self, file_path: Path) -> lint_action.LintRunResult: + if not self.config.use_cli: + # reference: https://docs.astral.sh/ruff/editors/settings/ + lint_settings: dict[str, object] = {"enable": True} + if self.config.select is not None: + lint_settings["select"] = self.config.select + if self.config.extend_select is not None: + lint_settings["extendSelect"] = self.config.extend_select + if self.config.ignore is not None: + lint_settings["ignore"] = self.config.ignore + if self.config.preview: + lint_settings["preview"] = True + self.lsp_service.update_settings({ + "lint": lint_settings, + "showSyntaxErrors": True, + "lineLength": self.config.line_length, + "targetVersion": self.config.target_version, + }) + + async def run_on_single_file( + self, file_path: Path + ) -> lint_files_action.LintFilesRunResult: messages = {} try: cached_lint_messages = await self.cache.get_file_cache( file_path, self.CACHE_KEY ) messages[str(file_path)] = cached_lint_messages - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) except icache.CacheMissException: pass - file_version = await self.file_manager.get_file_version(file_path) - file_content = await self.file_manager.get_content(file_path) - lint_messages = await self.run_ruff_lint_on_single_file(file_path, file_content) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + async with session.read_file(file_path=file_path) as file_info: + file_content: str = file_info.content + file_version: str = file_info.version + + if self.config.use_cli: + lint_messages = await self.run_ruff_lint_on_single_file(file_path, file_content) + else: + root_uri = self.project_info_provider.get_current_project_dir_path().as_uri() + await self.lsp_service.ensure_started(root_uri) + + lint_messages = await self.lsp_service.check_file(file_path) messages[str(file_path)] = lint_messages await self.cache.save_file_cache( file_path, file_version, self.CACHE_KEY, lint_messages ) - return lint_action.LintRunResult(messages=messages) + return lint_files_action.LintFilesRunResult(messages=messages) async def run( self, - payload: lint_action.LintRunPayload, - run_context: code_action.RunActionWithPartialResultsContext, + payload: lint_files_action.LintFilesRunPayload, + run_context: lint_files_action.LintFilesRunContext, ) -> None: file_paths = [file_path async for file_path in payload] @@ -84,9 +125,9 @@ async def run_ruff_lint_on_single_file( self, file_path: Path, file_content: str, - ) -> list[lint_action.LintMessage]: + ) -> list[lint_files_action.LintMessage]: """Run ruff linting on a single file""" - lint_messages: list[lint_action.LintMessage] = [] + lint_messages: list[lint_files_action.LintMessage] = [] # Build ruff check command cmd = [ @@ -135,7 +176,9 @@ async def run_ruff_lint_on_single_file( return lint_messages -def map_ruff_violation_to_lint_message(violation: dict) -> lint_action.LintMessage: +def map_ruff_violation_to_lint_message( + violation: dict, +) -> lint_files_action.LintMessage: """Map a ruff violation to a lint message""" location = violation.get("location", {}) end_location = violation.get("end_location", {}) @@ -150,16 +193,16 @@ def map_ruff_violation_to_lint_message(violation: dict) -> lint_action.LintMessa code = violation.get("code", "") code_description = violation.get("url", "") if code.startswith(("E", "F")): # Error codes - severity = lint_action.LintMessageSeverity.ERROR + severity = lint_files_action.LintMessageSeverity.ERROR elif code.startswith("W"): # Warning codes - severity = lint_action.LintMessageSeverity.WARNING + severity = lint_files_action.LintMessageSeverity.WARNING else: - severity = lint_action.LintMessageSeverity.INFO + severity = lint_files_action.LintMessageSeverity.INFO - return lint_action.LintMessage( - range=lint_action.Range( - start=lint_action.Position(line=start_line, character=start_column), - end=lint_action.Position(line=end_line, character=end_column), + return lint_files_action.LintMessage( + range=lint_files_action.Range( + start=lint_files_action.Position(line=start_line, character=start_column), + end=lint_files_action.Position(line=end_line, character=end_column), ), message=violation.get("message", ""), code=code, diff --git a/extensions/fine_python_ruff/fine_python_ruff/ruff_lsp_service.py b/extensions/fine_python_ruff/fine_python_ruff/ruff_lsp_service.py new file mode 100644 index 00000000..c4d69a80 --- /dev/null +++ b/extensions/fine_python_ruff/fine_python_ruff/ruff_lsp_service.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +import sys +from pathlib import Path +from typing import override + +from finecode_extension_api import service +from finecode_extension_api.actions import lint_files as lint_files_action +from finecode_extension_api.interfaces import ifileeditor, ilspclient, ilogger +from finecode_extension_api.contrib.lsp_service import LspService, map_diagnostics_to_lint_messages + + +class RuffLspService(service.DisposableService): + """Ruff LSP service — thin wrapper around generic LspService.""" + + def __init__( + self, + lsp_client: ilspclient.ILspClient, + file_editor: ifileeditor.IFileEditor, + logger: ilogger.ILogger, + ) -> None: + ruff_bin = Path(sys.executable).parent / "ruff" + self._lsp_service = LspService( + lsp_client=lsp_client, + file_editor=file_editor, + logger=logger, + cmd=f"{ruff_bin} server", + language_id="python", + readable_id="ruff-lsp", + ) + + @override + async def init(self) -> None: + await self._lsp_service.init() + + @override + def dispose(self) -> None: + self._lsp_service.dispose() + + def update_settings(self, settings: dict[str, object]) -> None: + self._lsp_service.update_settings(settings) + + async def ensure_started(self, root_uri: str) -> None: + await self._lsp_service.ensure_started(root_uri) + + async def check_file( + self, + file_path: Path, + timeout: float = 30.0, + ) -> list[lint_files_action.LintMessage]: + raw_diagnostics = await self._lsp_service.check_file(file_path, timeout) + return map_diagnostics_to_lint_messages( + raw_diagnostics, default_source="ruff" + ) diff --git a/extensions/fine_python_ruff/pyproject.toml b/extensions/fine_python_ruff/pyproject.toml index 8ab81162..be63222e 100644 --- a/extensions/fine_python_ruff/pyproject.toml +++ b/extensions/fine_python_ruff/pyproject.toml @@ -1,14 +1,20 @@ [project] name = "fine_python_ruff" -version = "0.1.0" +version = "0.2.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "ruff (>=0.8.0,<1.0.0)"] +dependencies = ["finecode_extension_api~=0.4.0a0", "ruff (>=0.8.0,<1.0.0)"] + +[project.optional-dependencies] +jsonrpc = ["finecode_jsonrpc~=0.1.0a0"] + +[project.entry-points."finecode.activator"] +fine_python_ruff = "fine_python_ruff.activator:Activator" [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +24,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_setuptools_scm/.gitignore b/extensions/fine_python_setuptools_scm/.gitignore new file mode 100644 index 00000000..cbb2a25b --- /dev/null +++ b/extensions/fine_python_setuptools_scm/.gitignore @@ -0,0 +1,5 @@ +.venvs +build/ +*.egg-info/ +__pycache__ +finecode_config_dump/ diff --git a/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/__init__.py b/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/__init__.py new file mode 100644 index 00000000..b5f0ad52 --- /dev/null +++ b/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/__init__.py @@ -0,0 +1,9 @@ +from .get_src_artifact_version_setuptools_scm_handler import ( + GetSrcArtifactVersionSetuptoolsScmHandler, + GetSrcArtifactVersionSetuptoolsScmHandlerConfig, +) + +__all__ = [ + "GetSrcArtifactVersionSetuptoolsScmHandler", + "GetSrcArtifactVersionSetuptoolsScmHandlerConfig", +] diff --git a/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/get_src_artifact_version_setuptools_scm_handler.py b/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/get_src_artifact_version_setuptools_scm_handler.py new file mode 100644 index 00000000..971a1bcb --- /dev/null +++ b/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/get_src_artifact_version_setuptools_scm_handler.py @@ -0,0 +1,85 @@ +import dataclasses + +from setuptools_scm import Configuration +from setuptools_scm._get_version_impl import _get_version + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + get_src_artifact_version as get_src_artifact_version_action, +) +from finecode_extension_api.interfaces import iprojectinfoprovider, ilogger + + +@dataclasses.dataclass +class GetSrcArtifactVersionSetuptoolsScmHandlerConfig( + code_action.ActionHandlerConfig +): ... + + +class GetSrcArtifactVersionSetuptoolsScmHandler( + code_action.ActionHandler[ + get_src_artifact_version_action.GetSrcArtifactVersionAction, + GetSrcArtifactVersionSetuptoolsScmHandlerConfig, + ] +): + def __init__( + self, + config: GetSrcArtifactVersionSetuptoolsScmHandlerConfig, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + logger: ilogger.ILogger + ) -> None: + self.config = config + self.project_info_provider = project_info_provider + self.logger = logger + + async def run( + self, + payload: get_src_artifact_version_action.GetSrcArtifactVersionRunPayload, + run_context: get_src_artifact_version_action.GetSrcArtifactVersionRunContext, + ) -> get_src_artifact_version_action.GetSrcArtifactVersionRunResult: + src_artifact_def_path = payload.src_artifact_def_path + + src_artifact_raw_def = ( + await self.project_info_provider.get_project_raw_config( + project_def_path=src_artifact_def_path + ) + ) + + # Check that version is dynamic + dynamic_fields = src_artifact_raw_def.get("project", {}).get("dynamic", []) + if "version" not in dynamic_fields: + raise code_action.ActionFailedException( + f"Version is not dynamic in {src_artifact_def_path}, " + "this handler only supports dynamic versions via setuptools_scm" + ) + + # from setuptools_scm._cli:main + pyproject = src_artifact_def_path.as_posix() + + try: + # could be optimized by providing config from project_info_provider instead + # of reading file each time + config = Configuration.from_file( + pyproject, + root=None + ) + except (LookupError, FileNotFoundError) as ex: + # no pyproject.toml OR no [tool.setuptools_scm] + self.logger.warning( + f"Warning: could not use {pyproject}," + " using default configuration.\n" + f" Reason: {ex}." + ) + config = Configuration(root=src_artifact_def_path.parent.as_posix()) + + version = _get_version( + config + ) + if version is None: + raise code_action.ActionFailedException("ERROR: no version found") + + # from setuptools_scm._cli:main end + + return get_src_artifact_version_action.GetSrcArtifactVersionRunResult( + version=version + ) diff --git a/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/py.typed b/extensions/fine_python_setuptools_scm/fine_python_setuptools_scm/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/extensions/fine_python_setuptools_scm/pyproject.toml b/extensions/fine_python_setuptools_scm/pyproject.toml new file mode 100644 index 00000000..32c21709 --- /dev/null +++ b/extensions/fine_python_setuptools_scm/pyproject.toml @@ -0,0 +1,25 @@ +[project] +name = "fine_python_setuptools_scm" +version = "0.1.0a0" +description = "" +authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] +readme = "README.md" +requires-python = ">=3.11, <= 3.14" +dependencies = ["finecode_extension_api~=0.4.0a0", "setuptools-scm>=8"] + +[dependency-groups] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] + +[tool.finecode] +presets = [{ source = "finecode_dev_common_preset" }] + +[tool.finecode.env.dev_workspace.dependencies] +finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editable = true } +finecode = { path = "../../", editable = true } +finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } +finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/extensions/fine_python_setuptools_scm/setup.py b/extensions/fine_python_setuptools_scm/setup.py new file mode 100644 index 00000000..7d5a61a9 --- /dev/null +++ b/extensions/fine_python_setuptools_scm/setup.py @@ -0,0 +1,67 @@ +import atexit +import shutil +import sys +import tempfile + +from setuptools import setup +from setuptools.command.build import build +from setuptools.command.build_ext import build_ext +from setuptools.command.build_py import build_py +from setuptools.command.egg_info import egg_info + +# Create a single temp directory for all build operations +_TEMP_BUILD_DIR = None + + +def get_temp_build_dir(pkg_name): + global _TEMP_BUILD_DIR + if _TEMP_BUILD_DIR is None: + _TEMP_BUILD_DIR = tempfile.mkdtemp(prefix=f"{pkg_name}_build_") + atexit.register(lambda: shutil.rmtree(_TEMP_BUILD_DIR, ignore_errors=True)) + return _TEMP_BUILD_DIR + + +class TempDirBuildMixin: + def initialize_options(self): + super().initialize_options() + temp_dir = get_temp_build_dir(self.distribution.get_name()) + self.build_base = temp_dir + + +class TempDirEggInfoMixin: + def initialize_options(self): + super().initialize_options() + temp_dir = get_temp_build_dir(self.distribution.get_name()) + self.egg_base = temp_dir + + +class CustomBuild(TempDirBuildMixin, build): + pass + + +class CustomBuildPy(TempDirBuildMixin, build_py): + pass + + +class CustomBuildExt(TempDirBuildMixin, build_ext): + pass + + +class CustomEggInfo(TempDirEggInfoMixin, egg_info): + def initialize_options(self): + # Don't use temp dir for editable installs + if "--editable" in sys.argv or "-e" in sys.argv: + egg_info.initialize_options(self) + else: + super().initialize_options() + + +setup( + name="fine_python_setuptools_scm", + cmdclass={ + "build": CustomBuild, + "build_py": CustomBuildPy, + "build_ext": CustomBuildExt, + "egg_info": CustomEggInfo, + }, +) diff --git a/extensions/fine_python_virtualenv/.gitignore b/extensions/fine_python_virtualenv/.gitignore index 250a6a24..cbb2a25b 100644 --- a/extensions/fine_python_virtualenv/.gitignore +++ b/extensions/fine_python_virtualenv/.gitignore @@ -1,5 +1,5 @@ .venvs build/ -src/*.egg-info/ +*.egg-info/ __pycache__ finecode_config_dump/ diff --git a/extensions/fine_python_virtualenv/pyproject.toml b/extensions/fine_python_virtualenv/pyproject.toml index 96e47811..24406b27 100644 --- a/extensions/fine_python_virtualenv/pyproject.toml +++ b/extensions/fine_python_virtualenv/pyproject.toml @@ -1,17 +1,17 @@ [project] name = "fine_python_virtualenv" -version = "0.1.0" +version = "0.2.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" dependencies = [ - "finecode_extension_api==0.3.*", + "finecode_extension_api~=0.4.0a0", "virtualenv (>=20.0.0,<21.0.0)", ] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -21,3 +21,8 @@ finecode_dev_common_preset = { path = "../../finecode_dev_common_preset", editab finecode = { path = "../../", editable = true } finecode_extension_runner = { path = "../../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../../presets/fine_python_format", editable = true } diff --git a/finecode_builtin_handlers/pyproject.toml b/finecode_builtin_handlers/pyproject.toml index 2edaed52..f112084f 100644 --- a/finecode_builtin_handlers/pyproject.toml +++ b/finecode_builtin_handlers/pyproject.toml @@ -1,20 +1,25 @@ [project] name = "finecode_builtin_handlers" -version = "0.1.0" +version = "0.2.0a1" description = "FineCode built-in handlers" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*", "tomlkit==0.11.*"] +dependencies = ["finecode_extension_api~=0.4.0a0", "tomlkit==0.11.*"] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode.env.dev_workspace.dependencies] finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable = true } finecode = { path = "../", editable = true } finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py index d98bd89e..8262ba04 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/__init__.py @@ -1,19 +1,31 @@ """FineCode Built-in handlers.""" +from .clean_finecode_logs import CleanFinecodeLogsHandler from .dump_config import DumpConfigHandler from .dump_config_save import DumpConfigSaveHandler +from .format import FormatHandler +from .format_files_save_handler import SaveFormatFilesHandler +from .init_repository_provider import InitRepositoryProviderHandler +from .lint import LintHandler from .prepare_envs_install_deps import PrepareEnvsInstallDepsHandler from .prepare_envs_read_configs import PrepareEnvsReadConfigsHandler from .prepare_runners_install_runner_and_presets import ( PrepareRunnersInstallRunnerAndPresetsHandler, ) from .prepare_runners_read_configs import PrepareRunnersReadConfigsHandler +from .publish_artifact import PublishArtifactHandler __all__ = [ + "CleanFinecodeLogsHandler", "DumpConfigHandler", + "FormatHandler", + "InitRepositoryProviderHandler", + "LintHandler", "PrepareEnvsInstallDepsHandler", "PrepareEnvsReadConfigsHandler", "PrepareRunnersInstallRunnerAndPresetsHandler", "PrepareRunnersReadConfigsHandler", "DumpConfigSaveHandler", + "PublishArtifactHandler", + "SaveFormatFilesHandler" ] diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/clean_finecode_logs.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/clean_finecode_logs.py new file mode 100644 index 00000000..2f1c1984 --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/clean_finecode_logs.py @@ -0,0 +1,46 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + clean_finecode_logs as clean_finecode_logs_action, +) +from finecode_extension_api.interfaces import ilogger, iextensionrunnerinfoprovider + + +@dataclasses.dataclass +class CleanFinecodeLogsHandlerConfig(code_action.ActionHandlerConfig): ... + + +class CleanFinecodeLogsHandler( + code_action.ActionHandler[ + clean_finecode_logs_action.CleanFinecodeLogsAction, + CleanFinecodeLogsHandlerConfig, + ] +): + def __init__( + self, + logger: ilogger.ILogger, + extension_runner_info_provider: iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider, + ) -> None: + self.logger = logger + self.extension_runner_info_provider = extension_runner_info_provider + + async def run( + self, + payload: clean_finecode_logs_action.CleanFinecodeLogsRunPayload, + run_context: clean_finecode_logs_action.CleanFinecodeLogsRunContext, + ) -> clean_finecode_logs_action.CleanFinecodeLogsRunResult: + venv_dir_path = self.extension_runner_info_provider.get_current_venv_dir_path() + logs_dir_path = venv_dir_path / "logs" + errors: list[str] = [] + + # use file manager instead? + for log_file_path in logs_dir_path.glob("*.log"): + try: + log_file_path.unlink() + except Exception as exception: + errors += str(exception) + + self.logger.info(f"Deleted {log_file_path}") + + return clean_finecode_logs_action.CleanFinecodeLogsRunResult(errors=errors) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py index 2252f395..2d531c24 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/dump_config_save.py @@ -4,7 +4,7 @@ from finecode_extension_api import code_action from finecode_extension_api.actions import dump_config as dump_config_action -from finecode_extension_api.interfaces import ifilemanager +from finecode_extension_api.interfaces import ifilemanager, ifileeditor @dataclasses.dataclass @@ -16,11 +16,17 @@ class DumpConfigSaveHandler( dump_config_action.DumpConfigAction, DumpConfigSaveHandlerConfig ] ): + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="DumpConfigSaveHandler" + ) + def __init__( self, file_manager: ifilemanager.IFileManager, + file_editor: ifileeditor.IFileEditor ) -> None: self.file_manager = file_manager + self.file_editor = file_editor async def run( self, @@ -31,9 +37,12 @@ async def run( target_file_dir_path = payload.target_file_path.parent await self.file_manager.create_dir(dir_path=target_file_dir_path) - await self.file_manager.save_file( - file_path=payload.target_file_path, file_content=raw_config_str - ) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + await session.save_file( + file_path=payload.target_file_path, file_content=raw_config_str + ) return dump_config_action.DumpConfigRunResult( config_dump=run_context.raw_config_dump diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/format.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/format.py new file mode 100644 index 00000000..5f20c491 --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/format.py @@ -0,0 +1,155 @@ +import asyncio +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + format as format_action, + format_files as format_files_action, + list_src_artifact_files_by_lang as list_src_artifact_files_by_lang_action, + group_src_artifact_files_by_lang as group_src_artifact_files_by_lang_action, +) +from finecode_extension_api.interfaces import ( + iactionrunner, + ifileeditor, + ilogger, +) + + +@dataclasses.dataclass +class FormatHandlerConfig(code_action.ActionHandlerConfig): ... + + +class FormatHandler( + code_action.ActionHandler[format_action.FormatAction, FormatHandlerConfig] +): + def __init__( + self, + action_runner: iactionrunner.IActionRunner, + logger: ilogger.ILogger, + file_editor: ifileeditor.IFileEditor, + ) -> None: + self.action_runner = action_runner + self.file_editor = file_editor + self.logger = logger + + async def run( + self, + payload: format_action.FormatRunPayload, + run_context: format_action.FormatRunContext, + ) -> format_action.FormatRunResult: + files_by_lang: dict[str, list[pathlib.Path]] = {} + + # first get languages for which formatters are available, they change rarely + # only on project config change + all_actions = self.action_runner.get_actions_names() + format_files_prefix = "format_files_" + format_files_actions = [ + action_name + for action_name in all_actions + if action_name.startswith(format_files_prefix) + ] + # TODO: ordered set? + # TODO: cache and update on project config change + langs_supported_by_format = list( + set( + [ + action_name[len(format_files_prefix) :] + for action_name in format_files_actions + ] + ) + ) + run_meta = run_context.meta + + if payload.target == format_action.FormatTarget.PROJECT: + if ( + run_meta.dev_env == code_action.DevEnv.IDE + and run_meta.trigger == code_action.RunActionTrigger.SYSTEM + ): + # performance optimization: if IDE automatically(=`trigger == SYSTEM`) + # tries to format the whole project, format only files owned by IDE(usually + # these are opened files). + # In future it could be improved by formatting opened files + dependencies + # or e.g. files changed according to git + dependencies. + files_to_format: list[pathlib.Path] = self.file_editor.get_opened_files() + group_project_files_action = self.action_runner.get_action_by_name( + "group_src_artifact_files_by_lang", group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangAction + ) + group_src_artifact_files_by_lang_payload = group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunPayload( + file_paths=files_to_format, langs=langs_supported_by_format + ) + files_by_lang_result = await self.action_runner.run_action( + action=group_project_files_action, + payload=group_src_artifact_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + else: + # not automatic check of IDE, format the whole project. + # Instead of getting all files in the project and then grouping them by + # language, use `list_src_artifact_files_by_lang_action` action which returns + # only files with supported languages + list_src_artifact_file_by_lang_action_instance = ( + self.action_runner.get_action_by_name("list_src_artifact_files_by_lang", list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangAction) + ) + list_src_artifact_files_by_lang_payload = ( + list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangRunPayload( + langs=langs_supported_by_format + ) + ) + files_by_lang_result = await self.action_runner.run_action( + action=list_src_artifact_file_by_lang_action_instance, + payload=list_src_artifact_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + else: + # format target are files, format them + files_to_format = payload.file_paths + group_src_artifact_files_by_lang_action_instance = ( + self.action_runner.get_action_by_name("group_src_artifact_files_by_lang", group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangAction) + ) + group_src_artifact_files_by_lang_payload = ( + group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunPayload( + file_paths=files_to_format, langs=langs_supported_by_format + ) + ) + files_by_lang_result = await self.action_runner.run_action( + action=group_src_artifact_files_by_lang_action_instance, + payload=group_src_artifact_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + # TODO: handle errors + format_tasks = [] + try: + async with asyncio.TaskGroup() as tg: + for lang, lang_files in files_by_lang.items(): + # TODO: handle errors + # TODO: handle KeyError? + action = self.action_runner.get_action_by_name( + format_files_prefix + lang, format_files_action.FormatFilesAction + ) + format_files_payload = format_files_action.FormatFilesRunPayload( + file_paths=lang_files, save=payload.save + ) + format_task = tg.create_task( + self.action_runner.run_action( + action=action, payload=format_files_payload, meta=run_meta + ) + ) + format_tasks.append(format_task) + except ExceptionGroup as eg: + error_str = ". ".join([str(exception) for exception in eg.exceptions]) + raise code_action.ActionFailedException(error_str) from eg + + format_results = [task.result() for task in format_tasks] + if len(format_results) > 0: + result = format_action.FormatRunResult(result_by_file_path={}) + for subresult in format_results: + result.update(subresult) + return result + else: + return format_action.FormatRunResult(result_by_file_path={}) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/format_files_save_handler.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/format_files_save_handler.py new file mode 100644 index 00000000..7db75b1d --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/format_files_save_handler.py @@ -0,0 +1,51 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import format_files as format_files_actions +from finecode_extension_api.interfaces import ifileeditor, ilogger + + +@dataclasses.dataclass +class SaveFormatFilesHandlerConfig(code_action.ActionHandlerConfig): ... + + +class SaveFormatFilesHandler( + code_action.ActionHandler[ + format_files_actions.FormatFilesAction, SaveFormatFilesHandlerConfig + ] +): + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(id="SaveFormatFilesHandler") + + def __init__( + self, file_editor: ifileeditor.IFileEditor, logger: ilogger.ILogger + ) -> None: + self.file_editor = file_editor + self.logger = logger + + async def run( + self, + payload: format_files_actions.FormatFilesRunPayload, + run_context: format_files_actions.FormatFilesRunContext, + ) -> format_files_actions.FormatFilesRunResult: + file_paths = payload.file_paths + save = payload.save + + if save is True: + async with self.file_editor.session(self.FILE_OPERATION_AUTHOR) as session: + for file_path in file_paths: + file_content = run_context.file_info_by_path[file_path].file_content + # TODO: only if changed? + await session.save_file( + file_path=file_path, file_content=file_content + ) + + result = format_files_actions.FormatFilesRunResult( + result_by_file_path={ + file_path: format_files_actions.FormatRunFileResult( + changed=False, # this handler doesn't change files, only saves them + code=run_context.file_info_by_path[file_path].file_content, + ) + for file_path in file_paths + } + ) + return result diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/init_repository_provider.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/init_repository_provider.py new file mode 100644 index 00000000..1e762379 --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/init_repository_provider.py @@ -0,0 +1,51 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + init_repository_provider as init_repository_provider_action, +) +from finecode_extension_api.interfaces import irepositorycredentialsprovider + + +@dataclasses.dataclass +class InitRepositoryProviderHandlerConfig(code_action.ActionHandlerConfig): + pass + + +class InitRepositoryProviderHandler( + code_action.ActionHandler[ + init_repository_provider_action.InitRepositoryProviderAction, + InitRepositoryProviderHandlerConfig, + ] +): + def __init__( + self, + repository_credentials_provider: irepositorycredentialsprovider.IRepositoryCredentialsProvider, + ) -> None: + self.repository_credentials_provider = repository_credentials_provider + + async def run( + self, + payload: init_repository_provider_action.InitRepositoryProviderRunPayload, + run_context: init_repository_provider_action.InitRepositoryProviderRunContext, + ) -> init_repository_provider_action.InitRepositoryProviderRunResult: + initialized_repositories: list[str] = [] + + # Add repositories + for repository in payload.repositories: + self.repository_credentials_provider.add_repository( + name=repository.name, url=repository.url + ) + initialized_repositories.append(repository.name) + + # Set credentials + for repo_name, credentials in payload.credentials_by_repository.items(): + self.repository_credentials_provider.set_credentials( + repository_name=repo_name, + username=credentials.username, + password=credentials.password, + ) + + return init_repository_provider_action.InitRepositoryProviderRunResult( + initialized_repositories=initialized_repositories + ) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/lint.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/lint.py new file mode 100644 index 00000000..8cdf7969 --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/lint.py @@ -0,0 +1,122 @@ +import asyncio +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + lint as lint_action, + lint_files as lint_files_action, + list_src_artifact_files_by_lang as list_src_artifact_files_by_lang_action, + group_src_artifact_files_by_lang as group_src_artifact_files_by_lang_action +) +from finecode_extension_api.interfaces import ( + iactionrunner, + ifileeditor, + ilogger, +) + + +@dataclasses.dataclass +class LintHandlerConfig(code_action.ActionHandlerConfig): ... + + +class LintHandler( + code_action.ActionHandler[ + lint_action.LintAction, LintHandlerConfig + ] +): + def __init__( + self, action_runner: iactionrunner.IActionRunner, logger: ilogger.ILogger, file_editor: ifileeditor.IFileEditor + ) -> None: + self.action_runner = action_runner + self.file_editor = file_editor + self.logger = logger + + async def run( + self, + payload: lint_action.LintRunPayload, + run_context: lint_action.LintRunContext, + ) -> lint_action.LintRunResult: + # files_to_lint: list[pathlib.Path] = [] + files_by_lang: dict[str, list[pathlib.Path]] = {} + + # first get languages for which linters are available, they change rarely + # only on project config change + all_actions = self.action_runner.get_actions_names() + lint_files_prefix = 'lint_files_' + lint_files_actions = [action_name for action_name in all_actions if action_name.startswith(lint_files_prefix)] + # TODO: ordered set? + # TODO: cache and update on project config change + langs_supported_by_lint = list(set([action_name[len(lint_files_prefix):] for action_name in lint_files_actions])) + run_meta = run_context.meta + + if payload.target == lint_action.LintTarget.PROJECT: + if run_meta.dev_env == code_action.DevEnv.IDE and run_meta.trigger == code_action.RunActionTrigger.SYSTEM: + # performance optimization: if IDE automatically(=`trigger == SYSTEM`) + # tries to lint the whole project, lint only files owned by IDE(usually + # these are opened files). + # In future it could be improved by linting opened files + dependencies + # or e.g. files changed according to git + dependencies. + files_to_lint: list[pathlib.Path] = self.file_editor.get_opened_files() + group_project_files_action = self.action_runner.get_action_by_name('group_src_artifact_files_by_lang', group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangAction) + group_src_artifact_files_by_lang_payload = group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunPayload(file_paths=files_to_lint, langs=langs_supported_by_lint) + files_by_lang_result = await self.action_runner.run_action( + action=group_project_files_action, + payload=group_src_artifact_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + else: + # not automatic check of IDE, lint the whole project. + # Instead of getting all files in the project and then grouping them by + # language, use `list_src_artifact_files_by_lang_action` action which returns + # only files with supported languages + list_src_artifact_file_by_lang_action_instance = self.action_runner.get_action_by_name('list_src_artifact_files_by_lang', list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangAction) + list_src_artifact_files_by_lang_payload = list_src_artifact_files_by_lang_action.ListSrcArtifactFilesByLangRunPayload(langs=langs_supported_by_lint) + files_by_lang_result = await self.action_runner.run_action( + action=list_src_artifact_file_by_lang_action_instance, + payload=list_src_artifact_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + else: + # lint target are files, lint them + files_to_lint = payload.file_paths + group_src_artifact_files_by_lang_action_instance = self.action_runner.get_action_by_name('group_src_artifact_files_by_lang', group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangAction) + group_src_artifact_files_by_lang_payload = group_src_artifact_files_by_lang_action.GroupSrcArtifactFilesByLangRunPayload(file_paths=files_to_lint, langs=langs_supported_by_lint) + files_by_lang_result = await self.action_runner.run_action( + action=group_src_artifact_files_by_lang_action_instance, + payload=group_src_artifact_files_by_lang_payload, + meta=run_meta + ) + files_by_lang = files_by_lang_result.files_by_lang + + # TODO: handle errors + lint_tasks = [] + try: + async with asyncio.TaskGroup() as tg: + for lang, lang_files in files_by_lang.items(): + # TODO: handle errors + # TODO: handle KeyError? + actions = self.action_runner.get_actions_for_language(source="finecode_extension_api.actions.lint_files.LintFilesAction", language=lang, expected_type=lint_files_action.LintFilesAction) + lint_files_payload = lint_files_action.LintFilesRunPayload(file_paths=lang_files) + for action in actions: + lint_task = tg.create_task(self.action_runner.run_action( + action=action, + payload=lint_files_payload, + meta=run_meta + )) + lint_tasks.append(lint_task) + except ExceptionGroup as eg: + error_str = ". ".join([str(exception) for exception in eg.exceptions]) + raise code_action.ActionFailedException(error_str) from eg + + lint_results = [task.result() for task in lint_tasks] + if len(lint_results) > 0: + result = lint_action.LintRunResult(messages={}) + for subresult in lint_results: + result.update(subresult) + return result + else: + return lint_action.LintRunResult(messages={}) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py index e7c46f46..88d1c5f4 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_install_deps.py @@ -1,9 +1,8 @@ import asyncio import dataclasses -import itertools from finecode_extension_api import code_action -from finecode_extension_api.actions import prepare_envs as prepare_envs_action +from finecode_extension_api.actions import prepare_envs as prepare_envs_action, install_deps_in_env as install_deps_in_env_action from finecode_extension_api.interfaces import ( iactionrunner, ilogger, @@ -33,7 +32,9 @@ async def run( ) -> prepare_envs_action.PrepareEnvsRunResult: envs = payload.envs - install_deps_tasks: list[asyncio.Task] = [] + install_deps_in_env_action_instance = self.action_runner.get_action_by_name(name="install_deps_in_env", expected_type=install_deps_in_env_action.InstallDepsInEnvAction) + install_deps_tasks: list[asyncio.Task[install_deps_in_env_action.InstallDepsInEnvRunResult]] = [] + run_meta = run_context.meta try: async with asyncio.TaskGroup() as tg: for env in envs: @@ -56,29 +57,30 @@ async def run( process_raw_deps( env_raw_deps, env_deps_config, dependencies, deps_groups ) + + install_deps_payload = install_deps_in_env_action.InstallDepsInEnvRunPayload( + env_name=env.name, + venv_dir_path=env.venv_dir_path, + project_dir_path=env.project_def_path.parent, + dependencies=[install_deps_in_env_action.Dependency(name=dep['name'], version_or_source=dep['version_or_source'], editable=dep['editable']) for dep in dependencies] + ) task = tg.create_task( self.action_runner.run_action( - name="install_deps_in_env", - payload={ - "env_name": env.name, - "venv_dir_path": env.venv_dir_path, - "project_dir_path": env.project_def_path.parent, - "dependencies": dependencies, - }, + action=install_deps_in_env_action_instance, + payload=install_deps_payload, + meta=run_meta ) ) install_deps_tasks.append(task) except ExceptionGroup as eg: error_str = ". ".join([str(exception) for exception in eg.exceptions]) - raise code_action.ActionFailedException(error_str) + raise code_action.ActionFailedException(error_str) from eg install_deps_results = [task.result() for task in install_deps_tasks] - errors: list[str] = list( - itertools.chain.from_iterable( - [result["errors"] for result in install_deps_results] - ) - ) + errors: list[str] = [] + for result in install_deps_results: + errors += result.errors return prepare_envs_action.PrepareEnvsRunResult(errors=errors) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py index 9eed2599..9f340826 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_envs_read_configs.py @@ -6,7 +6,6 @@ from finecode_extension_api import code_action from finecode_extension_api.actions import prepare_envs as prepare_envs_action from finecode_extension_api.interfaces import ( - iactionrunner, ilogger, iprojectinfoprovider, ) @@ -24,11 +23,9 @@ class PrepareEnvsReadConfigsHandler( ): def __init__( self, - action_runner: iactionrunner.IActionRunner, project_info_provider: iprojectinfoprovider.IProjectInfoProvider, logger: ilogger.ILogger, ) -> None: - self.action_runner = action_runner self.project_info_provider = project_info_provider self.logger = logger diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py index a897be0f..b1bc61eb 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_install_runner_and_presets.py @@ -1,10 +1,9 @@ import asyncio import dataclasses -import itertools import typing from finecode_extension_api import code_action -from finecode_extension_api.actions import prepare_runners as prepare_runners_action +from finecode_extension_api.actions import prepare_runners as prepare_runners_action, install_deps_in_env as install_deps_in_env_action from finecode_extension_api.interfaces import ( iactionrunner, ilogger, @@ -56,19 +55,23 @@ async def run( ) dependencies_by_env[env.name] = dependencies - install_deps_tasks: list[asyncio.Task] = [] + install_deps_in_env_action_instance = self.action_runner.get_action_by_name(name="install_deps_in_env", expected_type=install_deps_in_env_action.InstallDepsInEnvAction) + install_deps_tasks: list[asyncio.Task[install_deps_in_env_action.InstallDepsInEnvRunResult]] = [] + run_meta = run_context.meta try: async with asyncio.TaskGroup() as tg: for env in envs: + install_deps_payload = install_deps_in_env_action.InstallDepsInEnvRunPayload( + env_name=env.name, + venv_dir_path=env.venv_dir_path, + project_dir_path=env.project_def_path.parent, + dependencies=[install_deps_in_env_action.Dependency(name=dep['name'], version_or_source=dep['version_or_source'], editable=dep['editable']) for dep in dependencies_by_env[env.name]] + ) task = tg.create_task( self.action_runner.run_action( - name="install_deps_in_env", - payload={ - "env_name": env.name, - "venv_dir_path": env.venv_dir_path, - "project_dir_path": env.project_def_path.parent, - "dependencies": dependencies_by_env[env.name], - }, + action=install_deps_in_env_action_instance, + payload=install_deps_payload, + meta=run_meta ) ) install_deps_tasks.append(task) @@ -82,17 +85,15 @@ async def run( error_str = ". ".join( [str(exception) for exception in eg.exceptions] ) - raise code_action.ActionFailedException(error_str) + raise code_action.ActionFailedException(error_str) from eg result = prepare_runners_action.PrepareRunnersRunResult(errors=errors) - raise code_action.StopActionRunWithResult(result=result) + raise code_action.StopActionRunWithResult(result=result) from eg install_deps_results = [task.result() for task in install_deps_tasks] - errors: list[str] = list( - itertools.chain.from_iterable( - [result["errors"] for result in install_deps_results] - ) - ) + errors: list[str] = [] + for result in install_deps_results: + errors += result.errors result = prepare_runners_action.PrepareRunnersRunResult(errors=errors) return result diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py index 7d774483..e54b7f13 100644 --- a/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/prepare_runners_read_configs.py @@ -6,7 +6,6 @@ from finecode_extension_api import code_action from finecode_extension_api.actions import prepare_runners as prepare_runners_action from finecode_extension_api.interfaces import ( - iactionrunner, ilogger, iprojectinfoprovider, ) @@ -25,11 +24,9 @@ class PrepareRunnersReadConfigsHandler( ): def __init__( self, - action_runner: iactionrunner.IActionRunner, project_info_provider: iprojectinfoprovider.IProjectInfoProvider, logger: ilogger.ILogger, ) -> None: - self.action_runner = action_runner self.project_info_provider = project_info_provider self.logger = logger diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/publish_artifact.py b/finecode_builtin_handlers/src/finecode_builtin_handlers/publish_artifact.py new file mode 100644 index 00000000..53f5eb17 --- /dev/null +++ b/finecode_builtin_handlers/src/finecode_builtin_handlers/publish_artifact.py @@ -0,0 +1,164 @@ +import asyncio +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import \ + get_src_artifact_registries as get_src_artifact_registries_action +from finecode_extension_api.actions import \ + get_src_artifact_version as get_src_artifact_version_action +from finecode_extension_api.actions import \ + is_artifact_published_to_registry as is_artifact_published_to_registry_action +from finecode_extension_api.actions import \ + publish_artifact as publish_artifact_action +from finecode_extension_api.actions import \ + publish_artifact_to_registry as publish_artifact_to_registry_action +from finecode_extension_api.interfaces import ( + iactionrunner, + ilogger, + iprojectinfoprovider, +) + + +@dataclasses.dataclass +class PublishArtifactHandlerConfig(code_action.ActionHandlerConfig): ... + + +class PublishArtifactHandler( + code_action.ActionHandler[ + publish_artifact_action.PublishArtifactAction, + PublishArtifactHandlerConfig, + ] +): + action_runner: iactionrunner.IActionRunner + logger: ilogger.ILogger + project_info_provider: iprojectinfoprovider.IProjectInfoProvider + + def __init__( + self, + action_runner: iactionrunner.IActionRunner, + logger: ilogger.ILogger, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + ) -> None: + self.action_runner = action_runner + self.logger = logger + self.project_info_provider = project_info_provider + + async def run( + self, + payload: publish_artifact_action.PublishArtifactRunPayload, + run_context: publish_artifact_action.PublishArtifactRunContext, + ) -> publish_artifact_action.PublishArtifactRunResult: + run_meta = run_context.meta + + src_artifact_def_path = payload.src_artifact_def_path + dist_artifact_paths = payload.dist_artifact_paths + + get_version_action = self.action_runner.get_action_by_name( + "get_src_artifact_version", get_src_artifact_version_action.GetSrcArtifactVersionAction + ) + version_payload = ( + get_src_artifact_version_action.GetSrcArtifactVersionRunPayload( + src_artifact_def_path=src_artifact_def_path + ) + ) + version_result = await self.action_runner.run_action( + action=get_version_action, payload=version_payload, meta=run_meta + ) + version = version_result.version + + get_registries_action = self.action_runner.get_action_by_name( + "get_src_artifact_registries", get_src_artifact_registries_action.GetSrcArtifactRegistriesAction + ) + registries_payload = ( + get_src_artifact_registries_action.GetSrcArtifactRegistriesRunPayload( + src_artifact_def_path=src_artifact_def_path + ) + ) + registries_result = await self.action_runner.run_action( + action=get_registries_action, payload=registries_payload, meta=run_meta + ) + + # Filter registries based on publication status if not forced + registries_to_publish = registries_result.registries + if len(registries_to_publish) == 0: + raise code_action.ActionFailedException("No registries are configured") + + # Build dict of paths to publish per registry + dist_paths_to_publish_by_registry: dict[str, list[pathlib.Path]] + if payload.force: + dist_paths_to_publish_by_registry = { + registry.name: dist_artifact_paths + for registry in registries_to_publish + } + else: + is_published_action = self.action_runner.get_action_by_name( + "is_artifact_published_to_registry", is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryAction + ) + + check_tasks: list[tuple[asyncio.Task[is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryRunResult], get_src_artifact_registries_action.Registry]] = [] + try: + async with asyncio.TaskGroup() as tg: + for registry in registries_result.registries: + check_payload = is_artifact_published_to_registry_action.IsArtifactPublishedToRegistryRunPayload( + src_artifact_def_path=src_artifact_def_path, + dist_artifact_paths=dist_artifact_paths, + version=version, + registry_name=registry.name, + ) + task = tg.create_task( + self.action_runner.run_action( + action=is_published_action, + payload=check_payload, + meta=run_meta, + ) + ) + check_tasks.append((task, registry)) + except ExceptionGroup as eg: + error_str = ". ".join([str(exception) for exception in eg.exceptions]) + raise code_action.ActionFailedException(error_str) from eg + + # Filter to only dist paths that are not published per registry + dist_paths_to_publish_by_registry = {} + for task, registry in check_tasks: + result = task.result() + not_published_paths = [ + path + for path, is_published in result.is_published_by_dist_path.items() + if not is_published + ] + if not_published_paths: + dist_paths_to_publish_by_registry[registry.name] = not_published_paths + + # Publish to registries with unpublished artifacts + publish_to_registry_action = self.action_runner.get_action_by_name( + "publish_artifact_to_registry", publish_artifact_to_registry_action.PublishArtifactToRegistryAction + ) + + publish_tasks: list[asyncio.Task[publish_artifact_to_registry_action.PublishArtifactToRegistryRunResult]] = [] + try: + async with asyncio.TaskGroup() as tg: + for registry_name, paths_to_publish in dist_paths_to_publish_by_registry.items(): + publish_payload = publish_artifact_to_registry_action.PublishArtifactToRegistryRunPayload( + src_artifact_def_path=src_artifact_def_path, + dist_artifact_paths=paths_to_publish, + registry_name=registry_name, + force=payload.force, + ) + task = tg.create_task( + self.action_runner.run_action( + action=publish_to_registry_action, + payload=publish_payload, + meta=run_meta, + ) + ) + publish_tasks.append(task) + except ExceptionGroup as eg: + error_str = ". ".join([str(exception) for exception in eg.exceptions]) + raise code_action.ActionFailedException(error_str) from eg + + published_registries = list(dist_paths_to_publish_by_registry.keys()) + + return publish_artifact_action.PublishArtifactRunResult( + version=version, published_registries=published_registries + ) diff --git a/finecode_builtin_handlers/src/finecode_builtin_handlers/py.typed b/finecode_builtin_handlers/src/finecode_builtin_handlers/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/finecode_dev_common_preset/pyproject.toml b/finecode_dev_common_preset/pyproject.toml index 91782b9f..6ca2df39 100644 --- a/finecode_dev_common_preset/pyproject.toml +++ b/finecode_dev_common_preset/pyproject.toml @@ -1,13 +1,13 @@ [project] name = "finecode_dev_common_preset" -version = "0.2.0" +version = "0.3.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" dependencies = [ - "fine_python_aksem @ git+https://github.com/Aksem/fine_python_aksem.git", - "fine_python_recommended==0.4.*", + "fine_python_aksem~=0.3.0a3", + "fine_python_recommended~=0.5.0a0", ] [tool.setuptools.package-data] diff --git a/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml b/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml index 1799cf3b..50653a0d 100644 --- a/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml +++ b/finecode_dev_common_preset/src/finecode_dev_common_preset/preset.toml @@ -17,11 +17,18 @@ finecode_extension_runner = { path = "../../../finecode_extension_runner", edita [tool.finecode.env.dev_no_runtime.dependencies] finecode_extension_api = { path = "../../../finecode_extension_api", editable = true } finecode_extension_runner = { path = "../../../finecode_extension_runner", editable = true } +finecode_builtin_handlers = { path = "../../../finecode_builtin_handlers", editable = true } +finecode_httpclient = { path = "../../../finecode_httpclient", editable = true } +finecode_jsonrpc = { path = "../../../finecode_jsonrpc", editable = true } +finecode_dev_extensions = { path = "../../../finecode_dev_extensions", editable = true } +fine_python_ast = { path = "../../../extensions/fine_python_ast", editable = true } fine_python_mypy = { path = "../../../extensions/fine_python_mypy", editable = true } fine_python_ruff = { path = "../../../extensions/fine_python_ruff", editable = true } +fine_python_isort = { path = "../../../extensions/fine_python_isort", editable = true } fine_python_flake8 = { path = "../../../extensions/fine_python_flake8", editable = true } fine_python_pyrefly = { path = "../../../extensions/fine_python_pyrefly", editable = true } fine_python_package_info = { path = "../../../extensions/fine_python_package_info", editable = true } +fine_python_setuptools_scm = { path = "../../../extensions/fine_python_setuptools_scm", editable = true } # currently, all packages in finecode repository are pure python packages, reuse # setuptools build in all of them @@ -29,11 +36,97 @@ fine_python_package_info = { path = "../../../extensions/fine_python_package_inf requires = ["setuptools>=64", "setuptools-scm>=8"] build-backend = "setuptools.build_meta" +[[tool.finecode.service]] +interface = "finecode_extension_api.interfaces.ihttpclient.IHttpClient" +source = "finecode_httpclient.HttpClient" +env = "dev_no_runtime" +dependencies = ["finecode_httpclient~=0.1.0a1"] + # TODO: recognize minimal python version automatically [[tool.finecode.action_handler]] -source = "fine_python_ruff.RuffLintHandler" +source = "fine_python_ruff.RuffLintFilesHandler" config.target_version = 'py311' [[tool.finecode.action_handler]] -source = "fine_python_pyrefly.PyreflyLintHandler" +source = "fine_python_pyrefly.PyreflyLintFilesHandler" config.python_version = '3.11' + +[tool.finecode.action.init_repository_provider] +source = "finecode_extension_api.actions.init_repository_provider.InitRepositoryProviderAction" +handlers = [ + { name = 'init_repository_provider', source = 'finecode_builtin_handlers.InitRepositoryProviderHandler', env = "dev_no_runtime", dependencies = [ + "finecode_builtin_handlers~=0.2.0a0", + ] }, +] + +[tool.finecode.action.publish_artifact] +source = "finecode_extension_api.actions.publish_artifact.PublishArtifactAction" +handlers = [ + { name = 'publish_artifact', source = 'finecode_builtin_handlers.PublishArtifactHandler', env = "dev_no_runtime", dependencies = [ + "finecode_builtin_handlers~=0.2.0a0", + ] }, +] + +[tool.finecode.action.get_src_artifact_version] +source = "finecode_extension_api.actions.get_src_artifact_version.GetSrcArtifactVersionAction" +handlers = [ + { name = 'get_src_artifact_version_py', source = 'fine_python_package_info.GetSrcArtifactVersionPyHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_package_info~=0.2.0a1", + ] }, +] + +[tool.finecode.action.get_dist_artifact_version] +source = "finecode_extension_api.actions.get_dist_artifact_version.GetDistArtifactVersionAction" +handlers = [ + { name = 'get_dist_artifact_version_py', source = 'fine_python_package_info.GetDistArtifactVersionPyHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_package_info~=0.2.0a1", + ] }, +] + +[tool.finecode.action.get_src_artifact_registries] +source = "finecode_extension_api.actions.get_src_artifact_registries.GetSrcArtifactRegistriesAction" +handlers = [ + { name = 'get_src_artifact_registries_py', source = 'fine_python_package_info.GetSrcArtifactRegistriesPyHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_package_info~=0.2.0a1", + ] }, +] + +[tool.finecode.action.publish_artifact_to_registry] +source = "finecode_extension_api.actions.publish_artifact_to_registry.PublishArtifactToRegistryAction" +handlers = [ + { name = 'publish_artifact_to_registry_py', source = 'fine_python_package_info.PublishArtifactToRegistryPyHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_package_info~=0.2.0a1", + ] }, +] + +[tool.finecode.action.is_artifact_published_to_registry] +source = "finecode_extension_api.actions.is_artifact_published_to_registry.IsArtifactPublishedToRegistryAction" +handlers = [ + { name = 'is_artifact_published_to_registry_py', source = 'fine_python_package_info.IsArtifactPublishedToRegistryPyHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_package_info~=0.2.0a1", + ] }, +] + +[tool.finecode.action.build_artifact] +source = "finecode_extension_api.actions.build_artifact_action.BuildArtifactAction" +handlers = [ + { name = 'build_artifact_py', source = 'fine_python_package_info.BuildArtifactPyHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_package_info~=0.2.0a1", + ] }, +] + +[tool.finecode.action.verify_artifact_published_to_registry] +source = "finecode_extension_api.actions.verify_artifact_published_to_registry.VerifyArtifactPublishedToRegistryAction" +# TODO: install, try to import etc +handlers = [] + +[tool.finecode.action.publish_and_verify_artifact] +source = "finecode_dev_extensions.publish_and_verify_artifact_action.PublishAndVerifyArtifactAction" +handlers = [ + { name = 'init_repository_provider', source = 'finecode_dev_extensions.PublishAndVerifyArtifactInitRepositoryProviderHandler', env = "dev_no_runtime", dependencies = [ + "finecode_dev_extensions~=0.2.0a0", + ] }, + { name = 'publish_and_verify_artifact', source = 'finecode_dev_extensions.PublishAndVerifyArtifactHandler', env = "dev_no_runtime", dependencies = [ + "finecode_dev_extensions~=0.2.0a0", + ] }, +] diff --git a/finecode_dev_extensions/README.md b/finecode_dev_extensions/README.md new file mode 100644 index 00000000..e69de29b diff --git a/finecode_dev_extensions/pyproject.toml b/finecode_dev_extensions/pyproject.toml new file mode 100644 index 00000000..5e7120aa --- /dev/null +++ b/finecode_dev_extensions/pyproject.toml @@ -0,0 +1,26 @@ +[project] +name = "finecode_dev_extensions" +version = "0.2.0a0" +description = "FineCode development extensions" +authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] +readme = "README.md" +requires-python = ">=3.11, <= 3.14" +dependencies = ["finecode_extension_api~=0.4.0a0"] + +[dependency-groups] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] + +[tool.finecode.env.dev_workspace.dependencies] +finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable = true } +finecode = { path = "../", editable = true } +finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } +finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../finecode_jsonrpc", editable = true } +finecode_dev_extensions = { path = "../finecode_dev_extensions", editable = true } +fine_python_recommended = { path = "../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } + +[tool.finecode] +presets = [{ source = "finecode_dev_common_preset" }] diff --git a/finecode_dev_extensions/setup.py b/finecode_dev_extensions/setup.py new file mode 100644 index 00000000..b69be92f --- /dev/null +++ b/finecode_dev_extensions/setup.py @@ -0,0 +1,67 @@ +import atexit +import shutil +import sys +import tempfile + +from setuptools import setup +from setuptools.command.build import build +from setuptools.command.build_ext import build_ext +from setuptools.command.build_py import build_py +from setuptools.command.egg_info import egg_info + +# Create a single temp directory for all build operations +_TEMP_BUILD_DIR = None + + +def get_temp_build_dir(pkg_name): + global _TEMP_BUILD_DIR + if _TEMP_BUILD_DIR is None: + _TEMP_BUILD_DIR = tempfile.mkdtemp(prefix=f"{pkg_name}_build_") + atexit.register(lambda: shutil.rmtree(_TEMP_BUILD_DIR, ignore_errors=True)) + return _TEMP_BUILD_DIR + + +class TempDirBuildMixin: + def initialize_options(self): + super().initialize_options() + temp_dir = get_temp_build_dir(self.distribution.get_name()) + self.build_base = temp_dir + + +class TempDirEggInfoMixin: + def initialize_options(self): + super().initialize_options() + temp_dir = get_temp_build_dir(self.distribution.get_name()) + self.egg_base = temp_dir + + +class CustomBuild(TempDirBuildMixin, build): + pass + + +class CustomBuildPy(TempDirBuildMixin, build_py): + pass + + +class CustomBuildExt(TempDirBuildMixin, build_ext): + pass + + +class CustomEggInfo(TempDirEggInfoMixin, egg_info): + def initialize_options(self): + # Don't use temp dir for editable installs + if "--editable" in sys.argv or "-e" in sys.argv: + egg_info.initialize_options(self) + else: + super().initialize_options() + + +setup( + name="finecode_dev_extensions", + cmdclass={ + "build": CustomBuild, + "build_py": CustomBuildPy, + "build_ext": CustomBuildExt, + "egg_info": CustomEggInfo, + }, +) diff --git a/finecode_dev_extensions/src/finecode_dev_extensions/__init__.py b/finecode_dev_extensions/src/finecode_dev_extensions/__init__.py new file mode 100644 index 00000000..df379a68 --- /dev/null +++ b/finecode_dev_extensions/src/finecode_dev_extensions/__init__.py @@ -0,0 +1,15 @@ +from .publish_and_verify_artifact_handler import ( + PublishAndVerifyArtifactHandler, + PublishAndVerifyArtifactHandlerConfig, +) +from .publish_and_verify_artifact_init_repository_provider_handler import ( + PublishAndVerifyArtifactInitRepositoryProviderHandler, + PublishAndVerifyArtifactInitRepositoryProviderHandlerConfig, +) + +__all__ = [ + "PublishAndVerifyArtifactHandler", + "PublishAndVerifyArtifactHandlerConfig", + "PublishAndVerifyArtifactInitRepositoryProviderHandler", + "PublishAndVerifyArtifactInitRepositoryProviderHandlerConfig", +] diff --git a/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_action.py b/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_action.py new file mode 100644 index 00000000..0a129325 --- /dev/null +++ b/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_action.py @@ -0,0 +1,66 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class PublishAndVerifyArtifactRunPayload(code_action.RunActionPayload): + src_artifact_def_path: pathlib.Path + dist_artifact_paths: list[pathlib.Path] + force: bool = False + + +class PublishAndVerifyArtifactRunContext( + code_action.RunActionContext[PublishAndVerifyArtifactRunPayload] +): + pass + + +@dataclasses.dataclass +class PublishAndVerifyArtifactRunResult(code_action.RunActionResult): + version: str + published_registries: list[str] + verification_errors: dict[str, list[str]] # registry_name -> errors + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, PublishAndVerifyArtifactRunResult): + return + self.version = other.version + self.published_registries = other.published_registries + self.verification_errors = other.verification_errors + + def to_text(self) -> str | textstyler.StyledText: + lines = [] + if self.published_registries: + lines.append(f"Published version {self.version} to: {', '.join(self.published_registries)}") + else: + lines.append(f"Version {self.version} was already published") + + if self.verification_errors: + lines.append("Verification errors:") + for registry, errors in self.verification_errors.items(): + for error in errors: + lines.append(f" - {registry}: {error}") + else: + lines.append("Verification successful for all registries") + + return "\n".join(lines) + + @property + def return_code(self) -> code_action.RunReturnCode: + if self.verification_errors: + return code_action.RunReturnCode.ERROR + return code_action.RunReturnCode.SUCCESS + + +class PublishAndVerifyArtifactAction( + code_action.Action[ + PublishAndVerifyArtifactRunPayload, + PublishAndVerifyArtifactRunContext, + PublishAndVerifyArtifactRunResult, + ] +): + PAYLOAD_TYPE = PublishAndVerifyArtifactRunPayload + RUN_CONTEXT_TYPE = PublishAndVerifyArtifactRunContext + RESULT_TYPE = PublishAndVerifyArtifactRunResult diff --git a/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_handler.py b/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_handler.py new file mode 100644 index 00000000..98a8b90c --- /dev/null +++ b/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_handler.py @@ -0,0 +1,103 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action +from finecode_extension_api.actions import ( + get_dist_artifact_version, + publish_artifact, + verify_artifact_published_to_registry, +) +from finecode_extension_api.interfaces import iactionrunner, iprojectinfoprovider + +from .publish_and_verify_artifact_action import ( + PublishAndVerifyArtifactAction, + PublishAndVerifyArtifactRunContext, + PublishAndVerifyArtifactRunPayload, + PublishAndVerifyArtifactRunResult, +) + + +@dataclasses.dataclass +class PublishAndVerifyArtifactHandlerConfig(code_action.ActionHandlerConfig): ... + + +class PublishAndVerifyArtifactHandler( + code_action.ActionHandler[ + PublishAndVerifyArtifactAction, + PublishAndVerifyArtifactHandlerConfig, + ] +): + action_runner: iactionrunner.IActionRunner + project_info_provider: iprojectinfoprovider.IProjectInfoProvider + + def __init__( + self, + action_runner: iactionrunner.IActionRunner, + project_info_provider: iprojectinfoprovider.IProjectInfoProvider, + ) -> None: + self.action_runner = action_runner + self.project_info_provider = project_info_provider + + async def run( + self, + payload: PublishAndVerifyArtifactRunPayload, + run_context: PublishAndVerifyArtifactRunContext, + ) -> PublishAndVerifyArtifactRunResult: + run_meta = run_context.meta + + src_artifact_def_path: pathlib.Path = payload.src_artifact_def_path + dist_artifact_paths: list[pathlib.Path] = payload.dist_artifact_paths + + # Publish the artifact + publish_action = self.action_runner.get_action_by_name( + "publish_artifact", publish_artifact.PublishArtifactAction + ) + publish_payload = publish_artifact.PublishArtifactRunPayload( + src_artifact_def_path=src_artifact_def_path, + dist_artifact_paths=dist_artifact_paths, + force=payload.force, + ) + publish_result = await self.action_runner.run_action( + action=publish_action, payload=publish_payload, meta=run_meta + ) + published_registries = publish_result.published_registries + + # TODO: impl verify of each dist file. NOTE; they can have different versions + # Get version from the dist artifact + get_version_action = self.action_runner.get_action_by_name( + "get_dist_artifact_version", + get_dist_artifact_version.GetDistArtifactVersionAction, + ) + get_version_payload = get_dist_artifact_version.GetDistArtifactVersionRunPayload( + dist_artifact_path=dist_artifact_paths[0] + ) + get_version_result = await self.action_runner.run_action( + action=get_version_action, payload=get_version_payload, meta=run_meta + ) + version = get_version_result.version + + + # Verify each published registry + verification_errors: dict[str, list[str]] = {} + verify_action = self.action_runner.get_action_by_name( + "verify_artifact_published_to_registry", + verify_artifact_published_to_registry.VerifyArtifactPublishedToRegistryAction, + ) + + for registry_name in published_registries: + verify_payload = verify_artifact_published_to_registry.VerifyArtifactPublishedToRegistryRunPayload( + dist_artifact_paths=dist_artifact_paths, + registry_name=registry_name, + version=version, + ) + verify_result = await self.action_runner.run_action( + action=verify_action, payload=verify_payload, meta=run_meta + ) + if verify_result.errors: + verification_errors[registry_name] = verify_result.errors + + return PublishAndVerifyArtifactRunResult( + version=version, + published_registries=published_registries, + verification_errors=verification_errors, + ) diff --git a/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_init_repository_provider_handler.py b/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_init_repository_provider_handler.py new file mode 100644 index 00000000..e196a348 --- /dev/null +++ b/finecode_dev_extensions/src/finecode_dev_extensions/publish_and_verify_artifact_init_repository_provider_handler.py @@ -0,0 +1,66 @@ +import dataclasses + +from finecode_extension_api import code_action +from finecode_extension_api.actions import init_repository_provider +from finecode_extension_api.interfaces import iactionrunner +from finecode_extension_api.interfaces.irepositorycredentialsprovider import ( + Repository, + RepositoryCredentials, +) + +from .publish_and_verify_artifact_action import ( + PublishAndVerifyArtifactAction, + PublishAndVerifyArtifactRunContext, + PublishAndVerifyArtifactRunPayload, + PublishAndVerifyArtifactRunResult, +) + + +@dataclasses.dataclass +class PublishAndVerifyArtifactInitRepositoryProviderHandlerConfig( + code_action.ActionHandlerConfig, +): + repositories: list[Repository] = dataclasses.field(default_factory=list) + credentials_by_repository: dict[str, RepositoryCredentials] = dataclasses.field( + default_factory=dict + ) + + +class PublishAndVerifyArtifactInitRepositoryProviderHandler( + code_action.ActionHandler[ + PublishAndVerifyArtifactAction, + PublishAndVerifyArtifactInitRepositoryProviderHandlerConfig, + ] +): + def __init__( + self, + config: PublishAndVerifyArtifactInitRepositoryProviderHandlerConfig, + action_runner: iactionrunner.IActionRunner, + ) -> None: + self.config = config + self.action_runner = action_runner + + async def run( + self, + payload: PublishAndVerifyArtifactRunPayload, + run_context: PublishAndVerifyArtifactRunContext, + ) -> PublishAndVerifyArtifactRunResult: + run_meta = run_context.meta + + init_action = self.action_runner.get_action_by_name( + "init_repository_provider", + init_repository_provider.InitRepositoryProviderAction, + ) + init_payload = init_repository_provider.InitRepositoryProviderRunPayload( + repositories=self.config.repositories, + credentials_by_repository=self.config.credentials_by_repository, + ) + await self.action_runner.run_action( + action=init_action, payload=init_payload, meta=run_meta + ) + + return PublishAndVerifyArtifactRunResult( + version="", + published_registries=[], + verification_errors={}, + ) diff --git a/finecode_extension_api/pyproject.toml b/finecode_extension_api/pyproject.toml index f3fd8e14..5a1525b0 100644 --- a/finecode_extension_api/pyproject.toml +++ b/finecode_extension_api/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "finecode_extension_api" -version = "0.3.0" +version = "0.4.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" @@ -8,7 +8,7 @@ requires-python = ">=3.11, <= 3.14" dependencies = ["typing-extensions (>=4.12.2,<5.0.0)"] [dependency-groups] -dev_workspace = ["finecode==0.3.*", "finecode_dev_common_preset==0.2.*"] +dev_workspace = ["finecode~=0.3.0a0", "finecode_dev_common_preset~=0.3.0a0"] [tool.finecode] presets = [{ source = "finecode_dev_common_preset" }] @@ -18,3 +18,8 @@ finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable finecode = { path = "../", editable = true } finecode_extension_api = { path = "./", editable = true } finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } +finecode_jsonrpc = { path = "../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } diff --git a/finecode_extension_api/src/finecode_extension_api/actions/build_artifact_action.py b/finecode_extension_api/src/finecode_extension_api/actions/build_artifact_action.py new file mode 100644 index 00000000..9d269cb9 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/build_artifact_action.py @@ -0,0 +1,50 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class BuildArtifactRunPayload(code_action.RunActionPayload): + # if not provided, current artifact will be built + src_artifact_def_path: pathlib.Path | None = None + + +class BuildArtifactRunContext( + code_action.RunActionContext[BuildArtifactRunPayload] +): ... + + +@dataclasses.dataclass +class BuildArtifactRunResult(code_action.RunActionResult): + src_artifact_def_path: pathlib.Path + build_output_paths: list[pathlib.Path] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, BuildArtifactRunResult): + return + + if self.src_artifact_def_path != other.src_artifact_def_path: + raise code_action.ActionFailedException(f"BuildArtifactRunResult can be updated only with result of the same src artifact: {self.src_artifact_def_path} != {other.src_artifact_def_path}") + + self.build_output_paths = other.build_output_paths + + def to_text(self) -> str | textstyler.StyledText: + paths_str = "\n ".join(str(p) for p in self.build_output_paths) + return f"Built artifact at:\n {paths_str}" + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class BuildArtifactAction( + code_action.Action[ + BuildArtifactRunPayload, + BuildArtifactRunContext, + BuildArtifactRunResult, + ] +): + PAYLOAD_TYPE = BuildArtifactRunPayload + RUN_CONTEXT_TYPE = BuildArtifactRunContext + RESULT_TYPE = BuildArtifactRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/clean_finecode_logs.py b/finecode_extension_api/src/finecode_extension_api/actions/clean_finecode_logs.py new file mode 100644 index 00000000..297390f2 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/clean_finecode_logs.py @@ -0,0 +1,44 @@ +import dataclasses + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class CleanFinecodeLogsRunPayload(code_action.RunActionPayload): ... + + +class CleanFinecodeLogsRunContext( + code_action.RunActionContext[CleanFinecodeLogsRunPayload] +): ... + + +@dataclasses.dataclass +class CleanFinecodeLogsRunResult(code_action.RunActionResult): + errors: list[str] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, CleanFinecodeLogsRunResult): + return + self.errors += other.errors + + def to_text(self) -> str | textstyler.StyledText: + return "\n".join(self.errors) + + @property + def return_code(self) -> code_action.RunReturnCode: + if len(self.errors) == 0: + return code_action.RunReturnCode.SUCCESS + else: + return code_action.RunReturnCode.ERROR + + +class CleanFinecodeLogsAction( + code_action.Action[ + CleanFinecodeLogsRunPayload, + CleanFinecodeLogsRunContext, + CleanFinecodeLogsRunResult, + ] +): + PAYLOAD_TYPE = CleanFinecodeLogsRunPayload + RUN_CONTEXT_TYPE = CleanFinecodeLogsRunContext + RESULT_TYPE = CleanFinecodeLogsRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py b/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py index 4cfa4285..519eb6f3 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/dump_config.py @@ -22,17 +22,25 @@ class DumpConfigRunPayload(code_action.RunActionPayload): target_file_path: pathlib.Path -class DumpConfigRunContext(code_action.RunActionContext): +class DumpConfigRunContext(code_action.RunActionContext[DumpConfigRunPayload]): def __init__( self, run_id: int, + initial_payload: DumpConfigRunPayload, + meta: code_action.RunActionMeta, + info_provider: code_action.RunContextInfoProvider, ) -> None: - super().__init__(run_id=run_id) + super().__init__( + run_id=run_id, + initial_payload=initial_payload, + meta=meta, + info_provider=info_provider, + ) self.raw_config_dump: dict[str, typing.Any] = {} - async def init(self, initial_payload: DumpConfigRunPayload) -> None: - self.raw_config_dump = initial_payload.project_raw_config + async def init(self) -> None: + self.raw_config_dump = self.initial_payload.project_raw_config @dataclasses.dataclass @@ -51,7 +59,9 @@ def to_text(self) -> str | textstyler.StyledText: return formatted_dump_str -class DumpConfigAction(code_action.Action): +class DumpConfigAction( + code_action.Action[DumpConfigRunPayload, DumpConfigRunContext, DumpConfigRunResult] +): PAYLOAD_TYPE = DumpConfigRunPayload RUN_CONTEXT_TYPE = DumpConfigRunContext RESULT_TYPE = DumpConfigRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/format.py b/finecode_extension_api/src/finecode_extension_api/actions/format.py index fa251f04..40a9bd3a 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/format.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/format.py @@ -1,127 +1,34 @@ import dataclasses -import sys +import enum from pathlib import Path -from typing import NamedTuple -from finecode_extension_api.interfaces import ifilemanager +from finecode_extension_api import code_action +from finecode_extension_api.actions import format_files as format_files_action -if sys.version_info >= (3, 12): - from typing import override -else: - from typing_extensions import override -from finecode_extension_api import code_action, textstyler +class FormatTarget(enum.StrEnum): + PROJECT = "project" + FILES = "files" @dataclasses.dataclass class FormatRunPayload(code_action.RunActionPayload): - file_paths: list[Path] - save: bool + save: bool = True + target: FormatTarget = FormatTarget.PROJECT + # optional, expected only with `target == FormatTarget.FILES` + file_paths: list[Path] = dataclasses.field(default_factory=list) -class FileInfo(NamedTuple): - file_content: str - file_version: str - - -class FormatRunContext(code_action.RunActionContext): - def __init__( - self, - run_id: int, - file_manager: ifilemanager.IFileManager, - ) -> None: - super().__init__(run_id=run_id) - self.file_manager = file_manager - - self.file_info_by_path: dict[Path, FileInfo] = {} - - async def init(self, initial_payload: FormatRunPayload) -> None: - for file_path in initial_payload.file_paths: - file_content = await self.file_manager.get_content(file_path) - file_version = await self.file_manager.get_file_version(file_path) - self.file_info_by_path[file_path] = FileInfo( - file_content=file_content, file_version=file_version - ) +class FormatRunContext(code_action.RunActionContext[FormatRunPayload]): ... @dataclasses.dataclass -class FormatRunFileResult: - changed: bool - # changed code or empty string if code was not changed - code: str - - -@dataclasses.dataclass -class FormatRunResult(code_action.RunActionResult): - result_by_file_path: dict[Path, FormatRunFileResult] - - @override - def update(self, other: code_action.RunActionResult) -> None: - if not isinstance(other, FormatRunResult): - return - - for file_path, other_result in other.result_by_file_path.items(): - if other_result.changed is True: - self.result_by_file_path[file_path] = other_result - - def to_text(self) -> str | textstyler.StyledText: - text: textstyler.StyledText = textstyler.StyledText() - unchanged_counter: int = 0 - - for file_path, file_result in self.result_by_file_path.items(): - if file_result.changed: - text.append("reformatted ") - text.append_styled(file_path, bold=True) - text.append("\n") - else: - unchanged_counter += 1 - text.append_styled( - f"{unchanged_counter} files", foreground=textstyler.Color.BLUE - ) - text.append(" unchanged.") +class FormatRunResult(format_files_action.FormatFilesRunResult): ... - return text - -class FormatAction(code_action.Action): +class FormatAction( + code_action.Action[FormatRunPayload, FormatRunContext, FormatRunResult] +): PAYLOAD_TYPE = FormatRunPayload RUN_CONTEXT_TYPE = FormatRunContext RESULT_TYPE = FormatRunResult - - -@dataclasses.dataclass -class SaveFormatHandlerConfig(code_action.ActionHandlerConfig): ... - - -class SaveFormatHandler( - code_action.ActionHandler[FormatAction, SaveFormatHandlerConfig] -): - def __init__( - self, - file_manager: ifilemanager.IFileManager, - ) -> None: - self.file_manager = file_manager - - async def run( - self, payload: FormatRunPayload, run_context: FormatRunContext - ) -> FormatRunResult: - file_paths = payload.file_paths - save = payload.save - - if save is True: - for file_path in file_paths: - file_content = run_context.file_info_by_path[file_path].file_content - await self.file_manager.save_file( - file_path=file_path, file_content=file_content - ) - - result = FormatRunResult( - result_by_file_path={ - file_path: FormatRunFileResult( - changed=False, - code=run_context.file_info_by_path[file_path].file_content, - ) - for file_path in file_paths - } - ) - return result diff --git a/finecode_extension_api/src/finecode_extension_api/actions/format_files.py b/finecode_extension_api/src/finecode_extension_api/actions/format_files.py new file mode 100644 index 00000000..ea6dfb27 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/format_files.py @@ -0,0 +1,112 @@ +import dataclasses +import sys +from pathlib import Path +from typing import NamedTuple + +from finecode_extension_api.interfaces import ifileeditor + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class FormatFilesRunPayload(code_action.RunActionPayload): + file_paths: list[Path] + save: bool + + +class FileInfo(NamedTuple): + file_content: str + file_version: str + + +FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor(id="FormatFilesAction") + + +class FormatFilesRunContext(code_action.RunActionContext[FormatFilesRunPayload]): + def __init__( + self, + run_id: int, + initial_payload: FormatFilesRunPayload, + meta: code_action.RunActionMeta, + file_editor: ifileeditor.IFileEditor, + info_provider: code_action.RunContextInfoProvider, + ) -> None: + super().__init__( + run_id=run_id, + initial_payload=initial_payload, + meta=meta, + info_provider=info_provider, + ) + self.file_editor = file_editor + + self.file_info_by_path: dict[Path, FileInfo] = {} + self.file_editor_session: ifileeditor.IFileEditorSession + + @override + async def init(self) -> None: + self.file_editor_session = await self.exit_stack.enter_async_context( + self.file_editor.session(FILE_OPERATION_AUTHOR) + ) + for file_path in self.initial_payload.file_paths: + file_info = await self.exit_stack.enter_async_context( + self.file_editor_session.read_file(file_path, block=True) + ) + file_content = file_info.content + file_version = file_info.version + self.file_info_by_path[file_path] = FileInfo( + file_content=file_content, file_version=file_version + ) + + +@dataclasses.dataclass +class FormatRunFileResult: + changed: bool + # changed code or empty string if code was not changed + code: str + + +@dataclasses.dataclass +class FormatFilesRunResult(code_action.RunActionResult): + result_by_file_path: dict[Path, FormatRunFileResult] + + @override + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, FormatFilesRunResult): + return + + for file_path, other_result in other.result_by_file_path.items(): + if other_result.changed is True: + self.result_by_file_path[file_path] = other_result + + def to_text(self) -> str | textstyler.StyledText: + text: textstyler.StyledText = textstyler.StyledText() + unchanged_counter: int = 0 + + for file_path, file_result in self.result_by_file_path.items(): + if file_result.changed: + text.append("reformatted ") + text.append_styled(file_path.as_posix(), bold=True) + text.append("\n") + else: + unchanged_counter += 1 + text.append_styled( + f"{unchanged_counter} files", foreground=textstyler.Color.BLUE + ) + text.append(" unchanged.") + + return text + + +class FormatFilesAction( + code_action.Action[ + FormatFilesRunPayload, FormatFilesRunContext, FormatFilesRunResult + ] +): + PAYLOAD_TYPE = FormatFilesRunPayload + RUN_CONTEXT_TYPE = FormatFilesRunContext + RESULT_TYPE = FormatFilesRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/get_dist_artifact_version.py b/finecode_extension_api/src/finecode_extension_api/actions/get_dist_artifact_version.py new file mode 100644 index 00000000..c08d321f --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/get_dist_artifact_version.py @@ -0,0 +1,44 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class GetDistArtifactVersionRunPayload(code_action.RunActionPayload): + dist_artifact_path: pathlib.Path + + +class GetDistArtifactVersionRunContext( + code_action.RunActionContext[GetDistArtifactVersionRunPayload] +): ... + + +@dataclasses.dataclass +class GetDistArtifactVersionRunResult(code_action.RunActionResult): + version: str + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, GetDistArtifactVersionRunResult): + return + + self.version = other.version + + def to_text(self) -> str | textstyler.StyledText: + return self.version + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class GetDistArtifactVersionAction( + code_action.Action[ + GetDistArtifactVersionRunPayload, + GetDistArtifactVersionRunContext, + GetDistArtifactVersionRunResult, + ] +): + PAYLOAD_TYPE = GetDistArtifactVersionRunPayload + RUN_CONTEXT_TYPE = GetDistArtifactVersionRunContext + RESULT_TYPE = GetDistArtifactVersionRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/get_src_artifact_registries.py b/finecode_extension_api/src/finecode_extension_api/actions/get_src_artifact_registries.py new file mode 100644 index 00000000..059e6514 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/get_src_artifact_registries.py @@ -0,0 +1,56 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class Registry: + url: str + name: str + + +@dataclasses.dataclass +class GetSrcArtifactRegistriesRunPayload(code_action.RunActionPayload): + src_artifact_def_path: pathlib.Path + + +class GetSrcArtifactRegistriesRunContext( + code_action.RunActionContext[GetSrcArtifactRegistriesRunPayload] +): ... + + +@dataclasses.dataclass +class GetSrcArtifactRegistriesRunResult(code_action.RunActionResult): + registries: list[Registry] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, GetSrcArtifactRegistriesRunResult): + return + + self.registries = other.registries + + def to_text(self) -> str | textstyler.StyledText: + if not self.registries: + return "No registries configured" + + lines: list[str] = [] + for registry in self.registries: + lines.append(f"{registry.name}: {registry.url}") + return "\n".join(lines) + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class GetSrcArtifactRegistriesAction( + code_action.Action[ + GetSrcArtifactRegistriesRunPayload, + GetSrcArtifactRegistriesRunContext, + GetSrcArtifactRegistriesRunResult, + ] +): + PAYLOAD_TYPE = GetSrcArtifactRegistriesRunPayload + RUN_CONTEXT_TYPE = GetSrcArtifactRegistriesRunContext + RESULT_TYPE = GetSrcArtifactRegistriesRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/get_src_artifact_version.py b/finecode_extension_api/src/finecode_extension_api/actions/get_src_artifact_version.py new file mode 100644 index 00000000..302dc31f --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/get_src_artifact_version.py @@ -0,0 +1,44 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class GetSrcArtifactVersionRunPayload(code_action.RunActionPayload): + src_artifact_def_path: pathlib.Path + + +class GetSrcArtifactVersionRunContext( + code_action.RunActionContext[GetSrcArtifactVersionRunPayload] +): ... + + +@dataclasses.dataclass +class GetSrcArtifactVersionRunResult(code_action.RunActionResult): + version: str + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, GetSrcArtifactVersionRunResult): + return + + self.version = other.version + + def to_text(self) -> str | textstyler.StyledText: + return self.version + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class GetSrcArtifactVersionAction( + code_action.Action[ + GetSrcArtifactVersionRunPayload, + GetSrcArtifactVersionRunContext, + GetSrcArtifactVersionRunResult, + ] +): + PAYLOAD_TYPE = GetSrcArtifactVersionRunPayload + RUN_CONTEXT_TYPE = GetSrcArtifactVersionRunContext + RESULT_TYPE = GetSrcArtifactVersionRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/list_project_files_by_lang.py b/finecode_extension_api/src/finecode_extension_api/actions/group_src_artifact_files_by_lang.py similarity index 50% rename from finecode_extension_api/src/finecode_extension_api/actions/list_project_files_by_lang.py rename to finecode_extension_api/src/finecode_extension_api/actions/group_src_artifact_files_by_lang.py index f7f7a808..218c5d56 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/list_project_files_by_lang.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/group_src_artifact_files_by_lang.py @@ -11,24 +11,31 @@ @dataclasses.dataclass -class ListProjectFilesByLangRunPayload(code_action.RunActionPayload): ... +class GroupSrcArtifactFilesByLangRunPayload(code_action.RunActionPayload): + file_paths: list[pathlib.Path] + langs: list[str] | None = None -class ListProjectFilesByLangRunContext(code_action.RunActionContext): +class GroupSrcArtifactFilesByLangRunContext( + code_action.RunActionContext[GroupSrcArtifactFilesByLangRunPayload] +): def __init__( self, run_id: int, + initial_payload: GroupSrcArtifactFilesByLangRunPayload, + meta: code_action.RunActionMeta, + info_provider: code_action.RunContextInfoProvider ) -> None: - super().__init__(run_id=run_id) + super().__init__(run_id=run_id, initial_payload=initial_payload, meta=meta, info_provider=info_provider) @dataclasses.dataclass -class ListProjectFilesByLangRunResult(code_action.RunActionResult): +class GroupSrcArtifactFilesByLangRunResult(code_action.RunActionResult): files_by_lang: dict[str, list[pathlib.Path]] @override def update(self, other: code_action.RunActionResult) -> None: - if not isinstance(other, ListProjectFilesByLangRunResult): + if not isinstance(other, GroupSrcArtifactFilesByLangRunResult): return for lang, files in other.files_by_lang.items(): @@ -46,7 +53,13 @@ def to_text(self) -> str | textstyler.StyledText: return formatted_result -class ListProjectFilesByLangAction(code_action.Action): - PAYLOAD_TYPE = ListProjectFilesByLangRunPayload - RUN_CONTEXT_TYPE = ListProjectFilesByLangRunContext - RESULT_TYPE = ListProjectFilesByLangRunResult +class GroupSrcArtifactFilesByLangAction( + code_action.Action[ + GroupSrcArtifactFilesByLangRunPayload, + GroupSrcArtifactFilesByLangRunContext, + GroupSrcArtifactFilesByLangRunResult, + ] +): + PAYLOAD_TYPE = GroupSrcArtifactFilesByLangRunPayload + RUN_CONTEXT_TYPE = GroupSrcArtifactFilesByLangRunContext + RESULT_TYPE = GroupSrcArtifactFilesByLangRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/init_repository_provider.py b/finecode_extension_api/src/finecode_extension_api/actions/init_repository_provider.py new file mode 100644 index 00000000..e65216bf --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/init_repository_provider.py @@ -0,0 +1,49 @@ +import dataclasses + +from finecode_extension_api import code_action, textstyler +from finecode_extension_api.interfaces.irepositorycredentialsprovider import ( + Repository, + RepositoryCredentials, +) + + +@dataclasses.dataclass +class InitRepositoryProviderRunPayload(code_action.RunActionPayload): + repositories: list[Repository] + credentials_by_repository: dict[str, RepositoryCredentials] + + +class InitRepositoryProviderRunContext( + code_action.RunActionContext[InitRepositoryProviderRunPayload] +): ... + + +@dataclasses.dataclass +class InitRepositoryProviderRunResult(code_action.RunActionResult): + initialized_repositories: list[str] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, InitRepositoryProviderRunResult): + return + self.initialized_repositories = other.initialized_repositories + + def to_text(self) -> str | textstyler.StyledText: + if self.initialized_repositories: + return f"Initialized repositories: {', '.join(self.initialized_repositories)}" + return "No repositories initialized" + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class InitRepositoryProviderAction( + code_action.Action[ + InitRepositoryProviderRunPayload, + InitRepositoryProviderRunContext, + InitRepositoryProviderRunResult, + ] +): + PAYLOAD_TYPE = InitRepositoryProviderRunPayload + RUN_CONTEXT_TYPE = InitRepositoryProviderRunContext + RESULT_TYPE = InitRepositoryProviderRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py b/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py index e8de2f89..02627d13 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/install_deps_in_env.py @@ -25,12 +25,22 @@ class InstallDepsInEnvRunPayload(code_action.RunActionPayload): dependencies: list[Dependency] -class InstallDepsInEnvRunContext(code_action.RunActionContext): +class InstallDepsInEnvRunContext( + code_action.RunActionContext[InstallDepsInEnvRunPayload] +): def __init__( self, run_id: int, + initial_payload: InstallDepsInEnvRunPayload, + meta: code_action.RunActionMeta, + info_provider: code_action.RunContextInfoProvider, ) -> None: - super().__init__(run_id=run_id) + super().__init__( + run_id=run_id, + initial_payload=initial_payload, + meta=meta, + info_provider=info_provider, + ) @dataclasses.dataclass diff --git a/finecode_extension_api/src/finecode_extension_api/actions/is_artifact_published_to_registry.py b/finecode_extension_api/src/finecode_extension_api/actions/is_artifact_published_to_registry.py new file mode 100644 index 00000000..f07faa17 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/is_artifact_published_to_registry.py @@ -0,0 +1,58 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class IsArtifactPublishedToRegistryRunPayload(code_action.RunActionPayload): + src_artifact_def_path: pathlib.Path + dist_artifact_paths: list[pathlib.Path] + version: str + registry_name: str + + +class IsArtifactPublishedToRegistryRunContext( + code_action.RunActionContext[IsArtifactPublishedToRegistryRunPayload] +): ... + + +@dataclasses.dataclass +class IsArtifactPublishedToRegistryRunResult(code_action.RunActionResult): + is_published_by_dist_path: dict[pathlib.Path, bool] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, IsArtifactPublishedToRegistryRunResult): + return + + self.is_published_by_dist_path = other.is_published_by_dist_path + + def to_text(self) -> str | textstyler.StyledText: + published = [ + p for p, is_pub in self.is_published_by_dist_path.items() if is_pub + ] + not_published = [ + p for p, is_pub in self.is_published_by_dist_path.items() if not is_pub + ] + parts: list[str] = [] + if published: + parts.append(f"published: {', '.join(str(p) for p in published)}") + if not_published: + parts.append(f"not published: {', '.join(str(p) for p in not_published)}") + return "; ".join(parts) if parts else "no artifacts" + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class IsArtifactPublishedToRegistryAction( + code_action.Action[ + IsArtifactPublishedToRegistryRunPayload, + IsArtifactPublishedToRegistryRunContext, + IsArtifactPublishedToRegistryRunResult, + ] +): + PAYLOAD_TYPE = IsArtifactPublishedToRegistryRunPayload + RUN_CONTEXT_TYPE = IsArtifactPublishedToRegistryRunContext + RESULT_TYPE = IsArtifactPublishedToRegistryRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/lint.py b/finecode_extension_api/src/finecode_extension_api/actions/lint.py index 39f55108..9b52a993 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/lint.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/lint.py @@ -1,113 +1,37 @@ -import collections.abc import dataclasses import enum from pathlib import Path -from finecode_extension_api import code_action, textstyler +from finecode_extension_api import code_action +from finecode_extension_api.actions import lint_files -@dataclasses.dataclass -class Position: - line: int - character: int - - -@dataclasses.dataclass -class Range: - start: Position - end: Position - - -class LintMessageSeverity(enum.IntEnum): - # use IntEnum to get json serialization out of the box - ERROR = 1 - WARNING = 2 - INFO = 3 - HINT = 4 - - -@dataclasses.dataclass -class LintMessage: - range: Range - message: str - code: str | None = None - code_description: str | None = None - source: str | None = None - severity: LintMessageSeverity | None = None - - -@dataclasses.dataclass -class LintRunPayload(code_action.RunActionPayload, collections.abc.AsyncIterable): - file_paths: list[Path] - - def __aiter__(self) -> collections.abc.AsyncIterator[Path]: - return LintRunPayloadIterator(self) +class LintTarget(enum.StrEnum): + PROJECT = "project" + FILES = "files" @dataclasses.dataclass -class LintRunPayloadIterator(collections.abc.AsyncIterator): - def __init__(self, lint_run_payload: LintRunPayload): - self.lint_run_payload = lint_run_payload - self.current_file_path_index = 0 - - def __aiter__(self): - return self - - async def __anext__(self) -> Path: - if len(self.lint_run_payload.file_paths) <= self.current_file_path_index: - raise StopAsyncIteration() - self.current_file_path_index += 1 - return self.lint_run_payload.file_paths[self.current_file_path_index - 1] +class LintRunPayload(code_action.RunActionPayload): + target: LintTarget = LintTarget.PROJECT + # optional, expected only with `target == LintTarget.FILES` + file_paths: list[Path] = dataclasses.field(default_factory=list) @dataclasses.dataclass -class LintRunResult(code_action.RunActionResult): - # messages is a dict to support messages for multiple files because it could be the - # case that linter checks given file and its dependencies. - # - # dict key should be Path, but pygls fails to handle slashes in dict keys, use - # strings with posix representation of path instead until the problem is properly - # solved - messages: dict[str, list[LintMessage]] +class LintRunResult(lint_files.LintFilesRunResult): ... - def update(self, other: code_action.RunActionResult) -> None: - if not isinstance(other, LintRunResult): - return - for file_path_str, new_messages in other.messages.items(): - if file_path_str not in self.messages: - self.messages[file_path_str] = [] - self.messages[file_path_str].extend(new_messages) +class LintRunContext( + code_action.RunActionWithPartialResultsContext[LintRunPayload] +): ... - def to_text(self) -> str | textstyler.StyledText: - text: textstyler.StyledText = textstyler.StyledText() - for file_path_str, file_messages in self.messages.items(): - if len(file_messages) > 0: - for message in file_messages: - # TODO: relative file path? - source_str = "" - if message.source is not None: - source_str = f" ({message.source})" - text.append_styled(file_path_str, bold=True) - text.append(f":{message.range.start.line}") - text.append(f":{message.range.start.character}: ") - text.append_styled(message.code, foreground=textstyler.Color.RED) - text.append(f" {message.message}{source_str}\n") - else: - text.append_styled(file_path_str, bold=True) - text.append(": OK\n") - return text - - @property - def return_code(self) -> code_action.RunReturnCode: - for lint_messages in self.messages.values(): - if len(lint_messages) > 0: - return code_action.RunReturnCode.ERROR - return code_action.RunReturnCode.SUCCESS - - -class LintAction(code_action.Action): +class LintAction(code_action.Action[LintRunPayload, LintRunContext, LintRunResult]): PAYLOAD_TYPE = LintRunPayload - RUN_CONTEXT_TYPE = code_action.RunActionWithPartialResultsContext + RUN_CONTEXT_TYPE = LintRunContext RESULT_TYPE = LintRunResult + + +# reexport +LintMessage = lint_files.LintMessage diff --git a/finecode_extension_api/src/finecode_extension_api/actions/lint_files.py b/finecode_extension_api/src/finecode_extension_api/actions/lint_files.py new file mode 100644 index 00000000..a357e16f --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/lint_files.py @@ -0,0 +1,129 @@ +import collections.abc +import dataclasses +import enum +from pathlib import Path + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class Position: + line: int + character: int + + +@dataclasses.dataclass +class Range: + start: Position + end: Position + + +class LintMessageSeverity(enum.IntEnum): + # use IntEnum to get json serialization out of the box + ERROR = 1 + WARNING = 2 + INFO = 3 + HINT = 4 + + +@dataclasses.dataclass +class LintMessage: + range: Range + message: str + code: str | None = None + code_description: str | None = None + source: str | None = None + severity: LintMessageSeverity | None = None + + +@dataclasses.dataclass +class LintFilesRunPayload( + code_action.RunActionPayload, collections.abc.AsyncIterable[Path] +): + file_paths: list[Path] + + def __aiter__(self) -> collections.abc.AsyncIterator[Path]: + return LintFilesRunPayloadIterator(self) + + +@dataclasses.dataclass +class LintFilesRunPayloadIterator(collections.abc.AsyncIterator[Path]): + def __init__(self, lint_files_run_payload: LintFilesRunPayload): + self.lint_files_run_payload = lint_files_run_payload + self.current_file_path_index = 0 + + def __aiter__(self): + return self + + async def __anext__(self) -> Path: + if len(self.lint_files_run_payload.file_paths) <= self.current_file_path_index: + raise StopAsyncIteration() + self.current_file_path_index += 1 + return self.lint_files_run_payload.file_paths[self.current_file_path_index - 1] + + +@dataclasses.dataclass +class LintFilesRunResult(code_action.RunActionResult): + # messages is a dict to support messages for multiple files because it could be the + # case that linter checks given file and its dependencies. + # + # dict key should be Path, but pygls fails to handle slashes in dict keys, use + # strings with posix representation of path instead until the problem is properly + # solved + messages: dict[str, list[LintMessage]] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, LintFilesRunResult): + return + + for file_path_str, new_messages in other.messages.items(): + if file_path_str not in self.messages: + self.messages[file_path_str] = [] + self.messages[file_path_str].extend(new_messages) + + def to_text(self) -> str | textstyler.StyledText: + text: textstyler.StyledText = textstyler.StyledText() + for file_path_str, file_messages in self.messages.items(): + if len(file_messages) > 0: + for message in file_messages: + # TODO: relative file path? + source_str = "" + if message.source is not None: + source_str = f" ({message.source})" + text.append_styled(file_path_str, bold=True) + text.append(f":{message.range.start.line}") + text.append(f":{message.range.start.character}: ") + if message.code is not None: + text.append_styled( + message.code, foreground=textstyler.Color.RED + ) + text.append(f" {message.message}{source_str}\n") + else: + text.append_styled(file_path_str, bold=True) + text.append(": OK\n") + + return text + + @property + def return_code(self) -> code_action.RunReturnCode: + for lint_messages in self.messages.values(): + if len(lint_messages) > 0: + return code_action.RunReturnCode.ERROR + return code_action.RunReturnCode.SUCCESS + + +class LintFilesRunContext( + code_action.RunActionWithPartialResultsContext[LintFilesRunPayload] +): ... + + +class LintFilesAction( + code_action.Action[ + LintFilesRunPayload, + LintFilesRunContext, + LintFilesRunResult, + ] +): + PAYLOAD_TYPE = LintFilesRunPayload + RUN_CONTEXT_TYPE = LintFilesRunContext + RESULT_TYPE = LintFilesRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/list_src_artifact_files_by_lang.py b/finecode_extension_api/src/finecode_extension_api/actions/list_src_artifact_files_by_lang.py new file mode 100644 index 00000000..445b56eb --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/list_src_artifact_files_by_lang.py @@ -0,0 +1,69 @@ +import dataclasses +import pathlib +import sys + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class ListSrcArtifactFilesByLangRunPayload(code_action.RunActionPayload): + langs: list[str] | None = None + + +class ListSrcArtifactFilesByLangRunContext( + code_action.RunActionContext[ListSrcArtifactFilesByLangRunPayload] +): + def __init__( + self, + run_id: int, + initial_payload: ListSrcArtifactFilesByLangRunPayload, + meta: code_action.RunActionMeta, + info_provider: code_action.RunContextInfoProvider, + ) -> None: + super().__init__( + run_id=run_id, + initial_payload=initial_payload, + meta=meta, + info_provider=info_provider, + ) + + +@dataclasses.dataclass +class ListSrcArtifactFilesByLangRunResult(code_action.RunActionResult): + files_by_lang: dict[str, list[pathlib.Path]] + + @override + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, ListSrcArtifactFilesByLangRunResult): + return + + for lang, files in other.files_by_lang.items(): + if lang not in self.files_by_lang: + self.files_by_lang[lang] = files + else: + self.files_by_lang[lang] += files + + def to_text(self) -> str | textstyler.StyledText: + formatted_result = textstyler.StyledText() + for language, files in self.files_by_lang.items(): + formatted_result.append_styled(text=language + "\n", bold=True) + for file_path in files: + formatted_result.append(file_path.as_posix() + "\n") + return formatted_result + + +class ListSrcArtifactFilesByLangAction( + code_action.Action[ + ListSrcArtifactFilesByLangRunPayload, + ListSrcArtifactFilesByLangRunContext, + ListSrcArtifactFilesByLangRunResult, + ] +): + PAYLOAD_TYPE = ListSrcArtifactFilesByLangRunPayload + RUN_CONTEXT_TYPE = ListSrcArtifactFilesByLangRunContext + RESULT_TYPE = ListSrcArtifactFilesByLangRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py b/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py index 51ed0011..6c2452e0 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/prepare_envs.py @@ -28,12 +28,20 @@ class PrepareEnvsRunPayload(code_action.RunActionPayload): recreate: bool = False -class PrepareEnvsRunContext(code_action.RunActionContext): +class PrepareEnvsRunContext(code_action.RunActionContext[PrepareEnvsRunPayload]): def __init__( self, run_id: int, + initial_payload: PrepareEnvsRunPayload, + meta: code_action.RunActionMeta, + info_provider: code_action.RunContextInfoProvider, ) -> None: - super().__init__(run_id=run_id) + super().__init__( + run_id=run_id, + initial_payload=initial_payload, + meta=meta, + info_provider=info_provider, + ) # project def pathes are stored also in context, because prepare envs can run # tools like pip which expected 'normalized' project definition(=without @@ -50,8 +58,8 @@ def __init__( pathlib.Path, dict[str, typing.Any] ] = {} - async def init(self, initial_payload: PrepareEnvsRunPayload) -> None: - for env_info in initial_payload.envs: + async def init(self) -> None: + for env_info in self.initial_payload.envs: self.project_def_path_by_venv_dir_path[env_info.venv_dir_path] = ( env_info.project_def_path ) @@ -79,7 +87,11 @@ def return_code(self) -> code_action.RunReturnCode: return code_action.RunReturnCode.ERROR -class PrepareEnvsAction(code_action.Action): +class PrepareEnvsAction( + code_action.Action[ + PrepareEnvsRunPayload, PrepareEnvsRunContext, PrepareEnvsRunResult + ] +): PAYLOAD_TYPE = PrepareEnvsRunPayload RUN_CONTEXT_TYPE = PrepareEnvsRunContext RESULT_TYPE = PrepareEnvsRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py b/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py index a7c03299..6b575ada 100644 --- a/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py +++ b/finecode_extension_api/src/finecode_extension_api/actions/prepare_runners.py @@ -28,12 +28,20 @@ class PrepareRunnersRunPayload(code_action.RunActionPayload): recreate: bool = False -class PrepareRunnersRunContext(code_action.RunActionContext): +class PrepareRunnersRunContext(code_action.RunActionContext[PrepareRunnersRunPayload]): def __init__( self, run_id: int, + initial_payload: PrepareRunnersRunPayload, + meta: code_action.RunActionMeta, + info_provider: code_action.RunContextInfoProvider, ) -> None: - super().__init__(run_id=run_id) + super().__init__( + run_id=run_id, + initial_payload=initial_payload, + meta=meta, + info_provider=info_provider, + ) # project def pathes are stored also in context, because prepare envs can run # tools like pip which expected 'normalized' project definition(=without @@ -50,8 +58,8 @@ def __init__( pathlib.Path, dict[str, typing.Any] ] = {} - async def init(self, initial_payload: PrepareRunnersRunPayload) -> None: - for env_info in initial_payload.envs: + async def init(self) -> None: + for env_info in self.initial_payload.envs: self.project_def_path_by_venv_dir_path[env_info.venv_dir_path] = ( env_info.project_def_path ) @@ -79,7 +87,11 @@ def return_code(self) -> code_action.RunReturnCode: return code_action.RunReturnCode.ERROR -class PrepareRunnersAction(code_action.Action): +class PrepareRunnersAction( + code_action.Action[ + PrepareRunnersRunPayload, PrepareRunnersRunContext, PrepareRunnersRunResult + ] +): PAYLOAD_TYPE = PrepareRunnersRunPayload RUN_CONTEXT_TYPE = PrepareRunnersRunContext RESULT_TYPE = PrepareRunnersRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/publish_artifact.py b/finecode_extension_api/src/finecode_extension_api/actions/publish_artifact.py new file mode 100644 index 00000000..8aa9deee --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/publish_artifact.py @@ -0,0 +1,52 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class PublishArtifactRunPayload(code_action.RunActionPayload): + src_artifact_def_path: pathlib.Path + dist_artifact_paths: list[pathlib.Path] + force: bool = False + + +class PublishArtifactRunContext( + code_action.RunActionContext[PublishArtifactRunPayload] +): ... + + +@dataclasses.dataclass +class PublishArtifactRunResult(code_action.RunActionResult): + version: str + published_registries: list[str] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, PublishArtifactRunResult): + return + + self.version = other.version + self.published_registries = other.published_registries + + def to_text(self) -> str | textstyler.StyledText: + if len(self.published_registries) > 0: + registries_str = ", ".join(self.published_registries) + return f"Published version {self.version} to: {registries_str}" + else: + return f"Version {self.version} is already published" + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class PublishArtifactAction( + code_action.Action[ + PublishArtifactRunPayload, + PublishArtifactRunContext, + PublishArtifactRunResult, + ] +): + PAYLOAD_TYPE = PublishArtifactRunPayload + RUN_CONTEXT_TYPE = PublishArtifactRunContext + RESULT_TYPE = PublishArtifactRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/publish_artifact_to_registry.py b/finecode_extension_api/src/finecode_extension_api/actions/publish_artifact_to_registry.py new file mode 100644 index 00000000..3f611c07 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/publish_artifact_to_registry.py @@ -0,0 +1,43 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class PublishArtifactToRegistryRunPayload(code_action.RunActionPayload): + src_artifact_def_path: pathlib.Path + dist_artifact_paths: list[pathlib.Path] + registry_name: str + force: bool = False + + +class PublishArtifactToRegistryRunContext( + code_action.RunActionContext[PublishArtifactToRegistryRunPayload] +): ... + + +@dataclasses.dataclass +class PublishArtifactToRegistryRunResult(code_action.RunActionResult): + ... + + def update(self, other: code_action.RunActionResult) -> None: ... + + def to_text(self) -> str | textstyler.StyledText: + return "Published" + + @property + def return_code(self) -> code_action.RunReturnCode: + return code_action.RunReturnCode.SUCCESS + + +class PublishArtifactToRegistryAction( + code_action.Action[ + PublishArtifactToRegistryRunPayload, + PublishArtifactToRegistryRunContext, + PublishArtifactToRegistryRunResult, + ] +): + PAYLOAD_TYPE = PublishArtifactToRegistryRunPayload + RUN_CONTEXT_TYPE = PublishArtifactToRegistryRunContext + RESULT_TYPE = PublishArtifactToRegistryRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/actions/verify_artifact_published_to_registry.py b/finecode_extension_api/src/finecode_extension_api/actions/verify_artifact_published_to_registry.py new file mode 100644 index 00000000..6816a4f8 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/actions/verify_artifact_published_to_registry.py @@ -0,0 +1,51 @@ +import dataclasses +import pathlib + +from finecode_extension_api import code_action, textstyler + + +@dataclasses.dataclass +class VerifyArtifactPublishedToRegistryRunPayload(code_action.RunActionPayload): + dist_artifact_paths: list[pathlib.Path] + registry_name: str + version: str + + +class VerifyArtifactPublishedToRegistryRunContext( + code_action.RunActionContext[VerifyArtifactPublishedToRegistryRunPayload] +): ... + + +@dataclasses.dataclass +class VerifyArtifactPublishedToRegistryRunResult(code_action.RunActionResult): + errors: list[str] + + def update(self, other: code_action.RunActionResult) -> None: + if not isinstance(other, VerifyArtifactPublishedToRegistryRunResult): + return + self.errors.extend(other.errors) + + def to_text(self) -> str | textstyler.StyledText: + if self.errors: + return f"Verification failed with {len(self.errors)} error(s):\n" + "\n".join( + f" - {e}" for e in self.errors + ) + return "Verification successful" + + @property + def return_code(self) -> code_action.RunReturnCode: + if self.errors: + return code_action.RunReturnCode.ERROR + return code_action.RunReturnCode.SUCCESS + + +class VerifyArtifactPublishedToRegistryAction( + code_action.Action[ + VerifyArtifactPublishedToRegistryRunPayload, + VerifyArtifactPublishedToRegistryRunContext, + VerifyArtifactPublishedToRegistryRunResult, + ] +): + PAYLOAD_TYPE = VerifyArtifactPublishedToRegistryRunPayload + RUN_CONTEXT_TYPE = VerifyArtifactPublishedToRegistryRunContext + RESULT_TYPE = VerifyArtifactPublishedToRegistryRunResult diff --git a/finecode_extension_api/src/finecode_extension_api/code_action.py b/finecode_extension_api/src/finecode_extension_api/code_action.py index cde49bf4..cc2b1789 100644 --- a/finecode_extension_api/src/finecode_extension_api/code_action.py +++ b/finecode_extension_api/src/finecode_extension_api/code_action.py @@ -1,7 +1,7 @@ from __future__ import annotations -import asyncio import collections.abc +import contextlib import dataclasses import enum import typing @@ -18,6 +18,26 @@ class ActionHandlerConfig: ... class RunActionPayload: ... +class RunActionTrigger(enum.StrEnum): + USER = "user" + SYSTEM = "system" + UNKNOWN = "unknown" + + +class DevEnv(enum.StrEnum): + IDE = "ide" + CLI = "cli" + AI = "ai" + PRECOMMIT = "precommit" + CI_CD = "cicd" + + +@dataclasses.dataclass +class RunActionMeta: + trigger: RunActionTrigger + dev_env: DevEnv + + class RunReturnCode(enum.IntEnum): SUCCESS = 0 ERROR = 1 @@ -36,39 +56,104 @@ def return_code(self) -> RunReturnCode: return RunReturnCode.SUCCESS -RunPayloadType = TypeVar( - "RunPayloadType", bound=RunActionPayload -) # | AsyncIterator[RunActionPayload] -RunIterablePayloadType = TypeVar( - "RunIterablePayloadType", bound=collections.abc.AsyncIterator[RunPayloadType] -) -RunResultType = TypeVar( - "RunResultType", bound=RunActionResult -) # | AsyncIterator[RunActionResult] -RunIterableResultType = TypeVar( - "RunResultType", bound=collections.abc.AsyncIterator[RunResultType] -) +RunPayloadType = TypeVar("RunPayloadType", bound=RunActionPayload, covariant=True) +RunResultType = TypeVar("RunResultType", bound=RunActionResult, covariant=True) + + +class RunContextInfoProvider: + """ + Owned by the action runner, passed to RunActionContext. + """ + + def __init__(self, is_concurrent_execution: bool) -> None: + self._current_result: RunActionResult | None = None + self.is_concurrent_execution: bool = is_concurrent_execution + + @property + def current_result(self) -> RunActionResult | None: + """ + Access accumulated result from previously completed handlers. + Only available in sequential execution mode. + + NOTE: it's highly discouraged to change the object, use it as readonly object. + """ + return self._current_result + + def update(self, result: RunActionResult) -> None: + """Called by action runner after each handler completes.""" + if self._current_result is None: + self._current_result = result + else: + self._current_result.update(result) -class RunActionContext: +class RunActionContext(typing.Generic[RunPayloadType]): # data object to save data between action steps(only during one run, after run data # is removed). Keep it simple, without business logic, just data storage, but you # still may initialize values in constructor using dependency injection if needed # to avoid handling in action cases when run context is not initialized and is # initialized already. - def __init__(self, run_id: int) -> None: + def __init__( + self, + run_id: int, + initial_payload: RunPayloadType, + meta: RunActionMeta, + info_provider: RunContextInfoProvider, + ) -> None: self.run_id = run_id + self.initial_payload = initial_payload + self.meta = meta + self.exit_stack = contextlib.AsyncExitStack() + self._info_provider = info_provider + + @property + def current_result(self) -> RunActionResult | None: + """ + Access accumulated result from previously completed handlers. + Only available in sequential execution mode. + + NOTE: it's highly discouraged to change the object, use it as readonly object. + """ + if self._info_provider.is_concurrent_execution: + raise RuntimeError( + "Cannot access current_result during concurrent handler execution. " + "Results from other handlers are not reliably available in concurrent mode." + ) + return self._info_provider.current_result - async def init(self, initial_payload: RunPayloadType) -> None: ... + async def init(self) -> None: ... + async def __aenter__(self): + await self.exit_stack.__aenter__() -RunContextType = TypeVar("RunContextType", bound=RunActionContext) + await self.init() + return self -class RunActionWithPartialResultsContext(RunActionContext): - def __init__(self, run_id: int) -> None: - super().__init__(run_id=run_id) + async def __aexit__(self, exc_type, exc_val, exc_tb): + return await self.exit_stack.__aexit__(exc_type, exc_val, exc_tb) + + +RunContextType = TypeVar( + "RunContextType", bound=RunActionContext[RunActionPayload], covariant=True +) + + +class RunActionWithPartialResultsContext(RunActionContext[RunPayloadType]): + def __init__( + self, + run_id: int, + initial_payload: RunPayloadType, + meta: RunActionMeta, + info_provider: RunContextInfoProvider, + ) -> None: + super().__init__( + run_id=run_id, + initial_payload=initial_payload, + meta=meta, + info_provider=info_provider, + ) self.partial_result_scheduler = partialresultscheduler.PartialResultScheduler() @@ -78,10 +163,10 @@ class ActionConfig: class Action(Generic[RunPayloadType, RunContextType, RunResultType]): - PAYLOAD_TYPE: typing.Type[RunActionPayload] = RunActionPayload - RUN_CONTEXT_TYPE: typing.Type[RunActionContext] = RunActionContext - RESULT_TYPE: typing.Type[RunActionResult] = RunActionResult - CONFIG_TYPE: typing.Type[ActionConfig] = ActionConfig + PAYLOAD_TYPE: type[RunActionPayload] = RunActionPayload + RUN_CONTEXT_TYPE: type[RunActionContext[RunPayloadType]] = RunActionContext + RESULT_TYPE: type[RunActionResult] = RunActionResult + CONFIG_TYPE: type[ActionConfig] = ActionConfig class StopActionRunWithResult(Exception): @@ -120,16 +205,12 @@ def on_exit(self, callable: ExitCallable) -> None: ) ActionType = TypeVar( "ActionType", - bound=Action[ - RunPayloadType | RunIterablePayloadType, - RunContextType, - RunResultType | RunIterableResultType, - ], + bound=Action[RunActionPayload, RunActionContext[RunActionPayload], RunActionResult], covariant=True, ) -IterableType = TypeVar("IterableType") +PayloadTypeVar = TypeVar("PayloadTypeVar", bound=RunActionPayload) class ActionHandler(Protocol[ActionType, ActionHandlerConfigType]): @@ -145,12 +226,6 @@ class ActionHandler(Protocol[ActionType, ActionHandlerConfigType]): """ async def run( - self, payload: RunPayloadType, run_context: RunContextType - ) -> ( - RunResultType - | collections.abc.Mapping[IterableType, asyncio.Task[RunResultType]] - ): - raise NotImplementedError() - - async def stop(self): + self, payload: PayloadTypeVar, run_context: RunActionContext[PayloadTypeVar] + ) -> RunActionResult: raise NotImplementedError() diff --git a/finecode_extension_api/src/finecode_extension_api/contrib/__init__.py b/finecode_extension_api/src/finecode_extension_api/contrib/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/finecode_extension_api/src/finecode_extension_api/contrib/lsp_service.py b/finecode_extension_api/src/finecode_extension_api/contrib/lsp_service.py new file mode 100644 index 00000000..8fda6d40 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/contrib/lsp_service.py @@ -0,0 +1,409 @@ +from __future__ import annotations + +import asyncio +import threading +from pathlib import Path +from typing import Any, override + +from finecode_extension_api import service +from finecode_extension_api.actions import lint_files as lint_files_action +from finecode_extension_api.interfaces import ifileeditor, ilspclient, ilogger + + +class LspService(service.DisposableService): + """Generic long-running LSP service with document synchronization. + + Document synchronization is optimized by IFileEditor events: open, change and close + events are forwarded to the LSP server as textDocument/did* notifications. + + For files not opened by any session, check_file reads via file editor, + compares the content version against what was last sent to LSP, and sends + didOpen/didChange directly only when the content has changed. + + Settings management: + Settings are managed via ``update_settings(settings)`` (sync) which merges + into the internal ``_settings`` dict. Handlers call ``update_settings`` in + their ``__init__`` to apply config-driven settings. Since handler + instantiation happens during eager initialization (before the LSP server + is started), settings accumulate. When ``ensure_started`` triggers + ``start``, settings are delivered to the LSP server in three ways: + + 1. ``initializationOptions`` in the ``initialize`` request (as + ``{"settings": ...}``). + 2. ``workspace/didChangeConfiguration`` notification after ``initialized``. + 3. ``workspace/configuration`` pull requests from the server are answered + with the current settings. + + To push settings to an already running server, call ``send_settings``. + """ + + def __init__( + self, + lsp_client: ilspclient.ILspClient, + file_editor: ifileeditor.IFileEditor, + logger: ilogger.ILogger, + *, + cmd: str, + language_id: str, + readable_id: str = "", + ) -> None: + self._lsp_client = lsp_client + self._file_editor = file_editor + self._logger = logger + self._cmd = cmd + self._language_id = language_id + self._readable_id = readable_id + self._file_operation_author = ifileeditor.FileOperationAuthor( + id=readable_id or "LspService" + ) + self._session: ilspclient.ILspSession | None = None + self._event_task: asyncio.Task[None] | None = None + self._start_lock: asyncio.Lock = asyncio.Lock() + # pending diagnostics waiters: uri -> Event (threading for cross-thread safety) + self._diagnostics: dict[str, threading.Event] = {} + # last received diagnostics per uri (persistent cache) + self._diagnostics_data: dict[str, list[dict[str, Any]]] = {} + # uri -> content version last sent to LSP (for change detection) + self._file_versions: dict[str, str] = {} + # uris currently open in the LSP server + self._open_documents: set[str] = set() + # LSP protocol version counter per uri + self._document_version: dict[str, int] = {} + # current settings, accumulated via update_settings and sent on start + self._settings: dict[str, Any] = {} + + @override + async def init(self) -> None: + pass + + @override + def dispose(self) -> None: + asyncio.create_task(self._async_dispose()) + + async def _async_dispose(self) -> None: + if self._event_task is not None: + self._event_task.cancel() + try: + await self._event_task + except (asyncio.CancelledError, Exception): + pass + self._event_task = None + + if self._session is not None: + await self._session.__aexit__(None, None, None) + self._session = None + + self._diagnostics.clear() + self._diagnostics_data.clear() + self._file_versions.clear() + self._open_documents.clear() + self._document_version.clear() + + async def ensure_started( + self, + root_uri: str, + ) -> None: + async with self._start_lock: + if self._session is not None: + return + await self.start(root_uri) + + async def start( + self, + root_uri: str, + ) -> None: + session = self._lsp_client.session( + cmd=self._cmd, + root_uri=root_uri, + workspace_folders=[{"uri": root_uri, "name": root_uri}], + initialization_options={"settings": self._settings} if self._settings else None, + readable_id=self._readable_id, + ) + await session.__aenter__() + self._session = session + self._session.on_notification( + "textDocument/publishDiagnostics", + self._handle_diagnostics, + ) + # Handle pull-based configuration (e.g. pyrefly sends workspace/configuration + # requests after initialized and after each didChangeConfiguration). + self._session.on_request( + "workspace/configuration", + self._handle_configuration_request, + ) + + # some LSP servers read settings from didChangeConfiguration (e.g. pyrefly) + if self._settings: + await self._session.send_notification( + "workspace/didChangeConfiguration", + {"settings": self._settings}, + ) + + ready = asyncio.Event() + self._event_task = asyncio.create_task(self._run_event_loop(ready)) + await ready.wait() + + def update_settings(self, settings: dict[str, Any]) -> None: + """Update LSP server settings. + + Merges ``settings`` into the internal settings dict. If the server is not + yet started, settings accumulate and are sent on ``start``. Handlers call + this from ``__init__`` to apply config-driven settings. + + If the server is already running, call ``send_settings`` to push the + updated settings. + """ + self._settings.update(settings) + + async def send_settings(self) -> None: + """Send current settings to the running LSP server.""" + assert self._session is not None, "LspService not started" + await self._session.send_notification( + "workspace/didChangeConfiguration", + {"settings": self._settings}, + ) + + async def check_file( + self, + file_path: Path, + timeout: float = 30.0, + ) -> list[dict[str, Any]]: + """Check a file and return raw LSP diagnostics.""" + assert self._session is not None, "LspService not started" + + uri = file_path.as_uri() + + async with self._file_editor.session( + author=self._file_operation_author + ) as fe_session: + async with fe_session.read_file(file_path) as file_info: + content = file_info.content + version = file_info.version + + if self._file_versions.get(uri) == version: + # LSP already has the current content; return cached diagnostics + return self._diagnostics_data.get(uri, []) + + event = threading.Event() + self._diagnostics[uri] = event + + lsp_version = self._next_version(uri) + if uri not in self._open_documents: + await self._session.send_notification( + "textDocument/didOpen", + { + "textDocument": { + "uri": uri, + "languageId": self._language_id, + "version": lsp_version, + "text": content, + }, + }, + ) + self._open_documents.add(uri) + else: + await self._session.send_notification( + "textDocument/didChange", + { + "textDocument": {"uri": uri, "version": lsp_version}, + "contentChanges": [{"text": content}], + }, + ) + + self._file_versions[uri] = version + + was_set = await asyncio.to_thread(event.wait, timeout) + if not was_set: + self._logger.warning( + f"Timeout waiting for LSP diagnostics for {file_path}" + ) + elif not self._diagnostics_data.get(uri): + # Got empty initial diagnostics; some servers (e.g. pyrefly) send + # an empty ack first, then the real diagnostics after analysis. + # Wait a short settle time for follow-up notifications. + event.clear() + await asyncio.to_thread(event.wait, 1.0) + + self._diagnostics.pop(uri, None) + + if file_path not in self._file_editor.get_opened_files(): + await self._session.send_notification( + "textDocument/didClose", + {"textDocument": {"uri": uri}}, + ) + self._open_documents.discard(uri) + + return self._diagnostics_data.get(uri, []) + + async def _run_event_loop(self, ready: asyncio.Event) -> None: + async with self._file_editor.session( + author=self._file_operation_author + ) as fe_session: + ready.set() + async with fe_session.subscribe_to_all_events() as event_iter: + async for event in event_iter: + try: + await self._handle_file_event(event) + except Exception as exc: + self._logger.warning( + f"Error forwarding file event to LSP: {exc}" + ) + + async def _handle_file_event(self, event: ifileeditor.FileEvent) -> None: + if self._session is None: + return + + if isinstance(event, ifileeditor.FileOpenEvent): + uri = event.file_path.as_uri() + if uri not in self._open_documents: + try: + content = event.file_path.read_text() + except OSError: + return + lsp_version = self._next_version(uri) + await self._session.send_notification( + "textDocument/didOpen", + { + "textDocument": { + "uri": uri, + "languageId": self._language_id, + "version": lsp_version, + "text": content, + }, + }, + ) + self._open_documents.add(uri) + self._file_versions[uri] = str(hash(content)) + + elif isinstance(event, ifileeditor.FileChangeEvent): + uri = event.file_path.as_uri() + lsp_version = self._next_version(uri) + change = event.change + + if uri not in self._open_documents: + if isinstance(change, ifileeditor.FileChangeFull): + content = change.text + else: + try: + content = event.file_path.read_text() + except OSError: + return + await self._session.send_notification( + "textDocument/didOpen", + { + "textDocument": { + "uri": uri, + "languageId": self._language_id, + "version": lsp_version, + "text": content, + }, + }, + ) + self._open_documents.add(uri) + self._file_versions[uri] = str(hash(content)) + else: + if isinstance(change, ifileeditor.FileChangeFull): + content_changes = [{"text": change.text}] + self._file_versions[uri] = str(hash(change.text)) + else: + content_changes = [ + { + "range": { + "start": { + "line": change.range.start.line, + "character": change.range.start.character, + }, + "end": { + "line": change.range.end.line, + "character": change.range.end.character, + }, + }, + "text": change.text, + } + ] + # Partial change: invalidate cached version so check_file + # will re-read and send the full updated content next time. + self._file_versions.pop(uri, None) + await self._session.send_notification( + "textDocument/didChange", + { + "textDocument": {"uri": uri, "version": lsp_version}, + "contentChanges": content_changes, + }, + ) + + elif isinstance(event, ifileeditor.FileCloseEvent): + uri = event.file_path.as_uri() + if uri in self._open_documents: + await self._session.send_notification( + "textDocument/didClose", + {"textDocument": {"uri": uri}}, + ) + self._open_documents.discard(uri) + + def _next_version(self, uri: str) -> int: + version = self._document_version.get(uri, 0) + 1 + self._document_version[uri] = version + return version + + async def _handle_configuration_request( + self, params: dict[str, Any] | None + ) -> list[dict[str, Any]]: + """Handle workspace/configuration pull request from the LSP server. + + Returns one copy of the current settings for each requested item. + """ + items = (params or {}).get("items", []) + return [self._settings for _ in items] if items else [self._settings] + + async def _handle_diagnostics(self, params: dict[str, Any] | None) -> None: + if params is None: + return + uri = params.get("uri", "") + diagnostics = params.get("diagnostics", []) + self._diagnostics_data[uri] = diagnostics + + event = self._diagnostics.get(uri) + if event is not None: + event.set() + + +def map_diagnostics_to_lint_messages( + raw_diagnostics: list[dict[str, Any]], + default_source: str = "lsp", +) -> list[lint_files_action.LintMessage]: + """Convert raw LSP diagnostics to LintMessage objects.""" + severity_map = { + 1: lint_files_action.LintMessageSeverity.ERROR, + 2: lint_files_action.LintMessageSeverity.WARNING, + 3: lint_files_action.LintMessageSeverity.INFO, + 4: lint_files_action.LintMessageSeverity.HINT, + } + + messages: list[lint_files_action.LintMessage] = [] + for diag in raw_diagnostics: + rng = diag.get("range", {}) + start = rng.get("start", {}) + end = rng.get("end", {}) + + messages.append( + lint_files_action.LintMessage( + range=lint_files_action.Range( + start=lint_files_action.Position( + line=start.get("line", 0), + character=start.get("character", 0), + ), + end=lint_files_action.Position( + line=end.get("line", 0), + character=end.get("character", 0), + ), + ), + message=diag.get("message", ""), + code=str(diag.get("code", "")) + if diag.get("code") is not None + else None, + source=diag.get("source", default_source), + severity=severity_map.get(diag.get("severity")), + ) + ) + return messages diff --git a/finecode_extension_api/src/finecode_extension_api/extension.py b/finecode_extension_api/src/finecode_extension_api/extension.py new file mode 100644 index 00000000..2440ffa2 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/extension.py @@ -0,0 +1,9 @@ +import typing + + +class ExtensionActivator(typing.Protocol): + """Protocol for extension activation.""" + + def activate(self) -> None: + """Called when extension is loaded.""" + ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py index 8ae5e101..e5eb7977 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/iactionrunner.py @@ -1,10 +1,43 @@ -from typing import Any, Protocol +import typing +from finecode_extension_api import code_action, service + +PayloadT = typing.TypeVar("PayloadT", bound=code_action.RunActionPayload) +ResultT = typing.TypeVar("ResultT", bound=code_action.RunActionResult) +ActionT = typing.TypeVar( + "ActionT", + bound=code_action.Action[typing.Any, typing.Any, typing.Any], + covariant=True, +) + + +class ActionDeclaration(typing.Generic[ActionT]): ... + + +class IActionRunner(service.Service, typing.Protocol): + def get_actions_by_source( + self, source: str, expected_type: type[ActionT] + ) -> list[ActionDeclaration[ActionT]]: ... + + def get_actions_for_language( + self, source: str, language: str, expected_type: type[ActionT] + ) -> list[ActionDeclaration[ActionT]]: ... + + def get_action_by_name( + self, name: str, expected_type: type[ActionT] + ) -> ActionDeclaration[ActionT]: + # use it only if you are sure you need it. In most cases get_actions_by_source + # should be preferred + ... -class IActionRunner(Protocol): async def run_action( - self, name: str, payload: dict[str, Any] - ) -> dict[str, Any]: ... + self, + action: ActionDeclaration[code_action.Action[PayloadT, typing.Any, ResultT]], + payload: PayloadT, + meta: code_action.RunActionMeta, + ) -> ResultT: ... + + def get_actions_names(self) -> list[str]: ... class BaseRunActionException(Exception): diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py b/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py index e7629922..25c11f0e 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/icommandrunner.py @@ -14,11 +14,11 @@ def write_to_stdin(self, value: str) -> None: ... def close_stdin(self) -> None: ... -class ISyncProcess(IProcess): +class ISyncProcess(IProcess, Protocol): def wait_for_end(self, timeout: float | None = None) -> None: ... -class IAsyncProcess(IProcess): +class IAsyncProcess(IProcess, Protocol): async def wait_for_end(self, timeout: float | None = None) -> None: ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py index 7bed5e38..953a314f 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/iextensionrunnerinfoprovider.py @@ -3,10 +3,14 @@ class IExtensionRunnerInfoProvider(Protocol): + def get_current_env_name(self) -> str: ... + def get_cache_dir_path(self) -> pathlib.Path: ... def get_venv_dir_path_of_env(self, env_name: str) -> pathlib.Path: ... + def get_current_venv_dir_path(self) -> pathlib.Path: ... + def get_venv_site_packages( self, venv_dir_path: pathlib.Path ) -> list[pathlib.Path]: ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ifileeditor.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ifileeditor.py new file mode 100644 index 00000000..22af7cc8 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ifileeditor.py @@ -0,0 +1,144 @@ +import contextlib +import dataclasses +import pathlib +import typing +from typing import Protocol + +from finecode_extension_api import common_types + +# reexport +Position = common_types.Position +Range = common_types.Range + + +@dataclasses.dataclass +class FileInfo: + content: str + version: str + + +@dataclasses.dataclass +class FileChangePartial: + """The range of the document that changed.""" + + range: Range + """The new text for the provided range.""" + text: str + + +@dataclasses.dataclass +class FileChangeFull: + # new file content + text: str + + +FileChange = FileChangePartial | FileChangeFull + + +@dataclasses.dataclass +class FileOperationAuthor: + id: str + + +@dataclasses.dataclass +class FileChangeEvent: + file_path: pathlib.Path + author: FileOperationAuthor + change: FileChange + + +@dataclasses.dataclass +class FileOpenEvent: + file_path: pathlib.Path + + +@dataclasses.dataclass +class FileCloseEvent: + file_path: pathlib.Path + author: FileOperationAuthor + + +FileEvent = FileOpenEvent | FileCloseEvent | FileChangeEvent + + +class FileAlreadyOpenError(Exception): + """Raised when trying to open a file that's already open in the session.""" + + def __init__(self, message: str) -> None: + self.message = message + + +class IFileEditorSession(Protocol): + # Reasons for using sessions: + # - all operations should be authored to provide tracebility + # - some operations are author-specific, e.g. subscribe to changes of all opened by + # author files + async def change_file( + self, file_path: pathlib.Path, change: FileChange + ) -> None: ... + + async def subscribe_to_changes_of_opened_files( + self, + ) -> contextlib.AbstractAsyncContextManager[FileChangeEvent]: + # TODO: bunch of change events at once? + ... + + async def open_file(self, file_path: pathlib.Path) -> None: ... + + async def save_opened_file(self, file_path: pathlib.Path) -> None: ... + + async def close_file(self, file_path: pathlib.Path) -> None: ... + + def subscribe_to_all_events( + self, + ) -> contextlib.AbstractAsyncContextManager[FileEvent]: + # TODO: bunch of change events at once? + ... + + def read_file( + self, file_path: pathlib.Path, block: bool = False + ) -> contextlib.AbstractAsyncContextManager[FileInfo]: ... + + async def read_file_version(self, file_path: pathlib.Path) -> str: + # in case only file version is needed without content + ... + + async def save_file(self, file_path: pathlib.Path, file_content: str) -> None: ... + + # TODO + # async def reread_file() + + +class IFileEditor(Protocol): + """Service for managing read/write access to the files, e.g: + - read only for reading (other can read as well) (e.g. linter) + - read for modyfing and block until modification is done (e.g. code formatter) + - read for modyfing without blocking (e.g. by IDE) + + IDE needs possibility to subscribe on changes to sync. + IDE: + - user opens a file in IDE -> IDE sends 'open_file' and subscribes to changes, did by other + - user edits the file in IDE -> IDE sends 'file_changed' with changes to FineCode. All subscribers get the changes + -> file change should have an author + - user saves the file in IDE -> IDE sends 'file_modified_on_disk' || TODO: distinguish saved file and not saved? or just keep opened? + - user closes the file in IDE -> IDE sends 'close_file' and unsubscribes from changes + + External tools like language servers need possibility to subscribe not only to changes but also to open and close events. + + All tools access files via `ifileeditor.IFileEditor`, which stores the current(also not saved) content of the file. + + Reading/writing files: use always `ifileeditor.IFileEditor` to read and write files. It will check whether file is opened + and opened content should be modified or file is not opened and it can be modified directly on disk. + + 'opened files' ... files user sees and works with, not files which tools read. + """ + + def session( + self, author: FileOperationAuthor + ) -> typing.AsyncContextManager[IFileEditorSession]: + """Create a session for a specific author.""" + ... + + def get_opened_files(self) -> list[pathlib.Path]: + # opened files from all sessions + ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py index 9b21f6a2..570377eb 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ifilemanager.py @@ -3,13 +3,19 @@ class IFileManager(Protocol): + """Service for file system access: list files, create/read/write/delete files and + directories. + + Its main purpose is to abstract file storage(local, remote, file system etc). + Additional functionalities such as management of opened files etc are not part of + this service. + """ + async def get_content(self, file_path: Path) -> str: ... - async def get_file_version(self, file_path: Path) -> str: - # TODO: move file versioning to cache - ... + async def get_file_version(self, file_path: Path) -> str: ... - async def save_file(self, file_path: Path, file_content) -> None: ... + async def save_file(self, file_path: Path, file_content: str) -> None: ... async def create_dir( self, dir_path: Path, create_parents: bool = True, exist_ok: bool = True diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ihttpclient.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ihttpclient.py new file mode 100644 index 00000000..d1c822a1 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ihttpclient.py @@ -0,0 +1,196 @@ +from types import TracebackType +from typing import Any, Protocol, Self + + +class IHttpResponse(Protocol): + """Protocol for HTTP response objects.""" + + @property + def status_code(self) -> int: + """HTTP status code (e.g., 200, 404).""" + ... + + @property + def headers(self) -> dict[str, str]: + """Response headers.""" + ... + + @property + def content(self) -> bytes: + """Raw response content as bytes.""" + ... + + @property + def text(self) -> str: + """Response content as text.""" + ... + + def json(self) -> Any: + """Parse response content as JSON.""" + ... + + def raise_for_status(self) -> None: + """Raise an exception if the response status indicates an error.""" + ... + + +class IHttpSession(Protocol): + """Protocol for HTTP session that manages a connection and can be used as a context manager.""" + + async def __aenter__(self) -> Self: + """Async context manager entry.""" + ... + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Async context manager exit. Automatically closes the session.""" + ... + + async def get( + self, + url: str, + headers: dict[str, str] | None = None, + params: dict[str, str] | None = None, + timeout: float | None = None, + ) -> IHttpResponse: + """ + Send an HTTP GET request. + + Args: + url: The URL to request + headers: Optional request headers + params: Optional query parameters + timeout: Optional timeout in seconds + + Returns: + HTTP response object + """ + ... + + async def post( + self, + url: str, + data: bytes | str | dict[str, Any] | None = None, + json: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> IHttpResponse: + """ + Send an HTTP POST request. + + Args: + url: The URL to request + data: Request body data + json: JSON data to send (automatically sets Content-Type) + headers: Optional request headers + timeout: Optional timeout in seconds + + Returns: + HTTP response object + """ + ... + + async def put( + self, + url: str, + data: bytes | str | dict[str, Any] | None = None, + json: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> IHttpResponse: + """ + Send an HTTP PUT request. + + Args: + url: The URL to request + data: Request body data + json: JSON data to send (automatically sets Content-Type) + headers: Optional request headers + timeout: Optional timeout in seconds + + Returns: + HTTP response object + """ + ... + + async def delete( + self, + url: str, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> IHttpResponse: + """ + Send an HTTP DELETE request. + + Args: + url: The URL to request + headers: Optional request headers + timeout: Optional timeout in seconds + + Returns: + HTTP response object + """ + ... + + async def head( + self, + url: str, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> IHttpResponse: + """ + Send an HTTP HEAD request. + + Args: + url: The URL to request + headers: Optional request headers + timeout: Optional timeout in seconds + + Returns: + HTTP response object + """ + ... + + async def request( + self, + method: str, + url: str, + data: bytes | str | dict[str, Any] | None = None, + json: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + params: dict[str, str] | None = None, + timeout: float | None = None, + ) -> IHttpResponse: + """ + Send an HTTP request with any method. + + Args: + method: HTTP method (GET, POST, PUT, DELETE, etc.) + url: The URL to request + data: Request body data + json: JSON data to send (automatically sets Content-Type) + headers: Optional request headers + params: Optional query parameters + timeout: Optional timeout in seconds + + Returns: + HTTP response object + """ + ... + + +class IHttpClient(Protocol): + """Protocol for HTTP client factory that creates sessions.""" + + def session(self) -> IHttpSession: + """ + Create a new HTTP session. + + Returns: + A new HTTP session that should be used as a context manager + """ + ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ijsonrpcclient.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ijsonrpcclient.py new file mode 100644 index 00000000..b48c9432 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ijsonrpcclient.py @@ -0,0 +1,132 @@ +import collections.abc +from pathlib import Path +from types import TracebackType +from typing import Any, Protocol, Self + + +class IJsonRpcSession(Protocol): + """An active JSON-RPC connection session. + + Use as an async context manager: ``__aenter__`` starts the subprocess, + ``__aexit__`` stops it. + """ + + async def __aenter__(self) -> Self: ... + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: ... + + # -- Async API (for use from async extension handlers) ----------------- + + async def send_request( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + """Send a JSON-RPC request and wait for the response. + + Args: + method: The JSON-RPC method name. + params: Optional parameters for the request. + timeout: Optional timeout in seconds. + + Returns: + The ``result`` field from the JSON-RPC response. + """ + ... + + async def send_notification( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + """Send a JSON-RPC notification (no response expected).""" + ... + + # -- Sync API (blocks caller thread, IO thread resolves) --------------- + + def send_request_sync( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + """Send a JSON-RPC request synchronously. + + Blocks the calling thread until the IO thread receives the response. + """ + ... + + def send_notification_sync( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + """Send a JSON-RPC notification synchronously.""" + ... + + # -- Server-initiated messages ----------------------------------------- + + def on_notification( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[None] + ], + ) -> None: + """Register a handler for incoming notifications from the server. + + Args: + method: The notification method name to handle. + handler: Async callable that receives the notification params. + """ + ... + + def on_request( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[Any] + ], + ) -> None: + """Register a handler for incoming requests from the server. + + Args: + method: The request method name. + handler: Async callable that receives params and returns the result. + """ + ... + + +class IJsonRpcClient(Protocol): + """Factory for creating JSON-RPC sessions.""" + + def session( + self, + cmd: str, + cwd: Path | None = None, + env: dict[str, str] | None = None, + readable_id: str = "", + ) -> IJsonRpcSession: + """Create a new JSON-RPC session that launches a subprocess. + + Usage:: + + async with json_rpc_client.session("some-server --stdio") as session: + result = await session.send_request("method", {"key": "value"}) + + Args: + cmd: Shell command to start the JSON-RPC server process. + cwd: Working directory for the subprocess. + env: Environment variables for the subprocess. + readable_id: Human-readable identifier for logging. + + Returns: + An async context manager yielding IJsonRpcSession. + """ + ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ilogger.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ilogger.py index b36c7d4d..00af43ff 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/ilogger.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ilogger.py @@ -2,10 +2,16 @@ class ILogger(Protocol): + def exception(self, exception: Exception) -> None: ... + + def trace(self, message: str) -> None: ... + def info(self, message: str) -> None: ... def debug(self, message: str) -> None: ... + def warning(self, message: str) -> None: ... + def error(self, message: str) -> None: ... def disable(self, package: str) -> None: ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/ilspclient.py b/finecode_extension_api/src/finecode_extension_api/interfaces/ilspclient.py new file mode 100644 index 00000000..108c7c8f --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/ilspclient.py @@ -0,0 +1,142 @@ +import collections.abc +from pathlib import Path +from types import TracebackType +from typing import Any, Protocol, Self + + +class ILspSession(Protocol): + """An active LSP session with a language server. + + Use as an async context manager: + + - ``__aenter__`` starts the process, sends ``initialize`` request, + sends ``initialized`` notification. + - ``__aexit__`` sends ``shutdown`` request, sends ``exit`` notification, + stops the process. + """ + + async def __aenter__(self) -> Self: ... + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: ... + + # -- Async API --------------------------------------------------------- + + async def send_request( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + """Send an LSP request and return the result.""" + ... + + async def send_notification( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + """Send an LSP notification.""" + ... + + # -- Sync API ---------------------------------------------------------- + + def send_request_sync( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + """Send an LSP request synchronously (blocks caller thread).""" + ... + + def send_notification_sync( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + """Send an LSP notification synchronously.""" + ... + + # -- Server-initiated messages ----------------------------------------- + + def on_notification( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[None] + ], + ) -> None: + """Register handler for server notifications.""" + ... + + def on_request( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[Any] + ], + ) -> None: + """Register handler for server-to-client requests.""" + ... + + # -- Server info ------------------------------------------------------- + + @property + def server_capabilities(self) -> dict[str, Any]: + """Capabilities returned by the server in the initialize response.""" + ... + + @property + def server_info(self) -> dict[str, Any] | None: + """Server info returned in the initialize response, if any.""" + ... + + +class ILspClient(Protocol): + """Factory for creating LSP sessions with language servers.""" + + def session( + self, + cmd: str, + root_uri: str, + workspace_folders: list[dict[str, str]] | None = None, + initialization_options: dict[str, Any] | None = None, + client_capabilities: dict[str, Any] | None = None, + cwd: Path | None = None, + env: dict[str, str] | None = None, + readable_id: str = "", + ) -> ILspSession: + """Create a new LSP session that launches a language server. + + The session automatically performs the LSP initialization handshake. + + Usage:: + + async with lsp_client.session( + cmd="pyright-langserver --stdio", + root_uri="file:///path/to/project", + ) as session: + result = await session.send_request( + "textDocument/completion", + {"textDocument": {"uri": "file:///file.py"}, "position": {"line": 0, "character": 0}}, + ) + + Args: + cmd: Shell command to start the language server. + root_uri: The root URI of the workspace. + workspace_folders: Optional workspace folders (each with 'uri' and 'name' keys). + initialization_options: Optional server-specific initialization options. + client_capabilities: Optional client capabilities override. + cwd: Working directory for the subprocess. + env: Environment variables for the subprocess. + readable_id: Human-readable identifier for logging. + + Returns: + An async context manager yielding ILspSession. + """ + ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectfileclassifier.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectfileclassifier.py deleted file mode 100644 index 09d8bf40..00000000 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectfileclassifier.py +++ /dev/null @@ -1,15 +0,0 @@ -import enum -import typing -import pathlib - - -class ProjectFileType(enum.Enum): - SOURCE = enum.auto() - TEST = enum.auto() - UNKNOWN = enum.auto() - - -class IProjectFileClassifier(typing.Protocol): - def get_project_file_type(self, file_path: pathlib.Path) -> ProjectFileType: ... - - def get_env_for_file_type(self, file_type: ProjectFileType) -> str: ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py index f978b3f4..acc87207 100644 --- a/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/iprojectinfoprovider.py @@ -15,6 +15,8 @@ async def get_project_raw_config( async def get_current_project_raw_config(self) -> dict[str, Any]: ... + def get_current_project_raw_config_version(self) -> int: ... + class InvalidProjectConfig(Exception): def __init__(self, message: str) -> None: diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/irepositorycredentialsprovider.py b/finecode_extension_api/src/finecode_extension_api/interfaces/irepositorycredentialsprovider.py new file mode 100644 index 00000000..99805dd4 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/irepositorycredentialsprovider.py @@ -0,0 +1,72 @@ +import dataclasses +from typing import Protocol + + +@dataclasses.dataclass +class RepositoryCredentials: + username: str + password: str + + +@dataclasses.dataclass +class Repository: + name: str + url: str + + +class IRepositoryCredentialsProvider(Protocol): + def get_credentials(self, repository_name: str) -> RepositoryCredentials | None: + """ + Get credentials for a repository by name. + + Args: + repository_name: The name of the repository (e.g., "testpypi", "pypi") + + Returns: + RepositoryCredentials if found, None otherwise + """ + ... + + def set_credentials( + self, repository_name: str, username: str, password: str + ) -> None: + """ + Store credentials for a repository. + + Args: + repository_name: The name of the repository + username: The username for authentication + password: The password or token for authentication + """ + ... + + def add_repository(self, name: str, url: str) -> None: + """ + Add a repository. + + Args: + name: The name of the repository (e.g., "testpypi", "pypi") + url: The URL of the repository + """ + ... + + def get_repository(self, name: str) -> Repository | None: + """ + Get a repository by name. + + Args: + name: The name of the repository + + Returns: + Repository if found, None otherwise + """ + ... + + def get_all_repositories(self) -> list[Repository]: + """ + Get all registered repositories. + + Returns: + List of all repositories + """ + ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/iserviceregistry.py b/finecode_extension_api/src/finecode_extension_api/interfaces/iserviceregistry.py new file mode 100644 index 00000000..303a766d --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/iserviceregistry.py @@ -0,0 +1,9 @@ +import typing + +T = typing.TypeVar("T") + + +class IServiceRegistry(typing.Protocol): + def register_impl( + self, interface: type[T], impl: type[T], singleton: bool = False + ) -> None: ... diff --git a/finecode_extension_api/src/finecode_extension_api/interfaces/isrcartifactfileclassifier.py b/finecode_extension_api/src/finecode_extension_api/interfaces/isrcartifactfileclassifier.py new file mode 100644 index 00000000..6ac2ac29 --- /dev/null +++ b/finecode_extension_api/src/finecode_extension_api/interfaces/isrcartifactfileclassifier.py @@ -0,0 +1,17 @@ +import enum +import pathlib +import typing + + +class SrcArtifactFileType(enum.Enum): + SOURCE = enum.auto() + TEST = enum.auto() + UNKNOWN = enum.auto() + + +class ISrcArtifactFileClassifier(typing.Protocol): + def get_src_artifact_file_type( + self, file_path: pathlib.Path + ) -> SrcArtifactFileType: ... + + def get_env_for_file_type(self, file_type: SrcArtifactFileType) -> str: ... diff --git a/finecode_extension_api/src/finecode_extension_api/service.py b/finecode_extension_api/src/finecode_extension_api/service.py index 1a66440c..a8afaf18 100644 --- a/finecode_extension_api/src/finecode_extension_api/service.py +++ b/finecode_extension_api/src/finecode_extension_api/service.py @@ -1,2 +1,20 @@ -class Service: +import sys +import typing + +if sys.version_info >= (3, 12): + from typing import override +else: + from typing_extensions import override + + +@typing.runtime_checkable +class Service(typing.Protocol): async def init(self) -> None: ... + + +@typing.runtime_checkable +class DisposableService(Service, typing.Protocol): + @override + async def init(self) -> None: ... + + def dispose(self) -> None: ... diff --git a/finecode_extension_api/src/finecode_extension_api/textstyler.py b/finecode_extension_api/src/finecode_extension_api/textstyler.py index 65b423a9..da84e7bb 100644 --- a/finecode_extension_api/src/finecode_extension_api/textstyler.py +++ b/finecode_extension_api/src/finecode_extension_api/textstyler.py @@ -59,7 +59,7 @@ def append_styled( changed_params: dict[str, str | bool] = {} for param_name, param_default, param_value in params: - if param_value != param_default: + if param_value != param_default and param_value is not None: changed_params[param_name] = ( param_value if not isinstance(param_value, enum.Enum) diff --git a/finecode_extension_runner/pyproject.toml b/finecode_extension_runner/pyproject.toml index 114e5e1a..571ccd18 100644 --- a/finecode_extension_runner/pyproject.toml +++ b/finecode_extension_runner/pyproject.toml @@ -8,10 +8,12 @@ requires-python = ">=3.11, <= 3.14" dependencies = [ "loguru==0.7.*", "click==8.1.*", - "pydantic==2.11.*", - "pygls==2.0.0-a6", - "finecode_extension_api==0.3.*", + "pydantic==2.12.*", + "pygls==2.0.0", + "finecode_extension_api~=0.4.0a0", "deepmerge==2.0.*", + "debugpy==1.8.*", + "ordered-set==4.1.*", ] [dependency-groups] @@ -19,8 +21,8 @@ dependencies = [ # "fine_python_import_linter @ git+https://github.com/finecode-dev/finecode.git#subdirectory=extensions/fine_python_import_linter" dev_workspace = [ "build==1.2.2.post1", - "finecode==0.3.*", - "finecode_dev_common_preset==0.2.*", + "finecode~=0.4.0a0", + "finecode_dev_common_preset~=0.3.0a0", ] dev = [{ include-group = "runtime" }, "pytest==7.4.*", "debugpy==1.8.*"] @@ -29,6 +31,11 @@ finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable finecode = { path = "../", editable = true } finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } [build-system] requires = ["setuptools>=64", "setuptools-scm>=8"] @@ -54,3 +61,12 @@ layers = [ [tool.setuptools_scm] version_file = "src/finecode_extension_runner/_version.py" root = ".." + +[tool.finecode.action.get_src_artifact_version] +source = "finecode_extension_api.actions.get_src_artifact_version.GetSrcArtifactVersionAction" +handlers_mode = "replace" +handlers = [ + { name = 'get_src_artifact_version_setuptools_scm', source = 'fine_python_setuptools_scm.GetSrcArtifactVersionSetuptoolsScmHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_setuptools_scm~=0.1.0a1", + ] }, +] diff --git a/finecode_extension_runner/src/finecode_extension_runner/__main__.py b/finecode_extension_runner/src/finecode_extension_runner/__main__.py index 2149a653..23a88a5b 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/__main__.py +++ b/finecode_extension_runner/src/finecode_extension_runner/__main__.py @@ -1,4 +1,5 @@ from finecode_extension_runner import cli + if __name__ == "__main__": cli.main() diff --git a/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py b/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py index 34f285e1..90b114bf 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py +++ b/finecode_extension_runner/src/finecode_extension_runner/_services/run_action.py @@ -7,9 +7,10 @@ import deepmerge from loguru import logger +import pydantic from pydantic.dataclasses import dataclass as pydantic_dataclass -from finecode_extension_api import code_action, textstyler +from finecode_extension_api import code_action, textstyler, service from finecode_extension_api.interfaces import iactionrunner from finecode_extension_runner import context, domain, global_state from finecode_extension_runner import ( @@ -46,33 +47,20 @@ def set_partial_result_sender(send_func: typing.Callable) -> None: ) -async def run_action( - request: schemas.RunActionRequest, options: schemas.RunActionOptions -) -> schemas.RunActionResponse: - global last_run_id - run_id = last_run_id - last_run_id += 1 - logger.trace( - f"Run action '{request.action_name}', run id: {run_id}, partial result token: {options.partial_result_token}" - ) - # TODO: check whether config is set: this will be solved by passing initial - # configuration as payload of initialize - if global_state.runner_context is None: - raise ActionFailedException( - "Run of action failed because extension runner is not initialized yet" - ) +class AsyncPlaceholderContext: + async def __aenter__(self): + return self - start_time = time.time_ns() - project_def = global_state.runner_context.project + async def __aexit__(self, exc_type, exc_val, exc_tb): ... - try: - action = project_def.actions[request.action_name] - except KeyError: - logger.error(f"R{run_id} | Action {request.action_name} not found") - raise ActionFailedException( - f"R{run_id} | Action {request.action_name} not found" - ) +async def run_action( + action_def: domain.ActionDeclaration, + payload: code_action.RunActionPayload | None, + meta: code_action.RunActionMeta, + partial_result_token: int | str | None = None, + run_id: int | None = None, +) -> code_action.RunActionResult | None: # design decisions: # - keep payload unchanged between all subaction runs. # For intermediate data use run_context @@ -81,190 +69,237 @@ async def run_action( # returned. (experimental) # - execution of handlers can be concurrent or sequential. But executions of handler # on iterable payloads(single parts) are always concurrent. - action_name = request.action_name + + if run_id is None: + global last_run_id + run_id = last_run_id + last_run_id += 1 + + logger.trace( + f"Run action '{action_def.name}', run id: {run_id}, partial result token: {partial_result_token}" + ) + + # TODO: check whether config is set: this will be solved by passing initial + # configuration as payload of initialize + if global_state.runner_context is None: + raise ActionFailedException( + "Run of action failed because extension runner is not initialized yet" + ) + + start_time = time.time_ns() try: - action_cache = global_state.runner_context.action_cache_by_name[action_name] + action_cache = global_state.runner_context.action_cache_by_name[action_def.name] except KeyError: action_cache = domain.ActionCache() - global_state.runner_context.action_cache_by_name[action_name] = action_cache + global_state.runner_context.action_cache_by_name[action_def.name] = action_cache if action_cache.exec_info is not None: action_exec_info = action_cache.exec_info else: - action_exec_info = create_action_exec_info(action) + action_exec_info = create_action_exec_info(action_def) action_cache.exec_info = action_exec_info - # TODO: catch validation errors - payload: code_action.RunActionPayload | None = None - if action_exec_info.payload_type is not None: - payload_type_with_validation = pydantic_dataclass(action_exec_info.payload_type) - payload = payload_type_with_validation(**request.params) + # TODO: take value from action config + execute_handlers_concurrently = action_def.name.startswith("lint_files_") - run_context: code_action.RunActionContext | None = None + run_context: code_action.RunActionContext | AsyncPlaceholderContext + run_context_info = code_action.RunContextInfoProvider(is_concurrent_execution=execute_handlers_concurrently) if action_exec_info.run_context_type is not None: constructor_args = await resolve_func_args_with_di( action_exec_info.run_context_type.__init__, - known_args={"run_id": lambda _: run_id}, + known_args={ + "run_id": lambda _: run_id, + "initial_payload": lambda _: payload, + "meta": lambda _: meta, + "info_provider": lambda _: run_context_info + }, params_to_ignore=["self"], ) - run_context = action_exec_info.run_context_type(**constructor_args) - # TODO: handler errors - await run_context.init(initial_payload=payload) + # developers can change run context constructor, handle all exceptions + try: + run_context = action_exec_info.run_context_type(**constructor_args) + except Exception as exception: + raise ActionFailedException( + f"Failed to instantiate run context of action {action_def.name}(Run {run_id}): {str(exception)}." + + " See ER logs for more details" + ) from exception + else: + # TODO: check run_context below, whether AsyncPlaceholder can really be used + run_context = AsyncPlaceholderContext() action_result: code_action.RunActionResult | None = None runner_context = global_state.runner_context - # TODO: take value from action config - execute_handlers_concurrently = action.name == "lint" - partial_result_token = options.partial_result_token - send_partial_results = partial_result_token is not None - with action_exec_info.process_executor.activate(): - # action payload can be iterable or not - if isinstance(payload, collections.abc.AsyncIterable): - # iterable: `run` method should not calculate results itself, but call - # `partial_result_scheduler.schedule`. Then we execute provided - # coroutines either concurrently or sequentially. - logger.trace( - f"R{run_id} | Iterable payload, execute all handlers to schedule coros" - ) - for handler in action.handlers: - await execute_action_handler( - handler=handler, - payload=payload, - run_context=run_context, - run_id=run_id, - action_cache=action_cache, - action_exec_info=action_exec_info, - runner_context=runner_context, - ) + # to be able to catch source of exceptions in user-accessible code more precisely, + # manually enter and exit run context + try: + run_context_instance = await run_context.__aenter__() + except Exception as exception: + raise ActionFailedException( + f"Failed to enter run context of action {action_def.name}(Run {run_id}): {str(exception)}." + + " See ER logs for more details" + ) from exception - parts = [part async for part in payload] - subresults_tasks: list[asyncio.Task] = [] - logger.trace( - "R{run_id} | Run subresult coros {exec_type} {partials} partial results".format( - run_id=run_id, - exec_type=( - "concurrently" - if execute_handlers_concurrently - else "sequentially" - ), - partials="with" if send_partial_results else "without", - ) - ) - try: - async with asyncio.TaskGroup() as tg: - for part in parts: - part_coros = ( - run_context.partial_result_scheduler.coroutines_by_key[part] - ) - del run_context.partial_result_scheduler.coroutines_by_key[part] - if execute_handlers_concurrently: - coro = run_subresult_coros_concurrently( - part_coros, - send_partial_results, - partial_result_token, - partial_result_sender, - action.name, - run_id, - ) - else: - coro = run_subresult_coros_sequentially( - part_coros, - send_partial_results, - partial_result_token, - partial_result_sender, - action.name, - run_id, - ) - subresult_task = tg.create_task(coro) - subresults_tasks.append(subresult_task) - except ExceptionGroup as eg: - errors: list[str] = [] - for exc in eg.exceptions: - if not isinstance(exc, ActionFailedException): - logger.error("Unexpected exception:") - logger.exception(exc) - else: - errors.append(exc.message) - raise ActionFailedException( - f"Running action handlers of '{action.name}' failed(Run {run_id}): {errors}." - " See ER logs for more details" + try: + send_partial_results = partial_result_token is not None + with action_exec_info.process_executor.activate(): + # action payload can be iterable or not + if isinstance(payload, collections.abc.AsyncIterable): + # iterable: `run` method should not calculate results itself, but call + # `partial_result_scheduler.schedule`. Then we execute provided + # coroutines either concurrently or sequentially. + logger.trace( + f"R{run_id} | Iterable payload, execute all handlers to schedule coros" ) + for handler in action_def.handlers: + await execute_action_handler( + handler=handler, + payload=payload, + run_context=run_context_instance, + run_id=run_id, + action_cache=action_cache, + action_exec_info=action_exec_info, + runner_context=runner_context, + ) - if send_partial_results: - # all subresults are ready - logger.trace(f"R{run_id} | all subresults are ready, send them") - await partial_result_sender.send_all_immediately() - else: - for subresult_task in subresults_tasks: - result = subresult_task.result() - if result is not None: - if action_result is None: - action_result = result - else: - action_result.update(result) - else: - # action payload not iterable, just execute handlers on the whole payload - if execute_handlers_concurrently: - handlers_tasks: list[asyncio.Task] = [] + parts = [part async for part in payload] + subresults_tasks: list[asyncio.Task] = [] + logger.trace( + "R{run_id} | Run subresult coros {exec_type} {partials} partial results".format( + run_id=run_id, + exec_type=( + "concurrently" + if execute_handlers_concurrently + else "sequentially" + ), + partials="with" if send_partial_results else "without", + ) + ) try: async with asyncio.TaskGroup() as tg: - for handler in action.handlers: - handler_task = tg.create_task( - execute_action_handler( - handler=handler, - payload=payload, - run_context=run_context, - run_id=run_id, - action_cache=action_cache, - action_exec_info=action_exec_info, - runner_context=runner_context, - ) + for part in parts: + part_coros = ( + run_context.partial_result_scheduler.coroutines_by_key[part] ) - handlers_tasks.append(handler_task) + del run_context.partial_result_scheduler.coroutines_by_key[part] + if execute_handlers_concurrently: + coro = run_subresult_coros_concurrently( + part_coros, + send_partial_results, + partial_result_token, + partial_result_sender, + action_def.name, + run_id, + ) + else: + coro = run_subresult_coros_sequentially( + part_coros, + send_partial_results, + partial_result_token, + partial_result_sender, + action_def.name, + run_id, + ) + subresult_task = tg.create_task(coro) + subresults_tasks.append(subresult_task) except ExceptionGroup as eg: + errors: list[str] = [] for exc in eg.exceptions: - # TODO: expected / unexpected? - logger.exception(exc) - raise ActionFailedException( - f"Running action handlers of '{action.name}' failed" - f"(Run {run_id}). See ER logs for more details" - ) - - for handler_task in handlers_tasks: - coro_result = handler_task.result() - if coro_result is not None: - if action_result is None: - action_result = coro_result + if not isinstance(exc, ActionFailedException): + logger.error("Unexpected exception:") + logger.exception(exc) else: - action_result.update(coro_result) + errors.append(exc.message) + raise ActionFailedException( + f"Running action handlers of '{action_def.name}' failed(Run {run_id}): {errors}." + " See ER logs for more details" + ) from eg + + if send_partial_results: + # all subresults are ready + logger.trace(f"R{run_id} | all subresults are ready, send them") + await partial_result_sender.send_all_immediately() + else: + for subresult_task in subresults_tasks: + result = subresult_task.result() + if result is not None: + if action_result is None: + action_result = result + else: + action_result.update(result) else: - for handler in action.handlers: + # action payload not iterable, just execute handlers on the whole payload + if execute_handlers_concurrently: + handlers_tasks: list[asyncio.Task] = [] try: - handler_result = await execute_action_handler( - handler=handler, - payload=payload, - run_context=run_context, - run_id=run_id, - action_cache=action_cache, - action_exec_info=action_exec_info, - runner_context=runner_context, - ) - except ActionFailedException as exception: - raise exception - - if handler_result is not None: - if action_result is None: - action_result = handler_result - else: - action_result.update(handler_result) + async with asyncio.TaskGroup() as tg: + for handler in action_def.handlers: + handler_task = tg.create_task( + execute_action_handler( + handler=handler, + payload=payload, + run_context=run_context_instance, + run_id=run_id, + action_cache=action_cache, + action_exec_info=action_exec_info, + runner_context=runner_context, + ) + ) + handlers_tasks.append(handler_task) + except ExceptionGroup as eg: + for exc in eg.exceptions: + # TODO: expected / unexpected? + logger.exception(exc) + raise ActionFailedException( + f"Running action handlers of '{action_def.name}' failed" + f"(Run {run_id}). See ER logs for more details" + ) from eg + + for handler_task in handlers_tasks: + coro_result = handler_task.result() + if coro_result is not None: + if action_result is None: + action_result = coro_result + else: + action_result.update(coro_result) + else: + for handler in action_def.handlers: + try: + handler_result = await execute_action_handler( + handler=handler, + payload=payload, + run_context=run_context_instance, + run_id=run_id, + action_cache=action_cache, + action_exec_info=action_exec_info, + runner_context=runner_context, + ) + except ActionFailedException as exception: + raise exception + + if handler_result is not None: + if action_result is None: + action_result = handler_result + else: + action_result.update(handler_result) + + run_context_info.update(action_result) + finally: + # exit run context + try: + await run_context_instance.__aexit__(None, None, None) + except Exception as exception: + raise ActionFailedException( + f"Failed to exit run context of action {action_def.name}(Run {run_id}): {str(exception)}." + + " See ER logs for more details" + ) from exception end_time = time.time_ns() duration = (end_time - start_time) / 1_000_000 logger.trace( - f"R{run_id} | Run action end '{request.action_name}', duration: {duration}ms" + f"R{run_id} | Run action end '{action_def.name}', duration: {duration}ms" ) # if partial results were sent, `action_result` may be None @@ -278,44 +313,97 @@ async def run_action( f"Unexpected result type: {type(action_result).__name__}" ) + return action_result + + +async def run_action_raw( + request: schemas.RunActionRequest, options: schemas.RunActionOptions +) -> schemas.RunActionResponse: + global last_run_id + run_id = last_run_id + last_run_id += 1 + logger.trace( + f"Run action '{request.action_name}', run id: {run_id}, partial result token: {options.partial_result_token}" + ) + # # TODO: check whether config is set: this will be solved by passing initial + # # configuration as payload of initialize + if global_state.runner_context is None: + raise ActionFailedException( + "Run of action failed because extension runner is not initialized yet" + ) + + project_def = global_state.runner_context.project + + try: + action = project_def.actions[request.action_name] + except KeyError: + logger.error(f"R{run_id} | Action {request.action_name} not found") + raise ActionFailedException( + f"R{run_id} | Action {request.action_name} not found" + ) + + action_name = request.action_name + + try: + action_cache = global_state.runner_context.action_cache_by_name[action_name] + except KeyError: + action_cache = domain.ActionCache() + global_state.runner_context.action_cache_by_name[action_name] = action_cache + + if action_cache.exec_info is not None: + action_exec_info = action_cache.exec_info + else: + action_exec_info = create_action_exec_info(action) + action_cache.exec_info = action_exec_info + + # TODO: catch validation errors + payload: code_action.RunActionPayload | None = None + if action_exec_info.payload_type is not None: + payload_type_with_validation = pydantic_dataclass(action_exec_info.payload_type) + payload = payload_type_with_validation(**request.params) + + action_result = await run_action( + action_def=action, + payload=payload, + meta=options.meta, + partial_result_token=options.partial_result_token, + run_id=run_id, + ) + response = action_result_to_run_action_response( - action_result, options.result_format + action_result, options.result_formats ) return response def action_result_to_run_action_response( action_result: code_action.RunActionResult | None, - asked_result_format: typing.Literal["json"] | typing.Literal["string"], + asked_result_formats: list[typing.Literal["json"] | typing.Literal["string"]], ) -> schemas.RunActionResponse: - serialized_result: dict[str, typing.Any] | str | None = None - result_format = "string" + result_by_format: dict[str, dict[str, typing.Any] | str | None] = {} run_return_code = code_action.RunReturnCode.SUCCESS if isinstance(action_result, code_action.RunActionResult): run_return_code = action_result.return_code - if asked_result_format == "json": - serialized_result = dataclasses.asdict(action_result) - result_format = "json" - elif asked_result_format == "string": - result_text = action_result.to_text() - if isinstance(result_text, textstyler.StyledText): - serialized_result = result_text.to_json() - result_format = "styled_text_json" + for asked_result_format in asked_result_formats: + if asked_result_format == "json": + result_by_format["json"] = dataclasses.asdict(action_result) + elif asked_result_format == "string": + result_text = action_result.to_text() + if isinstance(result_text, textstyler.StyledText): + result_by_format["styled_text_json"] = result_text.to_json() + else: + result_by_format["string"] = result_text else: - serialized_result = result_text - result_format = "string" - else: - raise ActionFailedException( - f"Unsupported result format: {asked_result_format}" - ) + raise ActionFailedException( + f"Unsupported result format: {asked_result_format}" + ) return schemas.RunActionResponse( - result=serialized_result, - format=result_format, + result_by_format=result_by_format, return_code=run_return_code.value, ) -def create_action_exec_info(action: domain.Action) -> domain.ActionExecInfo: +def create_action_exec_info(action: domain.ActionDeclaration) -> domain.ActionExecInfo: try: action_type_def = run_utils.import_module_member_by_source_str(action.source) except Exception as e: @@ -342,7 +430,7 @@ async def resolve_func_args_with_di( func: typing.Callable, known_args: dict[str, typing.Callable[[typing.Any], typing.Any]] | None = None, params_to_ignore: list[str] | None = None, -): +) -> dict[str, typing.Any]: func_parameters = inspect.signature(func).parameters func_annotations = inspect.get_annotations(func, eval_str=True) args: dict[str, typing.Any] = {} @@ -368,25 +456,10 @@ async def resolve_func_args_with_di( return args -async def execute_action_handler( - handler: domain.ActionHandler, - payload: code_action.RunActionPayload | None, - run_context: code_action.RunActionContext | None, - run_id: int, - action_exec_info: domain.ActionExecInfo, - action_cache: domain.ActionCache, +def _get_handler_raw_config( + handler: domain.ActionHandlerDeclaration, runner_context: context.RunnerContext, -) -> code_action.RunActionResult: - logger.trace(f"R{run_id} | Run {handler.name} on {str(payload)[:100]}...") - if handler.name in action_cache.handler_cache_by_name: - handler_cache = action_cache.handler_cache_by_name[handler.name] - else: - handler_cache = domain.ActionHandlerCache() - action_cache.handler_cache_by_name[handler.name] = handler_cache - - start_time = time.time_ns() - execution_result: code_action.RunActionResult | None = None - +) -> dict[str, typing.Any]: handler_global_config = runner_context.project.action_handler_configs.get( handler.source, None ) @@ -399,80 +472,159 @@ async def execute_action_handler( else: # not empty anymore, deep merge handler_config_merger.merge(handler_raw_config, handler.config) + return handler_raw_config + + +async def ensure_handler_instantiated( + handler: domain.ActionHandlerDeclaration, + handler_cache: domain.ActionHandlerCache, + action_exec_info: domain.ActionExecInfo, + runner_context: context.RunnerContext, +) -> None: + """Ensure handler is instantiated and initialized, populating handler_cache. + If handler is already instantiated (handler_cache.instance is not None), this is + a no-op. Otherwise, imports the handler class, resolves DI, instantiates it, + calls on_initialize lifecycle hook if present, and caches the result. + """ if handler_cache.instance is not None: - handler_instance = handler_cache.instance - handler_run_func = handler_instance.run - exec_info = handler_cache.exec_info - logger.trace( - f"R{run_id} | Instance of action handler {handler.name} found in cache" + return + + handler_raw_config = _get_handler_raw_config(handler, runner_context) + + logger.trace(f"Load action handler {handler.name}") + try: + action_handler = run_utils.import_module_member_by_source_str( + handler.source + ) + except ModuleNotFoundError as error: + logger.error( + f"Source of action handler {handler.name} '{handler.source}'" + " could not be imported" + ) + logger.error(error) + raise ActionFailedException( + f"Import of action handler '{handler.name}' failed: {handler.source}" + ) from error + + def get_handler_config(param_type): + # validate config using pydantic + try: + config_type = pydantic_dataclass(param_type) + except pydantic.ValidationError as exception: + raise ActionFailedException(exception.errors()) from exception + return config_type(**handler_raw_config) + + def get_process_executor(param_type): + return action_exec_info.process_executor + + exec_info = domain.ActionHandlerExecInfo() + # save immediately in context to be able to shutdown it if the first execution + # is interrupted by stopping ER + handler_cache.exec_info = exec_info + if inspect.isclass(action_handler): + args = await resolve_func_args_with_di( + func=action_handler.__init__, + known_args={ + "config": get_handler_config, + "process_executor": get_process_executor, + }, + params_to_ignore=["self"], ) + + if "lifecycle" in args: + exec_info.lifecycle = args["lifecycle"] + + handler_instance = action_handler(**args) + handler_cache.instance = handler_instance + + service_instances = [ + instance + for instance in args.values() + if isinstance(instance, service.Service) + ] + handler_cache.used_services = service_instances + for service_instance in service_instances: + if service_instance not in runner_context.running_services: + runner_context.running_services[service_instance] = ( + domain.RunningServiceInfo(used_by=[]) + ) + + runner_context.running_services[service_instance].used_by.append( + handler_instance + ) + else: - logger.trace(f"R{run_id} | Load action handler {handler.name}") + # handler is a plain function, not a class — nothing to instantiate + handler_cache.exec_info = exec_info + exec_info.status = domain.ActionHandlerExecInfoStatus.INITIALIZED + return + + if ( + exec_info.lifecycle is not None + and exec_info.lifecycle.on_initialize_callable is not None + ): + logger.trace(f"Initialize {handler.name} action handler") try: - action_handler = run_utils.import_module_member_by_source_str( - handler.source + initialize_callable_result = ( + exec_info.lifecycle.on_initialize_callable() ) - except ModuleNotFoundError as error: + if inspect.isawaitable(initialize_callable_result): + await initialize_callable_result + except Exception as e: logger.error( - f"R{run_id} | Source of action handler {handler.name} '{handler.source}'" - " could not be imported" + f"Failed to initialize action handler {handler.name}: {e}" ) - logger.error(error) raise ActionFailedException( - f"Import of action handler '{handler.name}' failed(Run {run_id}): {handler.source}" - ) + f"Initialisation of action handler '{handler.name}' failed: {e}" + ) from e - def get_handler_config(param_type): - # TODO: validation errors - return param_type(**handler_raw_config) + exec_info.status = domain.ActionHandlerExecInfoStatus.INITIALIZED - def get_process_executor(param_type): - return action_exec_info.process_executor - exec_info = domain.ActionHandlerExecInfo() - # save immediately in context to be able to shutdown it if the first execution - # is interrupted by stopping ER - handler_cache.exec_info = exec_info - if inspect.isclass(action_handler): - args = await resolve_func_args_with_di( - func=action_handler.__init__, - known_args={ - "config": get_handler_config, - "process_executor": get_process_executor, - }, - params_to_ignore=["self"], - ) +async def execute_action_handler( + handler: domain.ActionHandlerDeclaration, + payload: code_action.RunActionPayload | None, + run_context: code_action.RunActionContext | AsyncPlaceholderContext, + run_id: int, + action_exec_info: domain.ActionExecInfo, + action_cache: domain.ActionCache, + runner_context: context.RunnerContext, +) -> code_action.RunActionResult: + logger.trace(f"R{run_id} | Run {handler.name} on {str(payload)[:100]}...") + if handler.name in action_cache.handler_cache_by_name: + handler_cache = action_cache.handler_cache_by_name[handler.name] + else: + handler_cache = domain.ActionHandlerCache() + action_cache.handler_cache_by_name[handler.name] = handler_cache - if "lifecycle" in args: - exec_info.lifecycle = args["lifecycle"] + start_time = time.time_ns() + execution_result: code_action.RunActionResult | None = None - handler_instance = action_handler(**args) - handler_cache.instance = handler_instance - handler_run_func = handler_instance.run + if handler_cache.instance is not None: + handler_instance = handler_cache.instance + handler_run_func = handler_instance.run + exec_info = handler_cache.exec_info + # TODO: check status of exec_info? + logger.trace( + f"R{run_id} | Instance of action handler {handler.name} found in cache" + ) + else: + await ensure_handler_instantiated( + handler=handler, + handler_cache=handler_cache, + action_exec_info=action_exec_info, + runner_context=runner_context, + ) + if handler_cache.instance is not None: + handler_run_func = handler_cache.instance.run else: + # handler is a plain function + action_handler = run_utils.import_module_member_by_source_str( + handler.source + ) handler_run_func = action_handler - - if ( - exec_info.lifecycle is not None - and exec_info.lifecycle.on_initialize_callable is not None - ): - logger.trace(f"R{run_id} | Initialize {handler.name} action handler") - try: - initialize_callable_result = ( - exec_info.lifecycle.on_initialize_callable() - ) - if inspect.isawaitable(initialize_callable_result): - await initialize_callable_result - except Exception as e: - logger.error( - f"R{run_id} | Failed to initialize action handler {handler.name}: {e}" - ) - raise ActionFailedException( - f"Initialisation of action handler '{handler.name}' failed(Run {run_id}): {e}" - ) - - exec_info.status = domain.ActionHandlerExecInfoStatus.INITIALIZED + exec_info = handler_cache.exec_info def get_run_payload(param_type): return payload @@ -489,6 +641,7 @@ def get_run_context(param_type): ) # TODO: cache parameters try: + logger.trace(f"Call handler {handler.name}(run {run_id})") # there is also `inspect.iscoroutinefunction` but it cannot recognize coroutine # functions which are class methods. Use `isawaitable` on result instead. call_result = handler_run_func(**args) @@ -500,18 +653,18 @@ def get_run_context(param_type): if isinstance(exception, code_action.StopActionRunWithResult): action_result = exception.result response = action_result_to_run_action_response(action_result, "string") - raise StopWithResponse(response=response) - elif isinstance(exception, iactionrunner.BaseRunActionException) or isinstance( - exception, code_action.ActionFailedException - ): + raise StopWithResponse(response=response) from exception + elif isinstance( + exception, iactionrunner.BaseRunActionException + ) or isinstance(exception, code_action.ActionFailedException): error_str = exception.message else: logger.error("Unhandled exception in action handler:") - logger.exception(exception) error_str = str(exception) + logger.exception(exception) raise ActionFailedException( f"Running action handler '{handler.name}' failed(Run {run_id}): {error_str}" - ) + ) from exception end_time = time.time_ns() duration = (end_time - start_time) / 1_000_000 @@ -547,7 +700,7 @@ async def run_subresult_coros_concurrently( errors_str += str(exc) + "." raise ActionFailedException( f"Concurrent running action handlers of '{action_name}' failed(Run {run_id}): {errors_str}" - ) + ) from eg action_subresult: code_action.RunActionResult | None = None for coro_task in coros_tasks: diff --git a/finecode_extension_runner/src/finecode_extension_runner/api.proto b/finecode_extension_runner/src/finecode_extension_runner/api.proto deleted file mode 100644 index 2a13af65..00000000 --- a/finecode_extension_runner/src/finecode_extension_runner/api.proto +++ /dev/null @@ -1,25 +0,0 @@ -syntax = "proto3"; - -package finecode_extension_runner; - -message UpdateConfigRequest { - string working_dir = 1; - map config = 2; -} - -message UpdateConfigResponse { -} - -message RunActionRequest { - string action_name = 1; - string apply_on = 2; -} - -message RunActionResponse { - string result_text = 1; -} - -service ExtensionRunnerService { - rpc UpdateConfig(UpdateConfigRequest) returns (UpdateConfigResponse); - rpc RunAction(RunActionRequest) returns (RunActionResponse); -} diff --git a/finecode_extension_runner/src/finecode_extension_runner/cli.py b/finecode_extension_runner/src/finecode_extension_runner/cli.py index 73aea9f6..0cbedc58 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/cli.py +++ b/finecode_extension_runner/src/finecode_extension_runner/cli.py @@ -7,7 +7,7 @@ from loguru import logger import finecode_extension_runner.start as runner_start -from finecode_extension_runner import global_state +from finecode_extension_runner import global_state, logs @click.group() @@ -19,7 +19,6 @@ def main(): @main.command() @click.option("--trace", "trace", is_flag=True, default=False) @click.option("--debug", "debug", is_flag=True, default=False) -@click.option("--debug-port", "debug_port", type=int, default=5680) @click.option( "--project-path", "project_path", @@ -28,16 +27,24 @@ def main(): ) @click.option("--env-name", "env_name", type=str) def start( - trace: bool, debug: bool, debug_port: int, project_path: Path, env_name: str | None + trace: bool, + debug: bool, + project_path: Path, + env_name: str | None, ): + debug_port: int = 0 if debug is True: import debugpy # avoid debugger warnings printed to stdout, they affect I/O communication os.environ["PYDEVD_DISABLE_FILE_VALIDATION"] = "1" + + debug_port = runner_start._find_free_port() try: debugpy.listen(debug_port) + click.echo(f"Debug session: 127.0.0.1:{debug_port}") debugpy.wait_for_client() + debugpy.breakpoint() except Exception as e: logger.info(e) @@ -49,7 +56,18 @@ def start( global_state.project_dir_path = project_path global_state.env_name = env_name - runner_start.start_runner_sync(env_name) + log_file_path = (project_path + / ".venvs" + / env_name + / "logs" + / "runner.log") + + logs.setup_logging(log_level="INFO" if trace is False else "TRACE", log_file_path=log_file_path) + + if debug is True: + logger.info(f"Started debugger on 127.0.0.1:{debug_port}") + + runner_start.start_runner_sync() @main.command() diff --git a/finecode_extension_runner/src/finecode_extension_runner/context.py b/finecode_extension_runner/src/finecode_extension_runner/context.py index 88838a83..8e237f60 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/context.py +++ b/finecode_extension_runner/src/finecode_extension_runner/context.py @@ -2,11 +2,14 @@ from dataclasses import dataclass, field from finecode_extension_runner import domain +from finecode_extension_api import service @dataclass class RunnerContext: project: domain.Project action_cache_by_name: dict[str, domain.ActionCache] = field(default_factory=dict) - # don't overwrite, only append and remove - docs_owned_by_client: list[str] = field(default_factory=list) + project_config_version: int = 0 + running_services: dict[service.Service, domain.RunningServiceInfo] = field( + default_factory=dict + ) diff --git a/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py b/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py index fd011db5..af41af9f 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py +++ b/finecode_extension_runner/src/finecode_extension_runner/di/bootstrap.py @@ -1,176 +1,200 @@ +import collections.abc import functools +import importlib.metadata import pathlib -from typing import Any, Awaitable, Callable +import re +from typing import Any, Callable -try: - import fine_python_ast -except ImportError: - fine_python_ast = None +import ordered_set -try: - import fine_python_mypy -except ImportError: - fine_python_mypy = None +from loguru import logger -try: - import fine_python_package_info -except ImportError: - fine_python_package_info = None - -from finecode_extension_api.interfaces import ( +from finecode_extension_api.interfaces import ( # idevenvinfoprovider, iactionrunner, icache, icommandrunner, + iextensionrunnerinfoprovider, + ifileeditor, ifilemanager, ilogger, iprojectinfoprovider, - iextensionrunnerinfoprovider, - iprojectfileclassifier, - ipypackagelayoutinfoprovider, + irepositorycredentialsprovider, ) -from finecode_extension_runner import global_state, schemas + +from finecode_extension_runner import domain from finecode_extension_runner._services import run_action from finecode_extension_runner.di import _state, resolver -from finecode_extension_runner.impls import ( +from finecode_extension_runner.run_utils import import_module_member_by_source_str +from finecode_extension_runner.impls import ( # dev_env_info_provider, action_runner, command_runner, + extension_runner_info_provider, + file_editor, file_manager, inmemory_cache, loguru_logger, project_info_provider, - extension_runner_info_provider, - project_file_classifier, + repository_credentials_provider, + service_registry, ) - def bootstrap( - get_document_func: Callable, - save_document_func: Callable, project_def_path_getter: Callable[[], pathlib.Path], - project_raw_config_getter: Callable[[str], Awaitable[dict[str, Any]]], + project_raw_config_getter: Callable[ + [str], collections.abc.Awaitable[dict[str, Any]] + ], + current_project_raw_config_version_getter: Callable[[], int], cache_dir_path_getter: Callable[[], pathlib.Path], + actions_names_getter: Callable[[], list[str]], + action_by_name_getter: Callable[[str], domain.ActionDeclaration], + current_env_name_getter: Callable[[], str], + handler_packages: set[str], + service_declarations: list, ): # logger_instance = loguru_logger.LoguruLogger() logger_instance = loguru_logger.get_logger() + command_runner_instance = command_runner.CommandRunner(logger=logger_instance) + # dev_env_info_provider_instance = dev_env_info_provider.DevEnvInfoProvider(logger=logger_instance) file_manager_instance = file_manager.FileManager( - docs_owned_by_client=global_state.runner_context.docs_owned_by_client, - get_document_func=get_document_func, - save_document_func=save_document_func, logger=logger_instance, ) + file_editor_instance = file_editor.FileEditor( + logger=logger_instance, file_manager=file_manager_instance + ) cache_instance = inmemory_cache.InMemoryCache( - file_manager=file_manager_instance, logger=logger_instance + file_editor=file_editor_instance, logger=logger_instance ) action_runner_instance = action_runner.ActionRunner( - internal_service_func=run_action_wrapper + run_action_func=run_action.run_action, + actions_names_getter=actions_names_getter, + action_by_name_getter=action_by_name_getter, ) _state.container[ilogger.ILogger] = logger_instance _state.container[icommandrunner.ICommandRunner] = command_runner_instance _state.container[ifilemanager.IFileManager] = file_manager_instance + _state.container[ifileeditor.IFileEditor] = file_editor_instance _state.container[icache.ICache] = cache_instance _state.container[iactionrunner.IActionRunner] = action_runner_instance - if fine_python_ast is not None: - _state.factories[fine_python_ast.IPythonSingleAstProvider] = ( - python_single_ast_provider_factory - ) - if fine_python_mypy is not None: - _state.factories[fine_python_mypy.IMypySingleAstProvider] = ( - mypy_single_ast_provider_factory - ) + _state.container[irepositorycredentialsprovider.IRepositoryCredentialsProvider] = ( + repository_credentials_provider.ConfigRepositoryCredentialsProvider() + ) + + # _state.container[idevenvinfoprovider.IDevEnvInfoProvider] = dev_env_info_provider_instance + _state.factories[iprojectinfoprovider.IProjectInfoProvider] = functools.partial( project_info_provider_factory, project_def_path_getter=project_def_path_getter, project_raw_config_getter=project_raw_config_getter, + current_project_raw_config_version_getter=current_project_raw_config_version_getter, ) _state.factories[iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider] = ( functools.partial( extension_runner_info_provider_factory, cache_dir_path_getter=cache_dir_path_getter, + current_env_name_getter=current_env_name_getter, ) ) - _state.factories[iprojectfileclassifier.IProjectFileClassifier] = ( - project_file_classifier_factory - ) - if fine_python_package_info is not None: - _state.factories[ipypackagelayoutinfoprovider.IPyPackageLayoutInfoProvider] = ( - py_package_layout_info_provider_factory - ) + _activate_extensions(handler_packages) + _apply_user_service_config(service_declarations) - # TODO: parameters from config +def _activate_extensions(handler_packages: set[str]) -> None: + registry = service_registry.ServiceRegistry() + all_eps = { + ep.name: ep + for ep in importlib.metadata.entry_points(group="finecode.activator") + } + packages_to_activate = _collect_activatable_packages(handler_packages, all_eps) -async def run_action_wrapper( - action_name: str, payload: dict[str, Any] -) -> dict[str, Any]: - request = schemas.RunActionRequest(action_name=action_name, params=payload) - options = schemas.RunActionOptions(result_format="json") + for pkg_name in packages_to_activate: + try: + activator_cls = all_eps[pkg_name].load() + activator_cls(registry=registry).activate() + logger.trace(f"Activated extension '{pkg_name}'") + except Exception as e: + logger.error(f"Failed to activate extension '{pkg_name}': {e}") - try: - response = await run_action.run_action(request=request, options=options) - except run_action.ActionFailedException as exception: - raise iactionrunner.ActionRunFailed(exception.message) - return response.result +def _apply_user_service_config(service_declarations: list[object]) -> None: + registry = service_registry.ServiceRegistry() + for svc in service_declarations: + try: + interface = import_module_member_by_source_str(svc.interface) + impl_cls = import_module_member_by_source_str(svc.source) + registry.register_impl(interface, impl_cls) + logger.trace(f"Configured service '{svc.source}' for '{svc.interface}'") + except Exception as e: + logger.error(f"Failed to configure service '{svc.source}': {e}") -def python_single_ast_provider_factory(container): - return fine_python_ast.PythonSingleAstProvider( - file_manager=container[ifilemanager.IFileManager], - cache=container[icache.ICache], - logger=container[ilogger.ILogger], - ) +def _collect_activatable_packages( + seed_packages: set[str], + all_eps: dict[str, importlib.metadata.EntryPoint], +) -> ordered_set.OrderedSet[str]: + """Expand seed_packages to include transitive deps that have activators.""" + result: ordered_set.OrderedSet[str] = ordered_set.OrderedSet([]) + visited: set[str] = set() + queue = list(seed_packages) + while queue: + pkg = queue.pop() + normalized = _normalize_pkg_name(pkg) + if normalized in visited: + continue + visited.add(normalized) -def mypy_single_ast_provider_factory(container): - return fine_python_mypy.MypySingleAstProvider( - file_manager=container[ifilemanager.IFileManager], - cache=container[icache.ICache], - logger=container[ilogger.ILogger], - ) + if normalized in all_eps: + result.add(normalized) + + try: + requires = importlib.metadata.requires(pkg) or [] + except importlib.metadata.PackageNotFoundError: + continue + + for req_str in requires: + dep_name = _parse_dep_name(req_str) + dep_normalized = _normalize_pkg_name(dep_name) + if dep_normalized not in visited and dep_normalized in all_eps: + queue.append(dep_name) + + return result + + +def _normalize_pkg_name(name: str) -> str: + return re.sub(r"[-_.]+", "_", name).lower() + + +def _parse_dep_name(req_str: str) -> str: + # PEP 508: package name precedes any version specifier, extra marker, or whitespace + return re.split(r"[\s>= None: self.name: str = name self.config: dict[str, typing.Any] = config - self.handlers: list[ActionHandler] = handlers + self.handlers: list[ActionHandlerDeclaration] = handlers self.source: str = source -class ActionHandler: +class ActionHandlerDeclaration: def __init__(self, name: str, source: str, config: dict[str, typing.Any]) -> None: self.name = name self.source = source @@ -36,7 +36,7 @@ def __init__( name: str, dir_path: Path, def_path: Path, - actions: dict[str, Action], + actions: dict[str, ActionDeclaration], action_handler_configs: dict[str, dict[str, typing.Any]], ) -> None: self.name = name @@ -52,13 +52,11 @@ def __str__(self) -> str: class ActionExecInfo: def __init__( self, - payload_type: typing.Type[code_action.RunActionPayload] | None, - run_context_type: typing.Type[code_action.RunActionContext] | None, + payload_type: type[code_action.RunActionPayload] | None, + run_context_type: type[code_action.RunActionContext] | None, ) -> None: - self.payload_type: typing.Type[code_action.RunActionPayload] | None = ( - payload_type - ) - self.run_context_type: typing.Type[code_action.RunActionContext] | None = ( + self.payload_type: type[code_action.RunActionPayload] | None = payload_type + self.run_context_type: type[code_action.RunActionContext] | None = ( run_context_type ) # instantiation of process executor impl is cheap. To avoid analyzing all @@ -89,8 +87,11 @@ class ActionCache: @dataclasses.dataclass class ActionHandlerCache: + # set all values by default to None and cache will be filled step-by-step if step + # was successful instance: code_action.ActionHandler | None = None exec_info: ActionHandlerExecInfo | None = None + used_services: list[service.Service] | None = None class TextDocumentInfo: @@ -112,3 +113,8 @@ class TextDocumentNotOpened(Exception): ... class PartialResult(typing.NamedTuple): token: int | str value: typing.Any + + +@dataclasses.dataclass +class RunningServiceInfo: + used_by: list[code_action.ActionHandler] diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py b/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py index 98bf5d84..4e439618 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/action_runner.py @@ -1,11 +1,49 @@ -from typing import Any - +import collections.abc +import typing +from finecode_extension_api import code_action from finecode_extension_api.interfaces import iactionrunner +from finecode_extension_runner import domain + class ActionRunner(iactionrunner.IActionRunner): - def __init__(self, internal_service_func): - self._internal_service_func = internal_service_func + def __init__(self, run_action_func: typing.Callable[[domain.ActionDeclaration, code_action.RunActionPayload, code_action.RunActionMeta], collections.abc.Coroutine[None, None, code_action.RunActionResult]], + actions_names_getter: typing.Callable[[], list[str]], + action_by_name_getter: typing.Callable[[str], domain.ActionDeclaration]): + self._run_action_func = run_action_func + self._actions_names_getter = actions_names_getter + self._action_by_name_getter = action_by_name_getter + + @typing.override + async def run_action( + self, action: iactionrunner.ActionDeclaration[iactionrunner.ActionT], payload: code_action.RunActionPayload, meta: code_action.RunActionMeta + ) -> code_action.RunActionResult: + try: + return await self._run_action_func(action, payload, meta) + except Exception as exception: + raise iactionrunner.ActionRunFailed(str(exception)) from exception + + @typing.override + def get_actions_names(self) -> list[str]: + return self._actions_names_getter() + + @typing.override + def get_actions_by_source(self, source: str, expected_type: type[iactionrunner.ActionT]) -> list[iactionrunner.ActionDeclaration[iactionrunner.ActionT]]: + return [ + action + for name in self._actions_names_getter() + if (action := self._action_by_name_getter(name)).source == source + ] + + @typing.override + def get_action_by_name(self, name: str, expected_type: type[iactionrunner.ActionT]) -> iactionrunner.ActionDeclaration[iactionrunner.ActionT]: + try: + return self._action_by_name_getter(name) + except KeyError as exception: + raise iactionrunner.ActionNotFound(f"Action '{name}' not found") from exception - async def run_action(self, name: str, payload: dict[str, Any]) -> dict[str, Any]: - return await self._internal_service_func(action_name=name, payload=payload) + @typing.override + def get_actions_for_language(self, source: str, language: str, expected_type: type[iactionrunner.ActionT]) -> list[iactionrunner.ActionDeclaration[iactionrunner.ActionT]]: + return [ + action for action in self.get_actions_by_source(source=source, expected_type=expected_type) if action.name.endswith('_' + language) + ] diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/dev_env_info_provider.py b/finecode_extension_runner/src/finecode_extension_runner/impls/dev_env_info_provider.py new file mode 100644 index 00000000..754e4bae --- /dev/null +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/dev_env_info_provider.py @@ -0,0 +1,39 @@ +# import pathlib +# import typing + +# from finecode_extension_api import common_types +from finecode_extension_api.interfaces import idevenvinfoprovider, ilogger + + +class DevEnvInfoProvider( + idevenvinfoprovider.IDevEnvInfoProvider +): + def __init__( + self, + logger: ilogger.ILogger, + # docs_owned_by_ide: list[str], + # get_document_func: typing.Callable, + # save_document_func: typing.Callable, + ) -> None: + self.logger = logger + # self.docs_owned_by_ide = docs_owned_by_ide + # self.get_document_func = get_document_func + # self.save_document_func = save_document_func + + # async def owned_files(self, dev_env: common_types.DevEnv) -> list[pathlib.Path]: + # ... + + # async def is_owner_of(self, dev_env: common_types.DevEnv, file_path: pathlib.Path) -> bool: + # ... + + # async def file_is_owned_by(self, file_path: pathlib.Path) -> list[common_types.DevEnv]: + # ... + + # async def files_owned_by_dev_envs(self) -> list[pathlib.Path]: + # ... + + # async def get_file_content(self, file_path: pathlib.Path) -> bytes: + # ... + + # async def save_file_content(self, file_path: pathlib.Path, file_content: bytes) -> None: + # ... diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py b/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py index 236a5d6c..508f152f 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/extension_runner_info_provider.py @@ -8,13 +8,17 @@ class ExtensionRunnerInfoProvider( iextensionrunnerinfoprovider.IExtensionRunnerInfoProvider ): def __init__( - self, cache_dir_path_getter: Callable[[], pathlib.Path], logger: ilogger.ILogger + self, cache_dir_path_getter: Callable[[], pathlib.Path], logger: ilogger.ILogger, current_env_name_getter: Callable[[], str] ) -> None: self.cache_dir_path_getter = cache_dir_path_getter self.logger = logger + self.current_env_name_getter = current_env_name_getter self._site_packages_cache: dict[pathlib.Path, list[pathlib.Path]] = {} + def get_current_env_name(self) -> str: + return self.current_env_name_getter() + def get_cache_dir_path(self) -> pathlib.Path: return self.cache_dir_path_getter() @@ -25,6 +29,10 @@ def get_venv_dir_path_of_env(self, env_name: str) -> pathlib.Path: venvs_dir_path = current_venv_dir_path.parent return venvs_dir_path / env_name + def get_current_venv_dir_path(self) -> pathlib.Path: + current_env_name = self.get_current_env_name() + return self.get_venv_dir_path_of_env(env_name=current_env_name) + def get_venv_site_packages(self, venv_dir_path: pathlib.Path) -> list[pathlib.Path]: # venv site packages can be cached because they don't change and if user runs # prepare-envs or updates environment in any other way, current ER should be diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/file_editor.py b/finecode_extension_runner/src/finecode_extension_runner/impls/file_editor.py new file mode 100644 index 00000000..b160695d --- /dev/null +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/file_editor.py @@ -0,0 +1,590 @@ +import asyncio +import contextlib +import collections.abc +import dataclasses +import pathlib +from typing import TypeVar + +from finecode_extension_api.interfaces import ifileeditor, ifilemanager, ilogger + + +T = TypeVar("T") + +class QueueIterator: + def __init__(self, queue: asyncio.Queue[T]): + self._queue = queue + + def __aiter__(self): + return self + + async def __anext__(self): + item = await self._queue.get() + if item is None: # Sentinel + raise StopAsyncIteration + return item + + +class MultiQueueIterator(collections.abc.AsyncIterator[T]): + """Merges multiple asyncio queues into a single async iterator. + + Supports dynamic addition and removal of queues during iteration. + """ + + def __init__(self, queues: list[asyncio.Queue[T]]) -> None: + self._queues: list[asyncio.Queue[T]] = queues + self._queues_changed_event: asyncio.Event = asyncio.Event() + self._shutdown_event: asyncio.Event = asyncio.Event() + + def shutdown(self) -> None: + """Shutdown the iterator, causing it to raise StopAsyncIteration.""" + self._shutdown_event.set() + + def add_queue(self, queue: asyncio.Queue[T]) -> None: + """Add a queue to be merged.""" + self._queues.append(queue) + self._queues_changed_event.set() + + def remove_queue(self, queue: asyncio.Queue[T]) -> None: + """Remove a queue from being merged.""" + if queue in self._queues: + self._queues.remove(queue) + self._queues_changed_event.set() + + def __aiter__(self) -> "MultiQueueIterator[T]": + return self + + async def __anext__(self) -> T: + while True: + if not self._queues: + raise StopAsyncIteration + + # Clear the event before starting wait + self._queues_changed_event.clear() + + # Create get tasks for all queues + tasks = {asyncio.create_task(queue.get()): queue for queue in self._queues} + + # Also wait for the queues changed event and shutdown event + queues_changed_task = asyncio.create_task(self._queues_changed_event.wait()) + shutdown_task = asyncio.create_task(self._shutdown_event.wait()) + # Wait for either a queue to have an item, queues to change, or shutdown + all_tasks = set(tasks.keys()) | {queues_changed_task, shutdown_task} + + try: + done, pending = await asyncio.wait( + all_tasks, return_when=asyncio.FIRST_COMPLETED + ) + + # Cancel all pending tasks + for task in pending: + task.cancel() + + # If shutdown, stop iteration + if shutdown_task in done: + raise StopAsyncIteration + + # If queues changed, restart the loop + if queues_changed_task in done: + continue + + # Get the result from the completed task + completed_task = done.pop() + result = await completed_task + + return result + except asyncio.CancelledError: + # Cancel all tasks on cancellation + for task in all_tasks: + if not task.done(): + task.cancel() + raise + finally: + # Make sure control tasks are cancelled if they're still pending + if not queues_changed_task.done(): + queues_changed_task.cancel() + if not shutdown_task.done(): + shutdown_task.cancel() + + async def aclose(self) -> None: + """Close the iterator and cleanup resources.""" + self.shutdown() + + +@dataclasses.dataclass +class OpenedFileInfo: + content: str + version: str + opened_by: list[ifileeditor.IFileEditorSession] + + +@dataclasses.dataclass +class BlockedFileInfo: + blocked_by: "FileEditorSession" + unblock_event: asyncio.Event + + +class BaseSubscription: ... + + +class SubscriptionToFileChanges(BaseSubscription): + def __init__(self) -> None: + self.event_queue: asyncio.Queue[ifileeditor.FileChangeEvent] = asyncio.Queue() + + +class SubscriptionToAllEvents(BaseSubscription): + def __init__(self) -> None: + self.event_queue: asyncio.Queue[ifileeditor.FileEvent] = asyncio.Queue() + + +class FileEditorSession(ifileeditor.IFileEditorSession): + def __init__( + self, + logger: ilogger.ILogger, + author: ifileeditor.FileOperationAuthor, + file_manager: ifilemanager.IFileManager, + opened_files: dict[pathlib.Path, OpenedFileInfo], + blocked_files: dict[pathlib.Path, BlockedFileInfo], + file_change_subscriptions: dict[ + pathlib.Path, + dict[ + ifileeditor.IFileEditorSession, + SubscriptionToFileChanges, + ], + ], + all_events_subscriptions: dict[ + ifileeditor.IFileEditorSession, + SubscriptionToAllEvents, + ], + ) -> None: + self.logger = logger + self.author = author + self._file_manager = file_manager + self._opened_files = opened_files + self._blocked_files = blocked_files + self._file_change_subscriptions = file_change_subscriptions + self._all_events_subscriptions = all_events_subscriptions + + self._opened_file_subscription: ( + MultiQueueIterator[ifileeditor.FileChangeEvent] | None + ) = None + + @property + def _subscribed_to_opened_files(self) -> bool: + return self._opened_file_subscription is not None + + def close(self) -> None: + """Close the session and cleanup all resources.""" + # Shutdown active subscription first + if self._opened_file_subscription is not None: + self._opened_file_subscription.shutdown() + + # Clean up subscriptions + files_to_unsubscribe: list[pathlib.Path] = [] + for file_path, sessions_dict in self._file_change_subscriptions.items(): + if self in sessions_dict: + files_to_unsubscribe.append(file_path) + + for file_path in files_to_unsubscribe: + self._unsubscribe_from_file_changes(file_path=file_path) + + self._opened_file_subscription = None + + # Close all files opened by this session + files_to_close: list[pathlib.Path] = [] + for file_path, opened_file_info in self._opened_files.items(): + if self in opened_file_info.opened_by: + files_to_close.append(file_path) + + for file_path in files_to_close: + try: + opened_file_info = self._opened_files[file_path] + opened_file_info.opened_by.remove(self) + + # Remove file from opened_files if no sessions have it open + if len(opened_file_info.opened_by) == 0: + del self._opened_files[file_path] + except (KeyError, ValueError): + # File was already removed or session not in list + pass + + # Unblock files blocked by this session + files_to_unblock: list[pathlib.Path] = [] + for file_path, blocked_file_info in self._blocked_files.items(): + if blocked_file_info.blocked_by == self: + files_to_unblock.append(file_path) + + for file_path in files_to_unblock: + try: + blocked_file_info = self._blocked_files.pop(file_path) + blocked_file_info.unblock_event.set() + except KeyError: + # File was already unblocked + pass + + async def change_file( + self, file_path: pathlib.Path, change: ifileeditor.FileChange + ) -> None: + self.logger.trace(f"Change file {file_path}") + if file_path in self._opened_files: + opened_file_info = self._opened_files[file_path] + file_content = opened_file_info.content + new_file_content = FileEditorSession.apply_change_to_file_content( + change=change, file_content=file_content + ) + self.logger.info(str(change)) + self.logger.info(f"||{file_content}||{new_file_content}||") + self._update_opened_file_info( + file_path=file_path, new_file_content=new_file_content + ) + self.logger.trace(f"File {file_path} is opened, updated its content") + else: + file_content = await self._file_manager.get_content(file_path=file_path) + new_file_content = FileEditorSession.apply_change_to_file_content( + change=change, file_content=file_content + ) + await self._file_manager.save_file( + file_path=file_path, file_content=new_file_content + ) + self.logger.trace( + f"File {file_path} is not opened, saved it in file system" + ) + + # notify subscribers + if file_path in self._file_change_subscriptions or len(self._all_events_subscriptions) > 0: + self._notify_subscribers_about_file_change( + file_path=file_path, change=change + ) + + @staticmethod + def apply_change_to_file_content( + change: ifileeditor.FileChange, file_content: str + ) -> str: + if isinstance(change, ifileeditor.FileChangeFull): + return change.text + else: + # Split file content into lines + lines = file_content.splitlines(keepends=True) + + # Get start and end positions + start_line = change.range.start.line + start_char = change.range.start.character + end_line = change.range.end.line + end_char = change.range.end.character + + # Validate range + if start_line < 0 or end_line < 0: + raise ValueError("Invalid range: negative line numbers not allowed") + + if end_line < start_line or ( + end_line == start_line and end_char < start_char + ): + raise ValueError("Invalid range: end position is before start position") + + # For bounds checking: line indices beyond file length should be treated as + # appending to the end. LSP spec allows this for insertions at end of file, + # make it also here the same. + # However, if both start and end are beyond bounds, it's likely an error. + if start_line > len(lines): + raise ValueError( + f"Invalid range: start line {start_line} is beyond file length {len(lines)}" + ) + + # Build the new content + # Part before the change + before_parts: list[str] = [] + for i in range(start_line): + before_parts.append(lines[i]) + if start_line < len(lines): + before_parts.append(lines[start_line][:start_char]) + before = "".join(before_parts) + + # Part after the change + after_parts: list[str] = [] + if end_line < len(lines): + after_parts.append(lines[end_line][end_char:]) + for i in range(end_line + 1, len(lines)): + after_parts.append(lines[i]) + after = "".join(after_parts) + + new_file_content = before + change.text + after + return new_file_content + + @contextlib.asynccontextmanager + async def subscribe_to_changes_of_opened_files( + self, + ) -> collections.abc.AsyncIterator[ifileeditor.FileChangeEvent]: + if self._subscribed_to_opened_files is True: + raise ValueError("This session is already subscribed to opened files") + + change_queues: list[asyncio.Queue[ifileeditor.FileChangeEvent]] = [] + for file_path, opened_file_info in self._opened_files.items(): + if self in opened_file_info.opened_by: + change_queue = self._subscribe_to_file_changes(file_path=file_path) + change_queues.append(change_queue) + + self._opened_file_subscription = MultiQueueIterator(queues=change_queues) + + try: + yield self._opened_file_subscription + finally: + # Unsubscribe from all files + files_to_unsubscribe: list[pathlib.Path] = [] + for file_path, sessions_dict in self._file_change_subscriptions.items(): + if self in sessions_dict: + files_to_unsubscribe.append(file_path) + + for file_path in files_to_unsubscribe: + self._unsubscribe_from_file_changes(file_path=file_path) + + self._opened_file_subscription.shutdown() + self._opened_file_subscription = None + + def _subscribe_to_file_changes( + self, file_path: pathlib.Path + ) -> asyncio.Queue[ifileeditor.FileChangeEvent]: + if file_path not in self._file_change_subscriptions: + self._file_change_subscriptions[file_path] = {} + + new_subscription = SubscriptionToFileChanges() + self._file_change_subscriptions[file_path][self] = new_subscription + + return new_subscription.event_queue + + def _unsubscribe_from_file_changes( + self, file_path: pathlib.Path + ) -> asyncio.Queue[ifileeditor.FileChangeEvent]: + subscription = self._file_change_subscriptions[file_path][self] + + del self._file_change_subscriptions[file_path][self] + + if len(self._file_change_subscriptions[file_path]) == 0: + del self._file_change_subscriptions[file_path] + + return subscription.event_queue + + def _notify_subscribers_about_file_change( + self, file_path: pathlib.Path, change: ifileeditor.FileChange + ) -> None: + file_change_event = ifileeditor.FileChangeEvent( + file_path=file_path, author=self.author, change=change + ) + for subscription in self._file_change_subscriptions[file_path].values(): + subscription.event_queue.put_nowait(file_change_event) + + for subscription in self._all_events_subscriptions.values(): + subscription.event_queue.put_nowait(file_change_event) + + async def open_file(self, file_path: pathlib.Path) -> None: + if file_path in self._opened_files: + # file is already opened by one of the sessions, just add current session to + # the `opened_by` list + opened_file_info = self._opened_files[file_path] + if self in opened_file_info.opened_by: + raise ifileeditor.FileAlreadyOpenError( + f"{file_path} is already opened in this session" + ) + + opened_file_info.opened_by.append(self) + else: + initial_file_content = await self._file_manager.get_content( + file_path=file_path + ) + initial_file_version = await self._file_manager.get_file_version( + file_path=file_path + ) + new_opened_file_info = OpenedFileInfo( + content=initial_file_content, + version=initial_file_version, + opened_by=[self], + ) + self._opened_files[file_path] = new_opened_file_info + + if self._subscribed_to_opened_files: + change_queue = self._subscribe_to_file_changes(file_path=file_path) + assert self._opened_file_subscription is not None + self._opened_file_subscription.add_queue(change_queue) + + if len(self._all_events_subscriptions) > 0: + file_open_event = ifileeditor.FileOpenEvent(file_path=file_path) + for subscription in self._all_events_subscriptions.values(): + subscription.event_queue.put_nowait(file_open_event) + + async def save_opened_file(self, file_path: pathlib.Path) -> None: + if file_path not in self._opened_files: + raise ValueError(f"{file_path} is not opened") + opened_file_info = self._opened_files[file_path] + + if self not in opened_file_info.opened_by: + raise ValueError(f"{file_path} is not opened in this session") + + file_content = opened_file_info.content + await self._file_manager.save_file( + file_path=file_path, file_content=file_content + ) + + async def close_file(self, file_path: pathlib.Path) -> None: + if self._subscribed_to_opened_files: + change_queue = self._unsubscribe_from_file_changes(file_path=file_path) + assert self._opened_file_subscription is not None + self._opened_file_subscription.remove_queue(change_queue) + + try: + opened_file_info = self._opened_files[file_path] + try: + opened_file_info.opened_by.remove(self) + except ValueError as exception: + raise ValueError( + f"{file_path} is not opened in this session" + ) from exception + + if len(opened_file_info.opened_by) == 0: + del self._opened_files[file_path] + except KeyError as exception: + raise ValueError(f"{file_path} is not opened") from exception + + if len(self._all_events_subscriptions) > 0: + file_close_event = ifileeditor.FileOpenEvent(file_path=file_path) + for subscription in self._all_events_subscriptions.values(): + subscription.event_queue.put_nowait(file_close_event) + + def _update_opened_file_info( + self, file_path: pathlib.Path, new_file_content: str + ) -> None: + # this method expects `file_path` is opened + opened_file_info = self._opened_files[file_path] + opened_file_info.content = new_file_content + new_version = hash(new_file_content) # or just increase? + opened_file_info.version = str(new_version) + + @contextlib.asynccontextmanager + async def subscribe_to_all_events( + self, + ) -> collections.abc.AsyncIterator[ifileeditor.FileEvent]: + new_subscription = SubscriptionToAllEvents() + self._all_events_subscriptions[self] = new_subscription + iterator = QueueIterator(queue=new_subscription.event_queue) + + try: + yield iterator + finally: + del self._all_events_subscriptions[self] + await iterator._queue.put(None) + + @contextlib.asynccontextmanager + async def read_file( + self, file_path: pathlib.Path, block: bool = False + ) -> collections.abc.AsyncIterator[ifileeditor.FileInfo]: + if file_path in self._blocked_files: + blocked_file_info = self._blocked_files[file_path] + if blocked_file_info.blocked_by == self: + raise ValueError( + f"{file_path} is blocked by this session, cannot read it" + ) + + unblock_event = blocked_file_info.unblock_event + await unblock_event.wait() + + if block: + blocked_file_info = BlockedFileInfo( + blocked_by=self, unblock_event=asyncio.Event() + ) + self._blocked_files[file_path] = blocked_file_info + try: + if file_path in self._opened_files: + opened_file_info = self._opened_files[file_path] + file_content = opened_file_info.content + file_version = opened_file_info.version + else: + file_content = await self._file_manager.get_content(file_path=file_path) + file_version = await self._file_manager.get_file_version( + file_path=file_path + ) + file_info = ifileeditor.FileInfo(content=file_content, version=file_version) + yield file_info + finally: + if block: + blocked_file_info = self._blocked_files.pop(file_path) + blocked_file_info.unblock_event.set() + + async def read_file_version(self, file_path: pathlib.Path) -> str: + if file_path in self._blocked_files: + blocked_file_info = self._blocked_files[file_path] + unblock_event = blocked_file_info.unblock_event + await unblock_event.wait() + + if file_path in self._opened_files: + opened_file_info = self._opened_files[file_path] + file_version = opened_file_info.version + else: + file_version = await self._file_manager.get_file_version( + file_path=file_path + ) + return file_version + + async def save_file(self, file_path: pathlib.Path, file_content: str) -> None: + self.logger.debug(f"Save file {file_path}") + await self._file_manager.save_file( + file_path=file_path, file_content=file_content + ) + + if file_path in self._opened_files: + self._update_opened_file_info( + file_path=file_path, new_file_content=file_content + ) + + if file_path in self._file_change_subscriptions or len(self._all_events_subscriptions) > 0: + file_change = ifileeditor.FileChangeFull(text=file_content) + self._notify_subscribers_about_file_change( + file_path=file_path, change=file_change + ) + + +class FileEditor(ifileeditor.IFileEditor): + def __init__( + self, logger: ilogger.ILogger, file_manager: ifilemanager.IFileManager + ) -> None: + self.logger = logger + self.file_manager = file_manager + + self._opened_files: dict[pathlib.Path, OpenedFileInfo] = {} + self._blocked_files: dict[pathlib.Path, BlockedFileInfo] = {} + self._sessions: list[FileEditorSession] = [] + self._author_by_session: dict[ + ifileeditor.IFileEditorSession, ifileeditor.FileOperationAuthor + ] = {} + self._file_change_subscriptions: dict[ + pathlib.Path, + dict[ + ifileeditor.IFileEditorSession, + SubscriptionToFileChanges, + ], + ] = {} + self._all_events_subscriptions: dict[ + ifileeditor.IFileEditorSession, + SubscriptionToAllEvents, + ] = {} + + @contextlib.asynccontextmanager + async def session( + self, author: ifileeditor.FileOperationAuthor + ) -> collections.abc.AsyncIterator[ifileeditor.IFileEditorSession]: + new_session = FileEditorSession( + logger=self.logger, + author=author, + file_manager=self.file_manager, + opened_files=self._opened_files, + blocked_files=self._blocked_files, + file_change_subscriptions=self._file_change_subscriptions, + all_events_subscriptions=self._all_events_subscriptions, + ) + self._sessions.append(new_session) + self._author_by_session[new_session] = author + try: + yield new_session + finally: + new_session.close() + self._sessions.remove(new_session) + del self._author_by_session[new_session] + + def get_opened_files(self) -> list[pathlib.Path]: + return list(self._opened_files.keys()) diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py b/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py index 70ee436e..28e6b83a 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/file_manager.py @@ -1,65 +1,25 @@ import hashlib import shutil from pathlib import Path -from typing import Callable from finecode_extension_api.interfaces import ifilemanager, ilogger -from finecode_extension_runner import domain class FileManager(ifilemanager.IFileManager): def __init__( self, - docs_owned_by_client: list[str], - get_document_func: Callable, - save_document_func: Callable, logger: ilogger.ILogger, ) -> None: - self.docs_owned_by_client = docs_owned_by_client - self.get_document_func = get_document_func - self.save_document_func = save_document_func self.logger = logger async def get_content(self, file_path: Path) -> str: - file_uri = f"file://{file_path.as_posix()}" - file_content: str = "" - - if file_uri in self.docs_owned_by_client: - # docs owned by client cannot be cached, always read from client - try: - document_info = await self.get_document_func(file_uri) - file_content = document_info.text - except domain.TextDocumentNotOpened: - file_content = self.read_content_file_from_fs(file_path=file_path) - else: - file_content = self.read_content_file_from_fs(file_path=file_path) + file_content = self.read_content_file_from_fs(file_path=file_path) return file_content async def get_file_version(self, file_path: Path) -> str: - file_uri = path_to_uri_str(file_path) file_version: str = "" - - if file_uri in self.docs_owned_by_client: - # read file from client - try: - document_info = await self.get_document_func(file_uri) - file_version = str(document_info.version) - except domain.TextDocumentNotOpened: - file_version = self.get_hash_of_file_from_fs(file_path=file_path) - else: - # TODO - # st = file_path.stat() - # file_version = f'{st.st_size},{st.st_mtime}' - # if st.st_size != old.st_size: - # return True - # if st.st_mtime != old.st_mtime: - # new_hash = Cache.hash_digest(res_src) - # if new_hash != old.hash: - # return True - # return False - - file_version = self.get_hash_of_file_from_fs(file_path=file_path) + file_version = self.get_hash_of_file_from_fs(file_path=file_path) # 12 chars is enough to distinguish. The whole value is 64 chars length and # is not really needed in logs @@ -68,12 +28,9 @@ async def get_file_version(self, file_path: Path) -> str: return file_version async def save_file(self, file_path: Path, file_content: str) -> None: - file_uri = path_to_uri_str(file_path) - if file_uri in self.docs_owned_by_client: - await self.save_document_func(file_uri, file_content) - else: - with open(file_path, "w") as f: - f.write(file_content) + self.logger.debug(f"Save file {file_path}") + with open(file_path, "w") as f: + f.write(file_content) async def create_dir( self, dir_path: Path, create_parents: bool = True, exist_ok: bool = True @@ -101,7 +58,3 @@ def get_hash_of_file_from_fs(self, file_path: Path) -> str: file_version = hashlib.file_digest(f, "sha256").hexdigest() return file_version - - -def path_to_uri_str(path: Path) -> str: - return f"file://{path.as_posix()}" diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py b/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py index 0e8edc72..ea050d45 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/inmemory_cache.py @@ -1,16 +1,20 @@ from pathlib import Path from typing import Any, TypeAlias -from finecode_extension_api.interfaces import icache, ifilemanager, ilogger +from finecode_extension_api.interfaces import icache, ifileeditor, ilogger CacheKeyType: TypeAlias = str class InMemoryCache(icache.ICache): + FILE_OPERATION_AUTHOR = ifileeditor.FileOperationAuthor( + id="InMemoryCache" + ) + def __init__( - self, file_manager: ifilemanager.IFileManager, logger: ilogger.ILogger + self, file_editor: ifileeditor.IFileEditor, logger: ilogger.ILogger ): - self.file_manager = file_manager + self.file_editor = file_editor self.logger = logger self.cache_by_file: dict[Path, dict[CacheKeyType, tuple[str, Any]]] = {} @@ -20,7 +24,10 @@ def __init__( async def save_file_cache( self, file_path: Path, file_version: str, key: CacheKeyType, value: Any ) -> None: - current_file_version = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + current_file_version = await session.read_file_version(file_path) if file_version != current_file_version: # `value` was created for older version of file, don't save it @@ -35,9 +42,9 @@ async def save_file_cache( async def get_file_cache(self, file_path: Path, key: CacheKeyType) -> Any: try: file_cache = self.cache_by_file[file_path] - except KeyError: + except KeyError as exception: self.logger.debug(f"No cache for file {file_path}, cache miss") - raise icache.CacheMissException() + raise icache.CacheMissException() from exception if key not in file_cache: self.logger.debug( @@ -45,7 +52,11 @@ async def get_file_cache(self, file_path: Path, key: CacheKeyType) -> Any: ) raise icache.CacheMissException() - current_file_version = await self.file_manager.get_file_version(file_path) + async with self.file_editor.session( + author=self.FILE_OPERATION_AUTHOR + ) as session: + current_file_version = await session.read_file_version(file_path) + cached_file_version = file_cache[key][0] if cached_file_version != current_file_version: self.logger.debug( diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/lsp_client.py b/finecode_extension_runner/src/finecode_extension_runner/impls/lsp_client.py new file mode 100644 index 00000000..e9a61bac --- /dev/null +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/lsp_client.py @@ -0,0 +1,200 @@ +from __future__ import annotations + +import collections.abc +import os +from pathlib import Path +from types import TracebackType +from typing import Any, Self + +from finecode_extension_api.interfaces import ijsonrpcclient, ilspclient + + +class LspSessionImpl(ilspclient.ILspSession): + """ILspSession implementation. Wraps an IJsonRpcSession with LSP lifecycle.""" + + def __init__( + self, + json_rpc_session: ijsonrpcclient.IJsonRpcSession, + root_uri: str, + workspace_folders: list[dict[str, str]] | None, + initialization_options: dict[str, Any] | None, + client_capabilities: dict[str, Any] | None, + ) -> None: + self._session = json_rpc_session + self._root_uri = root_uri + self._workspace_folders = workspace_folders + self._initialization_options = initialization_options + self._client_capabilities = ( + client_capabilities + if client_capabilities is not None + else _default_client_capabilities() + ) + self._server_capabilities: dict[str, Any] = {} + self._server_info: dict[str, Any] | None = None + + # ------------------------------------------------------------------ + # Context manager + # ------------------------------------------------------------------ + + async def __aenter__(self) -> Self: + await self._session.__aenter__() + + # LSP initialize handshake + init_params: dict[str, Any] = { + "processId": os.getpid(), + "rootUri": self._root_uri, + "capabilities": self._client_capabilities, + } + if self._workspace_folders is not None: + init_params["workspaceFolders"] = self._workspace_folders + if self._initialization_options is not None: + init_params["initializationOptions"] = self._initialization_options + + init_result = await self._session.send_request( + "initialize", init_params, timeout=30.0 + ) + + if isinstance(init_result, dict): + self._server_capabilities = init_result.get("capabilities", {}) + self._server_info = init_result.get("serverInfo") + + await self._session.send_notification("initialized", {}) + + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + try: + await self._session.send_request("shutdown", timeout=10.0) + except Exception: + pass # Best effort + + try: + await self._session.send_notification("exit") + except Exception: + pass # Best effort + + await self._session.__aexit__(exc_type, exc_val, exc_tb) + + # ------------------------------------------------------------------ + # Async API (delegated) + # ------------------------------------------------------------------ + + async def send_request( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + return await self._session.send_request(method, params, timeout) + + async def send_notification( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + await self._session.send_notification(method, params) + + # ------------------------------------------------------------------ + # Sync API (delegated) + # ------------------------------------------------------------------ + + def send_request_sync( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + return self._session.send_request_sync(method, params, timeout) + + def send_notification_sync( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + self._session.send_notification_sync(method, params) + + # ------------------------------------------------------------------ + # Server-initiated messages (delegated) + # ------------------------------------------------------------------ + + def on_notification( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[None] + ], + ) -> None: + self._session.on_notification(method, handler) + + def on_request( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[Any] + ], + ) -> None: + self._session.on_request(method, handler) + + # ------------------------------------------------------------------ + # Properties + # ------------------------------------------------------------------ + + @property + def server_capabilities(self) -> dict[str, Any]: + return self._server_capabilities + + @property + def server_info(self) -> dict[str, Any] | None: + return self._server_info + + +class LspClientImpl(ilspclient.ILspClient): + """ILspClient implementation. Factory for LspSessionImpl.""" + + def __init__(self, json_rpc_client: ijsonrpcclient.IJsonRpcClient) -> None: + self._json_rpc_client = json_rpc_client + + def session( + self, + cmd: str, + root_uri: str, + workspace_folders: list[dict[str, str]] | None = None, + initialization_options: dict[str, Any] | None = None, + client_capabilities: dict[str, Any] | None = None, + cwd: Path | None = None, + env: dict[str, str] | None = None, + readable_id: str = "", + ) -> LspSessionImpl: + json_rpc_session = self._json_rpc_client.session( + cmd=cmd, cwd=cwd, env=env, readable_id=readable_id + ) + return LspSessionImpl( + json_rpc_session=json_rpc_session, + root_uri=root_uri, + workspace_folders=workspace_folders, + initialization_options=initialization_options, + client_capabilities=client_capabilities, + ) + + +def _default_client_capabilities() -> dict[str, Any]: + return { + "textDocument": { + "synchronization": { + "dynamicRegistration": False, + "didSave": True, + }, + "completion": {"dynamicRegistration": False}, + "hover": {"dynamicRegistration": False}, + "publishDiagnostics": {"relatedInformation": True}, + }, + "workspace": { + "workspaceFolders": True, + "configuration": True, + }, + } diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py b/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py index 5f2d06dd..9d5630d0 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/project_info_provider.py @@ -1,5 +1,6 @@ +import collections.abc import pathlib -from typing import Any, Awaitable, Callable +from typing import Any, Callable from finecode_extension_api.interfaces import iprojectinfoprovider @@ -8,10 +9,12 @@ class ProjectInfoProvider(iprojectinfoprovider.IProjectInfoProvider): def __init__( self, project_def_path_getter: Callable[[], pathlib.Path], - project_raw_config_getter: Callable[[str], Awaitable[dict[str, Any]]], + project_raw_config_getter: Callable[[str], collections.abc.Awaitable[dict[str, Any]]], + current_project_raw_config_version_getter: Callable[[], int] ) -> None: self.project_def_path_getter = project_def_path_getter self.project_raw_config_getter = project_raw_config_getter + self.current_project_raw_config_version_getter = current_project_raw_config_version_getter def get_current_project_dir_path(self) -> pathlib.Path: project_def_path = self.project_def_path_getter() @@ -38,3 +41,6 @@ async def get_project_raw_config( async def get_current_project_raw_config(self) -> dict[str, Any]: current_project_path = self.get_current_project_def_path() return await self.get_project_raw_config(project_def_path=current_project_path) + + def get_current_project_raw_config_version(self) -> int: + return self.current_project_raw_config_version_getter() diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/repository_credentials_provider.py b/finecode_extension_runner/src/finecode_extension_runner/impls/repository_credentials_provider.py new file mode 100644 index 00000000..67c9b8e6 --- /dev/null +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/repository_credentials_provider.py @@ -0,0 +1,47 @@ +import sys + +if sys.version_info < (3, 12): + from typing_extensions import override +else: + from typing import override + + +from finecode_extension_api.interfaces.irepositorycredentialsprovider import ( + IRepositoryCredentialsProvider, + Repository, + RepositoryCredentials, +) + + +class ConfigRepositoryCredentialsProvider(IRepositoryCredentialsProvider): + """ + Repository credentials provider that stores credentials and repositories in memory. + """ + + def __init__(self) -> None: + self._credentials_by_repository: dict[str, RepositoryCredentials] = {} + self._repositories: dict[str, Repository] = {} + + @override + def get_credentials(self, repository_name: str) -> RepositoryCredentials | None: + return self._credentials_by_repository.get(repository_name) + + @override + def set_credentials( + self, repository_name: str, username: str, password: str + ) -> None: + self._credentials_by_repository[repository_name] = RepositoryCredentials( + username=username, password=password + ) + + @override + def add_repository(self, name: str, url: str) -> None: + self._repositories[name] = Repository(name=name, url=url) + + @override + def get_repository(self, name: str) -> Repository | None: + return self._repositories.get(name) + + @override + def get_all_repositories(self) -> list[Repository]: + return list(self._repositories.values()) diff --git a/finecode_extension_runner/src/finecode_extension_runner/impls/service_registry.py b/finecode_extension_runner/src/finecode_extension_runner/impls/service_registry.py new file mode 100644 index 00000000..395ba1db --- /dev/null +++ b/finecode_extension_runner/src/finecode_extension_runner/impls/service_registry.py @@ -0,0 +1,23 @@ +import typing + +from finecode_extension_api.interfaces import iserviceregistry +from finecode_extension_runner.di import _state + +T = typing.TypeVar("T") + + +class ServiceRegistry(iserviceregistry.IServiceRegistry): + def register_impl( + self, interface: type[T], impl: type[T], singleton: bool = False + ) -> None: + async def factory(container: dict) -> T: + from finecode_extension_runner._services.run_action import ( + resolve_func_args_with_di, + ) + + args = await resolve_func_args_with_di( + impl.__init__, params_to_ignore=["self"] + ) + return impl(**args) + + _state.factories[interface] = factory diff --git a/finecode_extension_runner/src/finecode_extension_runner/logs.py b/finecode_extension_runner/src/finecode_extension_runner/logs.py index 942bcdb8..c9d4d301 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/logs.py +++ b/finecode_extension_runner/src/finecode_extension_runner/logs.py @@ -1,6 +1,8 @@ import enum import io import sys +import inspect +import logging from pathlib import Path from loguru import logger @@ -49,8 +51,46 @@ def save_logs_to_file( logger.add(sys.stdout, level=log_level) + # Find the file with the largest ID in the log directory + log_dir_path = file_path.parent + max_id = 0 + + log_files_with_ids: list[tuple[int, Path]] = [] + if log_dir_path.exists(): + for log_file in log_dir_path.iterdir(): + if log_file.is_file() and log_file.suffix == '.log': + # Extract numeric ID from the end of the filename (before extension) + # first split by dot because loguru adds datetime after dot: + # ..log , we need stem without datetime + stem = log_file.stem.split('.')[0] + parts = stem.split('_') + last_part = parts[-1] + if last_part.isdigit(): + file_id = int(last_part) + max_id = max(max_id, file_id) + log_files_with_ids.append((file_id, log_file)) + + # Remove the oldest files if there are more than 10 + if len(log_files_with_ids) >= 10: + # Sort by ID (oldest first) + log_files_with_ids.sort(key=lambda x: x[0]) + # Keep only the 9 most recent, so after adding the new one we'll have 10 + files_to_remove = log_files_with_ids[:-9] + for _, log_file in files_to_remove: + try: + log_file.unlink() + logger.trace(f"Removed old log file: {log_file}") + except Exception as e: + logger.warning(f"Failed to remove old log file {log_file}: {e}") + + # Get next ID for new log file + next_id = max_id + 1 + + # Update file_path with the new ID + file_path_with_id = file_path.with_stem(file_path.stem + '_' + str(next_id)) + logger.add( - str(file_path), + str(file_path_with_id), rotation=rotation, retention=retention, level=log_level, @@ -70,4 +110,53 @@ def reset_log_level_for_group(group: str): del log_level_by_group[group] -__all__ = ["save_logs_to_file", "set_log_level_for_group", "reset_log_level_for_group"] +def setup_logging(log_level: str, log_file_path: Path) -> None: + logger.remove() + + # disable logging raw messages + # TODO: make configurable + # disable logging all raw sent messages + logger.configure(activation=[("pygls.protocol.json_rpc", False)]) + + # ~~extension runner communicates with workspace manager with tcp, we can print logs + # to stdout as well~~. See README.md + save_logs_to_file( + file_path=log_file_path, + log_level=log_level, + stdout=True, + ) + + # pygls uses standard python logger, intercept it and pass logs to loguru + class InterceptHandler(logging.Handler): + def emit(self, record: logging.LogRecord) -> None: + # Get corresponding Loguru level if it exists. + level: str | int + try: + level = logger.level(record.levelname).name + except ValueError: + level = record.levelno + + # Find caller from where originated the logged message. + frame, depth = inspect.currentframe(), 0 + while frame and ( + depth == 0 or frame.f_code.co_filename == logging.__file__ + ): + frame = frame.f_back + depth += 1 + + logger.opt(depth=depth, exception=record.exc_info).log( + level, record.getMessage() + ) + + logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True) + + # TODO: make configurable + set_log_level_for_group( + "finecode_extension_runner.impls.file_manager", LogLevel.WARNING + ) + set_log_level_for_group( + "finecode_extension_runner.impls.inmemory_cache", LogLevel.WARNING + ) + + +__all__ = ["save_logs_to_file", "set_log_level_for_group", "reset_log_level_for_group", "setup_logging"] diff --git a/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py b/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py index 96be19ca..e0c8c429 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py +++ b/finecode_extension_runner/src/finecode_extension_runner/lsp_server.py @@ -6,6 +6,7 @@ from __future__ import annotations import atexit +import collections.abc import dataclasses import functools import json @@ -13,18 +14,23 @@ import typing import pygls.exceptions as pygls_exceptions +from pygls.workspace import position_codec from loguru import logger from lsprotocol import types from pygls.lsp import server as lsp_server from pygls.io_ import StdoutWriter, run_async - from finecode_extension_api import code_action -from finecode_extension_runner import domain, schemas, services +from finecode_extension_api.interfaces import ifileeditor +from pydantic.dataclasses import dataclass as pydantic_dataclass + +from finecode_extension_runner import schemas, services from finecode_extension_runner._services import run_action as run_action_service +from finecode_extension_runner.di import resolver import sys import io import threading +import contextlib import asyncio @@ -59,6 +65,8 @@ async def readline(self) -> bytes: return line except TimeoutError: ... + except ValueError as exception: + logger.warning(str(exception)) return bytes() async def readexactly(self, n: int) -> bytes: @@ -88,11 +96,26 @@ def stop(self) -> None: class CustomLanguageServer(lsp_server.LanguageServer): + def __init__(self, *args, **kwargs) -> None: + super().__init__(*args, **kwargs) + + self._finecode_async_tasks: list[asyncio.Task] = [] + self._finecode_exit_stack = contextlib.AsyncExitStack() + self._finecode_file_editor_session: ifileeditor.IFileEditorSession + self._finecode_file_operation_author = ifileeditor.FileOperationAuthor(id=self.name) + def report_server_error(self, error: Exception, source: lsp_server.ServerErrors): + logger.info(f'->1 {self._stop_event.is_set()}') # return logging of error (`lsp_server.LanguageServer` overwrites it) super(lsp_server.LanguageServer, self).report_server_error(error, source) + logger.info(f'->2 {self._stop_event.is_set()}') + # log traceback of exception for easier analysis + logger.exception(error) # send to client - super().report_server_error(error, source) + if not isinstance(error, ValueError): + # TODO: check message 'write to closed file' + super().report_server_error(error, source) + logger.info(f'->3 {self._stop_event.is_set()}') async def start_io_async( self, stdin: io.BinaryIO | None = None, stdout: io.BinaryIO | None = None @@ -120,11 +143,89 @@ async def start_io_async( logger.info("exception handler in json rpc server") pass finally: + logger.info(f'->5 {self._stop_event.is_set()}') reader.stop() self.shutdown() + # shutdown is synchronous, so close exit stack here + await self._finecode_exit_stack.aclose() + logger.debug("Finecode async exit stack closed") + + def start_tcp(self, host: str, port: int) -> None: + """Starts TCP server.""" + logger.info("Starting TCP server on %s:%s", host, port) + + self._stop_event = stop_event = threading.Event() + + async def lsp_connection( + reader: asyncio.StreamReader, writer: asyncio.StreamWriter + ): + logger.debug("Connected to client") + self.protocol.set_writer(writer) # type: ignore + await run_async( + stop_event=stop_event, + reader=reader, + protocol=self.protocol, + logger=logger, + error_handler=self.report_server_error, + ) + logger.debug("Main loop finished") + self.shutdown() + + async def tcp_server(h: str, p: int): + self._server = await asyncio.start_server(lsp_connection, h, p) + + addrs = ", ".join(str(sock.getsockname()) for sock in self._server.sockets) + logger.info(f"Serving on {addrs}") + + try: + async with self._server: + await self._server.serve_forever() + finally: + # shutdown is synchronous, so close exit stack here + # TODO: test + await self._finecode_exit_stack.aclose() + + try: + asyncio.run(tcp_server(host, port)) + except asyncio.CancelledError: + logger.debug("Server was cancelled") + + + +def file_editor_file_change_to_lsp_text_edit(file_change: ifileeditor.FileChange) -> types.TextEdit: + if isinstance(file_change, ifileeditor.FileChangeFull): + # temporary workaround until we extend "applyWorkspaceEdit" from LSP with + # proper full document replacement without knowing original range + range_start_line = 0 + range_start_char = 0 + range_end_line = 999999 + range_end_char = 999999 + else: + range_start_line = file_change.range.start.line + range_start_char = file_change.range.start.character + range_end_line = file_change.range.end.line + range_end_char = file_change.range.end.character + + return types.TextEdit( + range=types.Range( + start=types.Position(line=range_start_line, character=range_start_char), + end=types.Position(line=range_end_line, character=range_end_char) + ), + new_text=file_change.text + ) + + +def position_from_client_units( + self, lines: collections.abc.Sequence[str], position: types.Position +) -> types.Position: + return position + def create_lsp_server() -> lsp_server.LanguageServer: + # avoid recalculating of positions by pygls + position_codec.PositionCodec.position_from_client_units = position_from_client_units + server = CustomLanguageServer("FineCode_Extension_Runner_Server", "v1") register_initialized_feature = server.feature(types.INITIALIZED) @@ -141,6 +242,9 @@ def create_lsp_server() -> lsp_server.LanguageServer: register_document_did_close_feature = server.feature(types.TEXT_DOCUMENT_DID_CLOSE) register_document_did_close_feature(_document_did_close) + + register_document_did_change_feature = server.feature(types.TEXT_DOCUMENT_DID_CHANGE) + register_document_did_change_feature(_document_did_change) register_update_config_feature = server.command("finecodeRunner/updateConfig") register_update_config_feature(update_config) @@ -172,17 +276,24 @@ def send_partial_result( server.progress(types.ProgressParams(token=token, value=partial_result_json)) run_action_service.set_partial_result_sender(send_partial_result) - + return server -def _on_initialized(ls: lsp_server.LanguageServer, params: types.InitializedParams): +def _on_initialized(ls: CustomLanguageServer, params: types.InitializedParams): logger.info(f"initialized {params}") -def _on_shutdown(ls: lsp_server.LanguageServer, params): +def _on_shutdown(ls: CustomLanguageServer, params): logger.info("Shutdown extension runner") services.shutdown_all_action_handlers() + + logger.debug("Stop Finecode async tasks") + for task in ls._finecode_async_tasks: + if not task.done(): + task.cancel() + ls._finecode_async_tasks = [] + logger.info("Shutdown end") return None @@ -191,73 +302,48 @@ def _on_exit(ls: lsp_server.LanguageServer, params): logger.info("Exit extension runner") -def _document_did_open( - ls: lsp_server.LanguageServer, params: types.DidOpenTextDocumentParams +def uri_to_path(uri: str) -> pathlib.Path: + return pathlib.Path(uri.removeprefix('file://')) + + +async def _document_did_open( + ls: CustomLanguageServer, params: types.DidOpenTextDocumentParams ): logger.info(f"document did open: {params.text_document.uri}") - services.document_did_open(params.text_document.uri) + # services.document_did_open(params.text_document.uri) + file_path = uri_to_path(uri=params.text_document.uri) + + await ls._finecode_file_editor_session.open_file(file_path=file_path) -def _document_did_close( - ls: lsp_server.LanguageServer, params: types.DidCloseTextDocumentParams +async def _document_did_close( + ls: CustomLanguageServer, params: types.DidCloseTextDocumentParams ): logger.info(f"document did close: {params.text_document.uri}") - services.document_did_close(params.text_document.uri) + file_path = uri_to_path(uri=params.text_document.uri) + + await ls._finecode_file_editor_session.close_file(file_path=file_path) -async def document_requester(server: lsp_server.LanguageServer, uri: str): - try: - document = await asyncio.wait_for( - server.protocol.send_request_async("documents/get", params={"uri": uri}), 10 - ) - except TimeoutError as error: - raise error - except pygls_exceptions.JsonRpcInternalError as error: - if error.message == "Exception: Document is not opened": - raise domain.TextDocumentNotOpened() - else: - raise error - - return domain.TextDocumentInfo( - uri=document.uri, version=document.version, text=document.text - ) +def lsp_document_change_to_file_editor_change(lsp_change: types.TextDocumentContentChangeEvent) -> ifileeditor.FileChange: + if isinstance(lsp_change, types.TextDocumentContentChangePartial): + return ifileeditor.FileChangePartial(range=ifileeditor.Range(start=ifileeditor.Position(line=lsp_change.range.start.line, character=lsp_change.range.start.character), end=ifileeditor.Position(line=lsp_change.range.end.line, character=lsp_change.range.end.character)), text=lsp_change.text) + elif isinstance(lsp_change, types.TextDocumentContentChangeWholeDocument): + return ifileeditor.FileChangeFull(text=lsp_change.text) + else: + logger.error(f"Unexpected type of document change from LSP client: {type(lsp_change)}") -async def document_saver(server: lsp_server.LanguageServer, uri: str, content: str): - try: - document = await asyncio.wait_for( - server.protocol.send_request_async("documents/get", params={"uri": uri}), 10 - ) - except TimeoutError as error: - raise error +async def _document_did_change( + ls: CustomLanguageServer, params: types.DidChangeTextDocumentParams +): + logger.info(f"document did change: {params.text_document.uri} {params.text_document.version}") + file_path = uri_to_path(uri=params.text_document.uri) - document_lines = document.text.split("\n") - params = types.ApplyWorkspaceEditParams( - edit=types.WorkspaceEdit( - # dict seems to be incorrectly unstructured on client(pygls issue?) - # use document_changes instead of changes - document_changes=[ - types.TextDocumentEdit( - text_document=types.OptionalVersionedTextDocumentIdentifier( - uri=uri - ), - edits=[ - types.TextEdit( - range=types.Range( - start=types.Position(line=0, character=0), - end=types.Position( - line=len(document_lines), - character=len(document_lines[-1]), - ), - ), - new_text=content, - ) - ], - ) - ] - ) - ) - await server.workspace_apply_edit_async(params) + for change in params.content_changes: + logger.trace(str(change)) + file_editor_change = lsp_document_change_to_file_editor_change(lsp_change=change) + await ls._finecode_file_editor_session.change_file(file_path=file_path, change=file_editor_change) async def get_project_raw_config( @@ -279,7 +365,7 @@ async def get_project_raw_config( async def update_config( - ls: lsp_server.LanguageServer, + ls: CustomLanguageServer, working_dir: pathlib.Path, project_name: str, project_def_path: pathlib.Path, @@ -311,13 +397,48 @@ async def update_config( for action in actions }, action_handler_configs=action_handler_configs, + services=[ + schemas.ServiceDeclaration( + interface=svc["interface"], + source=svc["source"], + ) + for svc in config.get("services", []) + ], + handlers_to_initialize=config.get("handlers_to_initialize"), ) response = await services.update_config( request=request, - document_requester=functools.partial(document_requester, ls), - document_saver=functools.partial(document_saver, ls), project_raw_config_getter=functools.partial(get_project_raw_config, ls), ) + # update_config calls DI bootstrap, we can instantiate file_editor_session first + # here + file_editor = await resolver.get_service_instance(ifileeditor.IFileEditor) + ls._finecode_file_editor_session = await ls._finecode_exit_stack.enter_async_context(file_editor.session(author=ls._finecode_file_operation_author)) + + # asyncio event loop is currently available only in handlers, not in server factory, + # so start task here + async def send_changed_files_to_lsp_client() -> None: + async with ls._finecode_file_editor_session.subscribe_to_changes_of_opened_files() as file_change_events: + async for file_change_event in file_change_events: + if file_change_event.author != ls._finecode_file_operation_author: + # someone else changed the file, send these changes to LSP client + params = types.ApplyWorkspaceEditParams( + edit=types.WorkspaceEdit( + document_changes=[ + types.TextDocumentEdit( + text_document=types.OptionalVersionedTextDocumentIdentifier(uri=f'file://{file_change_event.file_path.as_posix()}'), + edits=[ + file_editor_file_change_to_lsp_text_edit(file_change=file_change_event.change) + ] + ), + ] + ) + ) + await ls.workspace_apply_edit_async(params) + + send_changed_files_task = asyncio.create_task(send_changed_files_to_lsp_client()) + ls._finecode_async_tasks.append(send_changed_files_task) + return response.to_dict() except Exception as e: logger.exception(f"Update config error: {e}") @@ -350,11 +471,16 @@ async def run_action( ): logger.trace(f"Run action: {action_name}") request = schemas.RunActionRequest(action_name=action_name, params=params) - options_schema = schemas.RunActionOptions(**options if options is not None else {}) + + # use pydantic dataclass to convert dict to dataclass instance recursively + # (default dataclass constructor doesn't handle nested items, it stores them just + # as dict) + options_type = pydantic_dataclass(schemas.RunActionOptions) + options_schema = options_type(**options if options is not None else {}) status: str = "success" try: - response = await services.run_action(request=request, options=options_schema) + response = await services.run_action_raw(request=request, options=options_schema) except Exception as exception: if isinstance(exception, services.StopWithResponse): status = "stopped" @@ -375,12 +501,15 @@ async def run_action( # # custom json encoder converts dict values and `convert_path_keys` is used to # convert dict keys - result_dict = convert_path_keys(response.to_dict()["result"]) - result_str = json.dumps(result_dict, cls=CustomJSONEncoder) + result_by_format = response.to_dict()["result_by_format"] + converted_result_by_format = { + fmt: convert_path_keys(result) if isinstance(result, dict) else result + for fmt, result in result_by_format.items() + } + result_str = json.dumps(converted_result_by_format, cls=CustomJSONEncoder) return { "status": status, - "result": result_str, - "format": response.format, + "result_by_format": result_str, "return_code": response.return_code, } diff --git a/finecode_extension_runner/src/finecode_extension_runner/schemas.py b/finecode_extension_runner/src/finecode_extension_runner/schemas.py index 66878975..6af5ee51 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/schemas.py +++ b/finecode_extension_runner/src/finecode_extension_runner/schemas.py @@ -1,7 +1,9 @@ -from dataclasses import asdict, dataclass +from dataclasses import asdict, dataclass, field from pathlib import Path from typing import Any, Literal +from finecode_extension_api import code_action + @dataclass class BaseSchema: @@ -24,6 +26,12 @@ class Action(BaseSchema): config: dict[str, Any] | None = None +@dataclass +class ServiceDeclaration(BaseSchema): + interface: str + source: str + + @dataclass class UpdateConfigRequest(BaseSchema): working_dir: Path @@ -31,6 +39,11 @@ class UpdateConfigRequest(BaseSchema): project_def_path: Path actions: dict[str, Action] action_handler_configs: dict[str, dict[str, Any]] + services: list[ServiceDeclaration] = field(default_factory=list) + # If provided, eagerly instantiate these handlers after config update. + # Keys are action names, values are lists of handler names within that action. + # None means no eager initialization (lazy, on first use). + handlers_to_initialize: dict[str, list[str]] | None = None @dataclass @@ -45,13 +58,13 @@ class RunActionRequest(BaseSchema): @dataclass class RunActionOptions(BaseSchema): + meta: code_action.RunActionMeta partial_result_token: int | str | None = None - result_format: Literal["json"] | Literal["string"] = "json" + result_formats: list[Literal["json"] | Literal["string"]] = field(default_factory=lambda: ["json"]) @dataclass class RunActionResponse(BaseSchema): return_code: int # result can be empty(=None) e.g. if it was sent as a list of partial results - result: dict[str, Any] | str | None - format: Literal["json"] | Literal["string"] | Literal["styled_text_json"] = "json" + result_by_format: dict[str, dict[str, Any] | str] | None diff --git a/finecode_extension_runner/src/finecode_extension_runner/services.py b/finecode_extension_runner/src/finecode_extension_runner/services.py index 46d34b54..aa75fa8f 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/services.py +++ b/finecode_extension_runner/src/finecode_extension_runner/services.py @@ -1,3 +1,6 @@ +import json +import collections.abc +import hashlib import importlib import sys import types @@ -5,38 +8,53 @@ from pathlib import Path from loguru import logger +from finecode_extension_api import service from finecode_extension_runner import context, domain, global_state, schemas from finecode_extension_runner._services.run_action import ( ActionFailedException, StopWithResponse, - run_action, + run_action_raw, + create_action_exec_info, + ensure_handler_instantiated, ) from finecode_extension_runner.di import bootstrap as di_bootstrap +def _compute_request_hash(request: schemas.UpdateConfigRequest) -> int: + """Compute a hash of the request object for version tracking.""" + request_dict = request.to_dict() + # Convert Path objects to strings for JSON serialization + request_dict["working_dir"] = str(request_dict["working_dir"]) + request_dict["project_def_path"] = str(request_dict["project_def_path"]) + + # Sort keys for consistent hashing + request_json = json.dumps(request_dict, sort_keys=True) + hash_bytes = hashlib.sha256(request_json.encode()).digest() + # Convert first 8 bytes to integer for version number + return int.from_bytes(hash_bytes[:8], byteorder="big") + + async def update_config( request: schemas.UpdateConfigRequest, - document_requester: typing.Callable, - document_saver: typing.Callable, project_raw_config_getter: typing.Callable[ - [str], typing.Awaitable[dict[str, typing.Any]] + [str], collections.abc.Awaitable[dict[str, typing.Any]] ], ) -> schemas.UpdateConfigResponse: project_dir_path = Path(request.working_dir) - actions: dict[str, domain.Action] = {} + actions: dict[str, domain.ActionDeclaration] = {} for action_name, action_schema_obj in request.actions.items(): - handlers: list[domain.ActionHandler] = [] + handlers: list[domain.ActionHandlerDeclaration] = [] for handler_obj in action_schema_obj.handlers: handlers.append( - domain.ActionHandler( + domain.ActionHandlerDeclaration( name=handler_obj.name, source=handler_obj.source, config=handler_obj.config, ) ) - action = domain.Action( + action = domain.ActionDeclaration( name=action_name, config=action_schema_obj.config, handlers=handlers, @@ -53,6 +71,7 @@ async def update_config( action_handler_configs=request.action_handler_configs, ), ) + global_state.runner_context.project_config_version = _compute_request_hash(request) # currently update_config is called only once directly after runner start. So we can # bootstrap here. Should be changed after adding updating configuration on the fly. @@ -71,17 +90,112 @@ def cache_dir_path_getter() -> Path: return project_cache_dir + def current_project_raw_config_version_getter() -> int: + return global_state.runner_context.project_config_version + + def actions_names_getter() -> list[str]: + assert global_state.runner_context is not None + return list(global_state.runner_context.project.actions.keys()) + + def action_by_name_getter(action_name: str) -> domain.ActionDeclaration: + assert global_state.runner_context is not None + return global_state.runner_context.project.actions[action_name] + + def current_env_name_getter() -> str: + return global_state.env_name + + handler_packages = { + handler.source.split(".")[0] + for action in actions.values() + for handler in action.handlers + } | { + svc.source.split(".")[0] for svc in request.services + } + di_bootstrap.bootstrap( - get_document_func=document_requester, - save_document_func=document_saver, project_def_path_getter=project_def_path_getter, project_raw_config_getter=project_raw_config_getter, cache_dir_path_getter=cache_dir_path_getter, + current_project_raw_config_version_getter=current_project_raw_config_version_getter, + actions_names_getter=actions_names_getter, + action_by_name_getter=action_by_name_getter, + current_env_name_getter=current_env_name_getter, + handler_packages=handler_packages, + service_declarations=request.services, ) + if request.handlers_to_initialize is not None: + await initialize_handlers(request.handlers_to_initialize) + return schemas.UpdateConfigResponse() +async def initialize_handlers( + handlers_by_action: dict[str, list[str]], +) -> None: + """Eagerly instantiate and initialize handlers. + + This is called after update_config to pre-instantiate handlers so that + services (like LSP services) are started early rather than on first use. + + Args: + handlers_by_action: mapping of action name → list of handler names + to eagerly initialize. + """ + if global_state.runner_context is None: + logger.warning("Cannot initialize handlers: runner context is not set") + return + + runner_context = global_state.runner_context + project = runner_context.project + + for action_name, handler_names in handlers_by_action.items(): + action_def = project.actions.get(action_name) + if action_def is None: + logger.warning( + f"Action '{action_name}' not found, skipping handler initialization" + ) + continue + + if action_name in runner_context.action_cache_by_name: + action_cache = runner_context.action_cache_by_name[action_name] + else: + action_cache = domain.ActionCache() + runner_context.action_cache_by_name[action_name] = action_cache + + if action_cache.exec_info is None: + action_cache.exec_info = create_action_exec_info(action_def) + + handlers_to_init = [ + h for h in action_def.handlers if h.name in handler_names + ] + for handler in handlers_to_init: + if handler.name in action_cache.handler_cache_by_name: + handler_cache = action_cache.handler_cache_by_name[handler.name] + if handler_cache.instance is not None: + continue + else: + handler_cache = domain.ActionHandlerCache() + action_cache.handler_cache_by_name[handler.name] = handler_cache + + try: + await ensure_handler_instantiated( + handler=handler, + handler_cache=handler_cache, + action_exec_info=action_cache.exec_info, + runner_context=runner_context, + ) + logger.trace( + f"Eagerly initialized handler '{handler.name}' " + f"for action '{action_name}'" + ) + except Exception as e: + logger.error( + f"Failed to eagerly initialize handler '{handler.name}' " + f"for action '{action_name}': {e}" + ) + + def reload_action(action_name: str) -> None: if global_state.runner_context is None: # TODO: raise error @@ -109,7 +223,10 @@ def reload_action(action_name: str) -> None: if handler_cache.exec_info is not None: shutdown_action_handler( action_handler_name=handler_name, + handler_instance=handler_cache.instance, exec_info=handler_cache.exec_info, + used_services=handler_cache.used_services, + runner_context=global_state.runner_context, ) del global_state.runner_context.action_cache_by_name[action_name] @@ -149,24 +266,18 @@ def resolve_package_path(package_name: str) -> str: package_path = importlib.util.find_spec( package_name ).submodule_search_locations[0] - except Exception: - raise ValueError(f"Cannot find package {package_name}") + except Exception as exception: + raise ValueError(f"Cannot find package {package_name}") from exception return package_path -def document_did_open(document_uri: str) -> None: - if global_state.runner_context is not None: - global_state.runner_context.docs_owned_by_client.append(document_uri) - - -def document_did_close(document_uri: str) -> None: - if global_state.runner_context is not None: - global_state.runner_context.docs_owned_by_client.remove(document_uri) - - def shutdown_action_handler( - action_handler_name: str, exec_info: domain.ActionHandlerExecInfo + action_handler_name: str, + handler_instance: domain.ActionHandlerDeclaration | None, + exec_info: domain.ActionHandlerExecInfo, + used_services: list[service.Service], + runner_context: context.RunnerContext, ) -> None: # action handler exec info expected to exist in runner_context if exec_info.status == domain.ActionHandlerExecInfoStatus.SHUTDOWN: @@ -183,6 +294,19 @@ def shutdown_action_handler( logger.error(f"Failed to shutdown action {action_handler_name}: {e}") exec_info.status = domain.ActionHandlerExecInfoStatus.SHUTDOWN + if handler_instance is not None: + for used_service in used_services: + running_service_info = runner_context.running_services[used_service] + running_service_info.used_by.remove(handler_instance) + if len(running_service_info.used_by) == 0: + if isinstance(used_service, service.DisposableService): + try: + used_service.dispose() + logger.trace(f"Disposed service: {used_service}") + except Exception as exception: + logger.error(f"Failed to dispose service: {used_service}") + logger.exception(exception) + def shutdown_all_action_handlers() -> None: if global_state.runner_context is not None: @@ -195,7 +319,10 @@ def shutdown_all_action_handlers() -> None: if handler_cache.exec_info is not None: shutdown_action_handler( action_handler_name=handler_name, + handler_instance=handler_cache.instance, exec_info=handler_cache.exec_info, + used_services=handler_cache.used_services, + runner_context=global_state.runner_context, ) diff --git a/finecode_extension_runner/src/finecode_extension_runner/start.py b/finecode_extension_runner/src/finecode_extension_runner/start.py index e8b0f518..2e5e23fa 100644 --- a/finecode_extension_runner/src/finecode_extension_runner/start.py +++ b/finecode_extension_runner/src/finecode_extension_runner/start.py @@ -1,5 +1,4 @@ -import inspect -import logging +import os import socket import sys @@ -7,61 +6,14 @@ import finecode_extension_runner.global_state as global_state import finecode_extension_runner.lsp_server as extension_runner_lsp -from finecode_extension_runner import logs -def start_runner_sync(env_name: str) -> None: - logger.remove() - # disable logging raw messages - # TODO: make configurable - # logger.configure(activation=[("pygls.protocol.json_rpc", False)]) - # ~~extension runner communicates with workspace manager with tcp, we can print logs - # to stdout as well~~. See README.md +def start_runner_sync() -> None: assert global_state.project_dir_path is not None - logs.save_logs_to_file( - file_path=global_state.project_dir_path - / ".venvs" - / env_name - / "logs" - / "runner.log", - log_level=global_state.log_level, - stdout=True, - ) - - # pygls uses standard python logger, intercept it and pass logs to loguru - class InterceptHandler(logging.Handler): - def emit(self, record: logging.LogRecord) -> None: - # Get corresponding Loguru level if it exists. - level: str | int - try: - level = logger.level(record.levelname).name - except ValueError: - level = record.levelno - - # Find caller from where originated the logged message. - frame, depth = inspect.currentframe(), 0 - while frame and ( - depth == 0 or frame.f_code.co_filename == logging.__file__ - ): - frame = frame.f_back - depth += 1 - - logger.opt(depth=depth, exception=record.exc_info).log( - level, record.getMessage() - ) - - logging.basicConfig(handlers=[InterceptHandler()], level=0, force=True) - - # TODO: make configurable - logs.set_log_level_for_group( - "finecode_extension_runner.impls.file_manager", logs.LogLevel.WARNING - ) - logs.set_log_level_for_group( - "finecode_extension_runner.impls.inmemory_cache", logs.LogLevel.WARNING - ) logger.info(f"Python executable: {sys.executable}") logger.info(f"Project path: {global_state.project_dir_path}") + logger.info(f"Process id: {os.getpid()}") server = extension_runner_lsp.create_lsp_server() # asyncio.run(server.start_io_async()) diff --git a/finecode_httpclient/.gitignore b/finecode_httpclient/.gitignore new file mode 100644 index 00000000..2e6d867e --- /dev/null +++ b/finecode_httpclient/.gitignore @@ -0,0 +1,7 @@ +*.egg-info +__pycache__ +*.pyc +.pytest_cache +dist +build +.venvs diff --git a/finecode_httpclient/README.md b/finecode_httpclient/README.md new file mode 100644 index 00000000..35b069e9 --- /dev/null +++ b/finecode_httpclient/README.md @@ -0,0 +1,3 @@ +# FineCode HTTP Client + +HTTP client implementation for FineCode using httpx. diff --git a/finecode_httpclient/pyproject.toml b/finecode_httpclient/pyproject.toml new file mode 100644 index 00000000..38776e4b --- /dev/null +++ b/finecode_httpclient/pyproject.toml @@ -0,0 +1,26 @@ +[project] +name = "finecode_httpclient" +version = "0.1.0a1" +description = "HTTP client implementation for FineCode" +authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] +readme = "README.md" +requires-python = ">=3.11, <= 3.14" +dependencies = ["httpx==0.28.*", "finecode_extension_api~=0.4.0a0"] + +[dependency-groups] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] + +[tool.finecode] +presets = [{ source = "finecode_dev_common_preset" }] + +[tool.finecode.env.dev_workspace.dependencies] +finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable = true } +finecode = { path = "../", editable = true } +finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } +finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_httpclient = { path = "./", editable = true } +finecode_jsonrpc = { path = "../finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } diff --git a/finecode_httpclient/src/finecode_httpclient/__init__.py b/finecode_httpclient/src/finecode_httpclient/__init__.py new file mode 100644 index 00000000..93dce775 --- /dev/null +++ b/finecode_httpclient/src/finecode_httpclient/__init__.py @@ -0,0 +1,3 @@ +from .client import HttpClient, HttpResponse, HttpSession + +__all__ = ["HttpClient", "HttpResponse", "HttpSession"] diff --git a/finecode_httpclient/src/finecode_httpclient/client.py b/finecode_httpclient/src/finecode_httpclient/client.py new file mode 100644 index 00000000..ac5eaf24 --- /dev/null +++ b/finecode_httpclient/src/finecode_httpclient/client.py @@ -0,0 +1,169 @@ +from types import TracebackType +from typing import Any, Self + +import httpx + +from finecode_extension_api.interfaces import ihttpclient, ilogger + + +class HttpResponse(ihttpclient.IHttpResponse): + """Wrapper for httpx.Response that implements IHttpResponse protocol.""" + + def __init__(self, response: httpx.Response): + self._response = response + + @property + def status_code(self) -> int: + return self._response.status_code + + @property + def headers(self) -> dict[str, str]: + return dict(self._response.headers) + + @property + def content(self) -> bytes: + return self._response.content + + @property + def text(self) -> str: + return self._response.text + + def json(self) -> Any: + return self._response.json() + + def raise_for_status(self) -> None: + self._response.raise_for_status() + + +class HttpSession(ihttpclient.IHttpSession): + """HTTP session implementation using httpx.AsyncClient.""" + + def __init__(self, logger: ilogger.ILogger): + self.logger = logger + self._client: httpx.AsyncClient | None = None + + async def __aenter__(self) -> Self: + """Async context manager entry. Creates and initializes the httpx client.""" + self.logger.debug("HTTP session opened") + self._client = httpx.AsyncClient() + return self + + async def __aexit__( + self, + _exc_type: type[BaseException] | None, + _exc_val: BaseException | None, + _exc_tb: TracebackType | None, + ) -> None: + """Async context manager exit. Automatically closes the client.""" + if self._client is not None: + await self._client.aclose() + self._client = None + self.logger.debug("HTTP session closed") + + def _ensure_client(self) -> httpx.AsyncClient: + """Ensure the client is initialized.""" + if self._client is None: + raise RuntimeError( + "HTTP session not initialized. Use 'async with session:' context manager." + ) + return self._client + + async def get( + self, + url: str, + headers: dict[str, str] | None = None, + params: dict[str, str] | None = None, + timeout: float | None = None, + ) -> ihttpclient.IHttpResponse: + self.logger.debug(f"HTTP GET: {url}") + client = self._ensure_client() + response = await client.get( + url, headers=headers, params=params, timeout=timeout + ) + return HttpResponse(response) + + async def post( + self, + url: str, + data: bytes | str | dict[str, Any] | None = None, + json: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> ihttpclient.IHttpResponse: + self.logger.debug(f"HTTP POST: {url}") + client = self._ensure_client() + response = await client.post( + url, data=data, json=json, headers=headers, timeout=timeout + ) + return HttpResponse(response) + + async def put( + self, + url: str, + data: bytes | str | dict[str, Any] | None = None, + json: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> ihttpclient.IHttpResponse: + self.logger.debug(f"HTTP PUT: {url}") + client = self._ensure_client() + response = await client.put( + url, data=data, json=json, headers=headers, timeout=timeout + ) + return HttpResponse(response) + + async def delete( + self, + url: str, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> ihttpclient.IHttpResponse: + self.logger.debug(f"HTTP DELETE: {url}") + client = self._ensure_client() + response = await client.delete(url, headers=headers, timeout=timeout) + return HttpResponse(response) + + async def head( + self, + url: str, + headers: dict[str, str] | None = None, + timeout: float | None = None, + ) -> ihttpclient.IHttpResponse: + self.logger.debug(f"HTTP HEAD: {url}") + client = self._ensure_client() + response = await client.head(url, headers=headers, timeout=timeout) + return HttpResponse(response) + + async def request( + self, + method: str, + url: str, + data: bytes | str | dict[str, Any] | None = None, + json: dict[str, Any] | None = None, + headers: dict[str, str] | None = None, + params: dict[str, str] | None = None, + timeout: float | None = None, + ) -> ihttpclient.IHttpResponse: + self.logger.debug(f"HTTP {method.upper()}: {url}") + client = self._ensure_client() + response = await client.request( + method, + url, + data=data, + json=json, + headers=headers, + params=params, + timeout=timeout, + ) + return HttpResponse(response) + + +class HttpClient(ihttpclient.IHttpClient): + """HTTP client factory that creates sessions.""" + + def __init__(self, logger: ilogger.ILogger): + self.logger = logger + + def session(self) -> ihttpclient.IHttpSession: + """Create a new HTTP session that should be used as a context manager.""" + return HttpSession(self.logger) diff --git a/finecode_httpclient/src/finecode_httpclient/py.typed b/finecode_httpclient/src/finecode_httpclient/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/finecode_jsonrpc/.gitignore b/finecode_jsonrpc/.gitignore new file mode 100644 index 00000000..4279fe82 --- /dev/null +++ b/finecode_jsonrpc/.gitignore @@ -0,0 +1,20 @@ +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg diff --git a/finecode_jsonrpc/README.md b/finecode_jsonrpc/README.md new file mode 100644 index 00000000..93bb4843 --- /dev/null +++ b/finecode_jsonrpc/README.md @@ -0,0 +1,3 @@ +# finecode_jsonrpc + +JSON-RPC client implementation for FineCode. diff --git a/finecode_jsonrpc/pyproject.toml b/finecode_jsonrpc/pyproject.toml new file mode 100644 index 00000000..c0230dff --- /dev/null +++ b/finecode_jsonrpc/pyproject.toml @@ -0,0 +1,25 @@ +[project] +name = "finecode_jsonrpc" +version = "0.1.0a1" +description = "JSON-RPC client implementation for FineCode" +authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] +readme = "README.md" +requires-python = ">=3.11, <= 3.14" +dependencies = ["loguru==0.7.*", "culsans==0.11.*", "apischema==0.19.*", "finecode_extension_api~=0.4.0a0"] + +[dependency-groups] +dev_workspace = ["finecode~=0.4.0a0", "finecode_dev_common_preset~=0.3.0a0"] + +[tool.finecode] +presets = [{ source = "finecode_dev_common_preset" }] + +[tool.finecode.env.dev_workspace.dependencies] +finecode_dev_common_preset = { path = "../finecode_dev_common_preset", editable = true } +finecode = { path = "../", editable = true } +finecode_extension_runner = { path = "../finecode_extension_runner", editable = true } +finecode_extension_api = { path = "../finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "./", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } +fine_python_recommended = { path = "../presets/fine_python_recommended", editable = true } +fine_python_lint = { path = "../presets/fine_python_lint", editable = true } +fine_python_format = { path = "../presets/fine_python_format", editable = true } diff --git a/finecode_jsonrpc/setup.py b/finecode_jsonrpc/setup.py new file mode 100644 index 00000000..841816ab --- /dev/null +++ b/finecode_jsonrpc/setup.py @@ -0,0 +1,8 @@ +from setuptools import setup, find_packages + +setup( + name="finecode_jsonrpc", + package_dir={"": "src"}, + packages=find_packages(where="src"), + python_requires=">=3.11", +) diff --git a/src/finecode/runner/jsonrpc_client/__init__.py b/finecode_jsonrpc/src/finecode_jsonrpc/__init__.py similarity index 84% rename from src/finecode/runner/jsonrpc_client/__init__.py rename to finecode_jsonrpc/src/finecode_jsonrpc/__init__.py index 39a7caaf..dbd4099a 100644 --- a/src/finecode/runner/jsonrpc_client/__init__.py +++ b/finecode_jsonrpc/src/finecode_jsonrpc/__init__.py @@ -1,5 +1,4 @@ from .client import ( - create_lsp_client_io, JsonRpcClient, BaseRunnerRequestException, NoResponse, @@ -7,14 +6,15 @@ RunnerFailedToStart, RequestCancelledError, ) +from .transports import StdioTransport __all__ = [ - "create_lsp_client_io", "JsonRpcClient", "BaseRunnerRequestException", "NoResponse", "ResponseTimeout", "RunnerFailedToStart", "RequestCancelledError", + "StdioTransport", ] diff --git a/src/finecode/runner/jsonrpc_client/_io_thread.py b/finecode_jsonrpc/src/finecode_jsonrpc/_io_thread.py similarity index 100% rename from src/finecode/runner/jsonrpc_client/_io_thread.py rename to finecode_jsonrpc/src/finecode_jsonrpc/_io_thread.py diff --git a/src/finecode/runner/jsonrpc_client/client.py b/finecode_jsonrpc/src/finecode_jsonrpc/client.py similarity index 77% rename from src/finecode/runner/jsonrpc_client/client.py rename to finecode_jsonrpc/src/finecode_jsonrpc/client.py index ea53ceb6..0aef7c86 100644 --- a/src/finecode/runner/jsonrpc_client/client.py +++ b/finecode_jsonrpc/src/finecode_jsonrpc/client.py @@ -1,26 +1,25 @@ from __future__ import annotations -import traceback - +import asyncio +import collections.abc +import concurrent.futures import dataclasses +import enum import functools +import json import os -import shlex +import re import subprocess import sys -from pathlib import Path -import asyncio -import json -import re import threading +import traceback import typing import uuid -import concurrent.futures -import collections.abc +from pathlib import Path -import culsans import apischema -from finecode.runner.jsonrpc_client import _io_thread +import culsans +from finecode_jsonrpc import _io_thread from loguru import logger @@ -108,14 +107,23 @@ def __init__(self, error: ResponseError) -> None: self.error = error +class CommunicationType(enum.Enum): + TCP = enum.auto() + STDIO = enum.auto() + + def task_done_log_callback(future: asyncio.Future[typing.Any], task_id: str = ""): if future.cancelled(): logger.debug(f"task cancelled: {task_id}") else: exc = future.exception() if exc is not None: - logger.error(f"exception in task: {task_id}") + tb = "".join(traceback.format_exception(type(exc), exc, exc.__traceback__)) + logger.error( + f"exception in task: {task_id} {type(exc)} {exc.message if hasattr(exc, 'message') else ''}" + ) logger.exception(exc) + logger.error(tb) else: logger.trace(f"{task_id} done") @@ -131,7 +139,12 @@ class JsonRpcClient: CONTENT_TYPE: typing.Final[str] = "application/vscode-jsonrpc" VERSION: typing.Final[str] = "2.0" - def __init__(self, message_types: dict[str, typing.Any], readable_id: str) -> None: + def __init__( + self, + message_types: dict[str, typing.Any], + readable_id: str, + communication_type: CommunicationType = CommunicationType.TCP, + ) -> None: self.server_process_stopped: typing.Final = threading.Event() self.server_exit_callback: ( collections.abc.Callable[[], collections.abc.Coroutine] | None @@ -141,6 +154,7 @@ def __init__(self, message_types: dict[str, typing.Any], readable_id: str) -> No self.writer = WriterFromQueue(out_queue=self.out_message_queue.sync_q) self.message_types = message_types self.readable_id: str = readable_id + self.communication_type = communication_type self._async_tasks: list[asyncio.Task[typing.Any]] = [] self._stop_event: typing.Final = threading.Event() @@ -150,26 +164,59 @@ def __init__(self, message_types: dict[str, typing.Any], readable_id: str) -> No self.feature_impls: dict[str, collections.abc.Callable] = {} + # NOTE: reader and writer can be accessed only in IO thread + self._reader: asyncio.StreamReader | None = None + self._writer: asyncio.StreamWriter | None = None + self._async_tasks_in_io_thread: list[asyncio.Task[typing.Any]] = [] + self._tcp_port_future: asyncio.Future[int] | None = None + def feature(self, name: str, impl: collections.abc.Callable) -> None: self.feature_impls[name] = impl - async def start_io( - self, cmd: str, io_thread: _io_thread.AsyncIOThread, *args, **kwargs - ): - """Start the given server and communicate with it over stdio.""" - full_cmd = shlex.join([cmd, *args]) + async def start( + self, + server_cmd: str, + working_dir_path: Path, + io_thread: _io_thread.AsyncIOThread, + debug_port_future: concurrent.futures.Future[int] | None, + connect: bool = True, + ) -> None: + old_working_dir = os.getcwd() + os.chdir(working_dir_path) + + # temporary remove VIRTUAL_ENV env variable to avoid starting in wrong venv + old_virtual_env_var = os.environ.pop("VIRTUAL_ENV", None) + try: + await self._start_server( + full_cmd=server_cmd, + io_thread=io_thread, + debug_port_future=debug_port_future, + ) + if connect: + await self.connect_to_server(io_thread=io_thread) + finally: + if old_virtual_env_var is not None: + os.environ["VIRTUAL_ENV"] = old_virtual_env_var + + os.chdir(old_working_dir) # restore original working directory + + async def _start_server( + self, + full_cmd: str, + io_thread: _io_thread.AsyncIOThread, + debug_port_future: concurrent.futures.Future[int] | None, + ) -> None: server_future = io_thread.run_coroutine( start_server( - full_cmd, - kwargs, - self.in_message_queue, - self.out_message_queue, - request_futures=self._sync_request_futures, - result_types=self._expected_result_type_by_msg_id, + cmd=full_cmd, + communication_type=self.communication_type, + out_message_queue=self.out_message_queue, stop_event=self._stop_event, server_stopped_event=self.server_process_stopped, server_id=self.readable_id, + async_tasks=self._async_tasks_in_io_thread, + debug_port_future=debug_port_future, ) ) @@ -187,25 +234,49 @@ async def start_io( # the server raise server_start_exception - message_processor_task = asyncio.create_task(self.process_incoming_messages()) - message_processor_task.add_done_callback( + self._reader, self._writer, self._tcp_port_future = server_future.result() + + notify_exit = asyncio.create_task(self._server_process_stop_handler()) + notify_exit.add_done_callback( functools.partial( - task_done_log_callback, - task_id=f"process_incoming_messages|{self.readable_id}", + task_done_log_callback, task_id=f"notify_exit|{self.readable_id}" ) ) - notify_exit = asyncio.create_task(self.server_process_stop_handler()) - notify_exit.add_done_callback( + self._async_tasks.extend([notify_exit]) + logger.debug(f"End of start for {full_cmd}") + + async def connect_to_server( + self, io_thread: _io_thread.AsyncIOThread, timeout: float | None = 30 + ): + connect_to_server_future = io_thread.run_coroutine( + self._connect_to_server_io(timeout=timeout) + ) + + # add done callback to catch exceptions if coroutine fails + connect_to_server_future.add_done_callback( functools.partial( - task_done_log_callback, task_id=f"notify_exit|{self.readable_id}" + task_done_log_callback, + task_id=f"connect_to_server_future|{self.readable_id}", ) ) - self._async_tasks.extend([message_processor_task, notify_exit]) - logger.debug(f"End of start io for {cmd}") + await asyncio.wrap_future(connect_to_server_future) + connect_to_server_future_exception = connect_to_server_future.exception() + if connect_to_server_future_exception is not None: + raise connect_to_server_future_exception - async def server_process_stop_handler(self): + # message processor task ends automatically after getting QUEUE_END message, + # no need to save it in `_async_tasks` for explicit stop. + message_processor_task = asyncio.create_task(self.process_incoming_messages()) + message_processor_task.add_done_callback( + functools.partial( + task_done_log_callback, + task_id=f"process_incoming_messages|{self.readable_id}", + ) + ) + + async def _server_process_stop_handler(self): """Cleanup handler that runs when the server process managed by the client exits""" # await asyncio.to_thread(self.server_process_stopped.wait) @@ -307,7 +378,7 @@ def notify(self, method: str, params: typing.Any | None = None) -> None: raise InvalidResponse( f"Failed to serialize notification: {error}" ) from error - + logger.trace(notification_str) self._send_data(notification_str) def send_request_sync( @@ -435,11 +506,13 @@ async def send_request( async def process_incoming_messages(self) -> None: logger.debug(f"Start processing messages from server {self.readable_id}") try: - while not self._stop_event.is_set(): + while True: raw_message = await self.in_message_queue.async_q.get() if raw_message == QUEUE_END: - logger.debug("Queue with messages from server was closed") + # TODO: this message doesn't come, task is always cancelled + logger.info("Queue with messages from server was closed") self.in_message_queue.async_q.task_done() + self.in_message_queue.async_q.shutdown() break try: @@ -449,9 +522,10 @@ async def process_incoming_messages(self) -> None: finally: self.in_message_queue.async_q.task_done() except asyncio.CancelledError: + # logger.warning("process_incoming_messages was cancelled") ... - self.in_message_queue.async_q.shutdown() + # self.in_message_queue.async_q.shutdown() logger.debug(f"End processing messages from server {self.readable_id}") async def handle_message(self, message: dict[str, typing.Any]) -> None: @@ -698,94 +772,166 @@ async def run_notification_impl(self, impl_coro) -> None: except ValueError: ... + async def _connect_to_server_io(self, timeout: float | None) -> None: + if self.communication_type == CommunicationType.TCP: + assert self._tcp_port_future is not None + + try: + await asyncio.wait_for(self._tcp_port_future, timeout) + except TimeoutError as exception: + for task in self._async_tasks_in_io_thread: + task.cancel() + + raise RunnerFailedToStart( + "Didn't get port in 30 seconds" + ) from exception + + port = self._tcp_port_future.result() + logger.debug(f"Got port {port} | {self.readable_id}") + + try: + self._reader, self._writer = await asyncio.open_connection( + "127.0.0.1", port + ) + except Exception as exception: + logger.exception(exception) + + for task in self._async_tasks_in_io_thread: + task.cancel() + + raise RunnerFailedToStart( + f"Failed to open connection: {exception}" + ) from exception + + assert self._reader is not None and self._writer is not None + + task = asyncio.create_task( + read_messages_from_reader( + self._reader, + self.in_message_queue.sync_q, + self._sync_request_futures, + self._expected_result_type_by_msg_id, + self._stop_event, + server_id=self.readable_id, + ) + ) + task.add_done_callback( + functools.partial( + task_done_log_callback, + task_id=f"read_messages_from_reader|{self.readable_id}", + ) + ) + self._async_tasks_in_io_thread.append(task) + + task = asyncio.create_task( + send_messages_from_queue( + queue=self.out_message_queue.async_q, writer=self._writer + ) + ) + task.add_done_callback( + functools.partial( + task_done_log_callback, + task_id=f"send_messages_from_queue|{self.readable_id}", + ) + ) + self._async_tasks_in_io_thread.append(task) + async def start_server( cmd: str, - subprocess_kwargs: dict[str, str], - in_message_queue: culsans.Queue[bytes], + communication_type: CommunicationType, out_message_queue: culsans.Queue[bytes], - request_futures: dict[str, concurrent.futures.Future[typing.Any]], - result_types: dict[str, typing.Any], stop_event: threading.Event, server_stopped_event: threading.Event, server_id: str, -): - logger.debug(f"Starting server process: {' '.join([cmd, str(subprocess_kwargs)])}") - - server = await asyncio.create_subprocess_shell( - cmd, - stdout=asyncio.subprocess.PIPE, - stderr=asyncio.subprocess.PIPE, - **subprocess_kwargs, - ) + async_tasks: list[asyncio.Task[typing.Any]], + debug_port_future: concurrent.futures.Future[int] | None, +) -> tuple[ + asyncio.StreamReader | None, asyncio.StreamWriter | None, asyncio.Future[int] | None +]: + logger.debug(f"Starting server process: {cmd}") + + creationflags = 0 + # start_new_session = True .. process has parent id of real parent, but is not + # ended if parent was ended + start_new_session = True + if sys.platform == "win32": + # use creationflags because `start_new_session` doesn't work on Windows + # subprocess.CREATE_NO_WINDOW .. no console window on Windows. TODO: test + creationflags = subprocess.DETACHED_PROCESS | subprocess.CREATE_NO_WINDOW + start_new_session = False + + subprocess_kwargs = { + "creationflags": creationflags, + "start_new_session": start_new_session, + } + + # Start subprocess with appropriate stdio configuration + if communication_type == CommunicationType.STDIO: + server = await asyncio.create_subprocess_shell( + cmd, + stdin=asyncio.subprocess.PIPE, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + # max length of line: in STDIO mode, the whole file can be sent as a single + # line, increase default limit 64 KBit to 10 MiB + limit=1024 * 1024 * 10, # 10 MiB, + **subprocess_kwargs, + ) + elif communication_type == CommunicationType.TCP: + server = await asyncio.create_subprocess_shell( + cmd, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + **subprocess_kwargs, + ) + else: + raise ValueError(f"Unsupported communication type: {communication_type}") + logger.debug(f"{server_id} - process id: {server.pid}") - tasks: list[asyncio.Task[typing.Any]] = [] task = asyncio.create_task(log_stderr(server.stderr, stop_event)) task.add_done_callback( functools.partial(task_done_log_callback, task_id=f"log_stderr|{server_id}") ) - tasks.append(task) - - port_future: asyncio.Future[int] = asyncio.Future() - task = asyncio.create_task( - read_stdout(server.stdout, stop_event, port_future, server.pid) - ) - task.add_done_callback( - functools.partial(task_done_log_callback, task_id=f"read_stdout|{server_id}") - ) - tasks.append(task) - - logger.debug(f"Wait for port of {server.pid} | {server_id}") - - try: - await asyncio.wait_for(port_future, 15) - except TimeoutError: - raise RunnerFailedToStart("Didn't get port in 15 seconds") - - port = port_future.result() - logger.debug(f"Got port {port} of {server.pid} | {server_id}") - - try: - reader, writer = await asyncio.open_connection("127.0.0.1", port) - except Exception as exc: - logger.exception(exc) - - for task in tasks: - task.cancel() - - raise exc - - task = asyncio.create_task( - read_messages_from_reader( - reader, - in_message_queue.sync_q, - request_futures, - result_types, - stop_event, - server.pid, + async_tasks.append(task) + + # Get reader and writer based on communication type + if communication_type == CommunicationType.STDIO: + reader = server.stdout + writer = server.stdin + tcp_port_future = None + else: # CommunicationType.TCP + reader = None + writer = None + + # TODO: read debug port also in stdio + tcp_port_future = asyncio.Future[int]() + task = asyncio.create_task( + read_stdout( + server.stdout, + stop_event, + tcp_port_future, + server.pid, + debug_port_future, + ) ) - ) - task.add_done_callback( - functools.partial( - task_done_log_callback, task_id=f"read_messages_from_reader|{server_id}" + task.add_done_callback( + functools.partial( + task_done_log_callback, task_id=f"read_stdout|{server_id}" + ) ) - ) - tasks.append(task) + async_tasks.append(task) - task = asyncio.create_task( - send_messages_from_queue(queue=out_message_queue.async_q, writer=writer) - ) - task.add_done_callback( - functools.partial( - task_done_log_callback, task_id=f"send_messages_from_queue|{server_id}" - ) - ) - tasks.append(task) + logger.debug(f"Wait for port of {server.pid} | {server_id}") task = asyncio.create_task( wait_for_stop_event_and_clean( - stop_event, server, tasks, server_stopped_event, out_message_queue.async_q + stop_event, + server, + async_tasks, + server_stopped_event, + out_message_queue.async_q, ) ) task.add_done_callback( @@ -794,7 +940,11 @@ async def start_server( ) ) - logger.debug(f"Server {server.pid} started | {server_id}") + logger.debug( + f"Server {server.pid} started with {communication_type.name} | {server_id}" + ) + + return (reader, writer, tcp_port_future) async def wait_for_stop_event_and_clean( @@ -855,6 +1005,7 @@ async def read_stdout( stop_event: threading.Event, port_future: asyncio.Future[int], server_pid: int, + debug_port_future: concurrent.futures.Future[int] | None, ) -> None: logger.debug(f"Start reading logs from stdout | {server_pid}") try: @@ -871,12 +1022,23 @@ async def read_stdout( match = re.search(rb"Serving on \('[\d.]+', (\d+)\)", line) if match: port = int(match.group(1)) - port_future.set_result(port) + if not port_future.done(): + port_future.set_result(port) + elif b"Debug session:" in line: + match = re.search(rb"Debug session: [\d.]+:(\d+)", line) + if match: + port = int(match.group(1)) + if debug_port_future is not None and not debug_port_future.done(): + debug_port_future.set_result(port) # logger.debug( # f"Server {server_pid} stdout: {line.decode('utf-8', errors='replace').rstrip()}" # ) except asyncio.CancelledError: pass + # except Exception as exception: + # # catch all unexpected exception to log them properly and to get explicit log + # # about end of reading + # logger.exception(exception) logger.debug(f"End reading logs from stdout | {server_pid}") @@ -911,7 +1073,7 @@ async def read_messages_from_reader( request_futures: dict[str, concurrent.futures.Future[typing.Any]], result_types: dict[str, typing.Any], stop_event: threading.Event, - server_pid: int, + server_id: str, ) -> None: content_length = 0 @@ -921,18 +1083,18 @@ async def read_messages_from_reader( try: header = await reader.readline() except ValueError: - logger.error(f"Value error in readline of {server_pid}") + logger.error(f"Value error in readline of {server_id}") continue except ConnectionResetError: logger.warning( - f"Server {server_pid} closed the connection(ConnectionResetError), stop the client" + f"Server {server_id} closed the connection(ConnectionResetError), stop the client" ) stop_event.set() break if not header: if reader.at_eof(): - logger.debug(f"Reader reached EOF | {server_pid}") + logger.debug(f"Reader reached EOF | {server_id}") break continue @@ -941,10 +1103,10 @@ async def read_messages_from_reader( match = CONTENT_LENGTH_PATTERN.fullmatch(header) if match: content_length = int(match.group(1)) - logger.debug(f"Content length | {server_pid}: {content_length}") + logger.debug(f"Content length | {server_id}: {content_length}") else: logger.debug( - f"Not matched content length: {header} | {server_pid}" + f"Not matched content length: {header} | {server_id}" ) # Check if all headers have been read (as indicated by an empty line \r\n) @@ -955,13 +1117,13 @@ async def read_messages_from_reader( body = await reader.readexactly(content_length) except asyncio.IncompleteReadError as error: logger.debug( - f"Incomplete read error: {error} | {server_pid} : {error.partial}" + f"Incomplete read error: {error} | {server_id} : {error.partial}" ) content_length = 0 continue except ConnectionResetError: logger.warning( - f"Server {server_pid} closed the connection(ConnectionResetError), stop the client" + f"Server {server_id} closed the connection(ConnectionResetError), stop the client" ) stop_event.set() break @@ -970,12 +1132,12 @@ async def read_messages_from_reader( content_length = 0 continue - logger.debug(f"Got content {server_pid}: {body}") + logger.debug(f"Got content {server_id}: {body}") try: message = json.loads(body) except json.JSONDecodeError as exc: logger.error( - f"Failed to parse JSON message: {exc} | {server_pid}" + f"Failed to parse JSON message: {exc} | {server_id}" ) continue finally: @@ -990,7 +1152,7 @@ async def read_messages_from_reader( continue if message["jsonrpc"] != JsonRpcClient.VERSION: - logger.warning(f'Unknown message "{message}" | {server_pid}') + logger.warning(f'Unknown message "{message}" | {server_id}') continue # error should be also handled here @@ -1001,7 +1163,7 @@ async def read_messages_from_reader( ) if is_response: - logger.debug(f"Response message received. | {server_pid}") + logger.debug(f"Response message received. | {server_id}") msg_id = message["id"] raw_result = message.get("result", None) future = request_futures.pop(msg_id, None) @@ -1028,7 +1190,7 @@ async def read_messages_from_reader( continue logger.debug( - f'Received result for message "{msg_id}" | {server_pid}' + f'Received result for message "{msg_id}" | {server_id}' ) if not future.cancelled(): future.set_result(result) @@ -1042,59 +1204,19 @@ async def read_messages_from_reader( b"Content-Length:" ) and not header.startswith(b"Content-Type:"): logger.debug( - f'Something is wrong: {content_length} "{header}" {not header.strip()} | {server_pid}' + f'Something is wrong: {content_length} "{header}" {not header.strip()} | {server_id}' ) except Exception as exc: logger.exception( - f"Exception in message reader loop | {server_pid}: {exc}" + f"Exception in message reader loop | {server_id}: {exc}" ) # Reset state to avoid infinite loop on persistent errors content_length = 0 except asyncio.CancelledError: ... - logger.debug(f"End reading messages from reader | {server_pid}") - - -async def create_lsp_client_io( - server_cmd: str, - working_dir_path: Path, - message_types: dict[str, typing.Any], - io_thread: _io_thread.AsyncIOThread, - readable_id: str, -) -> JsonRpcClient: - ls = JsonRpcClient(message_types=message_types, readable_id=readable_id) - splitted_cmd = shlex.split(server_cmd) - executable, *args = splitted_cmd - - old_working_dir = os.getcwd() - os.chdir(working_dir_path) - - # temporary remove VIRTUAL_ENV env variable to avoid starting in wrong venv - old_virtual_env_var = os.environ.pop("VIRTUAL_ENV", None) - - creationflags = 0 - # start_new_session = True .. process has parent id of real parent, but is not - # ended if parent was ended - start_new_session = True - if sys.platform == "win32": - # use creationflags because `start_new_session` doesn't work on Windows - # subprocess.CREATE_NO_WINDOW .. no console window on Windows. TODO: test - creationflags = subprocess.DETACHED_PROCESS | subprocess.CREATE_NO_WINDOW - start_new_session = False - - await ls.start_io( - executable, - io_thread, - *args, - start_new_session=start_new_session, - creationflags=creationflags, - ) - if old_virtual_env_var is not None: - os.environ["VIRTUAL_ENV"] = old_virtual_env_var - - os.chdir(old_working_dir) # restore original working directory - return ls + message_queue.put_nowait(QUEUE_END) + logger.debug(f"End reading messages from reader | {server_id}") -__all__ = ["create_lsp_client_io", "JsonRpcClient"] +__all__ = ["JsonRpcClient"] diff --git a/finecode_jsonrpc/src/finecode_jsonrpc/jsonrpc_client.py b/finecode_jsonrpc/src/finecode_jsonrpc/jsonrpc_client.py new file mode 100644 index 00000000..34f17cef --- /dev/null +++ b/finecode_jsonrpc/src/finecode_jsonrpc/jsonrpc_client.py @@ -0,0 +1,353 @@ +from __future__ import annotations + +import asyncio +import collections.abc +import concurrent.futures +from pathlib import Path +from types import TracebackType +from typing import Any, Self + +from finecode_jsonrpc import _io_thread +from finecode_jsonrpc.transports import StdioTransport +from loguru import logger + + +class JsonRpcSessionImpl: + """IJsonRpcSession implementation using StdioTransport + AsyncIOThread.""" + + def __init__( + self, + cmd: str, + cwd: Path | None, + env: dict[str, str] | None, + readable_id: str, + ) -> None: + self._cmd = cmd + self._cwd = cwd + self._env = env + self._readable_id = readable_id + + self._transport: StdioTransport | None = None + self._io_thread: _io_thread.AsyncIOThread | None = None + + self._next_id: int = 0 + self._async_request_futures: dict[int, asyncio.Future[Any]] = {} + self._sync_request_futures: dict[int, concurrent.futures.Future[Any]] = {} + self._notification_handlers: dict[ + str, + collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[None] + ], + ] = {} + self._request_handlers: dict[ + str, + collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[Any] + ], + ] = {} + + # ------------------------------------------------------------------ + # Context manager + # ------------------------------------------------------------------ + + async def __aenter__(self) -> Self: + self._io_thread = _io_thread.AsyncIOThread() + self._io_thread.start() + + self._transport = StdioTransport(readable_id=self._readable_id) + self._transport.on_message(self._handle_message) + self._transport.on_exit(self._handle_exit) + + # Start transport on the IO thread + start_future = self._io_thread.run_coroutine( + self._transport.start(cmd=self._cmd, cwd=self._cwd, env=self._env) + ) + await asyncio.wrap_future(start_future) + + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + if self._transport is not None and self._io_thread is not None: + try: + stop_future = self._io_thread.run_coroutine(self._transport.stop()) + await asyncio.wrap_future(stop_future) + except RuntimeError: + # IO thread may already be stopped + pass + + # Cancel pending futures + for fut in self._async_request_futures.values(): + if not fut.done(): + fut.cancel() + for fut in self._sync_request_futures.values(): + if not fut.done(): + fut.cancel() + self._async_request_futures.clear() + self._sync_request_futures.clear() + + if self._io_thread is not None: + self._io_thread.stop(timeout=5.0) + self._io_thread = None + + # ------------------------------------------------------------------ + # Async API + # ------------------------------------------------------------------ + + async def send_request( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + msg_id = self._next_id + self._next_id += 1 + + message: dict[str, Any] = { + "jsonrpc": "2.0", + "id": msg_id, + "method": method, + } + if params is not None: + message["params"] = params + + future: asyncio.Future[Any] = asyncio.Future() + self._async_request_futures[msg_id] = future + + assert self._transport is not None + self._transport.send(message) + + try: + return await asyncio.wait_for(future, timeout) + except TimeoutError: + self._async_request_futures.pop(msg_id, None) + raise + + async def send_notification( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + message: dict[str, Any] = {"jsonrpc": "2.0", "method": method} + if params is not None: + message["params"] = params + assert self._transport is not None + self._transport.send(message) + + # ------------------------------------------------------------------ + # Sync API + # ------------------------------------------------------------------ + + def send_request_sync( + self, + method: str, + params: dict[str, Any] | None = None, + timeout: float | None = None, + ) -> Any: + msg_id = self._next_id + self._next_id += 1 + + message: dict[str, Any] = { + "jsonrpc": "2.0", + "id": msg_id, + "method": method, + } + if params is not None: + message["params"] = params + + future: concurrent.futures.Future[Any] = concurrent.futures.Future() + self._sync_request_futures[msg_id] = future + + assert self._transport is not None + self._transport.send(message) + + return future.result(timeout=timeout) + + def send_notification_sync( + self, + method: str, + params: dict[str, Any] | None = None, + ) -> None: + message: dict[str, Any] = {"jsonrpc": "2.0", "method": method} + if params is not None: + message["params"] = params + assert self._transport is not None + self._transport.send(message) + + # ------------------------------------------------------------------ + # Server-initiated messages + # ------------------------------------------------------------------ + + def on_notification( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[None] + ], + ) -> None: + self._notification_handlers[method] = handler + + def on_request( + self, + method: str, + handler: collections.abc.Callable[ + [dict[str, Any] | None], collections.abc.Awaitable[Any] + ], + ) -> None: + self._request_handlers[method] = handler + + # ------------------------------------------------------------------ + # Internal message dispatch (runs on IO thread) + # ------------------------------------------------------------------ + + async def _handle_message(self, message: dict[str, Any]) -> None: + has_id = "id" in message + has_method = "method" in message + has_result = "result" in message + has_error = "error" in message + + if has_id and (has_result or has_error) and not has_method: + # Response to one of our requests + msg_id = message["id"] + await self._resolve_response(msg_id, message) + elif has_id and has_method: + # Incoming request from server + await self._handle_incoming_request(message) + elif has_method and not has_id: + # Incoming notification from server + await self._handle_incoming_notification(message) + else: + logger.warning( + f"Unknown message structure | {self._readable_id}: {message}" + ) + + async def _resolve_response(self, msg_id: int, message: dict[str, Any]) -> None: + # Try async futures first + async_future = self._async_request_futures.pop(msg_id, None) + if async_future is not None: + # The future lives on the caller's event loop, but this callback + # runs on the IO thread. asyncio.Future is not thread-safe, so + # we must schedule the resolution on the future's own loop. + loop = async_future.get_loop() + if "error" in message: + loop.call_soon_threadsafe( + async_future.set_exception, JsonRpcError(message["error"]) + ) + else: + loop.call_soon_threadsafe( + async_future.set_result, message.get("result") + ) + return + + # Try sync futures + sync_future = self._sync_request_futures.pop(msg_id, None) + if sync_future is not None: + if "error" in message: + sync_future.set_exception(JsonRpcError(message["error"])) + else: + sync_future.set_result(message.get("result")) + return + + logger.warning( + f"No pending future for response id={msg_id} | {self._readable_id}" + ) + + async def _handle_incoming_request(self, message: dict[str, Any]) -> None: + method = message["method"] + handler = self._request_handlers.get(method) + if handler is None: + logger.warning( + f"No handler for server request '{method}' | {self._readable_id}" + ) + # Send error response + error_response: dict[str, Any] = { + "jsonrpc": "2.0", + "id": message["id"], + "error": { + "code": -32601, + "message": f"Method not found: {method}", + }, + } + assert self._transport is not None + self._transport.send(error_response) + return + + try: + result = await handler(message.get("params")) + except Exception as exc: + logger.exception( + f"Error handling server request '{method}' | {self._readable_id}: {exc}" + ) + error_response = { + "jsonrpc": "2.0", + "id": message["id"], + "error": { + "code": -32603, + "message": str(exc), + }, + } + assert self._transport is not None + self._transport.send(error_response) + return + + response: dict[str, Any] = { + "jsonrpc": "2.0", + "id": message["id"], + "result": result, + } + assert self._transport is not None + self._transport.send(response) + + async def _handle_incoming_notification(self, message: dict[str, Any]) -> None: + method = message["method"] + handler = self._notification_handlers.get(method) + if handler is not None: + try: + await handler(message.get("params")) + except Exception as exc: + logger.exception( + f"Error handling notification '{method}' | {self._readable_id}: {exc}" + ) + + async def _handle_exit(self) -> None: + # Cancel pending futures when server process exits. + # Async futures live on the caller's loop — resolve them thread-safely. + err = RuntimeError("Server process exited before response") + for fut in list(self._async_request_futures.values()): + if not fut.done(): + fut.get_loop().call_soon_threadsafe(fut.set_exception, err) + for fut in list(self._sync_request_futures.values()): + if not fut.done(): + fut.set_exception(err) + + +class JsonRpcError(Exception): + """Error received in a JSON-RPC response.""" + + def __init__(self, error: dict[str, Any]) -> None: + self.code: int = error.get("code", -1) + self.rpc_message: str = error.get("message", "Unknown error") + self.data: Any = error.get("data") + super().__init__(f"JSON-RPC error {self.code}: {self.rpc_message}") + + +class JsonRpcClientImpl: + """IJsonRpcClient implementation. Factory for JsonRpcSessionImpl.""" + + def session( + self, + cmd: str, + cwd: Path | None = None, + env: dict[str, str] | None = None, + readable_id: str = "", + ) -> JsonRpcSessionImpl: + return JsonRpcSessionImpl( + cmd=cmd, + cwd=cwd, + env=env, + readable_id=readable_id, + ) diff --git a/finecode_jsonrpc/src/finecode_jsonrpc/lsp/__init__.py b/finecode_jsonrpc/src/finecode_jsonrpc/lsp/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/finecode_jsonrpc/src/finecode_jsonrpc/py.typed b/finecode_jsonrpc/src/finecode_jsonrpc/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/finecode_jsonrpc/src/finecode_jsonrpc/transports.py b/finecode_jsonrpc/src/finecode_jsonrpc/transports.py new file mode 100644 index 00000000..e38d2f3d --- /dev/null +++ b/finecode_jsonrpc/src/finecode_jsonrpc/transports.py @@ -0,0 +1,298 @@ +from __future__ import annotations + +import asyncio +import collections.abc +import json +import re +import sys +import subprocess # needed for windows +import typing +from pathlib import Path + +from loguru import logger + +CONTENT_LENGTH_PATTERN = re.compile(rb"^Content-Length: (\d+)\r\n$") +CHARSET = "utf-8" +CONTENT_TYPE = "application/vscode-jsonrpc" + + +class StdioTransport: + """Raw JSON-RPC transport over subprocess STDIO with Content-Length framing. + + All I/O runs on the event loop where ``start()`` is called. + ``send()`` is thread-safe (writes go through an ``asyncio.Queue``). + """ + + def __init__(self, readable_id: str = "") -> None: + self._readable_id = readable_id + self._process: asyncio.subprocess.Process | None = None + self._stop_event = asyncio.Event() + self._out_queue: asyncio.Queue[bytes | None] = asyncio.Queue() + self._on_message: ( + collections.abc.Callable[ + [dict[str, typing.Any]], collections.abc.Awaitable[None] + ] + | None + ) = None + self._on_exit: ( + collections.abc.Callable[[], collections.abc.Awaitable[None]] | None + ) = None + self._tasks: list[asyncio.Task[typing.Any]] = [] + self._loop: asyncio.AbstractEventLoop | None = None + + # ------------------------------------------------------------------ + # Configuration (call before start) + # ------------------------------------------------------------------ + + def on_message( + self, + handler: collections.abc.Callable[ + [dict[str, typing.Any]], collections.abc.Awaitable[None] + ], + ) -> None: + self._on_message = handler + + def on_exit( + self, + handler: collections.abc.Callable[[], collections.abc.Awaitable[None]], + ) -> None: + self._on_exit = handler + + # ------------------------------------------------------------------ + # Lifecycle + # ------------------------------------------------------------------ + + async def start( + self, + cmd: str, + cwd: Path | None = None, + env: dict[str, str] | None = None, + ) -> None: + self._loop = asyncio.get_running_loop() + + creationflags = 0 + start_new_session = True + if sys.platform == "win32": + creationflags = subprocess.DETACHED_PROCESS | subprocess.CREATE_NO_WINDOW + start_new_session = False + + self._process = await asyncio.create_subprocess_shell( + cmd, + stdin=asyncio.subprocess.PIPE, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.PIPE, + cwd=cwd, + env=env, + limit=1024 * 1024 * 10, # 10 MiB + creationflags=creationflags, + start_new_session=start_new_session, + ) + + logger.debug( + f"StdioTransport started process pid={self._process.pid} | {self._readable_id}" + ) + + assert self._process.stdout is not None + assert self._process.stdin is not None + assert self._process.stderr is not None + + self._tasks.append( + asyncio.create_task( + self._read_messages(self._process.stdout), + name=f"read_messages|{self._readable_id}", + ) + ) + self._tasks.append( + asyncio.create_task( + self._write_messages(self._process.stdin), + name=f"write_messages|{self._readable_id}", + ) + ) + self._tasks.append( + asyncio.create_task( + self._log_stderr(self._process.stderr), + name=f"log_stderr|{self._readable_id}", + ) + ) + self._tasks.append( + asyncio.create_task( + self._wait_for_exit(), + name=f"wait_for_exit|{self._readable_id}", + ) + ) + + async def stop(self) -> None: + if self._stop_event.is_set(): + return + + self._stop_event.set() + + # Signal writer to stop + await self._out_queue.put(None) + + for task in self._tasks: + if not task.done(): + task.cancel() + + if self._tasks: + await asyncio.gather(*self._tasks, return_exceptions=True) + self._tasks.clear() + + if self._process is not None and self._process.returncode is None: + try: + self._process.terminate() + await asyncio.wait_for(self._process.wait(), timeout=5.0) + except TimeoutError: + self._process.kill() + await self._process.wait() + + logger.debug(f"StdioTransport stopped | {self._readable_id}") + + @property + def is_running(self) -> bool: + return ( + self._process is not None + and self._process.returncode is None + and not self._stop_event.is_set() + ) + + # ------------------------------------------------------------------ + # Send (thread-safe) + # ------------------------------------------------------------------ + + def send(self, message: dict[str, typing.Any]) -> None: + """Serialize *message* to JSON with Content-Length header and enqueue. + + Safe to call from any thread. + """ + body = json.dumps(message) + header = ( + f"Content-Length: {len(body)}\r\n" + f"Content-Type: {CONTENT_TYPE}; charset={CHARSET}\r\n\r\n" + ) + data = (header + body).encode(CHARSET) + + if self._loop is not None and self._loop.is_running(): + self._loop.call_soon_threadsafe(self._out_queue.put_nowait, data) + else: + self._out_queue.put_nowait(data) + + # ------------------------------------------------------------------ + # Internal tasks + # ------------------------------------------------------------------ + + async def _write_messages(self, stdin: asyncio.StreamWriter) -> None: + logger.debug(f"Start writing messages | {self._readable_id}") + try: + while True: + data = await self._out_queue.get() + if data is None: + break + stdin.write(data) + await stdin.drain() + except asyncio.CancelledError: + pass + except Exception as exc: + logger.error(f"Error writing message | {self._readable_id}: {exc}") + finally: + try: + stdin.close() + except Exception: + pass + logger.debug(f"End writing messages | {self._readable_id}") + + async def _read_messages(self, stdout: asyncio.StreamReader) -> None: + logger.debug(f"Start reading messages | {self._readable_id}") + content_length = 0 + + try: + while not self._stop_event.is_set(): + try: + header = await stdout.readline() + except (ValueError, ConnectionResetError) as exc: + logger.warning(f"Read error | {self._readable_id}: {exc}") + break + + if not header: + if stdout.at_eof(): + logger.debug(f"Reader reached EOF | {self._readable_id}") + break + continue + + if not content_length: + match = CONTENT_LENGTH_PATTERN.fullmatch(header) + if match: + content_length = int(match.group(1)) + continue + + # Empty line after headers → read body + if content_length and not header.strip(): + try: + body = await stdout.readexactly(content_length) + except asyncio.IncompleteReadError as exc: + logger.debug(f"Incomplete read | {self._readable_id}: {exc}") + content_length = 0 + continue + except ConnectionResetError: + logger.warning(f"Connection reset | {self._readable_id}") + break + + content_length = 0 + + if not body: + continue + + try: + message = json.loads(body) + except json.JSONDecodeError as exc: + logger.error(f"JSON parse error | {self._readable_id}: {exc}") + continue + + if not isinstance(message, dict): + logger.error(f"Expected dict message | {self._readable_id}") + continue + + if self._on_message is not None: + try: + await self._on_message(message) + except Exception as exc: + logger.exception( + f"Error in message handler | {self._readable_id}: {exc}" + ) + except asyncio.CancelledError: + pass + + logger.debug(f"End reading messages | {self._readable_id}") + + async def _log_stderr(self, stderr: asyncio.StreamReader) -> None: + logger.debug(f"Start reading stderr | {self._readable_id}") + try: + while not self._stop_event.is_set(): + line = await stderr.readline() + if not line: + break + logger.debug( + f"Server stderr | {self._readable_id}: " + f"{line.decode('utf-8', errors='replace').rstrip()}" + ) + except asyncio.CancelledError: + pass + logger.debug(f"End reading stderr | {self._readable_id}") + + async def _wait_for_exit(self) -> None: + if self._process is None: + return + try: + await self._process.wait() + except asyncio.CancelledError: + return + + logger.debug( + f"Process exited with code {self._process.returncode} | {self._readable_id}" + ) + + if self._on_exit is not None: + try: + await self._on_exit() + except Exception as exc: + logger.exception(f"Error in exit handler | {self._readable_id}: {exc}") diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..6addf0a3 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,78 @@ +site_name: FineCode +site_description: Tool runner and extension framework for software developers +site_url: !ENV [MKDOCS_SITE_URL, 'http://localhost:8000'] +repo_url: https://github.com/finecode-dev/finecode +repo_name: finecode-dev/finecode + +theme: + name: material + palette: + - scheme: default + toggle: + icon: material/brightness-7 + name: Switch to dark mode + - scheme: slate + toggle: + icon: material/brightness-4 + name: Switch to light mode + features: + - navigation.tabs + - navigation.sections + - navigation.expand + - navigation.top + - search.suggest + - search.highlight + - content.code.copy + - content.code.annotate + +plugins: + - search + - mkdocstrings: + handlers: + python: + paths: + - src + - finecode_extension_api/src + - finecode_extension_runner/src + - finecode_jsonrpc/src + - finecode_httpclient/src + - finecode_builtin_handlers/src + options: + show_source: true + show_root_heading: true + show_category_heading: true + docstring_style: google + +markdown_extensions: + - admonition + - pymdownx.details + - pymdownx.superfences: + custom_fences: + - name: mermaid + class: mermaid + format: !!python/name:pymdownx.superfences.fence_code_format + - pymdownx.highlight: + anchor_linenums: true + - pymdownx.inlinehilite + - pymdownx.tabbed: + alternate_style: true + - pymdownx.tasklist: + custom_checkbox: true + - tables + - toc: + permalink: true + +nav: + - Home: index.md + - Getting Started: getting-started.md + - Concepts: concepts.md + - Configuration: configuration.md + - CLI Reference: cli.md + - IDE Integration: ide-integration.md + - Guides: + - Creating an Extension: guides/creating-extension.md + - Creating a Preset: guides/creating-preset.md + - Multi-Project Workspace: guides/workspace.md + - Reference: + - Built-in Actions: reference/actions.md + - Extensions: reference/extensions.md diff --git a/presets/fine_python_format/fine_python_format/preset.toml b/presets/fine_python_format/fine_python_format/preset.toml index 685024c2..4a560286 100644 --- a/presets/fine_python_format/fine_python_format/preset.toml +++ b/presets/fine_python_format/fine_python_format/preset.toml @@ -1,10 +1,28 @@ -[tool.finecode.action.format] -source = "finecode_extension_api.actions.format.FormatAction" +[tool.finecode.action.format_files_python] +source = "finecode_extension_api.actions.format_files.FormatFilesAction" +languages = ["python"] handlers = [ - { name = "ruff", source = "fine_python_ruff.RuffFormatHandler", env = "dev_no_runtime", dependencies = [ - "fine_python_ruff==0.1.*", + { name = "ruff", source = "fine_python_ruff.RuffFormatFilesHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_ruff~=0.2.0a0", ] }, - { name = "save", source = "finecode_extension_api.actions.format.SaveFormatHandler", env = "dev_no_runtime", dependencies = [ - "finecode_extension_api==0.3.*", + # ruff formatter doesn't handle imports, run isort to sort them + { name = "isort", source = "fine_python_isort.IsortFormatFilesHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_isort~=0.3.0a0", + ] }, + { name = "save", source = "finecode_builtin_handlers.SaveFormatFilesHandler", env = "dev_no_runtime", dependencies = [ + "finecode_builtin_handlers~=0.2.0a1", ] }, ] + +[[tool.finecode.action_handler]] +source = "fine_python_isort.IsortFormatFilesHandler" +# make isort config compatible with fine_python_lint +# this config is based on 'black' profile of isort, black is not used in lint preset, +# but rules configured for ruff formatter are compatible with black formatting +config.multi_line_output = 3 +config.include_trailing_comma = true +config.force_grid_wrap = 0 +config.use_parentheses = true +config.ensure_newline_before_comments = true +config.line_length = 88 +config.split_on_trailing_comma = true diff --git a/presets/fine_python_format/pyproject.toml b/presets/fine_python_format/pyproject.toml index 31e64188..03c439ed 100644 --- a/presets/fine_python_format/pyproject.toml +++ b/presets/fine_python_format/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "fine_python_format" -version = "0.3.0" +version = "0.4.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*"] +dependencies = ["finecode_extension_api~=0.4.0a0"] [build-system] requires = ["setuptools>=64"] diff --git a/presets/fine_python_lint/fine_python_lint/preset.toml b/presets/fine_python_lint/fine_python_lint/preset.toml index 2bf41e36..01fd2d24 100644 --- a/presets/fine_python_lint/fine_python_lint/preset.toml +++ b/presets/fine_python_lint/fine_python_lint/preset.toml @@ -1,26 +1,27 @@ -[tool.finecode.action.lint] -source = "finecode_extension_api.actions.lint.LintAction" +[tool.finecode.action.lint_files_python] +source = "finecode_extension_api.actions.lint_files.LintFilesAction" +languages = ["python"] handlers = [ - { name = "ruff", source = "fine_python_ruff.RuffLintHandler", env = "dev_no_runtime", dependencies = [ - "fine_python_ruff==0.1.*", + { name = "ruff", source = "fine_python_ruff.RuffLintFilesHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_ruff[jsonrpc]~=0.2.0a0", ] }, - { name = "flake8", source = "fine_python_flake8.Flake8LintHandler", env = "dev_no_runtime", dependencies = [ - "fine_python_flake8==0.2.*", + { name = "flake8", source = "fine_python_flake8.Flake8LintFilesHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_flake8~=0.3.0a0", ] }, - { name = "pyrefly", source = "fine_python_pyrefly.PyreflyLintHandler", env = "dev_no_runtime", dependencies = [ - "fine_python_pyrefly==0.1.*", + { name = "pyrefly", source = "fine_python_pyrefly.PyreflyLintFilesHandler", env = "dev_no_runtime", dependencies = [ + "fine_python_pyrefly[jsonrpc]~=0.2.0a0", ] }, ] [[tool.finecode.action_handler]] -source = "fine_python_ruff.RuffLintHandler" +source = "fine_python_ruff.RuffLintFilesHandler" config.extend_select = ["B", "I"] # flake8 is used only for custom rules, all standard rules are checked by ruff, but # keep flake8 configuration if someone activates some rules or uses flake8 config # parameters in their own rules [[tool.finecode.action_handler]] -source = "fine_python_flake8.Flake8LintHandler" +source = "fine_python_flake8.Flake8LintFilesHandler" config.max_line_length = 80 # W391 is not compatible with black(and ruff formatter, which is compatible with black), # because black adds an empty line to the end of the file @@ -39,3 +40,15 @@ source = "fine_python_pip.PipInstallDepsInEnvHandler" # as editable in multiple packages, there are concurrency problems and it would require # sequential installation of dependencies in all envs. config.editable_mode = 'compat' + +[[tool.finecode.service]] +interface = "finecode_extension_api.interfaces.ijsonrpcclient.IJsonRpcClient" +source = "finecode_jsonrpc.jsonrpc_client.JsonRpcClientImpl" +env = "dev_no_runtime" +dependencies = ["finecode_jsonrpc~=0.1.0a1"] + +[[tool.finecode.service]] +interface = "finecode_extension_api.interfaces.ilspclient.ILspClient" +source = "finecode_extension_runner.impls.lsp_client.LspClientImpl" +env = "dev_no_runtime" +dependencies = [] diff --git a/presets/fine_python_lint/pyproject.toml b/presets/fine_python_lint/pyproject.toml index d92b4716..7e9dbbd6 100644 --- a/presets/fine_python_lint/pyproject.toml +++ b/presets/fine_python_lint/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "fine_python_lint" -version = "0.4.0" +version = "0.5.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["finecode_extension_api==0.3.*"] +dependencies = ["finecode_extension_api~=0.4.0a0"] [build-system] requires = ["setuptools>=64"] diff --git a/presets/fine_python_recommended/pyproject.toml b/presets/fine_python_recommended/pyproject.toml index 3d3c8465..1dd40a3b 100644 --- a/presets/fine_python_recommended/pyproject.toml +++ b/presets/fine_python_recommended/pyproject.toml @@ -1,11 +1,11 @@ [project] name = "fine_python_recommended" -version = "0.4.0" +version = "0.5.0a1" description = "" authors = [{ name = "Vladyslav Hnatiuk", email = "aders1234@gmail.com" }] readme = "README.md" requires-python = ">=3.11, <= 3.14" -dependencies = ["fine_python_format==0.3.*", "fine_python_lint==0.4.*"] +dependencies = ["fine_python_format~=0.4.0a0", "fine_python_lint~=0.5.0a0"] [build-system] requires = ["setuptools>=64"] diff --git a/pyproject.toml b/pyproject.toml index c2155f47..075ffac2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,29 +10,33 @@ dependencies = [ "tomlkit==0.11.*", "watchdog==4.0.*", "click==8.1.*", - "pydantic==2.11.*", - "pygls==2.0.0-a6", - "finecode_extension_api==0.3.*", - "finecode_extension_runner==0.3.*", - "finecode_builtin_handlers==0.1.*", + "pydantic==2.12.*", + "pygls==2.0.0", + "finecode_extension_api~=0.4.0a0", + "finecode_extension_runner~=0.4.0a0", + "finecode_builtin_handlers~=0.2.0a0", + "finecode_jsonrpc~=0.1.0a0", "ordered-set==4.1.*", "mcp==1.13.*", - "fine_python_virtualenv==0.1.*", - "fine_python_pip==0.1.*", - "culsans==0.9.*", + "fine_python_virtualenv~=0.2.0a0", + "fine_python_pip~=0.2.0a0", + "culsans==0.11.*", "apischema==0.19.*", ] [dependency-groups] -# file_python_import_linter is temporary disabled, because it isn't ported to the new finecode_extension_api yet -# "fine_python_import_linter @ git+https://github.com/finecode-dev/finecode.git#subdirectory=extensions/fine_python_import_linter", dev_workspace = [ "build==1.2.2.post1", - "finecode==0.3.*", - "finecode_dev_common_preset==0.2.*", + "finecode~=0.3.0a0", + "finecode_dev_common_preset~=0.3.0a0", "debugpy==1.8.*", ] dev = [{ include-group = "runtime" }, "pytest==7.4.*", "debugpy==1.8.*"] +docs = [ + "mkdocs==1.6.*", + "mkdocs-material==9.7.*", + "mkdocstrings[python]==1.0.*", +] [build-system] requires = ["setuptools>=64", "setuptools-scm>=8"] @@ -47,6 +51,15 @@ presets = [{ source = "finecode_dev_common_preset" }] finecode_dev_common_preset = { path = "./finecode_dev_common_preset", editable = true } finecode_extension_runner = { path = "./finecode_extension_runner", editable = true } finecode_extension_api = { path = "./finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "./finecode_jsonrpc", editable = true } +fine_python_lint = { path = "./presets/fine_python_lint", editable = true } +fine_python_format = { path = "./presets/fine_python_format", editable = true } +finecode_builtin_handlers = { path = "../finecode_builtin_handlers", editable = true } + +[tool.finecode.env.runtime.dependencies] +finecode_extension_api = { path = "./finecode_extension_api", editable = true } +finecode_jsonrpc = { path = "./finecode_jsonrpc", editable = true } +finecode_builtin_handlers = { path = "./finecode_builtin_handlers", editable = true } [tool.importlinter] root_package = "finecode" @@ -85,3 +98,12 @@ finecode = ["base_config.toml"] [tool.pyright] reportUnusedCallResult = false + +[tool.finecode.action.get_src_artifact_version] +source = "finecode_extension_api.actions.get_src_artifact_version.GetSrcArtifactVersionAction" +handlers_mode = "replace" +handlers = [ + { name = 'get_src_artifact_version_setuptools_scm', source = 'fine_python_setuptools_scm.GetSrcArtifactVersionSetuptoolsScmHandler', env = "dev_no_runtime", dependencies = [ + "fine_python_setuptools_scm~=0.1.0a1", + ] }, +] diff --git a/src/finecode/base_config.toml b/src/finecode/base_config.toml index 0c59fd45..46b151bf 100644 --- a/src/finecode/base_config.toml +++ b/src/finecode/base_config.toml @@ -5,14 +5,14 @@ source = "finecode_extension_api.actions.prepare_envs.PrepareEnvsAction" name = "prepare_envs_dump_configs" source = "finecode_builtin_handlers.PrepareEnvsReadConfigsHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] [[tool.finecode.action.prepare_envs.handlers]] name = "prepare_envs_install_deps" source = "finecode_builtin_handlers.PrepareEnvsInstallDepsHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] # preparing dev workspaces doesn't need dumping config for two reasons: @@ -28,20 +28,20 @@ source = "finecode_extension_api.actions.prepare_envs.PrepareEnvsAction" name = "prepare_venvs" source = "fine_python_virtualenv.VirtualenvPrepareEnvHandler" env = "dev_workspace" -dependencies = ["fine_python_virtualenv==0.1.*"] +dependencies = ["fine_python_virtualenv~=0.2.0a0"] [[tool.finecode.action.prepare_dev_workspaces_envs.handlers]] name = "prepare_envs_read_configs" source = "finecode_builtin_handlers.PrepareEnvsReadConfigsHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] [[tool.finecode.action.prepare_dev_workspaces_envs.handlers]] name = "prepare_envs_install_deps" source = "finecode_builtin_handlers.PrepareEnvsInstallDepsHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] [tool.finecode.action.prepare_runners] source = "finecode_extension_api.actions.prepare_runners.PrepareRunnersAction" @@ -50,20 +50,20 @@ source = "finecode_extension_api.actions.prepare_runners.PrepareRunnersAction" name = "prepare_runners_venvs" source = "fine_python_virtualenv.VirtualenvPrepareRunnersHandler" env = "dev_workspace" -dependencies = ["fine_python_virtualenv==0.1.*"] +dependencies = ["fine_python_virtualenv~=0.2.0a0"] [[tool.finecode.action.prepare_runners.handlers]] name = "prepare_runners_read_configs" source = "finecode_builtin_handlers.PrepareRunnersReadConfigsHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] [[tool.finecode.action.prepare_runners.handlers]] name = "prepare_runners_install_runner_and_presets" source = "finecode_builtin_handlers.PrepareRunnersInstallRunnerAndPresetsHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] [tool.finecode.action.dump_config] @@ -73,14 +73,14 @@ source = "finecode_extension_api.actions.dump_config.DumpConfigAction" name = "dump_config" source = "finecode_builtin_handlers.DumpConfigHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] [[tool.finecode.action.dump_config.handlers]] name = "dump_config_save" source = "finecode_builtin_handlers.DumpConfigSaveHandler" env = "dev_workspace" -dependencies = ["finecode_builtin_handlers==0.1.*"] +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] [tool.finecode.action.install_deps_in_env] @@ -90,14 +90,70 @@ source = "finecode_extension_api.actions.install_deps_in_env.InstallDepsInEnvAct name = "install_deps_with_pip" source = "fine_python_pip.PipInstallDepsInEnvHandler" env = "dev_workspace" -dependencies = ["fine_python_pip==0.1.*"] +dependencies = ["fine_python_pip~=0.2.0a0"] -[tool.finecode.action.list_project_files_by_lang] -source = "finecode_extension_api.actions.list_project_files_by_lang.ListProjectFilesByLangAction" +[tool.finecode.action.list_src_artifact_files_by_lang] +source = "finecode_extension_api.actions.list_src_artifact_files_by_lang.ListSrcArtifactFilesByLangAction" -[[tool.finecode.action.list_project_files_by_lang.handlers]] -name = "list_project_files_by_lang_python" -source = "fine_python_package_info.ListProjectFilesByLangPythonHandler" +[[tool.finecode.action.list_src_artifact_files_by_lang.handlers]] +name = "list_src_artifact_files_by_lang_python" +source = "fine_python_package_info.ListSrcArtifactFilesByLangPythonHandler" env = "dev_no_runtime" -dependencies = ["fine_python_package_info==0.1.*"] +dependencies = ["fine_python_package_info~=0.2.0a0"] + +[tool.finecode.action.group_src_artifact_files_by_lang] +source = "finecode_extension_api.actions.group_src_artifact_files_by_lang.GroupSrcArtifactFilesByLangAction" + +[[tool.finecode.action.group_src_artifact_files_by_lang.handlers]] +name = "group_src_artifact_files_by_lang_python" +source = "fine_python_package_info.GroupSrcArtifactFilesByLangPythonHandler" +env = "dev_no_runtime" +dependencies = ["fine_python_package_info~=0.2.0a0"] + + +[tool.finecode.action.lint] +source = "finecode_extension_api.actions.lint.LintAction" + +[[tool.finecode.action.lint.handlers]] +name = "lint" +source = "finecode_builtin_handlers.LintHandler" +env = "dev_no_runtime" +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] + +[tool.finecode.action.format] +source = "finecode_extension_api.actions.format.FormatAction" + +[[tool.finecode.action.format.handlers]] +name = "format" +source = "finecode_builtin_handlers.FormatHandler" +env = "dev_no_runtime" +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] + +[tool.finecode.action.clean_finecode_logs] +source = "finecode_extension_api.actions.clean_finecode_logs.CleanFinecodeLogsAction" + +[[tool.finecode.action.clean_finecode_logs.handlers]] +name = "clean_finecode_logs_dev_workspace" +source = "finecode_builtin_handlers.CleanFinecodeLogsHandler" +env = "dev_workspace" +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] + +[[tool.finecode.action.clean_finecode_logs.handlers]] +name = "clean_finecode_logs_dev_no_runtime" +source = "finecode_builtin_handlers.CleanFinecodeLogsHandler" +env = "dev_no_runtime" +dependencies = ["finecode_builtin_handlers~=0.2.0a0"] + +# TODO: finish +# [[tool.finecode.action.clean_finecode_logs.handlers]] +# name = "clean_finecode_logs_dev" +# source = "finecode_builtin_handlers.CleanFinecodeLogsHandler" +# env = "dev" +# dependencies = ["finecode_builtin_handlers~=0.1.0a0"] + +# [[tool.finecode.action.clean_finecode_logs.handlers]] +# name = "clean_finecode_logs_runtime" +# source = "finecode_builtin_handlers.CleanFinecodeLogsHandler" +# env = "runtime" +# dependencies = ["finecode_builtin_handlers~=0.1.0a0"] diff --git a/src/finecode/cli.py b/src/finecode/cli.py index 08f081ba..5178072c 100644 --- a/src/finecode/cli.py +++ b/src/finecode/cli.py @@ -11,6 +11,159 @@ import finecode.lsp_server.main as wm_lsp_server from finecode import communication_utils, logger_utils, user_messages from finecode.cli_app.commands import dump_config_cmd, prepare_envs_cmd, run_cmd +from finecode.config.config_models import ConfigurationError + + +FINECODE_CONFIG_ENV_PREFIX = "FINECODE_CONFIG_" + +# TODO: unify possibilities of CLI options and env vars +def parse_handler_config_from_env() -> dict[str, dict[str, dict[str, str]]]: + """ + Parse handler config overrides from environment variables. + + Format: + - FINECODE_CONFIG___=value + -> action-level config for all handlers of action + - FINECODE_CONFIG_____=value + -> handler-specific config + + Returns nested dict: {action_name: {handler_name_or_empty: {param: value}}} + Empty string key "" means action-level (applies to all handlers). + """ + config_overrides: dict[str, dict[str, dict[str, str]]] = {} + + for env_name, env_value in os.environ.items(): + if not env_name.startswith(FINECODE_CONFIG_ENV_PREFIX): + continue + + # Remove prefix and split by double underscore + config_key = env_name[len(FINECODE_CONFIG_ENV_PREFIX) :] + parts = config_key.split("__") + + if len(parts) < 2: + logger.warning( + f"Invalid config env var format: {env_name}. " + f"Expected FINECODE_CONFIG___ or " + f"FINECODE_CONFIG_____" + ) + continue + + # Convert to lowercase for matching + action_name = parts[0].lower() + + if len(parts) == 2: + # Action-level config: FINECODE_CONFIG_ACTION__PARAM + handler_name = "" # empty means all handlers + param_name = parts[1].lower() + else: + # Handler-specific config: FINECODE_CONFIG_ACTION__HANDLER__PARAM + handler_name = parts[1].lower() + param_name = "__".join(parts[2:]).lower() + + if action_name not in config_overrides: + config_overrides[action_name] = {} + if handler_name not in config_overrides[action_name]: + config_overrides[action_name][handler_name] = {} + + try: + parsed_value = json.loads(env_value) + except json.JSONDecodeError as e: + raise ConfigurationError( + f"Failed to parse JSON value for env var '{env_name}': {env_value!r}" + ) from e + + config_overrides[action_name][handler_name][param_name] = parsed_value + + return config_overrides + + +def parse_handler_config_from_cli( + config_args: list[str], actions: list[str] +) -> dict[str, dict[str, dict[str, str]]]: + """ + Parse handler config overrides from CLI arguments. + + Format: + - --config.=value + -> action-level config for all handlers of all specified actions + - --config..=value + -> handler-specific config for all specified actions + + Returns nested dict: {action_name: {handler_name_or_empty: {param: value}}} + Empty string key "" means action-level (applies to all handlers). + """ + config_overrides: dict[str, dict[str, dict[str, str]]] = {} + + for arg in config_args: + if not arg.startswith("--config."): + continue + + if "=" not in arg: + logger.warning( + f"Invalid config CLI arg format: {arg}. " + f"Expected --config.=value or --config..=value" + ) + continue + + # Remove --config. prefix and split by = + config_part = arg[len("--config.") :] + key_part, raw_value = config_part.split("=", 1) + try: + value = json.loads(raw_value) + except json.JSONDecodeError: + # fallback for literal string, all other types can be parsed by json.loads + value = raw_value + + # Split by . to determine if it's action-level or handler-specific + parts = key_part.split(".") + + if len(parts) == 1: + # Action-level config: --config.=value + handler_name = "" # empty means all handlers + param_name = parts[0].lower().replace("-", "_") + else: + # Handler-specific config: --config..=value + handler_name = parts[0].lower().replace("-", "_") + param_name = ".".join(parts[1:]).lower().replace("-", "_") + + # Apply to all specified actions + for action_name in actions: + action_name_lower = action_name.lower() + if action_name_lower not in config_overrides: + config_overrides[action_name_lower] = {} + if handler_name not in config_overrides[action_name_lower]: + config_overrides[action_name_lower][handler_name] = {} + + config_overrides[action_name_lower][handler_name][param_name] = value + + return config_overrides + + +def merge_config_overrides( + env_overrides: dict[str, dict[str, dict[str, str]]], + cli_overrides: dict[str, dict[str, dict[str, str]]], +) -> dict[str, dict[str, dict[str, str]]]: + """ + Merge env var and CLI config overrides. CLI takes precedence. + """ + merged = {} + + # Copy env overrides + for action, handlers in env_overrides.items(): + merged[action] = {} + for handler, params in handlers.items(): + merged[action][handler] = dict(params) + + # Merge CLI overrides (takes precedence) + for action, handlers in cli_overrides.items(): + if action not in merged: + merged[action] = {} + for handler, params in handlers.items(): + if handler not in merged[action]: + merged[action][handler] = {} + merged[action][handler].update(params) + + return merged @click.group() @@ -79,7 +232,7 @@ async def show_user_message(message: str, message_type: str) -> None: def deserialize_action_payload(raw_payload: dict[str, str]) -> dict[str, typing.Any]: deserialized_payload = {} for key, value in raw_payload.items(): - if value.startswith("{") and value.endswith("}"): + if (value.startswith("{") and value.endswith("}")) or (value.startswith('[') and value.endswith(']')): try: deserialized_value = json.loads(value) except json.JSONDecodeError: @@ -100,6 +253,9 @@ def run(ctx) -> None: processed_args_count: int = 0 concurrently: bool = False trace: bool = False + no_env_config: bool = False + save_results: bool = True + map_payload_fields: set[str] = set() # finecode run parameters for arg in args: @@ -122,12 +278,28 @@ def run(ctx) -> None: concurrently = True elif arg == "--trace": trace = True + elif arg == "--no-env-config": + no_env_config = True + elif arg == "--no-save-results": + save_results = False + elif arg.startswith("--map-payload-fields"): + fields = arg.removeprefix("--map-payload-fields=") + map_payload_fields = {f.replace("-", "_") for f in fields.split(",")} elif not arg.startswith("--"): break processed_args_count += 1 logger_utils.init_logger(trace=trace, stdout=True) + # Parse handler config from env vars + handler_config_overrides: dict[str, dict[str, dict[str, str]]] = {} + if not no_env_config: + try: + handler_config_overrides = parse_handler_config_from_env() + except ConfigurationError as exception: + click.echo(exception.message, err=True) + sys.exit(1) + # actions for arg in args[processed_args_count:]: if not arg.startswith("--"): @@ -140,8 +312,9 @@ def run(ctx) -> None: click.echo("No actions to run", err=True) sys.exit(1) - # action payload + # action payload and config overrides action_payload: dict[str, typing.Any] = {} + config_args: list[str] = [] for arg in args[processed_args_count:]: if not arg.startswith("--"): click.echo( @@ -156,27 +329,52 @@ def run(ctx) -> None: err=True, ) sys.exit(1) + elif arg.startswith("--config."): + config_args.append(arg) else: - arg_name, arg_value = arg[2:].split("=") + arg_name, arg_value = arg[2:].split("=", 1) arg_name = arg_name.replace("-", "_") action_payload[arg_name] = arg_value.strip('"').strip("'") processed_args_count += 1 + # Parse CLI config overrides and merge with env overrides + if config_args: + cli_config_overrides = parse_handler_config_from_cli(config_args, actions_to_run) + if cli_config_overrides: + logger.trace(f"Handler config overrides from CLI: {cli_config_overrides}") + handler_config_overrides = merge_config_overrides( + handler_config_overrides, cli_config_overrides + ) + user_messages._notification_sender = show_user_message deserialized_payload = deserialize_action_payload(action_payload) try: - output, return_code = asyncio.run( + result = asyncio.run( run_cmd.run_actions( workdir_path, projects, actions_to_run, deserialized_payload, concurrently, + handler_config_overrides, + save_results, + map_payload_fields, ) ) - click.echo(output) - sys.exit(return_code) + click.echo(result.output) + if save_results: + results_dir = pathlib.Path(sys.executable).parent.parent / "cache" / "finecode" / "results" + results_dir.mkdir(parents=True, exist_ok=True) + for project_path, result_by_action in result.result_by_project.items(): + for action_name, action_result in result_by_action.items(): + output_file = results_dir / f"{action_name}.json" + json_result: dict[str, typing.Any] = {} + if output_file.exists(): + json_result = json.loads(output_file.read_text()) + json_result[str(project_path)] = action_result.json() + output_file.write_text(json.dumps(json_result, indent=2)) + sys.exit(result.return_code) except run_cmd.RunFailed as exception: click.echo(exception.args[0], err=True) sys.exit(1) diff --git a/src/finecode/cli_app/commands/dump_config_cmd.py b/src/finecode/cli_app/commands/dump_config_cmd.py index 93fb5931..c14d2182 100644 --- a/src/finecode/cli_app/commands/dump_config_cmd.py +++ b/src/finecode/cli_app/commands/dump_config_cmd.py @@ -38,7 +38,7 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): except config_models.ConfigurationError as exception: raise DumpFailed( f"Reading project configs(without presets) in {project.dir_path} failed: {exception.message}" - ) + ) from exception # Some tools like IDE extensions for syntax highlighting rely on # file name. Keep file name of config the same and save in subdirectory @@ -59,7 +59,7 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): except runner_manager.RunnerFailedToStart as exception: raise DumpFailed( f"Starting runners with presets failed: {exception.message}" - ) + ) from exception try: await run_service.start_required_environments( @@ -68,7 +68,7 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): except run_service.StartingEnvironmentsFailed as exception: raise DumpFailed( f"Failed to start environments for running 'dump_config': {exception.message}" - ) + ) from exception project_raw_config = ws_context.ws_projects_raw_configs[project_dir_path] @@ -81,8 +81,10 @@ async def dump_config(workdir_path: pathlib.Path, project_name: str): }, project_def=project_def, ws_context=ws_context, - result_format=run_service.RunResultFormat.STRING, + result_formats=[run_service.RunResultFormat.STRING], preprocess_payload=False, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) logger.info(f"Dumped config into {dump_file_path}") finally: diff --git a/src/finecode/cli_app/commands/prepare_envs_cmd.py b/src/finecode/cli_app/commands/prepare_envs_cmd.py index bf749c43..e8d5fbbe 100644 --- a/src/finecode/cli_app/commands/prepare_envs_cmd.py +++ b/src/finecode/cli_app/commands/prepare_envs_cmd.py @@ -59,7 +59,7 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: except config_models.ConfigurationError as exception: raise PrepareEnvsFailed( f"Reading project config and collecting actions in {project.dir_path} failed: {exception.message}" - ) + ) from exception try: # try to start runner in 'dev_workspace' env of each project. If venv doesn't @@ -83,7 +83,7 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: except runner_manager.RunnerFailedToStart as exception: raise PrepareEnvsFailed( f"Starting runners with presets failed: {exception.message}" - ) + ) from exception # now all 'dev_workspace' envs are valid, run 'prepare_runners' in them to create # venvs and install runners and presets in them @@ -106,8 +106,14 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: ( result_output, result_return_code, + _ ) = await utils.run_actions_in_projects_and_concat_results( - actions_by_projects, action_payload, ws_context, concurrently=True + actions_by_projects, + action_payload, + ws_context, + concurrently=True, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) except run_service.ActionRunFailed as error: logger.error(error.message) @@ -127,8 +133,14 @@ async def prepare_envs(workdir_path: pathlib.Path, recreate: bool) -> None: ( result_output, result_return_code, + _ ) = await utils.run_actions_in_projects_and_concat_results( - actions_by_projects, action_payload, ws_context, concurrently=True + actions_by_projects, + action_payload, + ws_context, + concurrently=True, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) except run_service.ActionRunFailed as error: logger.error(error.message) @@ -192,13 +204,11 @@ async def check_or_recreate_all_dev_workspace_envs( current_project_dir_path = ws_context.ws_dirs_paths[0] current_project = ws_context.ws_projects[current_project_dir_path] try: - await runner_manager.start_runner( - project_def=current_project, env_name="dev_workspace", ws_context=ws_context - ) + await runner_manager._start_dev_workspace_runner(project_def=current_project, ws_context=ws_context) except runner_manager.RunnerFailedToStart as exception: raise PrepareEnvsFailed( f"Failed to start `dev_workspace` runner in {current_project.name}: {exception.message}" - ) + ) from exception envs = [] @@ -251,15 +261,17 @@ async def check_or_recreate_all_dev_workspace_envs( }, project_def=current_project, ws_context=ws_context, - result_format=run_service.RunResultFormat.STRING, + result_formats=[run_service.RunResultFormat.STRING], preprocess_payload=False, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, ) except run_service.ActionRunFailed as exception: raise PrepareEnvsFailed( f"'prepare_dev_workspaces_env' failed in {current_project.name}: {exception.message}" - ) + ) from exception if action_result.return_code != 0: raise PrepareEnvsFailed( - f"'prepare_dev_workspaces_env' ended in {current_project.name} with return code {action_result.return_code}: {action_result.result}" + f"'prepare_dev_workspaces_env' ended in {current_project.name} with return code {action_result.return_code}: {action_result.result_by_format['string']}" ) diff --git a/src/finecode/cli_app/commands/run_cmd.py b/src/finecode/cli_app/commands/run_cmd.py index de205fdf..88ea1acd 100644 --- a/src/finecode/cli_app/commands/run_cmd.py +++ b/src/finecode/cli_app/commands/run_cmd.py @@ -1,4 +1,7 @@ +import json import pathlib +import sys +import typing import ordered_set from loguru import logger @@ -22,8 +25,13 @@ async def run_actions( actions: list[str], action_payload: dict[str, str], concurrently: bool, -) -> tuple[str, int]: + handler_config_overrides: dict[str, dict[str, dict[str, str]]] | None = None, + save_results: bool = True, + map_payload_fields: set[str] | None = None, +) -> utils.RunActionsResult: ws_context = context.WorkspaceContext([workdir_path]) + if handler_config_overrides: + ws_context.handler_config_overrides = handler_config_overrides await read_configs.read_projects_in_dir( dir_path=workdir_path, ws_context=ws_context ) @@ -108,7 +116,7 @@ async def run_actions( except config_models.ConfigurationError as exception: raise RunFailed( f"Reading project config and collecting actions in {project.dir_path} failed: {exception.message}" - ) + ) from exception try: # 1. Start runners with presets to be able to resolve presets. Presets are @@ -117,9 +125,9 @@ async def run_actions( await runner_manager.start_runners_with_presets(projects, ws_context) except runner_manager.RunnerFailedToStart as exception: raise RunFailed( - f"One or more projects are misconfigured, runners for them didn't" - f" start: {exception.message}. Check logs for details." - ) + "One or more projects are misconfigured, runners for them didn't" + + f" start: {exception.message}. Check logs for details." + ) from exception except Exception as exception: logger.error("Unexpected exception:") logger.exception(exception) @@ -148,19 +156,37 @@ async def run_actions( try: await run_service.start_required_environments( - actions_by_projects, ws_context, update_config_in_running_runners=True + actions_by_projects, + ws_context, + update_config_in_running_runners=True, ) except run_service.StartingEnvironmentsFailed as exception: raise RunFailed( f"Failed to start environments for running actions: {exception.message}" + ) from exception + + payload_overrides_by_project: dict[str, dict[str, typing.Any]] = {} + if map_payload_fields: + payload_overrides_by_project = resolve_mapped_payload_fields( + map_payload_fields=map_payload_fields, + action_payload=action_payload, ) try: return await utils.run_actions_in_projects_and_concat_results( - actions_by_projects, action_payload, ws_context, concurrently + actions_by_projects, + action_payload, + ws_context, + concurrently, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.CLI, + output_json=save_results, + payload_overrides_by_project=payload_overrides_by_project, ) except run_service.ActionRunFailed as exception: - raise RunFailed(f"Failed to run actions: {exception.message}") + raise RunFailed( + f"Failed to run actions: {exception.message}" + ) from exception finally: shutdown_service.on_shutdown(ws_context) @@ -178,13 +204,56 @@ def get_projects_by_names( for project in ws_context.ws_projects.values() if project.name == project_name ) - except StopIteration: + except StopIteration as exception: raise RunFailed( f"Project '{projects_names}' not found in working directory '{workdir_path}'" - ) + ) from exception projects.append(project) return projects +def resolve_mapped_payload_fields( + map_payload_fields: set[str], + action_payload: dict[str, typing.Any], +) -> dict[str, dict[str, typing.Any]]: + """Resolve mapped payload fields from saved results. + + Returns a dict keyed by project path string, where each value is a dict + of field overrides for that project. + """ + results_dir = pathlib.Path(sys.executable).parent.parent / "cache" / "finecode" / "results" + payload_overrides_by_project: dict[str, dict[str, typing.Any]] = {} + + for field_name in map_payload_fields: + raw_value = action_payload.get(field_name) + if raw_value is None: + raise RunFailed( + f"Mapped payload field '{field_name}' not found in action payload" + ) + + action_name, field_path = str(raw_value).split(".", 1) + result_file = results_dir / f"{action_name}.json" + if not result_file.exists(): + raise RunFailed( + f"Results file '{result_file}' not found for mapped field '{field_name}'" + ) + + results_by_project: dict[str, typing.Any] = json.loads(result_file.read_text()) + for project_path, project_result in results_by_project.items(): + resolved_value = project_result + for key in field_path.split("."): + if not isinstance(resolved_value, dict): + raise RunFailed( + f"Cannot resolve '{field_path}' in results of '{action_name}' for project '{project_path}'" + ) + resolved_value = resolved_value.get(key) + + if project_path not in payload_overrides_by_project: + payload_overrides_by_project[project_path] = {} + payload_overrides_by_project[project_path][field_name] = resolved_value + + return payload_overrides_by_project + + __all__ = ["run_actions"] diff --git a/src/finecode/cli_app/utils.py b/src/finecode/cli_app/utils.py index bd3dcaa2..ae61e08d 100644 --- a/src/finecode/cli_app/utils.py +++ b/src/finecode/cli_app/utils.py @@ -1,11 +1,19 @@ import pathlib +import typing import click from finecode import context +from finecode.runner import runner_client from finecode.services import run_service +class RunActionsResult(typing.NamedTuple): + output: str + return_code: int + result_by_project: dict[pathlib.Path, dict[str, runner_client.RunActionResponse]] + + def run_result_to_str( run_result: str | dict[str, list[str | dict[str, str | bool]]], action_name: str ) -> str: @@ -81,13 +89,24 @@ async def run_actions_in_projects_and_concat_results( action_payload: dict[str, str], ws_context: context.WorkspaceContext, concurrently: bool, -) -> tuple[str, int]: + run_trigger: run_service.RunActionTrigger, + dev_env: run_service.DevEnv, + output_json: bool = False, + payload_overrides_by_project: dict[str, dict[str, typing.Any]] | None = None, +) -> RunActionsResult: + result_formats = [run_service.RunResultFormat.STRING] + if output_json: + result_formats.append(run_service.RunResultFormat.JSON) + result_by_project = await run_service.run_actions_in_projects( actions_by_project=actions_by_project, action_payload=action_payload, ws_context=ws_context, concurrently=concurrently, - result_format=run_service.RunResultFormat.STRING, + result_formats=result_formats, + run_trigger=run_trigger, + dev_env=dev_env, + payload_overrides_by_project=payload_overrides_by_project or {}, ) result_output: str = "" @@ -109,11 +128,16 @@ async def run_actions_in_projects_and_concat_results( for action_name, action_result in result_by_action.items(): if run_many_actions: result_output += f"{click.style(action_name, bold=True)}:" - action_result_str = run_result_to_str(action_result.result, action_name) + + action_result_str = run_result_to_str(action_result.text(), action_name) result_output += action_result_str result_return_code |= action_result.return_code if is_first_project: is_first_project = False - return (result_output, result_return_code) + return RunActionsResult( + output=result_output, + return_code=result_return_code, + result_by_project=result_by_project, + ) diff --git a/src/finecode/config/collect_actions.py b/src/finecode/config/collect_actions.py index 669c689f..52972995 100644 --- a/src/finecode/config/collect_actions.py +++ b/src/finecode/config/collect_actions.py @@ -15,26 +15,96 @@ def collect_actions( # - project expected to include finecode try: project = ws_context.ws_projects[project_path] - except KeyError: + except KeyError as exception: raise ValueError( f"Project {project_path} doesn't exist." - f" Existing projects: {ws_context.ws_projects}" - ) + + f" Existing projects: {ws_context.ws_projects}" + ) from exception try: config = ws_context.ws_projects_raw_configs[project_path] - except KeyError: - raise Exception("First you need to parse config of project") + except KeyError as exception: + raise Exception("First you need to parse config of project") from exception actions = _collect_actions_in_config(config) project.actions = actions action_handler_configs = _collect_action_handler_configs_in_config(config) + + # Apply overrides + # + # Merge handler config overrides from ws_context if available + if ws_context.handler_config_overrides: + for action in project.actions: + action_overrides = ws_context.handler_config_overrides.get(action.name, {}) + if not action_overrides: + continue + + for handler in action.handlers: + # Check for action-level overrides (empty string key) + action_level_overrides = action_overrides.get("", {}) + # Check for handler-specific overrides + handler_overrides = action_overrides.get(handler.name, {}) + + # Merge overrides if any exist + if action_level_overrides or handler_overrides: + if handler.source not in action_handler_configs: + action_handler_configs[handler.source] = {} + # Action-level first, then handler-specific (handler takes precedence) + action_handler_configs[handler.source] = { + **action_handler_configs[handler.source], + **action_level_overrides, + **handler_overrides, + } + project.action_handler_configs = action_handler_configs return actions +def collect_services( + project_path: Path, + ws_context: context.WorkspaceContext, +) -> list[domain.ServiceDeclaration]: + try: + project = ws_context.ws_projects[project_path] + except KeyError as exception: + raise ValueError( + f"Project {project_path} doesn't exist." + + f" Existing projects: {ws_context.ws_projects}" + ) from exception + + try: + config = ws_context.ws_projects_raw_configs[project_path] + except KeyError as exception: + raise Exception("First you need to parse config of project") from exception + + services = _collect_services_in_config(config) + project.services = services + return services + + +def _collect_services_in_config( + config: dict[str, Any], +) -> list[domain.ServiceDeclaration]: + services: list[domain.ServiceDeclaration] = [] + for service_def_raw in config["tool"]["finecode"].get("service", []): + try: + service_def = config_models.ServiceDefinition(**service_def_raw) + except config_models.ValidationError as exception: + raise config_models.ConfigurationError(str(exception)) from exception + + services.append( + domain.ServiceDeclaration( + interface=service_def.interface, + source=service_def.source, + env=service_def.env, + dependencies=service_def.dependencies, + ) + ) + return services + + def _collect_action_handler_configs_in_config( config: dict[str, Any], ) -> dict[str, dict[str, Any]]: @@ -63,7 +133,7 @@ def _collect_actions_in_config( try: action_def = config_models.ActionDefinition(**action_def_raw) except config_models.ValidationError as exception: - raise config_models.ConfigurationError(str(exception)) + raise config_models.ConfigurationError(str(exception)) from exception new_action = domain.Action( name=action_name, @@ -76,6 +146,7 @@ def _collect_actions_in_config( dependencies=handler.dependencies, ) for handler in action_def.handlers + if handler.enabled ], source=action_def.source, config=action_def.config or {}, diff --git a/src/finecode/config/config_models.py b/src/finecode/config/config_models.py index 5c9b1729..bdaaa5db 100644 --- a/src/finecode/config/config_models.py +++ b/src/finecode/config/config_models.py @@ -23,15 +23,24 @@ class PresetDefinition(BaseModel): class ActionHandlerDefinition(BaseModel): name: str + source: str = "" + env: str = "" + dependencies: list[str] = [] + config: dict[str, Any] | None = None + enabled: bool = True + + +class ServiceDefinition(BaseModel): + interface: str source: str env: str dependencies: list[str] = [] - config: dict[str, Any] | None = None class ActionDefinition(BaseModel): source: str handlers: list[ActionHandlerDefinition] = [] + handlers_mode: str = "merge" # "merge" or "replace" config: dict[str, Any] | None = None diff --git a/src/finecode/config/read_configs.py b/src/finecode/config/read_configs.py index 5ba16de5..9b3ebb04 100644 --- a/src/finecode/config/read_configs.py +++ b/src/finecode/config/read_configs.py @@ -171,6 +171,7 @@ async def read_project_config( add_extension_runner_to_dependencies(project_config) merge_handlers_dependencies_into_groups(project_config) + merge_services_dependencies_into_groups(project_config) ws_context.ws_projects_raw_configs[project.dir_path] = project_config @@ -201,8 +202,8 @@ async def get_preset_project_path( return None try: preset_project_path = Path(resolve_path_result["packagePath"]) - except KeyError: - raise ValueError(f"Preset source cannot be resolved: {preset.source}") + except KeyError as exception: + raise ValueError(f"Preset source cannot be resolved: {preset.source}") from exception logger.trace(f"Got: {preset.source} -> {preset_project_path}") return preset_project_path @@ -384,18 +385,30 @@ def _merge_projects_configs( # Handle handlers array merge by name if "handlers" in action_info: - if "handlers" not in tool_finecode_config1[key][action_name]: - tool_finecode_config1[key][action_name]["handlers"] = [] + handlers_mode = action_info.get("handlers_mode", "merge") + if handlers_mode == "replace": + tool_finecode_config1[key][action_name]["handlers"] = action_info["handlers"] + else: + if "handlers" not in tool_finecode_config1[key][action_name]: + tool_finecode_config1[key][action_name]["handlers"] = [] - existing_handlers = tool_finecode_config1[key][action_name][ - "handlers" - ] - new_handlers = action_info["handlers"] + existing_handlers = tool_finecode_config1[key][action_name][ + "handlers" + ] + new_handlers = action_info["handlers"] - # Merge handlers by name - _merge_object_array_by_key( - existing_handlers, new_handlers, "name" - ) + # Merge handlers by name + _merge_object_array_by_key( + existing_handlers, new_handlers, "name" + ) + elif key == "service": + if key not in tool_finecode_config1: + tool_finecode_config1[key] = [] + existing = tool_finecode_config1[key] + if isinstance(value, list): + _merge_object_array_by_key(existing, value, "interface") + else: + tool_finecode_config1[key] = value elif key == "action_handler": # Handle action_handler array merge by source if key not in tool_finecode_config1: @@ -611,6 +624,35 @@ def merge_handlers_dependencies_into_groups(project_config: dict[str, Any]) -> N deps_groups[group_name] = unique_deps +def merge_services_dependencies_into_groups(project_config: dict[str, Any]) -> None: + # tool.finecode.service[x].dependencies + services_list = project_config.get("tool", {}).get("finecode", {}).get("service", []) + if "dependency-groups" not in project_config: + project_config["dependency-groups"] = {} + deps_groups = project_config["dependency-groups"] + + for service in services_list: + service_env = service.get("env", None) + if service_env is None: + logger.warning(f"Service {service} has no env, skip it") + continue + deps = service.get("dependencies", []) + + if service_env not in deps_groups: + deps_groups[service_env] = [] + + env_deps = deps_groups[service_env] + env_deps += deps + + for group_name in deps_groups.keys(): + deps_list = deps_groups[group_name] + unique_deps = [] + for dep in deps_list: + if dep not in unique_deps: + unique_deps.append(dep) + deps_groups[group_name] = unique_deps + + def add_extension_runner_to_dependencies(project_config: dict[str, Any]) -> None: try: deps_groups = project_config["dependency-groups"] diff --git a/src/finecode/context.py b/src/finecode/context.py index 7bd09149..d28d7070 100644 --- a/src/finecode/context.py +++ b/src/finecode/context.py @@ -8,7 +8,7 @@ if TYPE_CHECKING: from finecode.runner.runner_client import ExtensionRunnerInfo - from finecode.runner._io_thread import AsyncIOThread + from finecode_jsonrpc._io_thread import AsyncIOThread @dataclass @@ -25,12 +25,21 @@ class WorkspaceContext: default_factory=dict ) runner_io_thread: AsyncIOThread | None = None - ignore_watch_paths: set[Path] = field(default_factory=set) - # we save list of meta and pygls manages content of documents automatically. - # They can be accessed using `ls.workspace.get_text_document()` function + # LSP doesn't provide endpoint to get opened files on client. The server should + # listen to didOpen and didClose events and manage state by itself. In this + # dictionary meta info of opened document is stored to be able to provide opened files + # to ERs in case of their restart. + # TODO: move in LSP server opened_documents: dict[str, domain.TextDocumentInfo] = field(default_factory=dict) + # Handler config overrides from CLI env vars or options. + # Format: {action_name: {handler_name_or_empty: {param: value}}} + # Empty string key "" means action-level (applies to all handlers). + handler_config_overrides: dict[str, dict[str, dict[str, str]]] = field( + default_factory=dict + ) + # cache # > project_path_by_dir_and_action: dict[str, dict[str, Path]] = field( diff --git a/src/finecode/domain.py b/src/finecode/domain.py index 436774a6..dc0c1985 100644 --- a/src/finecode/domain.py +++ b/src/finecode/domain.py @@ -11,6 +11,12 @@ class Preset: def __init__(self, source: str) -> None: self.source = source + def __str__(self) -> str: + return f'Preset(source="{self.source}")' + + def __repr__(self) -> str: + return str(self) + class ActionHandler: def __init__( @@ -27,6 +33,12 @@ def __init__( self.env: str = env self.dependencies: list[str] = dependencies + def __str__(self) -> str: + return f'ActionHandler(name="{self.name}", source="{self.source}", env="{self.env}")' + + def __repr__(self) -> str: + return str(self) + def to_dict(self) -> dict[str, typing.Any]: return { "name": self.name, @@ -37,6 +49,34 @@ def to_dict(self) -> dict[str, typing.Any]: } +class ServiceDeclaration: + def __init__( + self, + interface: str, + source: str, + env: str, + dependencies: list[str], + ): + self.interface = interface + self.source = source + self.env = env + self.dependencies = dependencies + + def __str__(self) -> str: + return f'ServiceDeclaration(interface="{self.interface}", source="{self.source}", env="{self.env}")' + + def __repr__(self) -> str: + return str(self) + + def to_dict(self) -> dict[str, typing.Any]: + return { + "interface": self.interface, + "source": self.source, + "env": self.env, + "dependencies": self.dependencies, + } + + class Action: def __init__( self, @@ -50,6 +90,13 @@ def __init__( self.handlers: list[ActionHandler] = handlers self.config = config + def __str__(self) -> str: + handler_names = [h.name for h in self.handlers] + return f'Action(name="{self.name}", handlers={handler_names})' + + def __repr__(self) -> str: + return str(self) + def to_dict(self) -> dict[str, typing.Any]: return { "name": self.name, @@ -76,6 +123,7 @@ def __init__( # None means actions were not collected yet # if project.status is RUNNING, then actions are not None self.actions = actions + self.services: list[ServiceDeclaration] = [] # config by handler source self.action_handler_configs: dict[str, dict[str, typing.Any]] = {} # config by env name @@ -100,6 +148,7 @@ def envs(self) -> list[str]: for action in self.actions: action_envs = [handler.env for handler in action.handlers] all_envs_set |= ordered_set.OrderedSet(action_envs) + all_envs_set |= ordered_set.OrderedSet([svc.env for svc in self.services]) return list(all_envs_set) @@ -116,11 +165,23 @@ class RunnerConfig: def __init__(self, debug: bool) -> None: self.debug = debug + def __str__(self) -> str: + return f"RunnerConfig(debug={self.debug})" + + def __repr__(self) -> str: + return str(self) + class EnvConfig: def __init__(self, runner_config: RunnerConfig) -> None: self.runner_config = runner_config + def __str__(self) -> str: + return f"EnvConfig(runner_config={self.runner_config})" + + def __repr__(self) -> str: + return str(self) + RootActions = list[str] ActionsDict = dict[str, Action] @@ -134,7 +195,7 @@ def __init__(self, runner_config: RunnerConfig) -> None: class TextDocumentInfo: - def __init__(self, uri: str, version: str) -> None: + def __init__(self, uri: str, version: str | int) -> None: self.uri = uri self.version = version @@ -156,6 +217,7 @@ class PartialResult(typing.NamedTuple): "ActionsDict", "AllActions", "Action", + "ServiceDeclaration", "Project", "TextDocumentInfo", "RunnerConfig", diff --git a/src/finecode/domain_helpers.py b/src/finecode/domain_helpers.py new file mode 100644 index 00000000..38807a6d --- /dev/null +++ b/src/finecode/domain_helpers.py @@ -0,0 +1,38 @@ +""" +Domain helper functions that operate on domain models but don't belong +directly in the domain module. +""" + +from finecode import domain + + +def collect_all_handlers_to_initialize( + project: domain.Project, + env_name: str, +) -> dict[str, list[str]]: + """Collect all handler names per action for the given env.""" + assert project.actions is not None + result: dict[str, list[str]] = {} + for action in project.actions: + handler_names = [h.name for h in action.handlers if h.env == env_name] + if handler_names: + result[action.name] = handler_names + return result + + +def collect_handlers_to_initialize_for_actions( + project: domain.Project, + env_name: str, + action_names: list[str], +) -> dict[str, list[str]]: + """Collect handler names per action for the given env, filtered by action names.""" + assert project.actions is not None + result: dict[str, list[str]] = {} + action_names_set = set(action_names) + for action in project.actions: + if action.name not in action_names_set: + continue + handler_names = [h.name for h in action.handlers if h.env == env_name] + if handler_names: + result[action.name] = handler_names + return result diff --git a/src/finecode/finecode_cmd.py b/src/finecode/finecode_cmd.py index 89394379..435de239 100644 --- a/src/finecode/finecode_cmd.py +++ b/src/finecode/finecode_cmd.py @@ -11,6 +11,10 @@ def get_python_cmd(project_path: Path, env_name: str) -> str: venv_python_path = venv_dir_path / "bin" / "python" if not venv_python_path.exists(): + # `Path.exists` returns False for invalid symlinks + if venv_python_path.is_symlink(): + raise ValueError(f"{env_name} venv is broken in project {project_path} (symlink cannot be resolved)") + raise ValueError(f"{env_name} venv not found in project {project_path}") return venv_python_path.as_posix() diff --git a/src/finecode/logger_utils.py b/src/finecode/logger_utils.py index 4aedf7af..66fa883c 100644 --- a/src/finecode/logger_utils.py +++ b/src/finecode/logger_utils.py @@ -19,9 +19,10 @@ def init_logger(trace: bool, stdout: bool = False): activation=[ ("pygls.protocol.json_rpc", False), ("pygls.feature_manager", False), - ("pygls.io_", False), + # ("pygls.io_", False), ] ) + logs.set_log_level_for_group(group="finecode_jsonrpc.client", level=logs.LogLevel.INFO) logs.save_logs_to_file( file_path=logs_dir_path / "workspace_manager.log", log_level="TRACE" if trace else "INFO", diff --git a/src/finecode/lsp_server/api.proto b/src/finecode/lsp_server/api.proto deleted file mode 100644 index 0f594e98..00000000 --- a/src/finecode/lsp_server/api.proto +++ /dev/null @@ -1,64 +0,0 @@ -syntax = "proto3"; - -package finecode; - -message AddWorkspaceDirRequest { - string dir_path = 1; -} - -message AddWorkspaceDirResponse { -} - -message DeleteWorkspaceDirRequest { - string dir_path = 1; -} - -message DeleteWorkspaceDirResponse { - -} - -message ListActionsRequest { - string parent_node_id = 1; -} - -message ActionTreeNode { - string node_id = 1; - string name = 2; - NodeType node_type = 3; - repeated ActionTreeNode subnodes = 4; - - enum NodeType { - DIRECTORY = 0; - PACKAGE = 1; - ACTION = 2; - PRESET = 3; - } -} - -message ListActionsResponse { - repeated ActionTreeNode nodes = 1; -} - -message RunActionRequest { - string action_node_id = 1; - // following cases are possible: - // 1. Running action on existing file: - // 1.1 Unsaved: both apply_on and apply_on_text are passed - // 1.2 Saved: either apply_on or both are passed - // 2. Running action on a new unsaved yet file: only apply_on_text is passed. - string apply_on = 2; - string apply_on_text = 3; -} - -message RunActionResponse { - // if target was passed as text and was changed, result is returned in this parameter - string result_text = 1; -} - -service WorkspaceManagerService { - rpc AddWorkspaceDir(AddWorkspaceDirRequest) returns (AddWorkspaceDirResponse); - rpc DeleteWorkspaceDir(DeleteWorkspaceDirRequest) returns (DeleteWorkspaceDirResponse); - - rpc ListActions(ListActionsRequest) returns (ListActionsResponse); - rpc RunAction(RunActionRequest) returns (RunActionResponse); -} diff --git a/src/finecode/lsp_server/endpoints/action_tree.py b/src/finecode/lsp_server/endpoints/action_tree.py index 7d77a54d..0cbde886 100644 --- a/src/finecode/lsp_server/endpoints/action_tree.py +++ b/src/finecode/lsp_server/endpoints/action_tree.py @@ -17,7 +17,9 @@ async def list_actions(ls: LanguageServer, params): logger.info(f"list_actions {params}") await global_state.server_initialized.wait() - parent_node_id = params[0] + # params is expected to be a list, but pygls seems to pass the first element of list + # if the list contains only one element. Test after migration from pygls + parent_node_id = params # params[0] request = schemas.ListActionsRequest(parent_node_id=parent_node_id) result = await _list_actions(request=request) return result.model_dump(by_alias=True) @@ -40,6 +42,7 @@ def get_project_action_tree( actions_nodes: list[schemas.ActionTreeNode] = [] if project.status == domain.ProjectStatus.CONFIG_VALID: assert project.actions is not None + action_nodes: list[schemas.ActionTreeNode] = [] for action in project.actions: node_id = f"{project.dir_path.as_posix()}::{action.name}" handlers_nodes = [ @@ -52,7 +55,7 @@ def get_project_action_tree( ) for handler in action.handlers ] - actions_nodes.append( + action_nodes.append( schemas.ActionTreeNode( node_id=node_id, name=action.name, @@ -66,6 +69,41 @@ def get_project_action_tree( project_path=project.dir_path, action_name=action.name, ) + + node_id = f"{project.dir_path.as_posix()}::actions" + actions_nodes.append( + schemas.ActionTreeNode( + node_id=node_id, + name="Actions", + node_type=schemas.ActionTreeNode.NodeType.ACTION_GROUP, + subnodes=action_nodes, + status="", + ) + ) + + envs_nodes: list[schemas.ActionTreeNode] = [] + for env in project.envs: + node_id = f"{project.dir_path.as_posix()}::envs::{env}" + envs_nodes.append( + schemas.ActionTreeNode( + node_id=node_id, + name=env, + node_type=schemas.ActionTreeNode.NodeType.ENV, + subnodes=[], + status="", + ) + ) + + node_id = f"{project.dir_path.as_posix()}::envs" + actions_nodes.append( + schemas.ActionTreeNode( + node_id=node_id, + name="Environments", + node_type=schemas.ActionTreeNode.NodeType.ENV_GROUP, + subnodes=envs_nodes, + status="", + ) + ) else: logger.info( f"Project has no valid config and finecode: {project.dir_path}, no actions will be shown" @@ -85,19 +123,16 @@ def create_node_list_for_ws( all_projects_paths = list(ws_context.ws_projects.keys()) all_projects_paths.sort() + # use sets to assign each project path to a single workspace directory + all_projects_paths_set = ordered_set.OrderedSet(all_projects_paths) - while len(all_ws_dirs) > 0: - ws_dir = all_ws_dirs.pop() - projects_by_ws_dir[ws_dir] = [] - - while True: - project_path = all_projects_paths[0] - if project_path.is_relative_to(ws_dir): - projects_by_ws_dir[ws_dir].append(project_path) - all_projects_paths.pop(0) + for ws_dir in all_ws_dirs: + ws_dir_project_paths = [project_path for project_path in all_projects_paths_set if project_path.is_relative_to(ws_dir)] + projects_by_ws_dir[ws_dir] = ws_dir_project_paths + all_projects_paths_set -= ordered_set.OrderedSet(ws_dir_project_paths) - if len(all_projects_paths) == 0: - break + if len(all_projects_paths_set) > 0: + logger.warning(f"Unexpected setup: these projects {all_projects_paths_set} don't belong to any of workspace dirs: {all_ws_dirs}") # build node tree so that: # - all ws dirs are in tree either as project or directory diff --git a/src/finecode/lsp_server/endpoints/diagnostics.py b/src/finecode/lsp_server/endpoints/diagnostics.py index b1e9f2d0..2bd0e9f5 100644 --- a/src/finecode/lsp_server/endpoints/diagnostics.py +++ b/src/finecode/lsp_server/endpoints/diagnostics.py @@ -12,8 +12,6 @@ from finecode import ( context, - domain, - project_analyzer, pygls_types_utils, ) from finecode.services import run_service @@ -63,11 +61,14 @@ async def document_diagnostic_with_full_result( response = await run_service.find_action_project_and_run( file_path=file_path, action_name="lint", - # TODO: use payload class params={ + "target": "files", "file_paths": [file_path], }, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, + initialize_all_handlers=True, ) except run_service.ActionRunFailed as error: # don't throw error because vscode after a few sequential errors will stop @@ -82,7 +83,7 @@ async def document_diagnostic_with_full_result( # (default dataclass constructor doesn't handle nested items, it stores them just # as dict) result_type = pydantic_dataclass(lint_action.LintRunResult) - lint_result: lint_action.LintRunResult = result_type(**response.result) + lint_result: lint_action.LintRunResult = result_type(**response.json()) try: requested_file_messages = lint_result.messages.pop(str(file_path)) @@ -131,7 +132,10 @@ async def document_diagnostic_with_partial_results( "file_paths": [file_path], }, partial_result_token=partial_result_token, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, + initialize_all_handlers=True, ) as response: # LSP defines that the first response should be `DocumentDiagnosticReport` # with diagnostics information for requested file and then n responses @@ -249,7 +253,10 @@ async def run_workspace_diagnostic_with_partial_results( params=exec_info.request_data, partial_result_token=partial_result_token, project_dir_path=exec_info.project_dir_path, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, + initialize_all_handlers=True, ) as response: # use pydantic dataclass to convert dict to dataclass instance recursively # (default dataclass constructor doesn't handle nested items, it stores them just @@ -314,7 +321,10 @@ async def workspace_diagnostic_with_full_result( params=exec_info.request_data, project_def=project, ws_context=ws_context, + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, preprocess_payload=False, + initialize_all_handlers=True, ) ) send_tasks.append(task) @@ -352,48 +362,38 @@ async def _workspace_diagnostic( params: types.WorkspaceDiagnosticParams, ) -> types.WorkspaceDiagnosticReport | None: relevant_projects_paths: list[Path] = run_service.find_all_projects_with_action( - action_name="lint", ws_context=global_state.ws_context + # check lint_files, because 'lint' is builtin and exists in all projects by default + action_name="lint_files_python", + ws_context=global_state.ws_context, # TODO: correct check of name ) exec_info_by_project_dir_path: dict[Path, LintActionExecInfo] = {} + actions_by_projects: dict[Path, list[str]] = {} for project_dir_path in relevant_projects_paths: - project = global_state.ws_context.ws_projects[project_dir_path] exec_info_by_project_dir_path[project_dir_path] = LintActionExecInfo( - project_dir_path=project_dir_path, action_name="lint" + project_dir_path=project_dir_path, + action_name="lint", + request_data={"target": "project", "trigger": "system", "dev_env": "ide"}, ) - - # find which runner is responsible for which files - # currently FineCode supports only raw python files, find them in each ws project - # exclude projects without finecode - # if both parent and child projects have lint action, exclude files of chid from - # parent - # check which runners are active and run in them - # - # assign files to projects - files_by_projects: dict[ - Path, list[Path] - ] = await project_analyzer.get_files_by_projects( - projects_dirs_paths=relevant_projects_paths, ws_context=global_state.ws_context - ) - - for project_dir_path, files_for_runner in files_by_projects.items(): - project = global_state.ws_context.ws_projects[project_dir_path] - if project.status != domain.ProjectStatus.CONFIG_VALID: - logger.warning( - f"Project {project_dir_path} has not valid configuration and finecode," - " lint in it will not be executed" - ) - continue - - exec_info = exec_info_by_project_dir_path[project_dir_path] - if exec_info.action_name == "lint": - exec_info.request_data = { - "file_paths": [file_path.as_posix() for file_path in files_for_runner], - } + actions_by_projects[project_dir_path] = ["lint"] exec_infos = list(exec_info_by_project_dir_path.values()) run_with_partial_results: bool = params.partial_result_token is not None + # linting is resource-intensive task. First start all runners and only then begin + # linting to avoid the case, when some of runners start first, take all available + # resources and other stay blocked. Starting of environment has timeout and the + # letter fail with timeout error. + try: + await run_service.start_required_environments( + actions_by_projects, global_state.ws_context, + initialize_all_handlers=True + ) + except run_service.StartingEnvironmentsFailed as exception: + logger.error( + f"Failed to start required environments for running workspace diagnostic: {exception.message}" + ) + if run_with_partial_results: return await workspace_diagnostic_with_partial_results( exec_infos=exec_infos, partial_result_token=params.partial_result_token diff --git a/src/finecode/lsp_server/endpoints/document_sync.py b/src/finecode/lsp_server/endpoints/document_sync.py index d1b5081c..cec258e2 100644 --- a/src/finecode/lsp_server/endpoints/document_sync.py +++ b/src/finecode/lsp_server/endpoints/document_sync.py @@ -106,3 +106,57 @@ async def document_did_change( global_state.ws_context.opened_documents[ params.text_document.uri ].version = params.text_document.version + + logger.trace(f"Document did change: {params.text_document.uri}") + file_path = Path(params.text_document.uri.replace("file://", "")) + projects_paths = [ + project_path + for project_path, project in global_state.ws_context.ws_projects.items() + if project.status == domain.ProjectStatus.CONFIG_VALID + and file_path.is_relative_to(project_path) + ] + + content_changes = [] + for change in params.content_changes: + if isinstance(change, types.TextDocumentContentChangePartial): + mapped_change = runner_client.TextDocumentContentChangePartial( + range=runner_client.Range( + start=runner_client.Position(line=change.range.start.line, character=change.range.start.character), + end=runner_client.Position(line=change.range.end.line, character=change.range.end.character) + ), + text=change.text, + range_length=change.range_length + ) + content_changes.append(mapped_change) + elif isinstance(change, types.TextDocumentContentChangeWholeDocument): + mapped_change = runner_client.TextDocumentContentChangeWholeDocument(text=change.text) + content_changes.append(mapped_change) + else: + logger.error(f"Got unsupported content change from LSP client: {type(change)}, skip it") + continue + + change_params = runner_client.DidChangeTextDocumentParams( + text_document=runner_client.VersionedTextDocumentIdentifier(version=params.text_document.version, uri=params.text_document.uri), + content_changes=content_changes + ) + + try: + async with asyncio.TaskGroup() as tg: + for project_path in projects_paths: + runners_by_env = global_state.ws_context.ws_projects_extension_runners[ + project_path + ] + for runner in runners_by_env.values(): + if runner.status != runner_client.RunnerStatus.RUNNING: + logger.trace( + f"Runner {runner.readable_id} is not running, skip it" + ) + continue + + tg.create_task( + runner_client.notify_document_did_change( + runner=runner, change_params=change_params + ) + ) + except ExceptionGroup as e: + logger.error(f"Error while sending changed document: {e}") diff --git a/src/finecode/lsp_server/endpoints/formatting.py b/src/finecode/lsp_server/endpoints/formatting.py index e393e54b..05bae535 100644 --- a/src/finecode/lsp_server/endpoints/formatting.py +++ b/src/finecode/lsp_server/endpoints/formatting.py @@ -24,7 +24,10 @@ async def format_document(ls: LanguageServer, params: types.DocumentFormattingPa file_path=file_path, action_name="format", params={"file_paths": [file_path], "save": False}, + run_trigger=run_service.RunActionTrigger.USER, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, + initialize_all_handlers=True, ) except Exception as error: # TODO logger.error(f"Error document formatting {file_path}: {error}") @@ -33,7 +36,7 @@ async def format_document(ls: LanguageServer, params: types.DocumentFormattingPa if response is None: return [] - response_for_file = response.result.get("result_by_file_path", {}).get( + response_for_file = response.json().get("result_by_file_path", {}).get( str(file_path), None ) if response_for_file is None: diff --git a/src/finecode/lsp_server/endpoints/inlay_hints.py b/src/finecode/lsp_server/endpoints/inlay_hints.py index 0d20b664..74ca3d15 100644 --- a/src/finecode/lsp_server/endpoints/inlay_hints.py +++ b/src/finecode/lsp_server/endpoints/inlay_hints.py @@ -53,7 +53,10 @@ async def document_inlay_hint( file_path=file_path, action_name="text_document_inlay_hint", params=inlay_hint_params_to_dict(params), + run_trigger=run_service.RunActionTrigger.SYSTEM, + dev_env=run_service.DevEnv.IDE, ws_context=global_state.ws_context, + initialize_all_handlers=True, ) except find_project.FileHasNotActionException: # ignore this exception because client requests inlay hints for all workspace @@ -67,7 +70,7 @@ async def document_inlay_hint( if response is None: return [] - hints = response.result.get("hints", None) + hints = response.json().get("hints", None) return [dict_to_inlay_hint(hint) for hint in hints] if hints is not None else None diff --git a/src/finecode/lsp_server/lsp_server.py b/src/finecode/lsp_server/lsp_server.py index 454e73f9..92f3235c 100644 --- a/src/finecode/lsp_server/lsp_server.py +++ b/src/finecode/lsp_server/lsp_server.py @@ -1,15 +1,17 @@ import asyncio +import collections.abc from functools import partial from pathlib import Path from typing import Any from loguru import logger from lsprotocol import types +from pygls.workspace import position_codec from pygls.lsp.server import LanguageServer from finecode_extension_runner.lsp_server import CustomLanguageServer from finecode.services import shutdown_service -from finecode.runner import runner_manager, runner_client +from finecode.runner import runner_manager from finecode.lsp_server import global_state, schemas, services from finecode.lsp_server.endpoints import action_tree as action_tree_endpoints from finecode.lsp_server.endpoints import code_actions as code_actions_endpoints @@ -20,7 +22,17 @@ from finecode.lsp_server.endpoints import inlay_hints as inlay_hints_endpoints +def position_from_client_units( + self, lines: collections.abc.Sequence[str], position: types.Position +) -> types.Position: + return position + + def create_lsp_server() -> CustomLanguageServer: + # avoid recalculating of positions by pygls + position_codec.PositionCodec.position_from_client_units = position_from_client_units + + # handle all requests explicitly because there are different types of requests: # project-specific, workspace-wide. Some Workspace-wide support partial responses, # some not. @@ -127,6 +139,11 @@ def create_lsp_server() -> CustomLanguageServer: "finecode.restartExtensionRunner" ) register_restart_extension_runner_cmd(restart_extension_runner) + + register_restart_and_debug_extension_runner_cmd = server.command( + "finecode.restartAndDebugExtensionRunner" + ) + register_restart_and_debug_extension_runner_cmd(restart_and_debug_extension_runner) register_shutdown_feature = server.feature(types.SHUTDOWN) register_shutdown_feature(_on_shutdown) @@ -134,35 +151,35 @@ def create_lsp_server() -> CustomLanguageServer: return server -LOG_LEVEL_MAP = { - "DEBUG": types.MessageType.Debug, - "INFO": types.MessageType.Info, - "SUCCESS": types.MessageType.Info, - "WARNING": types.MessageType.Warning, - "ERROR": types.MessageType.Error, - "CRITICAL": types.MessageType.Error, -} +# LOG_LEVEL_MAP = { +# "DEBUG": types.MessageType.Debug, +# "INFO": types.MessageType.Info, +# "SUCCESS": types.MessageType.Info, +# "WARNING": types.MessageType.Warning, +# "ERROR": types.MessageType.Error, +# "CRITICAL": types.MessageType.Error, +# } async def _on_initialized(ls: LanguageServer, params: types.InitializedParams): - def pass_log_to_ls_client(log) -> None: - # disabling and enabling logging of pygls package is required to avoid logging - # loop, because there are logs inside of log_trace and window_log_message - # functions - logger.disable("pygls") - if log.record["level"].no < 10: - # trace - ls.log_trace(types.LogTraceParams(message=log.record["message"])) - else: - level = LOG_LEVEL_MAP.get(log.record["level"].name, types.MessageType.Info) - ls.window_log_message( - types.LogMessageParams(type=level, message=log.record["message"]) - ) - logger.enable("pygls") - # module-specific config should be reapplied after disabling and enabling logger - # for the whole package - # TODO: unify with main - logger.configure(activation=[("pygls.protocol.json_rpc", False)]) + # def pass_log_to_ls_client(log) -> None: + # # disabling and enabling logging of pygls package is required to avoid logging + # # loop, because there are logs inside of log_trace and window_log_message + # # functions + # logger.disable("pygls") + # if log.record["level"].no < 10: + # # trace + # ls.log_trace(types.LogTraceParams(message=log.record["message"])) + # else: + # level = LOG_LEVEL_MAP.get(log.record["level"].name, types.MessageType.Info) + # ls.window_log_message( + # types.LogMessageParams(type=level, message=log.record["message"]) + # ) + # logger.enable("pygls") + # # module-specific config should be reapplied after disabling and enabling logger + # # for the whole package + # # TODO: unify with main + # logger.configure(activation=[("pygls.protocol.json_rpc", False)]) # loguru doesn't support passing partial with ls parameter, use nested function # instead @@ -170,33 +187,8 @@ def pass_log_to_ls_client(log) -> None: # Disabled, because it is not thread-safe and it means not compatible with IO thread # logger.add(sink=pass_log_to_ls_client) - async def get_document( - params: runner_client.GetDocumentParams, - ) -> runner_client.GetDocumentResult: - try: - doc_info = global_state.ws_context.opened_documents[params.uri] - except KeyError: - # this error can happen even if ER processes documents correctly: document - # is opened, action execution starts, user closes the document, ER is busy - # at this moment, action execution comes to reading the file before new sync - # of opened documents -> error occurs. ER is expected to be always never - # blocked, but still avoid possible error. - # - # pygls makes all exceptions on server side JsonRpcInternalError and they - # should be matched by text. - # Example: https://github.com/openlawlibrary/pygls/blob/main/tests/ - # lsp/test_errors.py#L108C24-L108C44 - raise Exception("Document is not opened") - - text = ls.workspace.get_text_document(params.uri).source - return runner_client.GetDocumentResult( - uri=params.uri, version=doc_info.version, text=text - ) - logger.info("initialized, adding workspace directories") - services.register_document_getter(get_document) - async def apply_workspace_edit(params): return await ls.workspace_apply_edit_async(params) @@ -216,6 +208,7 @@ def report_progress(token: str | int, value: Any): ls.progress(types.ProgressParams(token, value)) services.register_progress_reporter(report_progress) + services.register_debug_session_starter(partial(start_debug_session, ls)) try: async with asyncio.TaskGroup() as tg: @@ -226,7 +219,7 @@ def report_progress(token: str | int, value: Any): tg.create_task(services.add_workspace_dir(request=request)) except ExceptionGroup as error: logger.exception(error) - raise error + raise error from eg global_state.server_initialized.set() logger.trace("Workspace directories added, end of initialized handler") @@ -256,20 +249,33 @@ def _on_shutdown(ls: LanguageServer, params): async def reset(ls: LanguageServer, params): logger.info("Reset WM") await global_state.server_initialized.wait() - ... -async def restart_extension_runner(ls: LanguageServer, params): - logger.info(f"restart extension runners {params}") +async def restart_extension_runner(ls: LanguageServer, tree_node, param2): + logger.info(f"restart extension runner {tree_node}") await global_state.server_initialized.wait() - params_dict = params[0] - runner_working_dir_str = params_dict["projectPath"] + runner_id = tree_node['projectPath'] + splitted_runner_id = runner_id.split('::') + runner_working_dir_str = splitted_runner_id[0] runner_working_dir_path = Path(runner_working_dir_str) + env_name = splitted_runner_id[-1] + + await runner_manager.restart_extension_runner(runner_working_dir_path=runner_working_dir_path, env_name=env_name, ws_context=global_state.ws_context) - await runner_manager.restart_extension_runners( - runner_working_dir_path, global_state.ws_context - ) + +async def restart_and_debug_extension_runner(ls: LanguageServer, tree_node, params2): + logger.info(f"restart and debug extension runner {tree_node} {params2}") + await global_state.server_initialized.wait() + + runner_id = tree_node['projectPath'] + splitted_runner_id = runner_id.split('::') + runner_working_dir_str = splitted_runner_id[0] + runner_working_dir_path = Path(runner_working_dir_str) + env_name = splitted_runner_id[-1] + + logger.info(f'start debugging {runner_working_dir_path} {runner_id} {env_name}') + await runner_manager.restart_extension_runner(runner_working_dir_path=runner_working_dir_path, env_name=env_name, ws_context=global_state.ws_context, debug=True) async def send_user_message_notification( @@ -294,4 +300,11 @@ async def send_user_message_request( ) +async def start_debug_session( + ls: LanguageServer, params +) -> None: + res = await ls.protocol.send_request_async('ide/startDebugging', params) + logger.info(f"started debugging: {res}") + + __all__ = ["create_lsp_server"] diff --git a/src/finecode/lsp_server/schemas.py b/src/finecode/lsp_server/schemas.py index 38b58608..f9d023e8 100644 --- a/src/finecode/lsp_server/schemas.py +++ b/src/finecode/lsp_server/schemas.py @@ -44,7 +44,10 @@ class NodeType(IntEnum): DIRECTORY = 0 PROJECT = 1 ACTION = 2 - PRESET = 3 + ACTION_GROUP = 3 + PRESET = 4 + ENV_GROUP = 5 + ENV = 6 class ListActionsResponse(BaseModel): diff --git a/src/finecode/lsp_server/services.py b/src/finecode/lsp_server/services.py index 1af70db7..976b1522 100644 --- a/src/finecode/lsp_server/services.py +++ b/src/finecode/lsp_server/services.py @@ -38,14 +38,14 @@ def register_send_user_message_request_callback(send_user_message_request_callba user_messages._lsp_message_send = send_user_message_request_callback -def register_document_getter(get_document_func): - runner_manager.get_document = get_document_func - - def register_workspace_edit_applier(apply_workspace_edit_func): runner_manager.apply_workspace_edit = apply_workspace_edit_func +def register_debug_session_starter(start_debug_session_func): + runner_manager.start_debug_session = start_debug_session_func + + def register_progress_reporter(report_progress_func): global_state.progress_reporter = report_progress_func @@ -57,7 +57,7 @@ async def add_workspace_dir( dir_path = Path(request.dir_path) if dir_path in global_state.ws_context.ws_dirs_paths: - raise ValueError("Directory is already added") + await user_messages.error(f"Directory {dir_path} is already added") global_state.ws_context.ws_dirs_paths.append(dir_path) new_projects = await read_configs.read_projects_in_dir( @@ -73,10 +73,12 @@ async def add_workspace_dir( try: await runner_manager.start_runners_with_presets( - projects=new_projects, ws_context=global_state.ws_context + projects=new_projects, + ws_context=global_state.ws_context, + initialize_all_handlers=True, ) except runner_manager.RunnerFailedToStart as exception: - raise ValueError(f"Starting runners with presets failed: {exception.message}") + await user_messages.error(f"Starting runners with presets failed: {exception.message}. Did you run `finecode prepare-envs` ?") return schemas.AddWorkspaceDirResponse() diff --git a/src/finecode/project_analyzer.py b/src/finecode/project_analyzer.py deleted file mode 100644 index 45782706..00000000 --- a/src/finecode/project_analyzer.py +++ /dev/null @@ -1,55 +0,0 @@ -from pathlib import Path - -from finecode import context -from finecode.services import run_service - - -class FailedToGetProjectFiles(Exception): - def __init__(self, message: str) -> None: - self.message = message - - -async def get_files_by_projects( - projects_dirs_paths: list[Path], ws_context: context.WorkspaceContext -) -> dict[Path, list[Path]]: - files_by_project_dir: dict[Path, list[Path]] = {} - actions_by_project = { - project_dir_path: ["list_project_files_by_lang"] - for project_dir_path in projects_dirs_paths - } - action_payload = {} - - try: - results_by_project = await run_service.run_actions_in_projects( - actions_by_project=actions_by_project, - action_payload=action_payload, - ws_context=ws_context, - concurrently=False, - result_format=run_service.RunResultFormat.JSON, - ) - except run_service.ActionRunFailed as exception: - # TODO: handle it overall - raise FailedToGetProjectFiles(exception.message) - - for project_dir_path, action_results in results_by_project.items(): - list_project_files_action_result = action_results["list_project_files_by_lang"] - if list_project_files_action_result.return_code != 0: - raise FailedToGetProjectFiles( - f"'list_project_files_by_lang' action ended in {project_dir_path} with return code {list_project_files_action_result.return_code}: {list_project_files_action_result.result}" - ) - project_files_by_lang = list_project_files_action_result.result - files_by_project_dir[project_dir_path] = [ - Path(file_path) - for file_path in project_files_by_lang["files_by_lang"].get("python", []) - ] - - return files_by_project_dir - - -async def get_project_files( - project_dir_path: Path, ws_context: context.WorkspaceContext -) -> list[Path]: - files_by_projects = await get_files_by_projects( - [project_dir_path], ws_context=ws_context - ) - return files_by_projects[project_dir_path] diff --git a/src/finecode/runner/_internal_client_api.py b/src/finecode/runner/_internal_client_api.py index 05ca77c4..bb8abe12 100644 --- a/src/finecode/runner/_internal_client_api.py +++ b/src/finecode/runner/_internal_client_api.py @@ -2,11 +2,12 @@ Client API used only internally in runner manager or other modules of this package. They are not intended to be used in higher layers. """ +import pathlib from loguru import logger from finecode.runner import _internal_client_types -from finecode.runner.jsonrpc_client import client as jsonrpc_client +from finecode_jsonrpc import client as jsonrpc_client async def initialize( @@ -14,6 +15,7 @@ async def initialize( client_process_id: int, client_name: str, client_version: str, + client_workspace_dir: pathlib.Path ) -> None: logger.debug(f"Send initialize to server {client.readable_id}") await client.send_request( @@ -25,6 +27,7 @@ async def initialize( name=client_name, version=client_version ), trace=_internal_client_types.TraceValue.Verbose, + workspace_folders=[_internal_client_types.WorkspaceFolder(uri=f'file://{client_workspace_dir.as_posix()}', name=client_workspace_dir.name)] ), timeout=20, ) diff --git a/src/finecode/runner/_internal_client_types.py b/src/finecode/runner/_internal_client_types.py index 8013f554..b59843cf 100644 --- a/src/finecode/runner/_internal_client_types.py +++ b/src/finecode/runner/_internal_client_types.py @@ -19,11 +19,11 @@ CANCEL_REQUEST = "$/cancelRequest" PROGRESS = "$/progress" TEXT_DOCUMENT_DID_CLOSE = "textDocument/didClose" +TEXT_DOCUMENT_DID_CHANGE = "textDocument/didChange" TEXT_DOCUMENT_DID_OPEN = "textDocument/didOpen" WORKSPACE_EXECUTE_COMMAND = "workspace/executeCommand" WORKSPACE_APPLY_EDIT = "workspace/applyEdit" -DOCUMENT_GET = "documents/get" PROJECT_RAW_CONFIG_GET = "projects/getRawConfig" @@ -159,6 +159,36 @@ class InitializedParams: pass +@dataclasses.dataclass +class GeneralClientCapabilities: + """General client capabilities. + + @since 3.16.0""" + + # Since: 3.16.0 + + position_encodings: collections.abc.Sequence[PositionEncodingKind | str] | None = None + """The position encodings supported by the client. Client and server + have to agree on the same position encoding to ensure that offsets + (e.g. character position in a line) are interpreted the same on both + sides. + + To keep the protocol backwards compatible the following applies: if + the value 'utf-16' is missing from the array of position encodings + servers can assume that the client supports UTF-16. UTF-16 is + therefore a mandatory encoding. + + If omitted it defaults to ['utf-16']. + + Implementation considerations: since the conversion from one encoding + into another requires the content of the file / line the conversion + is best done where the file is read which is usually on the server + side. + + @since 3.17.0""" + # Since: 3.17.0 + + @dataclasses.dataclass class ClientCapabilities: """Defines the capabilities provided by the client.""" @@ -178,7 +208,7 @@ class ClientCapabilities: # window: WindowClientCapabilities | None = None """Window specific client capabilities.""" - # general: GeneralClientCapabilities | None = None + general: GeneralClientCapabilities | None = None """General client capabilities. @since 3.16.0""" @@ -1339,48 +1369,93 @@ class StringValue: @dataclasses.dataclass -class GetDocumentParams: - uri: str +class GetProjectRawConfigParams: + project_def_path: str @dataclasses.dataclass -class GetDocumentRequest(BaseRequest): - params: GetDocumentParams - method = "documents/get" +class GetProjectRawConfigRequest(BaseRequest): + params: GetProjectRawConfigParams + method = "projects/getRawConfig" @dataclasses.dataclass -class GetDocumentResult(BaseResult): - uri: str - version: str - text: str +class GetProjectRawConfigResult(BaseResult): + # stringified json + config: str @dataclasses.dataclass -class GetDocumentResponse(BaseResponse): - result: GetDocumentResult +class GetProjectRawConfigResponse(BaseResponse): + result: GetProjectRawConfigResult @dataclasses.dataclass -class GetProjectRawConfigParams: - project_def_path: str +class VersionedTextDocumentIdentifier: + """A text document identifier to denote a specific version of a text document.""" + + version: int + """The version number of this document.""" + + uri: str + """The text document's uri.""" @dataclasses.dataclass -class GetProjectRawConfigRequest(BaseRequest): - params: GetProjectRawConfigParams - method = "projects/getRawConfig" +class TextDocumentContentChangePartial: + """@since 3.18.0""" + + # Since: 3.18.0 + + range: Range + """The range of the document that changed.""" + + text: str + """The new text for the provided range.""" + + range_length: int | None + """The optional length of the range that got replaced. + + @deprecated use range instead.""" @dataclasses.dataclass -class GetProjectRawConfigResult(BaseResult): - # stringified json - config: str +class TextDocumentContentChangeWholeDocument: + """@since 3.18.0""" + + # Since: 3.18.0 + + text: str + """The new text of the whole document.""" + + +TextDocumentContentChangeEvent = TextDocumentContentChangePartial | TextDocumentContentChangeWholeDocument +"""An event describing a change to a text document. If only a text is provided +it is considered to be the full content of the document.""" @dataclasses.dataclass -class GetProjectRawConfigResponse(BaseResponse): - result: GetProjectRawConfigResult +class DidChangeTextDocumentParams: + """The change text document notification's parameters.""" + + text_document: VersionedTextDocumentIdentifier + """The document that did change. The version number points + to the version after all provided content changes have + been applied.""" + + content_changes: collections.abc.Sequence[TextDocumentContentChangeEvent] + """The actual content changes. The content changes describe single state changes + to the document. So if there are two content changes c1 (at array index 0) and + c2 (at array index 1) for a document in state S then c1 moves the document from + S to S' and c2 from S' to S''. So c1 is computed on the state S and c2 is computed + on the state S'. + + To mirror the content of a document using change events use the following approach: + - start with the same initial content + - apply the 'textDocument/didChange' notifications in the order you receive them. + - apply the `TextDocumentContentChangeEvent`s in a single notification in the order + you receive them.""" + @dataclasses.dataclass @@ -1424,6 +1499,15 @@ class DidCloseTextDocumentNotification(BaseNotification): method = "textDocument/didClose" +@dataclasses.dataclass +class DidChangeTextDocumentNotification(BaseNotification): + """The document change notification is sent from the client to the server to signal + changes to a text document.""" + + params: DidChangeTextDocumentParams + method = "textDocument/didChange" + + @dataclasses.dataclass class CancelParams: id: int | str @@ -1481,13 +1565,12 @@ class ExitNotification(BaseNotification): ApplyWorkspaceEditResponse, ApplyWorkspaceEditResult, ), - DOCUMENT_GET: ( - GetDocumentRequest, - GetDocumentParams, - GetDocumentResponse, - GetDocumentResult, + PROJECT_RAW_CONFIG_GET: ( + GetProjectRawConfigRequest, + GetProjectRawConfigParams, + GetProjectRawConfigResponse, + GetProjectRawConfigResult, ), - PROJECT_RAW_CONFIG_GET: (GetProjectRawConfigRequest, GetProjectRawConfigParams, GetProjectRawConfigResponse, GetProjectRawConfigResult), TEXT_DOCUMENT_DID_OPEN: ( DidOpenTextDocumentNotification, DidOpenTextDocumentParams, @@ -1500,4 +1583,10 @@ class ExitNotification(BaseNotification): None, None, ), + TEXT_DOCUMENT_DID_CHANGE: ( + DidChangeTextDocumentNotification, + DidChangeTextDocumentParams, + None, + None, + ), } diff --git a/src/finecode/runner/runner_client.py b/src/finecode/runner/runner_client.py index 11f88c05..bbf605d4 100644 --- a/src/finecode/runner/runner_client.py +++ b/src/finecode/runner/runner_client.py @@ -15,13 +15,18 @@ from loguru import logger import finecode.domain as domain -from finecode.runner import jsonrpc_client, _internal_client_types, _internal_client_api +from finecode.runner import _internal_client_types, _internal_client_api +import finecode_jsonrpc as jsonrpc_client # reexport BaseRunnerRequestException = jsonrpc_client.BaseRunnerRequestException -GetDocumentParams = _internal_client_types.GetDocumentParams -GetDocumentResult = _internal_client_types.GetDocumentResult +DidChangeTextDocumentParams = _internal_client_types.DidChangeTextDocumentParams +VersionedTextDocumentIdentifier = _internal_client_types.VersionedTextDocumentIdentifier +TextDocumentContentChangeWholeDocument = _internal_client_types.TextDocumentContentChangeWholeDocument +TextDocumentContentChangePartial = _internal_client_types.TextDocumentContentChangePartial +Range = _internal_client_types.Range +Position = _internal_client_types.Position class ActionRunFailed(jsonrpc_client.BaseRunnerRequestException): ... @@ -61,16 +66,43 @@ class RunnerStatus(enum.Enum): type RunActionRawResult = dict[str, Any] | str -class RunActionResponse(typing.NamedTuple): - result: RunActionRawResult +@dataclasses.dataclass +class RunActionResponse: + result_by_format: dict[str, RunActionRawResult] return_code: int + def json(self) -> dict[str, Any]: + result = self.result_by_format.get("json") + if result is None: + raise ActionRunFailed("Expected json result format but it was not returned") + return result + + def text(self) -> str: + result = self.result_by_format.get("styled_text_json") or self.result_by_format.get("string") + if result is None: + raise ActionRunFailed("Expected text result format but it was not returned") + return result + class RunResultFormat(enum.Enum): JSON = "json" STRING = "string" +class RunActionTrigger(enum.StrEnum): + USER = 'user' + SYSTEM = 'system' + UNKNOWN = 'unknown' + + +class DevEnv(enum.StrEnum): + IDE = 'ide' + CLI = 'cli' + AI = 'ai' + PRECOMMIT = 'precommit' + CI_CD = 'cicd' + + async def run_action( runner: ExtensionRunnerInfo, action_name: str, @@ -109,26 +141,18 @@ async def run_action( raise ActionRunFailed(command_result["error"]) return_code = command_result["return_code"] - raw_result = "" - stringified_result = command_result["result"] + stringified_result = command_result["result_by_format"] # currently result is always dumped to json even if response format is expected to # be a string. See docs of ER lsp server for more details. - raw_result = json.loads(stringified_result) - if command_result["format"] == "string": - result = raw_result - elif ( - command_result["format"] == "json" - or command_result["format"] == "styled_text_json" - ): - # string was already converted to dict above - result = raw_result - else: - raise Exception(f"Not support result format: {command_result['format']}") + try: + result_by_format = json.loads(stringified_result) + except json.JSONDecodeError as exception: + raise ActionRunFailed(f"Failed to decode result json: {exception}") from exception if command_result["status"] == "stopped": - raise ActionRunStopped(message=result) + raise ActionRunStopped(message=result_by_format) - return RunActionResponse(result=result, return_code=return_code) + return RunActionResponse(result_by_format=result_by_format, return_code=return_code) async def reload_action(runner: ExtensionRunnerInfo, action_name: str) -> None: @@ -170,12 +194,20 @@ class RunnerConfig: actions: list[domain.Action] # config by handler source action_handler_configs: dict[str, dict[str, Any]] + services: list[domain.ServiceDeclaration] = dataclasses.field(default_factory=list) + # If provided, eagerly instantiate these handlers after config update. + # Keys are action names, values are lists of handler names within that action. + handlers_to_initialize: dict[str, list[str]] | None = None def to_dict(self) -> dict[str, typing.Any]: - return { + result: dict[str, typing.Any] = { "actions": [action.to_dict() for action in self.actions], "action_handler_configs": self.action_handler_configs, + "services": [svc.to_dict() for svc in self.services], } + if self.handlers_to_initialize is not None: + result["handlers_to_initialize"] = self.handlers_to_initialize + return result async def update_config( @@ -221,6 +253,12 @@ async def notify_document_did_close( ), ) +async def notify_document_did_change(runner: ExtensionRunnerInfo, change_params: _internal_client_types.DidChangeTextDocumentParams) -> None: + runner.client.notify( + method=_internal_client_types.TEXT_DOCUMENT_DID_CHANGE, + params=change_params, + ) + __all__ = [ "ActionRunFailed", diff --git a/src/finecode/runner/runner_manager.py b/src/finecode/runner/runner_manager.py index 2701b647..48cdbadf 100644 --- a/src/finecode/runner/runner_manager.py +++ b/src/finecode/runner/runner_manager.py @@ -4,6 +4,7 @@ import asyncio import collections.abc +import concurrent.futures import json import os import shutil @@ -12,23 +13,24 @@ from loguru import logger -from finecode import context, domain, finecode_cmd +from finecode import context, domain, domain_helpers, finecode_cmd from finecode.config import collect_actions, config_models, read_configs from finecode.runner import ( - jsonrpc_client, runner_client, _internal_client_api, _internal_client_types, ) -from finecode.runner.jsonrpc_client import _io_thread +import finecode_jsonrpc as jsonrpc_client +from finecode_jsonrpc import _io_thread from finecode.utils import iterable_subscribe project_changed_callback: ( typing.Callable[[domain.Project], collections.abc.Coroutine[None, None, None]] | None ) = None -get_document: typing.Callable[[], collections.abc.Coroutine] | None = None +# get_document: typing.Callable[[], collections.abc.Coroutine] | None = None apply_workspace_edit: typing.Callable[[], collections.abc.Coroutine] | None = None +start_debug_session: typing.Callable[[int], collections.abc.Coroutine] | None = None partial_results: iterable_subscribe.IterableSubscribe = ( iterable_subscribe.IterableSubscribe() ) @@ -76,13 +78,13 @@ def map_change_object(change): async def _start_extension_runner_process( - runner: runner_client.ExtensionRunnerInfo, ws_context: context.WorkspaceContext + runner: runner_client.ExtensionRunnerInfo, ws_context: context.WorkspaceContext, debug: bool = False ) -> None: try: python_cmd = finecode_cmd.get_python_cmd( runner.working_dir_path, runner.env_name ) - except ValueError: + except ValueError as exception: try: runner.status = runner_client.RunnerStatus.NO_VENV await notify_project_changed( @@ -96,7 +98,7 @@ async def _start_extension_runner_process( raise jsonrpc_client.RunnerFailedToStart( f"Runner '{runner.readable_id}' failed to start" - ) + ) from exception if ws_context.runner_io_thread is None: logger.trace("Starting IO Thread") @@ -112,23 +114,64 @@ async def _start_extension_runner_process( runner.env_name ] runner_config = env_config.runner_config - # TODO: also check whether lsp server is available, without it doesn't make sense - # to start with debugger - if runner_config.debug: + + start_with_debug = debug or runner_config.debug + if start_with_debug: process_args.append("--debug") - # TODO: find free port and pass it - process_args.append("--debug-port=5681") + debug_port_future = concurrent.futures.Future() + else: + debug_port_future = None process_args_str: str = " ".join(process_args) - client = await jsonrpc_client.create_lsp_client_io( - f"{python_cmd} -m finecode_extension_runner.cli start {process_args_str}", - runner.working_dir_path, - message_types=_internal_client_types.METHOD_TO_TYPES, - io_thread=ws_context.runner_io_thread, - readable_id=runner.readable_id, - ) + client = jsonrpc_client.JsonRpcClient(message_types=_internal_client_types.METHOD_TO_TYPES, readable_id=runner.readable_id) + + try: + await client.start(server_cmd=f"{python_cmd} -m finecode_extension_runner.cli start {process_args_str}", working_dir_path=runner.working_dir_path, io_thread=ws_context.runner_io_thread, debug_port_future=debug_port_future, connect=not start_with_debug) + except jsonrpc_client.RunnerFailedToStart as exception: + logger.error(f"Runner {runner.readable_id} failed to start: {exception.message}") + runner.status = runner_client.RunnerStatus.FAILED + runner.initialized_event.set() + raise exception + runner.client = client - # TODO: recognize started debugger and send command to lsp server + + if start_with_debug: + assert debug_port_future is not None + + # avoid blocking main thread? + debug_async_future = asyncio.wrap_future(future=debug_port_future) + try: + await asyncio.wait_for(debug_async_future, timeout=30) + except TimeoutError as exception: + runner.status = runner_client.RunnerStatus.FAILED + runner.initialized_event.set() + raise RunnerFailedToStart(f"Failed to get debugger port in 30 seconds: {runner.readable_id}") from exception + + debug_port = debug_async_future.result() + logger.info(f"debug port: {debug_port}") + + if start_debug_session is not None: + debug_params = { + "name": "Python: WM", + "type": "debugpy", + "request": "attach", + "connect": { + "host": "localhost", + "port": debug_port + }, + "justMyCode": False, + # "logToFile": True, + } + await start_debug_session(debug_params) + + try: + await client.connect_to_server(io_thread=ws_context.runner_io_thread, timeout=None) + except Exception as exception: # TODO: analyze which can occur + # TODO: analyze whether server process will always stop if connection + logger.error(f"Runner {runner.readable_id} failed to connect to server: {exception}") + runner.status = runner_client.RunnerStatus.FAILED + runner.initialized_event.set() + raise RunnerFailedToStart(str(exception)) from exception async def on_exit(): logger.debug(f"Extension Runner {runner.readable_id} exited") @@ -140,20 +183,20 @@ async def on_exit(): runner.client.server_exit_callback = on_exit - if get_document is not None: - runner.client.feature( - _internal_client_types.DOCUMENT_GET, - get_document, - ) - runner.client.feature( _internal_client_types.WORKSPACE_APPLY_EDIT, _apply_workspace_edit ) - async def on_progress(params: _internal_client_types.ProgressParams): + async def on_progress(params: _internal_client_types.ProgressParams) -> None: logger.debug(f"Got progress from runner for token: {params.token}") + try: + result_value = json.loads(params.value) + except json.JSONDecodeError as exception: + logger.error(f"Failed to decode partial result value json: {exception}") + return + partial_result = domain.PartialResult( - token=params.token, value=json.loads(params.value) + token=params.token, value=result_value ) partial_results.publish(partial_result) @@ -169,8 +212,8 @@ async def get_project_raw_config( project_raw_config = ws_context.ws_projects_raw_configs[ project_def_path.parent ] - except KeyError: - raise ValueError(f"Config of project '{project_def_path_str}' not found") + except KeyError as exception: + raise ValueError(f"Config of project '{project_def_path_str}' not found") from exception return _internal_client_types.GetProjectRawConfigResult( config=json.dumps(project_raw_config) ) @@ -187,7 +230,7 @@ async def stop_extension_runner(runner: runner_client.ExtensionRunnerInfo) -> No try: await _internal_client_api.shutdown(client=runner.client) except Exception as e: - logger.error(f"Failed to shutdown:") + logger.error(f"Failed to shutdown {runner.readable_id}:") logger.exception(e) await _internal_client_api.exit(client=runner.client) @@ -211,8 +254,11 @@ def stop_extension_runner_sync(runner: runner_client.ExtensionRunnerInfo) -> Non logger.trace("Extension runner was not running") + async def start_runners_with_presets( - projects: list[domain.Project], ws_context: context.WorkspaceContext + projects: list[domain.Project], + ws_context: context.WorkspaceContext, + initialize_all_handlers: bool = False, ) -> None: # start runners with presets in projects, resolve presets and read project actions new_runners_tasks: list[asyncio.Task] = [] @@ -224,16 +270,27 @@ async def start_runners_with_presets( project_status = project.status if project_status == domain.ProjectStatus.CONFIG_VALID: # first check whether runner doesn't exist yet to avoid duplicates - project_runners = ws_context.ws_projects_extension_runners.get(project.dir_path, {}) - project_dev_workspace_runner = project_runners.get('dev_workspace', None) + project_runners = ws_context.ws_projects_extension_runners.get( + project.dir_path, {} + ) + project_dev_workspace_runner = project_runners.get( + "dev_workspace", None + ) start_new_runner = True - if project_dev_workspace_runner is not None and project_dev_workspace_runner.status in [runner_client.RunnerStatus.INITIALIZING, runner_client.RunnerStatus.RUNNING]: + if ( + project_dev_workspace_runner is not None + and project_dev_workspace_runner.status + in [ + runner_client.RunnerStatus.INITIALIZING, + runner_client.RunnerStatus.RUNNING, + ] + ): # start a new one only if: # - either there is no runner yet # or venv exist(=exclude `runner_client.RunnerStatus.NO_VENV`) # and runner is not initializing or running already start_new_runner = False - + if start_new_runner: task = tg.create_task( _start_dev_workspace_runner( @@ -260,7 +317,7 @@ async def start_runners_with_presets( logger.exception(exception) raise jsonrpc_client.RunnerFailedToStart( "Failed to initialize runner(s). See previous logs for more details" - ) + ) from eg for project in projects: if project.status != domain.ProjectStatus.CONFIG_VALID: @@ -273,16 +330,28 @@ async def start_runners_with_presets( collect_actions.collect_actions( project_path=project.dir_path, ws_context=ws_context ) + collect_actions.collect_services( + project_path=project.dir_path, ws_context=ws_context + ) except config_models.ConfigurationError as exception: raise jsonrpc_client.RunnerFailedToStart( f"Reading project config with presets and collecting actions in {project.dir_path} failed: {exception.message}" - ) + ) from exception # update config of dev_workspace runner, the new config contains resolved presets dev_workspace_runner = ws_context.ws_projects_extension_runners[ project.dir_path ]["dev_workspace"] - await update_runner_config(runner=dev_workspace_runner, project=project) + handlers_to_init = ( + domain_helpers.collect_all_handlers_to_initialize(project, "dev_workspace") + if initialize_all_handlers + else None + ) + await update_runner_config( + runner=dev_workspace_runner, + project=project, + handlers_to_initialize=handlers_to_init, + ) async def get_or_start_runners_with_presets( @@ -310,7 +379,7 @@ async def get_or_start_runners_with_presets( async def start_runner( - project_def: domain.Project, env_name: str, ws_context: context.WorkspaceContext + project_def: domain.Project, env_name: str, handlers_to_initialize: dict[str, list[str]] | None, ws_context: context.WorkspaceContext, debug: bool = False ) -> runner_client.ExtensionRunnerInfo: # this function manages status of the runner and initialized event runner = runner_client.ExtensionRunnerInfo( @@ -321,7 +390,7 @@ async def start_runner( client=None, ) save_runner_in_context(runner=runner, ws_context=ws_context) - await _start_extension_runner_process(runner=runner, ws_context=ws_context) + await _start_extension_runner_process(runner=runner, ws_context=ws_context, debug=debug) try: await _init_lsp_client(runner=runner, project=project_def) @@ -342,15 +411,18 @@ async def start_runner( collect_actions.collect_actions( project_path=project_def.dir_path, ws_context=ws_context ) + collect_actions.collect_services( + project_path=project_def.dir_path, ws_context=ws_context + ) except config_models.ConfigurationError as exception: runner.status = runner_client.RunnerStatus.FAILED runner.initialized_event.set() await notify_project_changed(project_def) raise jsonrpc_client.RunnerFailedToStart( f"Found problem in configuration of {project_def.dir_path}: {exception.message}" - ) + ) from exception - await update_runner_config(runner=runner, project=project_def) + await update_runner_config(runner=runner, project=project_def, handlers_to_initialize=handlers_to_initialize) await _finish_runner_init(runner=runner, project=project_def, ws_context=ws_context) runner.status = runner_client.RunnerStatus.RUNNING @@ -361,19 +433,30 @@ async def start_runner( async def get_or_start_runner( - project_def: domain.Project, env_name: str, ws_context: context.WorkspaceContext + project_def: domain.Project, + env_name: str, + ws_context: context.WorkspaceContext, + initialize_all_handlers: bool = False, + action_names_to_initialize: list[str] | None = None, ) -> runner_client.ExtensionRunnerInfo: - runners_by_env = ws_context.ws_projects_extension_runners[project_def.dir_path] - try: + runners_by_env = ws_context.ws_projects_extension_runners[project_def.dir_path] runner = runners_by_env[env_name] logger.trace(f"Runner {runner.readable_id} found") except KeyError: logger.trace( f"Runner for env {env_name} in {project_def.dir_path} not found, start one" ) + if initialize_all_handlers: + handlers_to_initialize = domain_helpers.collect_all_handlers_to_initialize(project_def, env_name) + elif action_names_to_initialize is not None: + handlers_to_initialize = domain_helpers.collect_handlers_to_initialize_for_actions( + project_def, env_name, action_names_to_initialize + ) + else: + handlers_to_initialize = None runner = await start_runner( - project_def=project_def, env_name=env_name, ws_context=ws_context + project_def=project_def, env_name=env_name, handlers_to_initialize=handlers_to_initialize, ws_context=ws_context ) if runner.status != runner_client.RunnerStatus.RUNNING: @@ -397,7 +480,7 @@ async def get_or_start_runner( async def _start_dev_workspace_runner( project_def: domain.Project, ws_context: context.WorkspaceContext ) -> runner_client.ExtensionRunnerInfo: - return await start_runner( + return await get_or_start_runner( project_def=project_def, env_name="dev_workspace", ws_context=ws_context ) @@ -411,30 +494,36 @@ async def _init_lsp_client( client_process_id=os.getpid(), client_name="FineCode_WorkspaceManager", client_version="0.1.0", + client_workspace_dir=runner.working_dir_path ) - except jsonrpc_client.BaseRunnerRequestException as error: + except jsonrpc_client.BaseRunnerRequestException as exception: raise jsonrpc_client.RunnerFailedToStart( - f"Runner failed to initialize: {error.message}" - ) + f"Runner failed to initialize: {exception.message}" + ) from exception try: await _internal_client_api.notify_initialized(runner.client) - except Exception as error: - logger.error(f"Failed to notify runner about initialization: {error}") - logger.exception(error) + except Exception as exception: + logger.error(f"Failed to notify runner about initialization: {exception}") + logger.exception(exception) raise jsonrpc_client.RunnerFailedToStart( - f"Runner failed to notify about initialization: {error}" - ) + f"Runner failed to notify about initialization: {exception}" + ) from exception logger.debug(f"LSP Client for initialized: {runner.readable_id}") async def update_runner_config( - runner: runner_client.ExtensionRunnerInfo, project: domain.Project + runner: runner_client.ExtensionRunnerInfo, + project: domain.Project, + handlers_to_initialize: dict[str, list[str]] | None, ) -> None: assert project.actions is not None config = runner_client.RunnerConfig( - actions=project.actions, action_handler_configs=project.action_handler_configs + actions=project.actions, + action_handler_configs=project.action_handler_configs, + services=project.services, + handlers_to_initialize=handlers_to_initialize, ) try: await runner_client.update_config(runner, project.def_path, config) @@ -444,7 +533,7 @@ async def update_runner_config( runner.initialized_event.set() raise jsonrpc_client.RunnerFailedToStart( f"Runner failed to update config: {exception.message}" - ) + ) from exception logger.debug(f"Updated config of runner {runner.readable_id}") @@ -454,13 +543,15 @@ async def _finish_runner_init( project: domain.Project, ws_context: context.WorkspaceContext, ) -> None: + # TODO: save per runner only during initialization. But where to get data from + # in case of runner restart? await send_opened_files( runner=runner, opened_files=list(ws_context.opened_documents.values()) ) def save_runners_from_tasks_in_context( - tasks: list[asyncio.Task], ws_context: context.WorkspaceContext + tasks: list[asyncio.Task[runner_client.ExtensionRunnerInfo]], ws_context: context.WorkspaceContext ) -> None: extension_runners: list[runner_client.ExtensionRunnerInfo] = [ runner.result() for runner in tasks if runner is not None @@ -555,7 +646,6 @@ def remove_runner_venv(runner_dir: Path, env_name: str) -> None: async def restart_extension_runners( runner_working_dir_path: Path, ws_context: context.WorkspaceContext ) -> None: - # TODO: reload config? try: runners_by_env = ws_context.ws_projects_extension_runners[ runner_working_dir_path @@ -564,12 +654,36 @@ async def restart_extension_runners( logger.error(f"Cannot find runner for {runner_working_dir_path}") return + # TODO: parallel? for runner in runners_by_env.values(): - await stop_extension_runner(runner) + await restart_extension_runner(runner_working_dir_path=runner.working_dir_path, env_name=runner.env_name, ws_context=ws_context) - project_def = ws_context.ws_projects[runner.working_dir_path] - await start_runner( - project_def=project_def, - env_name=runner.env_name, - ws_context=ws_context, - ) + +async def restart_extension_runner( + runner_working_dir_path: Path, env_name: str, ws_context: context.WorkspaceContext, debug: bool = False +) -> None: + # TODO: reload config? + try: + runners_by_env = ws_context.ws_projects_extension_runners[ + runner_working_dir_path + ] + except KeyError: + logger.error(f"Cannot find runner for {runner_working_dir_path}") + return + + try: + runner = runners_by_env[env_name] + except KeyError: + logger.error(f"Cannot find runner for env {env_name} in {runner_working_dir_path}") + return + + await stop_extension_runner(runner) + + project_def = ws_context.ws_projects[runner.working_dir_path] + await start_runner( + project_def=project_def, + env_name=runner.env_name, + handlers_to_initialize=None, + ws_context=ws_context, + debug=debug + ) \ No newline at end of file diff --git a/src/finecode/services/run_service/__init__.py b/src/finecode/services/run_service/__init__.py index f6e94f01..8bcb6119 100644 --- a/src/finecode/services/run_service/__init__.py +++ b/src/finecode/services/run_service/__init__.py @@ -9,4 +9,23 @@ start_required_environments, run_actions_in_projects, RunResultFormat, + RunActionTrigger, + DevEnv ) + + +__all__ = [ + "ActionRunFailed", + "StartingEnvironmentsFailed", + "run_action", + "find_action_project_and_run", + "find_action_project_and_run_with_partial_results", + "find_projects_with_actions", + "find_all_projects_with_action", + "run_with_partial_results", + "start_required_environments", + "run_actions_in_projects", + "RunResultFormat", + "RunActionTrigger", + "DevEnv", +] \ No newline at end of file diff --git a/src/finecode/services/run_service/payload_preprocessor.py b/src/finecode/services/run_service/payload_preprocessor.py index 9eeffefa..b95e57e1 100644 --- a/src/finecode/services/run_service/payload_preprocessor.py +++ b/src/finecode/services/run_service/payload_preprocessor.py @@ -1,7 +1,7 @@ import pathlib import typing -from finecode import context, project_analyzer +from finecode import context async def preprocess_for_project( @@ -12,15 +12,7 @@ async def preprocess_for_project( ) -> dict[str, typing.Any]: processed_payload = payload.copy() - # temporary hardcore logic until we get the proper payload structure and defaults - # from extension runner - if action_name == "lint" or action_name == "format": - if "file_paths" not in processed_payload: - processed_payload["file_paths"] = None - - if action_name == "format" and "save" not in processed_payload: - processed_payload["save"] = True - elif action_name == "prepare_envs" or action_name == "prepare_runners": + if action_name == "prepare_envs" or action_name == "prepare_runners": runtime_venv_path = project_dir_path / ".venvs" / "runtime" project_def_path = project_dir_path / "pyproject.toml" envs = [ @@ -51,10 +43,4 @@ async def preprocess_for_project( ) processed_payload["envs"] = envs - for param, value in processed_payload.items(): - if param == "file_paths" and value is None: - processed_payload["file_paths"] = await project_analyzer.get_project_files( - project_dir_path, ws_context=ws_context - ) - return processed_payload diff --git a/src/finecode/services/run_service/proxy_utils.py b/src/finecode/services/run_service/proxy_utils.py index 27786bdc..5dfcc785 100644 --- a/src/finecode/services/run_service/proxy_utils.py +++ b/src/finecode/services/run_service/proxy_utils.py @@ -9,7 +9,7 @@ import ordered_set from loguru import logger -from finecode import context, domain, find_project, user_messages +from finecode import context, domain, domain_helpers, find_project, user_messages from finecode.runner import runner_manager from finecode.runner import runner_client from finecode.runner.runner_manager import RunnerFailedToStart @@ -40,11 +40,11 @@ async def find_action_project( if project_status != domain.ProjectStatus.CONFIG_VALID: logger.info( f"Extension runner {project_path} has no valid config with finecode, " - f"status: {project_status.name}" + + f"status: {project_status.name}" ) raise ActionRunFailed( f"Project {project_path} has no valid config with finecode," - f"status: {project_status.name}" + + f"status: {project_status.name}" ) return project_path @@ -54,7 +54,10 @@ async def find_action_project_and_run( file_path: pathlib.Path, action_name: str, params: dict[str, typing.Any], + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ws_context: context.WorkspaceContext, + initialize_all_handlers: bool = False, ) -> runner_client.RunActionResponse: project_path = await find_action_project( file_path=file_path, action_name=action_name, ws_context=ws_context @@ -68,6 +71,9 @@ async def find_action_project_and_run( project_def=project, ws_context=ws_context, preprocess_payload=False, + run_trigger=run_trigger, + dev_env=dev_env, + initialize_all_handlers=initialize_all_handlers, ) except ActionRunFailed as exception: raise exception @@ -85,9 +91,9 @@ async def run_action_in_runner( response = await runner_client.run_action( runner=runner, action_name=action_name, params=params, options=options ) - except runner_client.BaseRunnerRequestException as error: - logger.error(f"Error on running action {action_name}: {error.message}") - raise ActionRunFailed(error.message) + except runner_client.BaseRunnerRequestException as exception: + logger.error(f"Error on running action {action_name}: {exception.message}") + raise ActionRunFailed(exception.message) from exception return response @@ -142,13 +148,18 @@ async def run_action_and_notify( runner: runner_client.ExtensionRunnerInfo, result_list: AsyncList, partial_results_task: asyncio.Task, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ) -> runner_client.RunActionResponse: try: return await run_action_in_runner( action_name=action_name, params=params, runner=runner, - options={"partial_result_token": partial_result_token}, + options={ + "partial_result_token": partial_result_token, + "meta": {"trigger": run_trigger.value, "dev_env": dev_env.value}, + }, ) finally: result_list.end() @@ -173,7 +184,10 @@ async def run_with_partial_results( params: dict[str, typing.Any], partial_result_token: int | str, project_dir_path: pathlib.Path, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ws_context: context.WorkspaceContext, + initialize_all_handlers: bool = False, ) -> collections.abc.AsyncIterator[ collections.abc.AsyncIterable[domain.PartialResultRawValue] ]: @@ -197,7 +211,11 @@ async def run_with_partial_results( for env_name in action_envs: try: runner = await runner_manager.get_or_start_runner( - project_def=project, env_name=env_name, ws_context=ws_context + project_def=project, + env_name=env_name, + ws_context=ws_context, + initialize_all_handlers=initialize_all_handlers, + action_names_to_initialize=[action_name], ) except runner_manager.RunnerFailedToStart as exception: raise ActionRunFailed( @@ -212,6 +230,8 @@ async def run_with_partial_results( runner=runner, result_list=result, partial_results_task=partial_results_task, + run_trigger=run_trigger, + dev_env=dev_env, ) ) @@ -228,7 +248,7 @@ async def run_with_partial_results( errors_str = ", ".join(errors) raise ActionRunFailed( f"Run of {action_name} in {project.dir_path} failed: {errors_str}. See logs for more details" - ) + ) from eg @contextlib.asynccontextmanager @@ -237,7 +257,10 @@ async def find_action_project_and_run_with_partial_results( action_name: str, params: dict[str, typing.Any], partial_result_token: int | str, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ws_context: context.WorkspaceContext, + initialize_all_handlers: bool = False, ) -> collections.abc.AsyncIterator[runner_client.RunActionRawResult]: logger.trace(f"Run {action_name} on {file_path}") project_path = await find_action_project( @@ -248,7 +271,10 @@ async def find_action_project_and_run_with_partial_results( params=params, partial_result_token=partial_result_token, project_dir_path=project_path, + run_trigger=run_trigger, + dev_env=dev_env, ws_context=ws_context, + initialize_all_handlers=initialize_all_handlers, ) @@ -287,8 +313,17 @@ async def start_required_environments( actions_by_projects: dict[pathlib.Path, list[str]], ws_context: context.WorkspaceContext, update_config_in_running_runners: bool = False, + initialize_handlers: bool = True, + initialize_all_handlers: bool = False, ) -> None: - """Collect all required envs from actions that will be run and start them.""" + """Collect all required envs from actions that will be run and start them. + + Args: + initialize_handlers: Initialize handlers for the specified actions. + initialize_all_handlers: Initialize all handlers in the environment, + not just those for the specified actions. Takes precedence over + initialize_handlers. + """ required_envs_by_project: dict[pathlib.Path, set[str]] = {} for project_dir_path, action_names in actions_by_projects.items(): project = ws_context.ws_projects[project_dir_path] @@ -312,8 +347,23 @@ async def start_required_environments( existing_runners = ws_context.ws_projects_extension_runners.get( project_dir_path, {} ) + action_names = actions_by_projects[project_dir_path] for env_name in required_envs: + if initialize_all_handlers: + handlers_to_init = ( + domain_helpers.collect_all_handlers_to_initialize( + project, env_name + ) + ) + elif initialize_handlers: + handlers_to_init = ( + domain_helpers.collect_handlers_to_initialize_for_actions( + project, env_name, action_names + ) + ) + else: + handlers_to_init = None tg.create_task( _start_runner_or_update_config( env_name=env_name, @@ -321,6 +371,7 @@ async def start_required_environments( project=project, update_config_in_running_runners=update_config_in_running_runners, ws_context=ws_context, + handlers_to_initialize=handlers_to_init, ) ) except ExceptionGroup as eg: @@ -330,7 +381,7 @@ async def start_required_environments( errors.append(exception.message) else: errors.append(str(exception)) - raise StartingEnvironmentsFailed(".".join(errors)) + raise StartingEnvironmentsFailed(".".join(errors)) from eg async def _start_runner_or_update_config( @@ -339,19 +390,24 @@ async def _start_runner_or_update_config( project: domain.Project, update_config_in_running_runners: bool, ws_context: context.WorkspaceContext, + handlers_to_initialize: dict[str, list[str]] | None, ): runner_exist = env_name in existing_runners start_runner = True if runner_exist: + runner = existing_runners[env_name] + if runner.status == runner_client.RunnerStatus.INITIALIZING: + await runner.initialized_event.wait() + runner_is_running = ( - existing_runners[env_name].status == runner_client.RunnerStatus.RUNNING + runner.status == runner_client.RunnerStatus.RUNNING ) start_runner = not runner_is_running if start_runner: try: await runner_manager.start_runner( - project_def=project, env_name=env_name, ws_context=ws_context + project_def=project, env_name=env_name, handlers_to_initialize=handlers_to_initialize, ws_context=ws_context ) except runner_manager.RunnerFailedToStart as exception: raise StartingEnvironmentsFailed( @@ -366,7 +422,9 @@ async def _start_runner_or_update_config( try: await runner_manager.update_runner_config( - runner=runner, project=project + runner=runner, + project=project, + handlers_to_initialize=handlers_to_initialize, ) except RunnerFailedToStart as exception: raise StartingEnvironmentsFailed( @@ -380,7 +438,9 @@ async def run_actions_in_running_project( project: domain.Project, ws_context: context.WorkspaceContext, concurrently: bool, - result_format: RunResultFormat, + result_formats: list[RunResultFormat], + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, ) -> dict[str, RunActionResponse]: result_by_action: dict[str, RunActionResponse] = {} @@ -395,7 +455,9 @@ async def run_actions_in_running_project( params=action_payload, project_def=project, ws_context=ws_context, - result_format=result_format, + run_trigger=run_trigger, + dev_env=dev_env, + result_formats=result_formats, ) ) run_tasks.append(run_task) @@ -406,7 +468,7 @@ async def run_actions_in_running_project( else: logger.error("Unexpected exception:") logger.exception(exception) - raise ActionRunFailed(f"Running of actions {actions} failed") + raise ActionRunFailed(f"Running of actions {actions} failed") from eg for idx, run_task in enumerate(run_tasks): run_result = run_task.result() @@ -420,18 +482,20 @@ async def run_actions_in_running_project( params=action_payload, project_def=project, ws_context=ws_context, - result_format=result_format, + run_trigger=run_trigger, + dev_env=dev_env, + result_formats=result_formats, ) except ActionRunFailed as exception: raise ActionRunFailed( f"Running of action {action_name} failed: {exception.message}" - ) + ) from exception except Exception as error: logger.error("Unexpected exception") logger.exception(error) raise ActionRunFailed( f"Running of action {action_name} failed with unexpected exception" - ) + ) from error result_by_action[action_name] = run_result @@ -443,28 +507,38 @@ async def run_actions_in_projects( action_payload: dict[str, str], ws_context: context.WorkspaceContext, concurrently: bool, - result_format: RunResultFormat, + result_formats: list[RunResultFormat], + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, + payload_overrides_by_project: dict[str, dict[str, typing.Any]] | None = None, ) -> dict[pathlib.Path, dict[str, RunActionResponse]]: + _payload_overrides_by_project = payload_overrides_by_project or {} project_handler_tasks: list[asyncio.Task] = [] try: async with asyncio.TaskGroup() as tg: for project_dir_path, actions_to_run in actions_by_project.items(): project = ws_context.ws_projects[project_dir_path] + project_payload = { + **action_payload, + **_payload_overrides_by_project.get(str(project_dir_path), {}), + } project_task = tg.create_task( run_actions_in_running_project( actions=actions_to_run, - action_payload=action_payload, + action_payload=project_payload, project=project, ws_context=ws_context, concurrently=concurrently, - result_format=result_format, + result_formats=result_formats, + run_trigger=run_trigger, + dev_env=dev_env, ) ) project_handler_tasks.append(project_task) except ExceptionGroup as eg: for exception in eg.exceptions: # TODO: merge all in one? - raise exception + raise exception from eg results = {} projects_paths = list(actions_by_project.keys()) @@ -494,8 +568,10 @@ def find_projects_with_actions( return actions_by_project -RunResultFormat = runner_client.RunResultFormat -RunActionResponse = runner_client.RunActionResponse +RunResultFormat: typing.TypeAlias = runner_client.RunResultFormat +RunActionResponse: typing.TypeAlias = runner_client.RunActionResponse +RunActionTrigger: typing.TypeAlias = runner_client.RunActionTrigger +DevEnv: typing.TypeAlias = runner_client.DevEnv async def run_action( @@ -503,18 +579,26 @@ async def run_action( params: dict[str, typing.Any], project_def: domain.Project, ws_context: context.WorkspaceContext, - result_format: RunResultFormat = RunResultFormat.JSON, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, + result_formats: list[runner_client.RunResultFormat] | None = None, preprocess_payload: bool = True, + initialize_all_handlers: bool = False, ) -> RunActionResponse: formatted_params = str(params) if len(formatted_params) > 100: formatted_params = f"{formatted_params[:100]}..." logger.trace(f"Execute action {action_name} with {formatted_params}") + + if result_formats is None: + _result_formats = [RunResultFormat.JSON] + else: + _result_formats = result_formats if project_def.status != domain.ProjectStatus.CONFIG_VALID: raise ActionRunFailed( f"Project {project_def.dir_path} has no valid configuration and finecode." - " Please check logs." + + " Please check logs." ) if preprocess_payload: @@ -551,7 +635,10 @@ async def run_action( env_name=env_name, project_def=project_def, ws_context=ws_context, - result_format=result_format, + run_trigger=run_trigger, + dev_env=dev_env, + result_formats=_result_formats, + initialize_all_handlers=initialize_all_handlers, ) else: # TODO: concurrent vs sequential, this value should be taken from action config @@ -568,7 +655,10 @@ async def run_action( env_name=handler.env, project_def=project_def, ws_context=ws_context, - result_format=result_format, + run_trigger=run_trigger, + dev_env=dev_env, + result_formats=_result_formats, + initialize_all_handlers=initialize_all_handlers, ) return response @@ -580,23 +670,33 @@ async def _run_action_in_env_runner( env_name: str, project_def: domain.Project, ws_context: context.WorkspaceContext, - result_format: RunResultFormat = RunResultFormat.JSON, + run_trigger: runner_client.RunActionTrigger, + dev_env: runner_client.DevEnv, + result_formats: list[runner_client.RunResultFormat], + initialize_all_handlers: bool = False, ): try: runner = await runner_manager.get_or_start_runner( - project_def=project_def, env_name=env_name, ws_context=ws_context + project_def=project_def, + env_name=env_name, + ws_context=ws_context, + initialize_all_handlers=initialize_all_handlers, + action_names_to_initialize=[action_name], ) except runner_manager.RunnerFailedToStart as exception: raise ActionRunFailed( f"Runner {env_name} in project {project_def.dir_path} failed: {exception.message}" - ) + ) from exception try: response = await runner_client.run_action( runner=runner, action_name=action_name, params=payload, - options={"result_format": result_format}, + options={ + "result_formats": result_formats, + "meta": {"trigger": run_trigger.value, "dev_env": dev_env.value}, + }, ) except runner_client.BaseRunnerRequestException as error: await user_messages.error( diff --git a/tests/__testdata__/list_ws/backend/finecode.sh b/tests/__testdata__/list_ws/backend/finecode.sh deleted file mode 100644 index 176acc23..00000000 --- a/tests/__testdata__/list_ws/backend/finecode.sh +++ /dev/null @@ -1 +0,0 @@ -poetry run python \ No newline at end of file diff --git a/tests/__testdata__/list_ws/backend/poetry.lock b/tests/__testdata__/list_ws/backend/poetry.lock deleted file mode 100644 index 60ea7ae4..00000000 --- a/tests/__testdata__/list_ws/backend/poetry.lock +++ /dev/null @@ -1,445 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "attrs" -version = "25.3.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, -] - -[package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] - -[[package]] -name = "cattrs" -version = "24.1.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "cattrs-24.1.3-py3-none-any.whl", hash = "sha256:adf957dddd26840f27ffbd060a6c4dd3b2192c5b7c2c0525ef1bd8131d8a83f5"}, - {file = "cattrs-24.1.3.tar.gz", hash = "sha256:981a6ef05875b5bb0c7fb68885546186d306f10f0f6718fe9b96c226e68821ff"}, -] - -[package.dependencies] -attrs = ">=23.1.0" - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -msgspec = ["msgspec (>=0.18.5) ; implementation_name == \"cpython\""] -orjson = ["orjson (>=3.9.2) ; implementation_name == \"cpython\""] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "finecode" -version = "0.2.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -click = "==8.1.*" -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -loguru = "==0.7.*" -ordered-set = "==4.1.*" -platformdirs = "==4.3.*" -pydantic = "==2.10.*" -pygls = "2.0.0-a2" -tomlkit = "==0.11.*" -watchdog = "==4.0.*" - -[package.source] -type = "directory" -url = "../../.." - -[[package]] -name = "finecode-extension-api" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -pydantic = ">=2.10.6,<3.0.0" -typing-extensions = ">=4.12.2,<5.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "finecode_extension_api" - -[[package]] -name = "loguru" -version = "0.7.3" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = "<4.0,>=3.5" -groups = ["dev"] -files = [ - {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, - {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] - -[[package]] -name = "lsprotocol" -version = "2024.0.0b1" -description = "Python types for Language Server Protocol." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "lsprotocol-2024.0.0b1-py3-none-any.whl", hash = "sha256:93785050ac155ae2be16b1ebfbd74c214feb3d3ef77b10399ce941e5ccef6ebd"}, - {file = "lsprotocol-2024.0.0b1.tar.gz", hash = "sha256:d3667fb70894d361aa6c495c5c8a1b2e6a44be65ff84c21a9cbb67ebfb4830fd"}, -] - -[package.dependencies] -attrs = ">=21.3.0" -cattrs = "!=23.2.1" - -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] - -[package.extras] -dev = ["black", "mypy", "pytest"] - -[[package]] -name = "platformdirs" -version = "4.3.7" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, - {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "pydantic" -version = "2.10.6" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, -] - -[package.dependencies] -annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" -typing-extensions = ">=4.12.2" - -[package.extras] -email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] - -[[package]] -name = "pydantic-core" -version = "2.27.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pygls" -version = "2.0.0a2" -description = "A pythonic generic language server (pronounced like 'pie glass')" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pygls-2.0.0a2-py3-none-any.whl", hash = "sha256:b202369321409343aa6440d73111d9fa0c22e580466ff1c7696b8358bb91f243"}, - {file = "pygls-2.0.0a2.tar.gz", hash = "sha256:03e00634ed8d989918268aaa4b4a0c3ab857ea2d4ee94514a52efa5ddd6d5d9f"}, -] - -[package.dependencies] -cattrs = ">=23.1.2" -lsprotocol = "2024.0.0b1" - -[package.extras] -ws = ["websockets (>=13.0)"] - -[[package]] -name = "tomlkit" -version = "0.11.8" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, -] - -[[package]] -name = "watchdog" -version = "4.0.2" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, - {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, - {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, - {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, - {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "win32-setctime" -version = "1.2.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -groups = ["dev"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, - {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, -] - -[package.extras] -dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] - -[metadata] -lock-version = "2.1" -python-versions = ">=3.11, < 3.14" -content-hash = "a35ee132a1427a364664722abe3af554d632162d15cf1e37c05f9015d59e5e0f" diff --git a/tests/__testdata__/list_ws/cli_tool/poetry.lock b/tests/__testdata__/list_ws/cli_tool/poetry.lock deleted file mode 100644 index af8230f4..00000000 --- a/tests/__testdata__/list_ws/cli_tool/poetry.lock +++ /dev/null @@ -1,577 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "attrs" -version = "25.3.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, -] - -[package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] - -[[package]] -name = "black" -version = "25.1.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, - {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, - {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, - {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, - {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, - {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, - {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, - {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, - {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, - {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, - {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, - {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, - {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, - {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, - {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, - {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, - {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, - {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, - {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, - {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, - {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, - {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cattrs" -version = "24.1.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "cattrs-24.1.3-py3-none-any.whl", hash = "sha256:adf957dddd26840f27ffbd060a6c4dd3b2192c5b7c2c0525ef1bd8131d8a83f5"}, - {file = "cattrs-24.1.3.tar.gz", hash = "sha256:981a6ef05875b5bb0c7fb68885546186d306f10f0f6718fe9b96c226e68821ff"}, -] - -[package.dependencies] -attrs = ">=23.1.0" - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -msgspec = ["msgspec (>=0.18.5) ; implementation_name == \"cpython\""] -orjson = ["orjson (>=3.9.2) ; implementation_name == \"cpython\""] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "fine-python-black" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -black = ">=25.1.0,<26.0.0" -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} - -[package.source] -type = "directory" -url = "../../../extensions/fine_python_black" - -[[package]] -name = "fine-python-isort" -version = "0.1.0" -description = "" -optional = false -python-versions = ">= 3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -isort = ">=5.13,<6" - -[package.source] -type = "directory" -url = "../../../extensions/fine_python_isort" - -[[package]] -name = "finecode" -version = "0.2.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -click = "==8.1.*" -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -loguru = "==0.7.*" -ordered-set = "==4.1.*" -platformdirs = "==4.3.*" -pydantic = "==2.10.*" -pygls = "2.0.0-a2" -tomlkit = "==0.11.*" -watchdog = "==4.0.*" - -[package.source] -type = "directory" -url = "../../.." - -[[package]] -name = "finecode-extension-api" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -pydantic = ">=2.10.6,<3.0.0" -typing-extensions = ">=4.12.2,<5.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "finecode_extension_api" - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -groups = ["dev"] -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "loguru" -version = "0.7.3" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = "<4.0,>=3.5" -groups = ["dev"] -files = [ - {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, - {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] - -[[package]] -name = "lsprotocol" -version = "2024.0.0b1" -description = "Python types for Language Server Protocol." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "lsprotocol-2024.0.0b1-py3-none-any.whl", hash = "sha256:93785050ac155ae2be16b1ebfbd74c214feb3d3ef77b10399ce941e5ccef6ebd"}, - {file = "lsprotocol-2024.0.0b1.tar.gz", hash = "sha256:d3667fb70894d361aa6c495c5c8a1b2e6a44be65ff84c21a9cbb67ebfb4830fd"}, -] - -[package.dependencies] -attrs = ">=21.3.0" -cattrs = "!=23.2.1" - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, -] - -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] - -[package.extras] -dev = ["black", "mypy", "pytest"] - -[[package]] -name = "packaging" -version = "25.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.7" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, - {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "pydantic" -version = "2.10.6" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, -] - -[package.dependencies] -annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" -typing-extensions = ">=4.12.2" - -[package.extras] -email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] - -[[package]] -name = "pydantic-core" -version = "2.27.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pygls" -version = "2.0.0a2" -description = "A pythonic generic language server (pronounced like 'pie glass')" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pygls-2.0.0a2-py3-none-any.whl", hash = "sha256:b202369321409343aa6440d73111d9fa0c22e580466ff1c7696b8358bb91f243"}, - {file = "pygls-2.0.0a2.tar.gz", hash = "sha256:03e00634ed8d989918268aaa4b4a0c3ab857ea2d4ee94514a52efa5ddd6d5d9f"}, -] - -[package.dependencies] -cattrs = ">=23.1.2" -lsprotocol = "2024.0.0b1" - -[package.extras] -ws = ["websockets (>=13.0)"] - -[[package]] -name = "tomlkit" -version = "0.11.8" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, -] - -[[package]] -name = "watchdog" -version = "4.0.2" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, - {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, - {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, - {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, - {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "win32-setctime" -version = "1.2.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -groups = ["dev"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, - {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, -] - -[package.extras] -dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] - -[metadata] -lock-version = "2.1" -python-versions = ">=3.11, < 3.14" -content-hash = "089bf66866fef8312a122c6a93913fee2a513dc16ac1c3ef0b055566979fd685" diff --git a/tests/__testdata__/list_ws/ui_app/poetry.lock b/tests/__testdata__/list_ws/ui_app/poetry.lock deleted file mode 100644 index d33f9f4a..00000000 --- a/tests/__testdata__/list_ws/ui_app/poetry.lock +++ /dev/null @@ -1,860 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "attrs" -version = "25.3.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, -] - -[package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] - -[[package]] -name = "black" -version = "25.1.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, - {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, - {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, - {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, - {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, - {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, - {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, - {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, - {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, - {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, - {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, - {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, - {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, - {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, - {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, - {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, - {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, - {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, - {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, - {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, - {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, - {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cattrs" -version = "24.1.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "cattrs-24.1.3-py3-none-any.whl", hash = "sha256:adf957dddd26840f27ffbd060a6c4dd3b2192c5b7c2c0525ef1bd8131d8a83f5"}, - {file = "cattrs-24.1.3.tar.gz", hash = "sha256:981a6ef05875b5bb0c7fb68885546186d306f10f0f6718fe9b96c226e68821ff"}, -] - -[package.dependencies] -attrs = ">=23.1.0" - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -msgspec = ["msgspec (>=0.18.5) ; implementation_name == \"cpython\""] -orjson = ["orjson (>=3.9.2) ; implementation_name == \"cpython\""] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "fine-python-ast" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_ast" - -[[package]] -name = "fine-python-black" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -black = ">=25.1.0,<26.0.0" -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_black" - -[[package]] -name = "fine-python-flake8" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -fine_python_ast = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_ast"} -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -flake8 = ">=7.1.2,<8.0.0" -types-flake8 = ">=7.1.0.20241020,<8.0.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_flake8" - -[[package]] -name = "fine-python-format" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -fine_python_black = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_black"} -fine_python_isort = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_isort"} - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "presets/fine_python_format" - -[[package]] -name = "fine-python-isort" -version = "0.1.0" -description = "" -optional = false -python-versions = ">= 3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -isort = ">=5.13,<6" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_isort" - -[[package]] -name = "fine-python-lint" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -fine_python_flake8 = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_flake8"} -fine_python_mypy = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_mypy"} -flake8-bugbear = ">=24.12.12,<25.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "presets/fine_python_lint" - -[[package]] -name = "fine-python-mypy" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -mypy = ">=1.15,<2.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_mypy" - -[[package]] -name = "fine-python-recommended" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -fine_python_format = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "presets/fine_python_format"} -fine_python_lint = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "presets/fine_python_lint"} - -[package.source] -type = "directory" -url = "../../../presets/fine_python_recommended" - -[[package]] -name = "finecode" -version = "0.2.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -click = "==8.1.*" -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -loguru = "==0.7.*" -ordered-set = "==4.1.*" -platformdirs = "==4.3.*" -pydantic = "==2.10.*" -pygls = "2.0.0-a2" -tomlkit = "==0.11.*" -watchdog = "==4.0.*" - -[package.source] -type = "directory" -url = "../../.." - -[[package]] -name = "finecode-extension-api" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -pydantic = ">=2.10.6,<3.0.0" -typing-extensions = ">=4.12.2,<5.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "finecode_extension_api" - -[[package]] -name = "flake8" -version = "7.2.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343"}, - {file = "flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.13.0,<2.14.0" -pyflakes = ">=3.3.0,<3.4.0" - -[[package]] -name = "flake8-bugbear" -version = "24.12.12" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -optional = false -python-versions = ">=3.8.1" -groups = ["dev"] -files = [ - {file = "flake8_bugbear-24.12.12-py3-none-any.whl", hash = "sha256:1b6967436f65ca22a42e5373aaa6f2d87966ade9aa38d4baf2a1be550767545e"}, - {file = "flake8_bugbear-24.12.12.tar.gz", hash = "sha256:46273cef0a6b6ff48ca2d69e472f41420a42a46e24b2a8972e4f0d6733d12a64"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -flake8 = ">=6.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -groups = ["dev"] -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "loguru" -version = "0.7.3" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = "<4.0,>=3.5" -groups = ["dev"] -files = [ - {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, - {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] - -[[package]] -name = "lsprotocol" -version = "2024.0.0b1" -description = "Python types for Language Server Protocol." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "lsprotocol-2024.0.0b1-py3-none-any.whl", hash = "sha256:93785050ac155ae2be16b1ebfbd74c214feb3d3ef77b10399ce941e5ccef6ebd"}, - {file = "lsprotocol-2024.0.0b1.tar.gz", hash = "sha256:d3667fb70894d361aa6c495c5c8a1b2e6a44be65ff84c21a9cbb67ebfb4830fd"}, -] - -[package.dependencies] -attrs = ">=21.3.0" -cattrs = "!=23.2.1" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mypy" -version = "1.15.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, -] - -[package.dependencies] -mypy_extensions = ">=1.0.0" -typing_extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -faster-cache = ["orjson"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, -] - -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] - -[package.extras] -dev = ["black", "mypy", "pytest"] - -[[package]] -name = "packaging" -version = "25.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.7" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, - {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "pycodestyle" -version = "2.13.0" -description = "Python style guide checker" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9"}, - {file = "pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"}, -] - -[[package]] -name = "pydantic" -version = "2.10.6" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, -] - -[package.dependencies] -annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" -typing-extensions = ">=4.12.2" - -[package.extras] -email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] - -[[package]] -name = "pydantic-core" -version = "2.27.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pyflakes" -version = "3.3.2" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a"}, - {file = "pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b"}, -] - -[[package]] -name = "pygls" -version = "2.0.0a2" -description = "A pythonic generic language server (pronounced like 'pie glass')" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pygls-2.0.0a2-py3-none-any.whl", hash = "sha256:b202369321409343aa6440d73111d9fa0c22e580466ff1c7696b8358bb91f243"}, - {file = "pygls-2.0.0a2.tar.gz", hash = "sha256:03e00634ed8d989918268aaa4b4a0c3ab857ea2d4ee94514a52efa5ddd6d5d9f"}, -] - -[package.dependencies] -cattrs = ">=23.1.2" -lsprotocol = "2024.0.0b1" - -[package.extras] -ws = ["websockets (>=13.0)"] - -[[package]] -name = "tomlkit" -version = "0.11.8" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, -] - -[[package]] -name = "types-flake8" -version = "7.2.0.20250330" -description = "Typing stubs for flake8" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "types_flake8-7.2.0.20250330-py3-none-any.whl", hash = "sha256:af31590a269586309b80a439c94e59359e139e17475a8fc8d3c426ab01a40547"}, - {file = "types_flake8-7.2.0.20250330.tar.gz", hash = "sha256:481e5c914a26fabd23e85704ad055f2716f9238740bfe6fe77259c2533cb970c"}, -] - -[package.dependencies] -types-pyflakes = "*" - -[[package]] -name = "types-pyflakes" -version = "3.3.2.20250429" -description = "Typing stubs for pyflakes" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "types_pyflakes-3.3.2.20250429-py3-none-any.whl", hash = "sha256:f9ccc1968ddd1a18232c1e66cfcce8a9e8f4b2b85fbbf682bf87148a2b2d58a0"}, - {file = "types_pyflakes-3.3.2.20250429.tar.gz", hash = "sha256:a81b0ee91e34d143f655d366bd4002730f0e342a5aa338779d2f995515ce1c5c"}, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, -] - -[[package]] -name = "watchdog" -version = "4.0.2" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, - {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, - {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, - {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, - {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "win32-setctime" -version = "1.2.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -groups = ["dev"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, - {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, -] - -[package.extras] -dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] - -[metadata] -lock-version = "2.1" -python-versions = ">=3.11, < 3.14" -content-hash = "ed1dc1133af581f1975b16cf755a302ac01abefd7bc66afe50ed9064c1425f29" diff --git a/tests/__testdata__/nested_package/pyback/poetry.lock b/tests/__testdata__/nested_package/pyback/poetry.lock deleted file mode 100644 index d33f9f4a..00000000 --- a/tests/__testdata__/nested_package/pyback/poetry.lock +++ /dev/null @@ -1,860 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. - -[[package]] -name = "annotated-types" -version = "0.7.0" -description = "Reusable constraint types to use with typing.Annotated" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, - {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, -] - -[[package]] -name = "attrs" -version = "25.3.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, - {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, -] - -[package.extras] -benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] -tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] - -[[package]] -name = "black" -version = "25.1.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "black-25.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:759e7ec1e050a15f89b770cefbf91ebee8917aac5c20483bc2d80a6c3a04df32"}, - {file = "black-25.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e519ecf93120f34243e6b0054db49c00a35f84f195d5bce7e9f5cfc578fc2da"}, - {file = "black-25.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:055e59b198df7ac0b7efca5ad7ff2516bca343276c466be72eb04a3bcc1f82d7"}, - {file = "black-25.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:db8ea9917d6f8fc62abd90d944920d95e73c83a5ee3383493e35d271aca872e9"}, - {file = "black-25.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a39337598244de4bae26475f77dda852ea00a93bd4c728e09eacd827ec929df0"}, - {file = "black-25.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96c1c7cd856bba8e20094e36e0f948718dc688dba4a9d78c3adde52b9e6c2299"}, - {file = "black-25.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bce2e264d59c91e52d8000d507eb20a9aca4a778731a08cfff7e5ac4a4bb7096"}, - {file = "black-25.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:172b1dbff09f86ce6f4eb8edf9dede08b1fce58ba194c87d7a4f1a5aa2f5b3c2"}, - {file = "black-25.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4b60580e829091e6f9238c848ea6750efed72140b91b048770b64e74fe04908b"}, - {file = "black-25.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e2978f6df243b155ef5fa7e558a43037c3079093ed5d10fd84c43900f2d8ecc"}, - {file = "black-25.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b48735872ec535027d979e8dcb20bf4f70b5ac75a8ea99f127c106a7d7aba9f"}, - {file = "black-25.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:ea0213189960bda9cf99be5b8c8ce66bb054af5e9e861249cd23471bd7b0b3ba"}, - {file = "black-25.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8f0b18a02996a836cc9c9c78e5babec10930862827b1b724ddfe98ccf2f2fe4f"}, - {file = "black-25.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:afebb7098bfbc70037a053b91ae8437c3857482d3a690fefc03e9ff7aa9a5fd3"}, - {file = "black-25.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:030b9759066a4ee5e5aca28c3c77f9c64789cdd4de8ac1df642c40b708be6171"}, - {file = "black-25.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:a22f402b410566e2d1c950708c77ebf5ebd5d0d88a6a2e87c86d9fb48afa0d18"}, - {file = "black-25.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1ee0a0c330f7b5130ce0caed9936a904793576ef4d2b98c40835d6a65afa6a0"}, - {file = "black-25.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3df5f1bf91d36002b0a75389ca8663510cf0531cca8aa5c1ef695b46d98655f"}, - {file = "black-25.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d9e6827d563a2c820772b32ce8a42828dc6790f095f441beef18f96aa6f8294e"}, - {file = "black-25.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:bacabb307dca5ebaf9c118d2d2f6903da0d62c9faa82bd21a33eecc319559355"}, - {file = "black-25.1.0-py3-none-any.whl", hash = "sha256:95e8176dae143ba9097f351d174fdaf0ccd29efb414b362ae3fd72bf0f710717"}, - {file = "black-25.1.0.tar.gz", hash = "sha256:33496d5cd1222ad73391352b4ae8da15253c5de89b93a80b3e2c8d9a19ec2666"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "cattrs" -version = "24.1.3" -description = "Composable complex class support for attrs and dataclasses." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "cattrs-24.1.3-py3-none-any.whl", hash = "sha256:adf957dddd26840f27ffbd060a6c4dd3b2192c5b7c2c0525ef1bd8131d8a83f5"}, - {file = "cattrs-24.1.3.tar.gz", hash = "sha256:981a6ef05875b5bb0c7fb68885546186d306f10f0f6718fe9b96c226e68821ff"}, -] - -[package.dependencies] -attrs = ">=23.1.0" - -[package.extras] -bson = ["pymongo (>=4.4.0)"] -cbor2 = ["cbor2 (>=5.4.6)"] -msgpack = ["msgpack (>=1.0.5)"] -msgspec = ["msgspec (>=0.18.5) ; implementation_name == \"cpython\""] -orjson = ["orjson (>=3.9.2) ; implementation_name == \"cpython\""] -pyyaml = ["pyyaml (>=6.0)"] -tomlkit = ["tomlkit (>=0.11.8)"] -ujson = ["ujson (>=5.7.0)"] - -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["dev"] -markers = "sys_platform == \"win32\" or platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "fine-python-ast" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_ast" - -[[package]] -name = "fine-python-black" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -black = ">=25.1.0,<26.0.0" -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_black" - -[[package]] -name = "fine-python-flake8" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -fine_python_ast = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_ast"} -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -flake8 = ">=7.1.2,<8.0.0" -types-flake8 = ">=7.1.0.20241020,<8.0.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_flake8" - -[[package]] -name = "fine-python-format" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -fine_python_black = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_black"} -fine_python_isort = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_isort"} - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "presets/fine_python_format" - -[[package]] -name = "fine-python-isort" -version = "0.1.0" -description = "" -optional = false -python-versions = ">= 3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -isort = ">=5.13,<6" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_isort" - -[[package]] -name = "fine-python-lint" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -fine_python_flake8 = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_flake8"} -fine_python_mypy = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "extensions/fine_python_mypy"} -flake8-bugbear = ">=24.12.12,<25.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "presets/fine_python_lint" - -[[package]] -name = "fine-python-mypy" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -mypy = ">=1.15,<2.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "extensions/fine_python_mypy" - -[[package]] -name = "fine-python-recommended" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -fine_python_format = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "presets/fine_python_format"} -fine_python_lint = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "presets/fine_python_lint"} - -[package.source] -type = "directory" -url = "../../../presets/fine_python_recommended" - -[[package]] -name = "finecode" -version = "0.2.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = true - -[package.dependencies] -click = "==8.1.*" -finecode_extension_api = {git = "https://github.com/finecode-dev/finecode.git", subdirectory = "finecode_extension_api"} -loguru = "==0.7.*" -ordered-set = "==4.1.*" -platformdirs = "==4.3.*" -pydantic = "==2.10.*" -pygls = "2.0.0-a2" -tomlkit = "==0.11.*" -watchdog = "==4.0.*" - -[package.source] -type = "directory" -url = "../../.." - -[[package]] -name = "finecode-extension-api" -version = "0.1.0" -description = "" -optional = false -python-versions = ">=3.11, < 3.14" -groups = ["dev"] -files = [] -develop = false - -[package.dependencies] -pydantic = ">=2.10.6,<3.0.0" -typing-extensions = ">=4.12.2,<5.0.0" - -[package.source] -type = "git" -url = "https://github.com/finecode-dev/finecode.git" -reference = "HEAD" -resolved_reference = "edb9e5f21b755a9df0501513e6001a9cda4c462b" -subdirectory = "finecode_extension_api" - -[[package]] -name = "flake8" -version = "7.2.0" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "flake8-7.2.0-py2.py3-none-any.whl", hash = "sha256:93b92ba5bdb60754a6da14fa3b93a9361fd00a59632ada61fd7b130436c40343"}, - {file = "flake8-7.2.0.tar.gz", hash = "sha256:fa558ae3f6f7dbf2b4f22663e5343b6b6023620461f8d4ff2019ef4b5ee70426"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.13.0,<2.14.0" -pyflakes = ">=3.3.0,<3.4.0" - -[[package]] -name = "flake8-bugbear" -version = "24.12.12" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." -optional = false -python-versions = ">=3.8.1" -groups = ["dev"] -files = [ - {file = "flake8_bugbear-24.12.12-py3-none-any.whl", hash = "sha256:1b6967436f65ca22a42e5373aaa6f2d87966ade9aa38d4baf2a1be550767545e"}, - {file = "flake8_bugbear-24.12.12.tar.gz", hash = "sha256:46273cef0a6b6ff48ca2d69e472f41420a42a46e24b2a8972e4f0d6733d12a64"}, -] - -[package.dependencies] -attrs = ">=22.2.0" -flake8 = ">=6.0.0" - -[package.extras] -dev = ["coverage", "hypothesis", "hypothesmith (>=0.2)", "pre-commit", "pytest", "tox"] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -groups = ["dev"] -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "loguru" -version = "0.7.3" -description = "Python logging made (stupidly) simple" -optional = false -python-versions = "<4.0,>=3.5" -groups = ["dev"] -files = [ - {file = "loguru-0.7.3-py3-none-any.whl", hash = "sha256:31a33c10c8e1e10422bfd431aeb5d351c7cf7fa671e3c4df004162264b28220c"}, - {file = "loguru-0.7.3.tar.gz", hash = "sha256:19480589e77d47b8d85b2c827ad95d49bf31b0dcde16593892eb51dd18706eb6"}, -] - -[package.dependencies] -colorama = {version = ">=0.3.4", markers = "sys_platform == \"win32\""} -win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} - -[package.extras] -dev = ["Sphinx (==8.1.3) ; python_version >= \"3.11\"", "build (==1.2.2) ; python_version >= \"3.11\"", "colorama (==0.4.5) ; python_version < \"3.8\"", "colorama (==0.4.6) ; python_version >= \"3.8\"", "exceptiongroup (==1.1.3) ; python_version >= \"3.7\" and python_version < \"3.11\"", "freezegun (==1.1.0) ; python_version < \"3.8\"", "freezegun (==1.5.0) ; python_version >= \"3.8\"", "mypy (==v0.910) ; python_version < \"3.6\"", "mypy (==v0.971) ; python_version == \"3.6\"", "mypy (==v1.13.0) ; python_version >= \"3.8\"", "mypy (==v1.4.1) ; python_version == \"3.7\"", "myst-parser (==4.0.0) ; python_version >= \"3.11\"", "pre-commit (==4.0.1) ; python_version >= \"3.9\"", "pytest (==6.1.2) ; python_version < \"3.8\"", "pytest (==8.3.2) ; python_version >= \"3.8\"", "pytest-cov (==2.12.1) ; python_version < \"3.8\"", "pytest-cov (==5.0.0) ; python_version == \"3.8\"", "pytest-cov (==6.0.0) ; python_version >= \"3.9\"", "pytest-mypy-plugins (==1.9.3) ; python_version >= \"3.6\" and python_version < \"3.8\"", "pytest-mypy-plugins (==3.1.0) ; python_version >= \"3.8\"", "sphinx-rtd-theme (==3.0.2) ; python_version >= \"3.11\"", "tox (==3.27.1) ; python_version < \"3.8\"", "tox (==4.23.2) ; python_version >= \"3.8\"", "twine (==6.0.1) ; python_version >= \"3.11\""] - -[[package]] -name = "lsprotocol" -version = "2024.0.0b1" -description = "Python types for Language Server Protocol." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "lsprotocol-2024.0.0b1-py3-none-any.whl", hash = "sha256:93785050ac155ae2be16b1ebfbd74c214feb3d3ef77b10399ce941e5ccef6ebd"}, - {file = "lsprotocol-2024.0.0b1.tar.gz", hash = "sha256:d3667fb70894d361aa6c495c5c8a1b2e6a44be65ff84c21a9cbb67ebfb4830fd"}, -] - -[package.dependencies] -attrs = ">=21.3.0" -cattrs = "!=23.2.1" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mypy" -version = "1.15.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, - {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, - {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, - {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, - {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, - {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, - {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, - {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, - {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, - {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, - {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, - {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, - {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, - {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, - {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, - {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, - {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, - {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, - {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, - {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, - {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, - {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, - {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, -] - -[package.dependencies] -mypy_extensions = ">=1.0.0" -typing_extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -faster-cache = ["orjson"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.1.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, - {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, -] - -[[package]] -name = "ordered-set" -version = "4.1.0" -description = "An OrderedSet is a custom MutableSet that remembers its order, so that every" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "ordered-set-4.1.0.tar.gz", hash = "sha256:694a8e44c87657c59292ede72891eb91d34131f6531463aab3009191c77364a8"}, - {file = "ordered_set-4.1.0-py3-none-any.whl", hash = "sha256:046e1132c71fcf3330438a539928932caf51ddbc582496833e23de611de14562"}, -] - -[package.extras] -dev = ["black", "mypy", "pytest"] - -[[package]] -name = "packaging" -version = "25.0" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, - {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.3.7" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, - {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "pycodestyle" -version = "2.13.0" -description = "Python style guide checker" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pycodestyle-2.13.0-py2.py3-none-any.whl", hash = "sha256:35863c5974a271c7a726ed228a14a4f6daf49df369d8c50cd9a6f58a5e143ba9"}, - {file = "pycodestyle-2.13.0.tar.gz", hash = "sha256:c8415bf09abe81d9c7f872502a6eee881fbe85d8763dd5b9924bb0a01d67efae"}, -] - -[[package]] -name = "pydantic" -version = "2.10.6" -description = "Data validation using Python type hints" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, - {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, -] - -[package.dependencies] -annotated-types = ">=0.6.0" -pydantic-core = "2.27.2" -typing-extensions = ">=4.12.2" - -[package.extras] -email = ["email-validator (>=2.0.0)"] -timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] - -[[package]] -name = "pydantic-core" -version = "2.27.2" -description = "Core functionality for Pydantic validation and serialization" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, - {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, - {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, - {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, - {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, - {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, - {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, - {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, - {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, - {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, - {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, - {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, - {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, - {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, - {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, - {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, - {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, - {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, - {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, - {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, - {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, - {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, - {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, - {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, - {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, - {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, -] - -[package.dependencies] -typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" - -[[package]] -name = "pyflakes" -version = "3.3.2" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pyflakes-3.3.2-py2.py3-none-any.whl", hash = "sha256:5039c8339cbb1944045f4ee5466908906180f13cc99cc9949348d10f82a5c32a"}, - {file = "pyflakes-3.3.2.tar.gz", hash = "sha256:6dfd61d87b97fba5dcfaaf781171ac16be16453be6d816147989e7f6e6a9576b"}, -] - -[[package]] -name = "pygls" -version = "2.0.0a2" -description = "A pythonic generic language server (pronounced like 'pie glass')" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "pygls-2.0.0a2-py3-none-any.whl", hash = "sha256:b202369321409343aa6440d73111d9fa0c22e580466ff1c7696b8358bb91f243"}, - {file = "pygls-2.0.0a2.tar.gz", hash = "sha256:03e00634ed8d989918268aaa4b4a0c3ab857ea2d4ee94514a52efa5ddd6d5d9f"}, -] - -[package.dependencies] -cattrs = ">=23.1.2" -lsprotocol = "2024.0.0b1" - -[package.extras] -ws = ["websockets (>=13.0)"] - -[[package]] -name = "tomlkit" -version = "0.11.8" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "tomlkit-0.11.8-py3-none-any.whl", hash = "sha256:8c726c4c202bdb148667835f68d68780b9a003a9ec34167b6c673b38eff2a171"}, - {file = "tomlkit-0.11.8.tar.gz", hash = "sha256:9330fc7faa1db67b541b28e62018c17d20be733177d290a13b24c62d1614e0c3"}, -] - -[[package]] -name = "types-flake8" -version = "7.2.0.20250330" -description = "Typing stubs for flake8" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "types_flake8-7.2.0.20250330-py3-none-any.whl", hash = "sha256:af31590a269586309b80a439c94e59359e139e17475a8fc8d3c426ab01a40547"}, - {file = "types_flake8-7.2.0.20250330.tar.gz", hash = "sha256:481e5c914a26fabd23e85704ad055f2716f9238740bfe6fe77259c2533cb970c"}, -] - -[package.dependencies] -types-pyflakes = "*" - -[[package]] -name = "types-pyflakes" -version = "3.3.2.20250429" -description = "Typing stubs for pyflakes" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "types_pyflakes-3.3.2.20250429-py3-none-any.whl", hash = "sha256:f9ccc1968ddd1a18232c1e66cfcce8a9e8f4b2b85fbbf682bf87148a2b2d58a0"}, - {file = "types_pyflakes-3.3.2.20250429.tar.gz", hash = "sha256:a81b0ee91e34d143f655d366bd4002730f0e342a5aa338779d2f995515ce1c5c"}, -] - -[[package]] -name = "typing-extensions" -version = "4.13.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, - {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, -] - -[[package]] -name = "watchdog" -version = "4.0.2" -description = "Filesystem events monitoring" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ede7f010f2239b97cc79e6cb3c249e72962404ae3865860855d5cbe708b0fd22"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2cffa171445b0efa0726c561eca9a27d00a1f2b83846dbd5a4f639c4f8ca8e1"}, - {file = "watchdog-4.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c50f148b31b03fbadd6d0b5980e38b558046b127dc483e5e4505fcef250f9503"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7c7d4bf585ad501c5f6c980e7be9c4f15604c7cc150e942d82083b31a7548930"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:914285126ad0b6eb2258bbbcb7b288d9dfd655ae88fa28945be05a7b475a800b"}, - {file = "watchdog-4.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:984306dc4720da5498b16fc037b36ac443816125a3705dfde4fd90652d8028ef"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:1cdcfd8142f604630deef34722d695fb455d04ab7cfe9963055df1fc69e6727a"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d7ab624ff2f663f98cd03c8b7eedc09375a911794dfea6bf2a359fcc266bff29"}, - {file = "watchdog-4.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:132937547a716027bd5714383dfc40dc66c26769f1ce8a72a859d6a48f371f3a"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:cd67c7df93eb58f360c43802acc945fa8da70c675b6fa37a241e17ca698ca49b"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:bcfd02377be80ef3b6bc4ce481ef3959640458d6feaae0bd43dd90a43da90a7d"}, - {file = "watchdog-4.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:980b71510f59c884d684b3663d46e7a14b457c9611c481e5cef08f4dd022eed7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:aa160781cafff2719b663c8a506156e9289d111d80f3387cf3af49cedee1f040"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f6ee8dedd255087bc7fe82adf046f0b75479b989185fb0bdf9a98b612170eac7"}, - {file = "watchdog-4.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0b4359067d30d5b864e09c8597b112fe0a0a59321a0f331498b013fb097406b4"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:770eef5372f146997638d737c9a3c597a3b41037cfbc5c41538fc27c09c3a3f9"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eeea812f38536a0aa859972d50c76e37f4456474b02bd93674d1947cf1e39578"}, - {file = "watchdog-4.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b2c45f6e1e57ebb4687690c05bc3a2c1fb6ab260550c4290b8abb1335e0fd08b"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:10b6683df70d340ac3279eff0b2766813f00f35a1d37515d2c99959ada8f05fa"}, - {file = "watchdog-4.0.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:f7c739888c20f99824f7aa9d31ac8a97353e22d0c0e54703a547a218f6637eb3"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c100d09ac72a8a08ddbf0629ddfa0b8ee41740f9051429baa8e31bb903ad7508"}, - {file = "watchdog-4.0.2-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f5315a8c8dd6dd9425b974515081fc0aadca1d1d61e078d2246509fd756141ee"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:2d468028a77b42cc685ed694a7a550a8d1771bb05193ba7b24006b8241a571a1"}, - {file = "watchdog-4.0.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:f15edcae3830ff20e55d1f4e743e92970c847bcddc8b7509bcd172aa04de506e"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:936acba76d636f70db8f3c66e76aa6cb5136a936fc2a5088b9ce1c7a3508fc83"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_armv7l.whl", hash = "sha256:e252f8ca942a870f38cf785aef420285431311652d871409a64e2a0a52a2174c"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_i686.whl", hash = "sha256:0e83619a2d5d436a7e58a1aea957a3c1ccbf9782c43c0b4fed80580e5e4acd1a"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64.whl", hash = "sha256:88456d65f207b39f1981bf772e473799fcdc10801062c36fd5ad9f9d1d463a73"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:32be97f3b75693a93c683787a87a0dc8db98bb84701539954eef991fb35f5fbc"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:c82253cfc9be68e3e49282831afad2c1f6593af80c0daf1287f6a92657986757"}, - {file = "watchdog-4.0.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c0b14488bd336c5b1845cee83d3e631a1f8b4e9c5091ec539406e4a324f882d8"}, - {file = "watchdog-4.0.2-py3-none-win32.whl", hash = "sha256:0d8a7e523ef03757a5aa29f591437d64d0d894635f8a50f370fe37f913ce4e19"}, - {file = "watchdog-4.0.2-py3-none-win_amd64.whl", hash = "sha256:c344453ef3bf875a535b0488e3ad28e341adbd5a9ffb0f7d62cefacc8824ef2b"}, - {file = "watchdog-4.0.2-py3-none-win_ia64.whl", hash = "sha256:baececaa8edff42cd16558a639a9b0ddf425f93d892e8392a56bf904f5eff22c"}, - {file = "watchdog-4.0.2.tar.gz", hash = "sha256:b4dfbb6c49221be4535623ea4474a4d6ee0a9cef4a80b20c28db4d858b64e270"}, -] - -[package.extras] -watchmedo = ["PyYAML (>=3.10)"] - -[[package]] -name = "win32-setctime" -version = "1.2.0" -description = "A small Python utility to set file creation time on Windows" -optional = false -python-versions = ">=3.5" -groups = ["dev"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "win32_setctime-1.2.0-py3-none-any.whl", hash = "sha256:95d644c4e708aba81dc3704a116d8cbc974d70b3bdb8be1d150e36be6e9d1390"}, - {file = "win32_setctime-1.2.0.tar.gz", hash = "sha256:ae1fdf948f5640aae05c511ade119313fb6a30d7eabe25fef9764dca5873c4c0"}, -] - -[package.extras] -dev = ["black (>=19.3b0) ; python_version >= \"3.6\"", "pytest (>=4.6.2)"] - -[metadata] -lock-version = "2.1" -python-versions = ">=3.11, < 3.14" -content-hash = "ed1dc1133af581f1975b16cf755a302ac01abefd7bc66afe50ed9064c1425f29"