diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 526a2b0..eb0728a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -55,13 +55,14 @@ jobs: uses: reviewdog/action-actionlint@83e4ed25b168066ad8f62f5afbb29ebd8641d982 # v1.6.8 test: - name: "Test (Python ${{ matrix.python-version }})" + name: "Test (${{ matrix.os }}, Python ${{ matrix.python-version }})" needs: lint - runs-on: ubuntu-latest + runs-on: ${{ matrix.os }} timeout-minutes: 15 strategy: fail-fast: false matrix: + os: [ubuntu-latest, windows-latest, macos-latest] python-version: ["3.10", "3.11", "3.12", "3.13", "3.14", "3.14t"] steps: @@ -95,7 +96,7 @@ jobs: run: just test-coverage - name: Upload coverage to Codecov - if: matrix.python-version == '3.10' + if: matrix.os == 'ubuntu-latest' && matrix.python-version == '3.10' uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1 env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} diff --git a/.vale.ini b/.vale.ini index 747d1c0..6c45e51 100644 --- a/.vale.ini +++ b/.vale.ini @@ -14,5 +14,5 @@ Google.Will = NO proselint.Annotations = NO # Internal/temporary files - skip linting -[{.claude/**/*.md,.oaps/**/**.md,tmp/**/*.md}] +[{.claude/**/*.md,.oaps/**/**.md,tmp/**/*.md,PLAN.md}] BasedOnStyles = diff --git a/README.md b/README.md index 9f66df1..a695873 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# JSON Lines Table (JSONLT) Python package +# JSONLT Python package [![CI](https://github.com/jsonlt/jsonlt-python/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/jsonlt/jsonlt-python/actions/workflows/ci.yml) @@ -10,8 +10,191 @@ **jsonlt** is the Python reference implementation of the [JSON Lines Table (JSONLT) specification][jsonlt]. +JSONLT is a data format for storing keyed records in append-only files using [JSON Lines](https://jsonlines.org/). The format optimizes for version control diffs and human readability. + > [!NOTE] -> This package is in development and not yet ready for use. +> This package is in development and not yet ready for production use. + +## Installation + +```bash +pip install jsonlt + +# Or + +uv add jsonlt +``` + +## Quick start + +### Basic operations + +```python +from jsonlt import Table + +# Open or create a table with a simple key +table = Table("users.jsonlt", key="id") + +# Insert or update records +table.put({"id": "alice", "role": "admin", "email": "alice@example.com"}) +table.put({"id": "bob", "role": "user", "email": "bob@example.com"}) + +# Read a record by key +user = table.get("alice") +print(user) # {"id": "alice", "role": "admin", "email": "alice@example.com"} + +# Check if a key exists +if table.has("bob"): + print("Bob exists") + +# Delete a record +table.delete("bob") + +# Get all records +for record in table.all(): + print(record) +``` + +### Compound keys + +JSONLT supports multi-field compound keys: + +```python +# Using a tuple of field names for compound keys +orders = Table("orders.jsonlt", key=("customer_id", "order_id")) + +orders.put({"customer_id": "alice", "order_id": 1, "total": 99.99}) +orders.put({"customer_id": "alice", "order_id": 2, "total": 149.99}) + +# Access with compound key +order = orders.get(("alice", 1)) +``` + +### Transactions + +Use transactions for atomic updates with conflict detection: + +```python +from jsonlt import Table, ConflictError + +table = Table("counters.jsonlt", key="name") + +# Context manager commits on success, aborts on exception +with table.transaction() as tx: + counter = tx.get("visits") + if counter: + tx.put({"name": "visits", "count": counter["count"] + 1}) + else: + tx.put({"name": "visits", "count": 1}) + +# Handle conflicts from concurrent modifications +try: + with table.transaction() as tx: + tx.put({"name": "counter", "value": 42}) +except ConflictError as e: + print(f"Conflict on key: {e.key}") +``` + +### Finding records + +```python +from jsonlt import Table + +table = Table("products.jsonlt", key="sku") + +# Find all records matching a predicate +expensive = table.find(lambda r: r.get("price", 0) > 100) + +# Find with limit +top_3 = table.find(lambda r: r.get("in_stock", False), limit=3) + +# Find the first matching record +first_match = table.find_one(lambda r: r.get("category") == "electronics") +``` + +### Table maintenance + +```python +from jsonlt import Table + +table = Table("data.jsonlt", key="id") + +# Compact the table (removes tombstones and superseded records) +table.compact() + +# Clear all records (keeps header if present) +table.clear() +``` + +## API overview + +### Table class + +The `Table` class is the primary interface for working with JSONLT files. + +| Method | Description | +| ------ | ----------- | +| `Table(path, key)` | Open or create a table at the given path | +| `get(key)` | Get a record by key, returns `None` if not found | +| `has(key)` | Check if a key exists | +| `put(record)` | Insert or update a record | +| `delete(key)` | Delete a record, returns whether it existed | +| `all()` | Get all records in key order | +| `keys()` | Get all keys in key order | +| `items()` | Get all (key, record) pairs in key order | +| `count()` | Get the number of records | +| `find(predicate, limit=None)` | Find records matching a predicate | +| `find_one(predicate)` | Find the first matching record | +| `transaction()` | Start a new transaction | +| `compact()` | Compact the table file | +| `clear()` | Remove all records | +| `reload()` | Force reload from disk | + +The `Table` class also supports idiomatic Python operations: + +- `len(table)` - number of records +- `key in table` - check if key exists +- `for record in table` - iterate over records + +### Transaction class + +The `Transaction` class provides snapshot isolation and buffered writes. + +| Method | Description | +| ------ | ----------- | +| `get(key)` | Get a record from the transaction snapshot | +| `has(key)` | Check if a key exists in the snapshot | +| `put(record)` | Buffer a record for commit | +| `delete(key)` | Buffer a deletion for commit | +| `commit()` | Write buffered changes to disk | +| `abort()` | Discard buffered changes | + +### Exception hierarchy + +All exceptions inherit from `JSONLTError`: + +| Exception | Description | +| --------- | ----------- | +| `ParseError` | Invalid file format or content | +| `InvalidKeyError` | Invalid or missing key | +| `FileError` | File I/O error | +| `LockError` | Cannot obtain file lock | +| `LimitError` | Size limit exceeded | +| `TransactionError` | Transaction state error | +| `ConflictError` | Write-write conflict detected | + +### Type exports + +For type annotations, the package exports: + +- `Key` - A key value (`str`, `int`, or tuple of these) +- `KeySpecifier` - Key field names (single or multiple) +- `JSONObject` - A JSON object (`dict[str, Any]`) +- `Header` - Table header data class + +## Documentation + +For detailed documentation, tutorials, and the full specification, visit [jsonlt.org/docs](https://jsonlt.org/docs). ## License diff --git a/pyproject.toml b/pyproject.toml index 0b9ec8b..60fda55 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,7 +14,7 @@ dependencies = ["typing-extensions>=4.15.0"] dev = [ "basedpyright>=1.36.1", "codespell>=2.4.1", - "cosmic-ray>=8.4.3", + "cosmic-ray>=8.4.3; sys_platform != 'win32'", "dirty-equals>=0.11", "faker>=39.0.0", "hypothesis>=6.148.7", @@ -24,7 +24,7 @@ dev = [ "pytest-codspeed>=4.2.0", "pytest-cov>=7.0.0", "pytest-examples>=0.0.18", - "pytest-memray>=1.8.0", + "pytest-memray>=1.8.0; sys_platform != 'win32'", "pytest-mock>=3.15.1", "pytest-test-groups>=1.2.1", "rich>=14.2.0", @@ -88,6 +88,8 @@ exclude_lines = [ "if TYPE_CHECKING:", "@abstractmethod", "@abc.abstractmethod", + # Platform-specific exclusions + "class _WindowsLock:", ] precision = 2 show_missing = true diff --git a/src/jsonlt/__init__.py b/src/jsonlt/__init__.py index dbeef1a..95fdf4b 100644 --- a/src/jsonlt/__init__.py +++ b/src/jsonlt/__init__.py @@ -2,8 +2,41 @@ from importlib.metadata import version +from ._exceptions import ( + ConflictError, + FileError, + InvalidKeyError, + JSONLTError, + LimitError, + LockError, + ParseError, + TransactionError, +) +from ._header import Header +from ._json import JSONArray, JSONObject, JSONPrimitive, JSONValue +from ._keys import Key, KeySpecifier from ._table import Table +from ._transaction import Transaction __version__ = version("jsonlt") -__all__ = ["Table", "__version__"] +__all__ = [ + "ConflictError", + "FileError", + "Header", + "InvalidKeyError", + "JSONArray", + "JSONLTError", + "JSONObject", + "JSONPrimitive", + "JSONValue", + "Key", + "KeySpecifier", + "LimitError", + "LockError", + "ParseError", + "Table", + "Transaction", + "TransactionError", + "__version__", +] diff --git a/src/jsonlt/_constants.py b/src/jsonlt/_constants.py new file mode 100644 index 0000000..7cec128 --- /dev/null +++ b/src/jsonlt/_constants.py @@ -0,0 +1,55 @@ +"""Constants defining implementation limits and protocol version. + +These constants define the minimum required limits per the JSONLT specification. +Implementations may support larger values. +""" + +from typing import Final + +# JSONLT specification version +JSONLT_VERSION: Final[int] = 1 +"""The JSONLT specification version implemented.""" + +# Minimum required limits per specification +MAX_KEY_LENGTH: Final[int] = 1024 +"""Maximum supported key length in bytes. + +The key length is the number of bytes in the key's JSON representation +when encoded as UTF-8. For example, "alice" is 7 bytes (including quotes). +""" + +MAX_RECORD_SIZE: Final[int] = 1_048_576 +"""Maximum supported record size in bytes (1 MiB). + +The record size is the number of bytes in the record's JSON serialization +using deterministic serialization, encoded as UTF-8. +""" + +MIN_NESTING_DEPTH: Final[int] = 64 +"""Minimum supported JSON nesting depth. + +Nesting depth is the maximum number of nested JSON objects and arrays +at any point within a value, where the outermost value is at depth 1. +""" + +MAX_TUPLE_ELEMENTS: Final[int] = 16 +"""Maximum number of elements in a tuple key. + +Tuple keys may contain at most 16 elements. Key specifiers with more +than 16 field names are invalid. +""" + +# Valid integer key range (IEEE 754 double-precision safe integers) +MAX_INTEGER_KEY: Final[int] = 2**53 - 1 +"""Maximum valid integer key value (9007199254740991). + +This is the maximum integer that IEEE 754 double-precision floating-point +can represent exactly, ensuring interoperability across languages. +""" + +MIN_INTEGER_KEY: Final[int] = -(2**53) + 1 +"""Minimum valid integer key value (-9007199254740991). + +This is the minimum integer that IEEE 754 double-precision floating-point +can represent exactly, ensuring interoperability across languages. +""" diff --git a/src/jsonlt/_encoding.py b/src/jsonlt/_encoding.py new file mode 100644 index 0000000..980f93f --- /dev/null +++ b/src/jsonlt/_encoding.py @@ -0,0 +1,172 @@ +"""UTF-8 encoding validation for JSONLT. + +This module provides functions to validate and prepare UTF-8 encoded bytes +for JSONLT processing, per specification section "Encoding". +""" + +from typing import TYPE_CHECKING, Final + +from ._exceptions import ParseError + +if TYPE_CHECKING: + from ._json import JSONValue + +# Unicode surrogate code point ranges +_HIGH_SURROGATE_START: Final[int] = 0xD800 +_HIGH_SURROGATE_END: Final[int] = 0xDBFF +_LOW_SURROGATE_START: Final[int] = 0xDC00 +_LOW_SURROGATE_END: Final[int] = 0xDFFF + + +def strip_bom(data: bytes) -> bytes: + """Strip UTF-8 BOM from the start of byte data if present. + + Per specification: "A conforming parser SHOULD strip any BOM encountered + at the start of the file." + + Args: + data: Raw bytes that may start with a BOM. + + Returns: + The bytes with BOM stripped if present, otherwise unchanged. + """ + bom = b"\xef\xbb\xbf" + if data.startswith(bom): + return data[len(bom) :] + return data + + +def strip_cr_before_lf(data: bytes) -> bytes: + r"""Strip CR characters that precede LF characters. + + Per specification: "A conforming parser SHOULD strip CR characters + preceding LF." + + This ensures consistent LF-only line endings regardless of whether + input was created on Windows (CRLF) or Unix (LF) systems. + + Args: + data: Raw bytes that may contain CRLF sequences. + + Returns: + The bytes with all CR-LF sequences replaced with just LF. + """ + return data.replace(b"\r\n", b"\n") + + +def validate_utf8(data: bytes) -> str: + """Validate and decode UTF-8 bytes with strict security requirements. + + Per specification: + - "A conforming parser SHALL reject byte sequences that are overlong + encodings" (RFC 3629, Unicode 16.0 Section 3.9) + - Surrogate code points (U+D800-U+DFFF) are not valid in UTF-8 + + Python's built-in UTF-8 codec with 'strict' error handling correctly + rejects both overlong encodings and surrogate code points, as these + are invalid UTF-8 per RFC 3629. + + Args: + data: Raw bytes to validate and decode. + + Returns: + The decoded string. + + Raises: + UnicodeDecodeError: If the bytes contain invalid UTF-8, including + overlong encodings or surrogate code points. + """ + return data.decode("utf-8", errors="strict") + + +def prepare_input(data: bytes) -> str: + """Prepare raw byte input for JSONLT parsing. + + This function applies all input preprocessing required by the specification: + 1. Strip BOM if present + 2. Strip CR before LF (normalize line endings) + 3. Validate UTF-8 encoding (reject overlong and surrogate encodings) + + Args: + data: Raw bytes from a JSONLT file. + + Returns: + The prepared string ready for line-by-line parsing. + + Raises: + UnicodeDecodeError: If the bytes contain invalid UTF-8. + """ + data = strip_bom(data) + data = strip_cr_before_lf(data) + return validate_utf8(data) + + +def has_unpaired_surrogates(text: str) -> bool: + """Check if string contains unpaired Unicode surrogates. + + Surrogates are in the range U+D800-U+DFFF. High surrogates (U+D800-U+DBFF) + must be followed by low surrogates (U+DC00-U+DFFF) to form valid pairs. + + Note: In Python 3, strings normally cannot contain unpaired surrogates + because the string type requires valid Unicode. However, the surrogatepass + error handler or certain APIs may produce strings with lone surrogates. + + Args: + text: String to check. + + Returns: + True if unpaired surrogates found, False otherwise. + """ + i = 0 + length = len(text) + + while i < length: + code_point = ord(text[i]) + + # Check if this is a high surrogate (U+D800-U+DBFF) + if _HIGH_SURROGATE_START <= code_point <= _HIGH_SURROGATE_END: + # High surrogate must be followed by low surrogate + if i + 1 < length: + next_code_point = ord(text[i + 1]) + if _LOW_SURROGATE_START <= next_code_point <= _LOW_SURROGATE_END: + # Valid surrogate pair, skip both + i += 2 + continue + # Unpaired high surrogate + return True + + # Check if this is a lone low surrogate (U+DC00-U+DFFF) + if _LOW_SURROGATE_START <= code_point <= _LOW_SURROGATE_END: + # Low surrogate without preceding high surrogate + return True + + i += 1 + + return False + + +def validate_no_surrogates(value: "JSONValue") -> None: + """Recursively validate no unpaired surrogates in any string. + + Checks strings for unpaired Unicode surrogates and recursively descends + into dicts and lists to validate all nested string values. + + Args: + value: A JSON value to check. + + Raises: + ParseError: If any string contains unpaired surrogates. + """ + if isinstance(value, str): + if has_unpaired_surrogates(value): + msg = "record contains unpaired Unicode surrogates" + raise ParseError(msg) + elif isinstance(value, dict): + for k, v in value.items(): + if has_unpaired_surrogates(k): + msg = "record contains unpaired Unicode surrogates in field name" + raise ParseError(msg) + validate_no_surrogates(v) + elif isinstance(value, list): + for item in value: + validate_no_surrogates(item) diff --git a/src/jsonlt/_exceptions.py b/src/jsonlt/_exceptions.py new file mode 100644 index 0000000..ecf310e --- /dev/null +++ b/src/jsonlt/_exceptions.py @@ -0,0 +1,155 @@ +"""Exception hierarchy for JSONLT operations. + +This module defines the exception types used throughout the JSONLT library, +following the specification's error categories. +""" + +# pyright: reportImportCycles=false + +from typing import TYPE_CHECKING +from typing_extensions import override + +if TYPE_CHECKING: + from ._json import JSONObject + from ._keys import Key + + +class JSONLTError(Exception): + """Base exception for all JSONLT operations. + + All exceptions raised by JSONLT inherit from this class, allowing + callers to catch all JSONLT-related errors with a single handler. + """ + + +class ParseError(JSONLTError): + """Error during parsing of JSONLT content. + + Raised for: + - Invalid UTF-8 encoding + - Invalid JSON syntax + - Non-object JSON values where objects are expected + - Duplicate keys in JSON objects + - Invalid $deleted values (not boolean true) + - Header appearing on non-first line + - Invalid header structure + - Missing or invalid version field + + See specification section "Parse errors (ParseError)". + """ + + +class InvalidKeyError(JSONLTError): + """Error related to keys or key specifiers. + + Raised for: + - Missing required key fields in records + - Invalid key field values (null, boolean, object, array) + - Numbers outside the valid integer key range + - Fractional numbers in key fields + - Records containing $-prefixed fields + - Key specifier mismatch between header and caller + - Duplicate field names in key specifier tuples + - Empty key specifier or key tuples + + See specification section "Key errors (KeyError)". + """ + + +class FileError(JSONLTError): + """Error during file system operations. + + Raised for: + - File read failures (permissions, I/O errors) + - File write failures (permissions, I/O errors) + - Atomic file replacement failures + + See specification section "File errors (IOError)". + """ + + +class LockError(JSONLTError): + """Error during file locking. + + Raised when a file lock cannot be acquired within the configured timeout. + + See specification section "Lock errors (LockError)". + """ + + +class LimitError(JSONLTError): + """Error when content exceeds implementation limits. + + Raised for: + - Key length exceeding maximum + - Record size exceeding maximum + - JSON nesting depth exceeding maximum + - Tuple key exceeding maximum element count + + See specification section "Limit errors (LimitError)". + """ + + +class TransactionError(JSONLTError): + """Error related to transaction operations. + + Raised for: + - Attempting to start a nested transaction + + See specification section "Transaction errors (TransactionError)". + """ + + +class ConflictError(TransactionError): + """Error when a transaction commit detects a write-write conflict. + + Raised when another process has modified a key that the transaction + also modified since the transaction started. + + See specification section "Transaction errors (TransactionError)". + + Attributes: + key: The conflicting key. + expected: The value that was expected (from transaction start snapshot). + actual: The actual current value (after reload). + """ + + def __init__( + self, + message: str, + key: "Key", + expected: "JSONObject | None", + actual: "JSONObject | None", + ) -> None: + """Initialize a ConflictError. + + Args: + message: The error message. + key: The conflicting key. + expected: The value that was expected (from transaction start). + actual: The actual current value (after reload). + """ + super().__init__(message) + self._key: Key = key + self._expected: JSONObject | None = expected + self._actual: JSONObject | None = actual + + @property + def key(self) -> "Key": + """The conflicting key.""" + return self._key + + @property + def expected(self) -> "JSONObject | None": + """The value that was expected (from transaction start snapshot).""" + return self._expected + + @property + def actual(self) -> "JSONObject | None": + """The actual current value (after reload).""" + return self._actual + + @override + def __repr__(self) -> str: + """Return a string representation of the conflict error.""" + return f"ConflictError({self.args[0]!r}, key={self._key!r})" diff --git a/src/jsonlt/_filesystem.py b/src/jsonlt/_filesystem.py new file mode 100644 index 0000000..33761ac --- /dev/null +++ b/src/jsonlt/_filesystem.py @@ -0,0 +1,233 @@ +"""Filesystem abstraction for JSONLT Table operations. + +This module provides a filesystem protocol and implementation used by the Table +class for file operations, enabling testability through dependency injection. +""" + +import os +from contextlib import contextmanager +from dataclasses import dataclass +from typing import TYPE_CHECKING, ClassVar, Protocol, cast, runtime_checkable + +from ._exceptions import FileError +from ._lock import exclusive_lock +from ._writer import atomic_replace as _atomic_replace + +if TYPE_CHECKING: + from collections.abc import Iterator, Sequence + from contextlib import AbstractContextManager + from pathlib import Path + from typing import BinaryIO + + +@dataclass(frozen=True, slots=True) +class FileStats: + """Immutable container for file stat results.""" + + mtime: float + size: int + exists: bool + + +@runtime_checkable +class LockedFile(Protocol): + """Protocol for a file handle with exclusive lock held.""" + + def read(self) -> bytes: # pragma: no cover + """Read all remaining bytes from the file.""" + ... + + def write(self, data: bytes) -> int: # pragma: no cover + """Write bytes to the file.""" + ... + + def seek(self, offset: int, whence: int = 0) -> int: # pragma: no cover + """Seek to a position in the file.""" + ... + + def sync(self) -> None: # pragma: no cover + """Flush and fsync the file.""" + ... + + +@runtime_checkable +class FileSystem(Protocol): + """Protocol for filesystem operations needed by Table.""" + + def stat(self, path: "Path") -> FileStats: # pragma: no cover + """Get file stats. Returns FileStats with exists=False if not found.""" + ... + + def read_bytes( + self, path: "Path", *, max_size: int | None = None + ) -> bytes: # pragma: no cover + """Read entire file contents. Raises FileError if not readable.""" + ... + + def ensure_parent_dir(self, path: "Path") -> None: # pragma: no cover + """Create parent directories if needed.""" + ... + + def open_locked( # pragma: no cover + self, + path: "Path", + mode: str, + timeout: float | None, + ) -> "AbstractContextManager[LockedFile]": + """Open file with exclusive lock.""" + ... + + def atomic_replace( + self, path: "Path", lines: "Sequence[str]" + ) -> None: # pragma: no cover + """Atomically replace file contents with lines.""" + ... + + +class _LockedFileHandle: + """Wrapper around file handle satisfying LockedFile protocol.""" + + __slots__: ClassVar[tuple[str, ...]] = ("_file",) + + _file: "BinaryIO" + + def __init__(self, file: "BinaryIO") -> None: + self._file = file + + def read(self) -> bytes: + """Read all remaining bytes from the file.""" + return self._file.read() + + def write(self, data: bytes) -> int: + """Write bytes to the file.""" + return self._file.write(data) + + def seek(self, offset: int, whence: int = 0) -> int: + """Seek to a position in the file.""" + return self._file.seek(offset, whence) + + def sync(self) -> None: + """Flush and fsync the file.""" + self._file.flush() + os.fsync(self._file.fileno()) + + +class RealFileSystem: + """Real filesystem implementation using standard library.""" + + __slots__: ClassVar[tuple[str, ...]] = () + + def stat(self, path: "Path") -> FileStats: + """Get file stats. Returns FileStats with exists=False if not found. + + Args: + path: Path to the file. + + Returns: + FileStats with file metadata, or exists=False if not found. + + Raises: + FileError: If stat fails for reasons other than file not found. + """ + try: + st = path.stat() + return FileStats(mtime=st.st_mtime, size=st.st_size, exists=True) + except FileNotFoundError: + return FileStats(mtime=0.0, size=0, exists=False) + except OSError as e: + msg = f"cannot stat file: {e}" + raise FileError(msg) from e + + def read_bytes(self, path: "Path", *, max_size: int | None = None) -> bytes: + """Read entire file contents. + + Args: + path: Path to the file. + max_size: Optional maximum file size to allow. If the file exceeds + this size, FileError is raised. + + Returns: + The file contents as bytes. + + Raises: + FileError: If the file cannot be read or exceeds max_size. + """ + if max_size is not None: + try: + st = path.stat() + except OSError as e: + msg = f"cannot read file: {e}" + raise FileError(msg) from e + if st.st_size > max_size: + msg = f"file size {st.st_size} exceeds maximum {max_size}" + raise FileError(msg) + try: + return path.read_bytes() + except OSError as e: + msg = f"cannot read file: {e}" + raise FileError(msg) from e + + def ensure_parent_dir(self, path: "Path") -> None: + """Create parent directories if needed. + + Args: + path: Path whose parent directory should exist. + + Raises: + FileError: If directory creation fails. + """ + try: + path.parent.mkdir(parents=True, exist_ok=True) + except OSError as e: + msg = f"cannot create directory: {e}" + raise FileError(msg) from e + + @contextmanager + def open_locked( + self, + path: "Path", + mode: str, + timeout: float | None, + ) -> "Iterator[LockedFile]": + """Open file with exclusive lock. + + Args: + path: Path to the file. + mode: File mode ("r+b" or "xb"). + timeout: Lock acquisition timeout in seconds, or None for no timeout. + + Yields: + A LockedFile handle for reading/writing. + + Raises: + FileNotFoundError: If mode is "r+b" and file doesn't exist. + FileExistsError: If mode is "xb" and file already exists. + LockError: If lock cannot be acquired within timeout. + FileError: For other OS-level errors. + """ + try: + file = path.open(mode) + except (FileNotFoundError, FileExistsError): + # Let these propagate for control flow in Table + raise + except OSError as e: + msg = f"cannot open file: {e}" + raise FileError(msg) from e + + try: + with exclusive_lock(cast("BinaryIO", file), timeout=timeout): + yield _LockedFileHandle(cast("BinaryIO", file)) + finally: + file.close() + + def atomic_replace(self, path: "Path", lines: "Sequence[str]") -> None: + """Atomically replace file contents with lines. + + Args: + path: Target file path. + lines: Lines to write (newlines added automatically). + + Raises: + FileError: If write, sync, or rename fails. + """ + _atomic_replace(path, lines) diff --git a/src/jsonlt/_header.py b/src/jsonlt/_header.py new file mode 100644 index 0000000..32ed3c0 --- /dev/null +++ b/src/jsonlt/_header.py @@ -0,0 +1,223 @@ +"""Header parsing and representation for JSONLT files. + +This module defines the Header dataclass and functions for parsing and +validating JSONLT file headers per the specification section "Header". +""" + +from dataclasses import dataclass +from typing import TYPE_CHECKING, cast + +from ._constants import JSONLT_VERSION, MAX_TUPLE_ELEMENTS +from ._exceptions import ParseError +from ._json import serialize_json +from ._keys import KeySpecifier, is_valid_key_specifier + +if TYPE_CHECKING: + from ._json import JSONObject, JSONValue + + +@dataclass(frozen=True, slots=True) +class Header: + """Represents a JSONLT file header. + + A header is an optional first line in a JSONLT file that provides + metadata about the file. It contains a `$jsonlt` field with metadata. + + Attributes: + version: The JSONLT specification version (must be 1). + key: The key specifier for the table, if present. + schema_url: A URL reference to a JSON Schema that validates records. + schema: An inline JSON Schema object that validates records. + meta: User-defined metadata. + """ + + version: int + key: "KeySpecifier | None" = None + schema_url: "str | None" = None + schema: "JSONObject | None" = None + meta: "JSONObject | None" = None + + +def is_header_line(obj: "JSONObject") -> bool: + """Check if a parsed JSON object is a header line. + + A header line is a JSON object containing a single field `$jsonlt`. + + Args: + obj: A parsed JSON object. + + Returns: + True if the object contains the `$jsonlt` field, False otherwise. + """ + return "$jsonlt" in obj + + +def _parse_key_specifier(value: object) -> KeySpecifier: + """Parse a key specifier from a JSON value. + + Args: + value: The value of the `key` field from the header. + + Returns: + A valid key specifier (string or tuple of strings). + + Raises: + ParseError: If the value is not a valid key specifier. + """ + if isinstance(value, str): + return value + if isinstance(value, list): + if not value: + msg = "key specifier cannot be an empty array" + raise ParseError(msg) + # Check all items are strings and build typed list + # Cast to list[object] for type-safe iteration + items = cast("list[object]", value) + str_list: list[str] = [] + for item in items: + if not isinstance(item, str): + msg = "key specifier array must contain only strings" + raise ParseError(msg) + str_list.append(item) + result: tuple[str, ...] = tuple(str_list) + if len(result) > MAX_TUPLE_ELEMENTS: + msg = f"key specifier exceeds maximum of {MAX_TUPLE_ELEMENTS} elements" + raise ParseError(msg) + if not is_valid_key_specifier(result): + msg = "key specifier contains duplicate field names" + raise ParseError(msg) + return result + msg = ( + f"key specifier must be a string or array of strings, " + f"got {type(value).__name__}" + ) + raise ParseError(msg) + + +def parse_header(obj: "JSONObject") -> Header: + """Parse a header from a JSON object. + + The object must contain a `$jsonlt` field whose value is an object + with the required `version` field and optional `key`, `$schema`, + `schema`, and `meta` fields. + + Args: + obj: A parsed JSON object containing the `$jsonlt` field. + + Returns: + A Header instance with the parsed metadata. + + Raises: + ParseError: If the header structure is invalid: + - `$jsonlt` value is not an object + - `version` field is missing or not an integer + - `version` is not 1 + - Both `$schema` and `schema` are present + - `key` is not a valid key specifier + - `$schema` is not a string + - `schema` is not an object + - `meta` is not an object + """ + jsonlt_value = obj.get("$jsonlt") + + if not isinstance(jsonlt_value, dict): + msg = "$jsonlt value must be an object" + raise ParseError(msg) + + # Validate version (required) + if "version" not in jsonlt_value: + msg = "header missing required 'version' field" + raise ParseError(msg) + + version = jsonlt_value["version"] + if not isinstance(version, int) or isinstance(version, bool): + msg = f"version must be an integer, got {type(version).__name__}" + raise ParseError(msg) + + if version != JSONLT_VERSION: + msg = f"unsupported version {version}, expected {JSONLT_VERSION}" + raise ParseError(msg) + + # Check for mutually exclusive schema fields + has_schema_url = "$schema" in jsonlt_value + has_inline_schema = "schema" in jsonlt_value + + if has_schema_url and has_inline_schema: + msg = "$schema and schema are mutually exclusive" + raise ParseError(msg) + + # Parse optional key specifier + key: KeySpecifier | None = None + if "key" in jsonlt_value: + key = _parse_key_specifier(jsonlt_value["key"]) + + # Parse optional schema URL + schema_url: str | None = None + if has_schema_url: + schema_url_value = jsonlt_value["$schema"] + if not isinstance(schema_url_value, str): + msg = f"$schema must be a string, got {type(schema_url_value).__name__}" + raise ParseError(msg) + schema_url = schema_url_value + + # Parse optional inline schema + schema: JSONObject | None = None + if has_inline_schema: + schema_value = jsonlt_value["schema"] + if not isinstance(schema_value, dict): + msg = f"schema must be an object, got {type(schema_value).__name__}" + raise ParseError(msg) + schema = schema_value + + # Parse optional meta + meta: JSONObject | None = None + if "meta" in jsonlt_value: + meta_value = jsonlt_value["meta"] + if not isinstance(meta_value, dict): + msg = f"meta must be an object, got {type(meta_value).__name__}" + raise ParseError(msg) + meta = meta_value + + return Header( + version=version, + key=key, + schema_url=schema_url, + schema=schema, + meta=meta, + ) + + +def serialize_header(header: Header) -> str: + """Serialize a Header to a JSON line. + + Produces deterministic JSON output with sorted keys. + + Args: + header: The header to serialize. + + Returns: + The JSON line string (without trailing newline). + """ + # Build the $jsonlt metadata object + jsonlt_obj: dict[str, JSONValue] = {"version": header.version} + + if header.key is not None: + # Convert tuple key specifier to list for JSON + if isinstance(header.key, tuple): + jsonlt_obj["key"] = list(header.key) + else: + jsonlt_obj["key"] = header.key + + if header.schema_url is not None: + jsonlt_obj["$schema"] = header.schema_url + + if header.schema is not None: + jsonlt_obj["schema"] = header.schema + + if header.meta is not None: + jsonlt_obj["meta"] = header.meta + + # Build the full header object + header_obj: dict[str, JSONValue] = {"$jsonlt": jsonlt_obj} + + return serialize_json(header_obj) diff --git a/src/jsonlt/_json.py b/src/jsonlt/_json.py new file mode 100644 index 0000000..5e54a65 --- /dev/null +++ b/src/jsonlt/_json.py @@ -0,0 +1,198 @@ +"""JSON parsing and serialization for JSONLT. + +This module provides functions for parsing and serializing JSON with +JSONLT-specific requirements: +- Duplicate key detection (ParseError) +- Nesting depth enforcement (LimitError at depth > 64) +- Deterministic serialization (sorted keys, no whitespace, ensure_ascii=False) +""" + +import json +from json import JSONDecodeError +from typing import TYPE_CHECKING, TypeAlias, cast + +from ._constants import MIN_NESTING_DEPTH +from ._exceptions import LimitError, ParseError + +if TYPE_CHECKING: + from collections.abc import Mapping, Sequence + +# JSON type definitions per RFC 8259 +# Using string annotations for forward references to avoid runtime | issues +JSONPrimitive: TypeAlias = "str | int | float | bool | None" +JSONArray: TypeAlias = "list[JSONValue]" +JSONObject: TypeAlias = "dict[str, JSONValue]" +JSONValue: TypeAlias = "JSONPrimitive | JSONArray | JSONObject" + + +def json_nesting_depth(value: object) -> int: + """Compute the nesting depth of a JSON value. + + Per specification: "The nesting depth of a JSON value is the maximum + number of nested JSON objects and arrays at any point within that value, + where the outermost value is at depth 1." + + - A primitive value (null, boolean, number, or string) has nesting depth 1. + - An empty object or array has nesting depth 1. + - An object or array containing only primitive values has nesting depth 2. + + Args: + value: A JSON-compatible value (dict, list, str, int, float, bool, None). + + Returns: + The nesting depth of the value. + """ + if isinstance(value, dict): + if not value: + return 1 + # Cast to JSONObject after isinstance check + obj = cast("JSONObject", value) + return 1 + max(json_nesting_depth(v) for v in obj.values()) + if isinstance(value, list): + if not value: + return 1 + # Cast to JSONArray after isinstance check + arr = cast("JSONArray", value) + return 1 + max(json_nesting_depth(item) for item in arr) + # Primitives: str, int, float, bool, None + return 1 + + +class _DuplicateKeyDetector(dict[str, JSONValue]): + """A dict subclass that detects duplicate keys during JSON parsing. + + Used as object_pairs_hook in json.loads to detect duplicate keys, + which are prohibited by the JSONLT specification. + """ + + def __init__(self, pairs: "Sequence[tuple[str, JSONValue]]") -> None: + """Initialize from key-value pairs, checking for duplicates. + + Args: + pairs: List of (key, value) pairs from JSON parsing. + + Raises: + ParseError: If duplicate keys are detected. + """ + super().__init__() + for key, value in pairs: + if key in self: + msg = f"duplicate key: {key!r}" + raise ParseError(msg) + self[key] = value + + +def parse_json_line( + line: str, + *, + max_depth: int = MIN_NESTING_DEPTH, +) -> JSONObject: + """Parse a single JSON line with JSONLT-specific validation. + + This function parses JSON with additional checks required by JSONLT: + - Duplicate key detection (raises ParseError) + - Nesting depth enforcement (raises LimitError if depth > max_depth) + + Args: + line: A single line of JSON text to parse. + max_depth: Maximum allowed nesting depth (default: 64 per spec). + + Returns: + The parsed JSON object as a dict. + + Raises: + ParseError: If the line contains invalid JSON, is not a JSON object, + or contains duplicate keys. + LimitError: If the JSON nesting depth exceeds max_depth. + """ + try: + result: JSONValue = cast( + "JSONValue", json.loads(line, object_pairs_hook=_DuplicateKeyDetector) + ) + except JSONDecodeError as e: + msg = f"invalid JSON: {e.msg}" + raise ParseError(msg) from e + except RecursionError: + # Deeply nested JSON exhausted the Python call stack during parsing. + # Convert to LimitError since this represents excessive nesting depth. + msg = f"nesting depth exceeds maximum {max_depth}" + raise LimitError(msg) from None + except ParseError: + # Re-raise ParseError from duplicate key detection + raise + + if not isinstance(result, dict): + msg = f"expected JSON object, got {type(result).__name__}" + raise ParseError(msg) + + # Check nesting depth + try: + depth = json_nesting_depth(result) + except RecursionError: + # Deeply nested JSON exhausted the Python call stack during depth check. + msg = f"nesting depth exceeds maximum {max_depth}" + raise LimitError(msg) from None + if depth > max_depth: + msg = f"nesting depth {depth} exceeds maximum {max_depth}" + raise LimitError(msg) + + return result + + +def _sort_keys_recursive(value: JSONValue) -> JSONValue: + """Recursively sort dictionary keys for deterministic serialization. + + Args: + value: A JSON-compatible value. + + Returns: + The value with all dictionary keys sorted. + """ + if isinstance(value, dict): + return {k: _sort_keys_recursive(v) for k, v in sorted(value.items())} + if isinstance(value, list): + return [_sort_keys_recursive(item) for item in value] + return value + + +def serialize_json(value: "Mapping[str, object]") -> str: + """Serialize a JSON object using deterministic serialization. + + Per specification: "Deterministic serialization is a JSON serialization + that produces consistent output for identical logical data." + + - Keys are sorted lexicographically by Unicode code point, recursively + - No whitespace except within string values + - ensure_ascii=False (SHOULD NOT escape characters that don't require it) + + Args: + value: A JSON object (Mapping) to serialize. + + Returns: + The JSON string with sorted keys and no extraneous whitespace. + """ + # Convert Mapping to JSONObject and sort recursively + dict_value: JSONObject = cast("JSONObject", dict(value)) + sorted_value = _sort_keys_recursive(dict_value) + return json.dumps( + sorted_value, + separators=(",", ":"), + ensure_ascii=False, + ) + + +def utf8_byte_length(s: str) -> int: + """Compute the UTF-8 byte length of a string. + + Uses a fast path for ASCII-only strings where len(s) == UTF-8 byte length. + Falls back to encoding for strings containing non-ASCII characters. + + Args: + s: The string to measure. + + Returns: + The number of bytes when encoded as UTF-8. + """ + if s.isascii(): + return len(s) + return len(s.encode("utf-8")) diff --git a/src/jsonlt/_keys.py b/src/jsonlt/_keys.py new file mode 100644 index 0000000..b9c53ca --- /dev/null +++ b/src/jsonlt/_keys.py @@ -0,0 +1,345 @@ +"""Key types and operations for JSONLT. + +This module defines the key types (Key, KeyElement, KeySpecifier) and +operations for working with keys per the JSONLT specification. +""" + +import json +from collections.abc import Sequence +from typing import TYPE_CHECKING, TypeAlias + +from ._constants import MAX_INTEGER_KEY, MAX_TUPLE_ELEMENTS, MIN_INTEGER_KEY +from ._exceptions import InvalidKeyError +from ._json import utf8_byte_length + +if TYPE_CHECKING: + from typing import TypeGuard + from typing_extensions import TypeIs + +KeyElement: TypeAlias = "str | int" +"""A key element is a string or integer that may appear in a tuple key.""" + +Key: TypeAlias = "str | int | tuple[str | int, ...]" +"""A key identifies a record within a table. + +A key is one of: +- A string +- An integer in the range [-(2^53)+1, (2^53)-1] +- A tuple of key elements (non-empty, max 16 elements) +""" + +KeySpecifier: TypeAlias = "str | tuple[str, ...]" +"""A key specifier defines how to extract a key from a record. + +A key specifier is one of: +- A string naming a single field +- A tuple of strings naming multiple fields (for compound keys) +""" + + +def is_valid_key_element(value: object) -> "TypeIs[str | int]": + """Check if a value is a valid key element. + + A valid key element is a string or an integer within the range + [-(2^53)+1, (2^53)-1]. + + Args: + value: The value to check. + + Returns: + True if the value is a valid key element, False otherwise. + """ + if isinstance(value, str): + return True + if isinstance(value, int) and not isinstance(value, bool): + return MIN_INTEGER_KEY <= value <= MAX_INTEGER_KEY + return False + + +def is_valid_key(value: object) -> "TypeIs[str | int | tuple[str | int, ...]]": + """Check if a value is a valid key. + + A valid key is: + - A string + - An integer within the range [-(2^53)+1, (2^53)-1] + - A non-empty tuple of valid key elements (max 16 elements) + + Args: + value: The value to check. + + Returns: + True if the value is a valid key, False otherwise. + """ + if isinstance(value, str): + return True + if isinstance(value, int) and not isinstance(value, bool): + return MIN_INTEGER_KEY <= value <= MAX_INTEGER_KEY + if not isinstance(value, tuple): + return False + # Type narrowing from isinstance(value, tuple) gives tuple[Unknown, ...] + # which is unavoidable when validating arbitrary objects + tuple_value: tuple[object, ...] = value # pyright: ignore[reportUnknownVariableType] + if len(tuple_value) == 0: + return False + if len(tuple_value) > MAX_TUPLE_ELEMENTS: + return False + return all(is_valid_key_element(elem) for elem in tuple_value) + + +def is_valid_key_specifier(specifier: object) -> "TypeGuard[str | tuple[str, ...]]": + """Check if a value is a valid key specifier. + + A valid key specifier is: + - A string (naming a single field) + - A non-empty tuple of strings with no duplicates + + Args: + specifier: The value to check. + + Returns: + True if the value is a valid key specifier, False otherwise. + """ + if isinstance(specifier, str): + return True + if not isinstance(specifier, tuple): + return False + # Type narrowing from isinstance(specifier, tuple) gives tuple[Unknown, ...] + tuple_spec: tuple[object, ...] = specifier # pyright: ignore[reportUnknownVariableType] + if len(tuple_spec) == 0: + return False + if not all(isinstance(field, str) for field in tuple_spec): + return False + # After the isinstance check above, all elements are strings + str_tuple: tuple[str, ...] = tuple_spec # pyright: ignore[reportAssignmentType] + return len(str_tuple) == len(set(str_tuple)) + + +def normalize_key_specifier(specifier: KeySpecifier) -> KeySpecifier: + """Normalize a key specifier. + + Single-element tuples are normalized to strings. Other specifiers + are returned unchanged. + + Args: + specifier: A valid key specifier. + + Returns: + The normalized key specifier. + """ + if isinstance(specifier, tuple) and len(specifier) == 1: + return specifier[0] + return specifier + + +def key_specifiers_match(a: KeySpecifier, b: KeySpecifier) -> bool: + """Check if two key specifiers match. + + Key specifiers match if, after normalizing single-element tuples to + strings, they are structurally identical and each field name consists + of the same sequence of Unicode code points. + + Args: + a: First key specifier. + b: Second key specifier. + + Returns: + True if the key specifiers match, False otherwise. + """ + return normalize_key_specifier(a) == normalize_key_specifier(b) + + +def _compare_elements(a: str | int, b: str | int) -> int: + """Compare two key elements. + + Args: + a: First key element. + b: Second key element. + + Returns: + -1 if a < b, 0 if a == b, 1 if a > b. + """ + # Integers are ordered before strings + a_is_int = isinstance(a, int) + b_is_int = isinstance(b, int) + + if a_is_int and not b_is_int: + return -1 + if not a_is_int and b_is_int: + return 1 + + # Same type: compare directly + if a < b: # pyright: ignore[reportOperatorIssue] + return -1 + if a > b: # pyright: ignore[reportOperatorIssue] + return 1 + return 0 + + +def _cmp(a: int, b: int) -> int: + """Return -1, 0, or 1 based on comparison.""" + return -1 if a < b else (1 if a > b else 0) + + +def _compare_ints(a: int, b: int) -> int: + """Compare two integers.""" + return _cmp(a, b) + + +def _compare_strs(a: str, b: str) -> int: + """Compare two strings lexicographically.""" + return -1 if a < b else (1 if a > b else 0) + + +def _compare_tuples(a: tuple[str | int, ...], b: tuple[str | int, ...]) -> int: + """Compare two tuples lexicographically by element.""" + for elem_a, elem_b in zip(a, b, strict=False): + cmp = _compare_elements(elem_a, elem_b) + if cmp != 0: + return cmp + return _cmp(len(a), len(b)) + + +def _type_rank(k: Key) -> int: + """Return type rank for ordering: int=0, str=1, tuple=2.""" + if isinstance(k, int): + return 0 + if isinstance(k, str): + return 1 + return 2 + + +def compare_keys(a: Key, b: Key) -> int: + """Compare two keys according to JSONLT ordering. + + Key ordering: + - Integers are ordered numerically + - Strings are ordered lexicographically by Unicode code point + - Tuples are ordered lexicographically by element + - Across types: integers < strings < tuples + + Args: + a: First key. + b: Second key. + + Returns: + -1 if a < b, 0 if a == b, 1 if a > b. + """ + rank_a = _type_rank(a) + rank_b = _type_rank(b) + + if rank_a != rank_b: + return _cmp(rank_a, rank_b) + + # Same type - dispatch to type-specific comparison + if isinstance(a, int) and isinstance(b, int): + return _compare_ints(a, b) + if isinstance(a, str) and isinstance(b, str): + return _compare_strs(a, b) + # Both are tuples (type narrowed after int/str checks) + return _compare_tuples(a, b) # pyright: ignore[reportArgumentType] + + +def serialize_key(key: Key) -> str: + """Serialize a key to its JSON representation. + + Uses ensure_ascii=False per the specification requirement that generators + SHOULD NOT escape characters that do not require escaping. + + Args: + key: A valid key. + + Returns: + The JSON string representation of the key. + """ + if isinstance(key, tuple): + return json.dumps(list(key), separators=(",", ":"), ensure_ascii=False) + return json.dumps(key, separators=(",", ":"), ensure_ascii=False) + + +def key_length(key: Key) -> int: + """Compute the byte length of a key per the specification. + + The key length is the number of bytes in its JSON representation + when encoded as UTF-8: + - For a string key: byte length including quotes and escapes + - For an integer key: byte length of decimal representation + - For a tuple key: byte length of the complete JSON array + + Args: + key: A valid key. + + Returns: + The key length in bytes. + """ + return utf8_byte_length(serialize_key(key)) + + +def validate_key_arity(key: Key, key_specifier: KeySpecifier) -> None: + """Validate that key arity matches specifier arity. + + A scalar key specifier (string) requires a scalar key (string or int). + A tuple key specifier requires a tuple key of the same length. + + Args: + key: The key to validate. + key_specifier: The key specifier to match against. + + Raises: + InvalidKeyError: If the key arity doesn't match the specifier. + """ + if isinstance(key_specifier, str): + # Scalar specifier - key must be scalar + if isinstance(key, tuple): + msg = ( + f"key arity mismatch: expected scalar key, " + f"got tuple of {len(key)} elements" + ) + raise InvalidKeyError(msg) + else: + # Tuple specifier - key must be tuple of same length + if not isinstance(key, tuple): + msg = ( + f"key arity mismatch: expected tuple of {len(key_specifier)} " + f"elements, got scalar" + ) + raise InvalidKeyError(msg) + if len(key) != len(key_specifier): + msg = ( + f"key arity mismatch: expected tuple of {len(key_specifier)} " + f"elements, got {len(key)}" + ) + raise InvalidKeyError(msg) + + +def key_from_json(value: object) -> Key: + """Convert a JSON-parsed value to a Key. + + This is used when extracting keys from parsed JSON objects. + Lists are converted to tuples. + + Args: + value: A value from JSON parsing. + + Returns: + The value as a Key type. + + Raises: + TypeError: If the value cannot be converted to a valid key. + """ + if isinstance(value, str): + return value + if isinstance(value, int) and not isinstance(value, bool): + return value + if isinstance(value, Sequence) and not isinstance(value, str): + elements: list[str | int] = [] + for item in value: + if isinstance(item, str) or ( + isinstance(item, int) and not isinstance(item, bool) + ): + elements.append(item) + else: + msg = f"Cannot convert {type(item).__name__} to key element" + raise TypeError(msg) + return tuple(elements) + msg = f"Cannot convert {type(value).__name__} to key" + raise TypeError(msg) diff --git a/src/jsonlt/_lock.py b/src/jsonlt/_lock.py new file mode 100644 index 0000000..e7ad9bb --- /dev/null +++ b/src/jsonlt/_lock.py @@ -0,0 +1,191 @@ +"""File locking for JSONLT write operations. + +This module provides cross-platform exclusive file locking with timeout support. +Uses fcntl on Unix systems and LockFileEx via ctypes on Windows. +""" + +import contextlib +import ctypes +import importlib +import sys +import time +from contextlib import contextmanager +from typing import TYPE_CHECKING, Final, Protocol + +from ._exceptions import LockError + +if TYPE_CHECKING: + from collections.abc import Iterator + from types import ModuleType + from typing import BinaryIO + +# Lock polling constants +_LOCK_POLL_INTERVAL: Final[float] = 0.01 # 10ms initial +_LOCK_MAX_POLL_INTERVAL: Final[float] = 0.1 # 100ms max with backoff + + +class _LockModule(Protocol): + """Protocol for platform-specific lock module.""" + + def acquire(self, fd: int) -> bool: # pragma: no cover + """Try to acquire exclusive lock.""" + ... + + def release(self, fd: int) -> None: # pragma: no cover + """Release exclusive lock.""" + ... + + +class _UnixLock: + """Unix file locking using fcntl.""" + + _fcntl: "ModuleType" + _lock_ex_nb: int + _lock_un: int + + def __init__(self) -> None: + self._fcntl = importlib.import_module("fcntl") + self._lock_ex_nb = self._fcntl.LOCK_EX | self._fcntl.LOCK_NB # pyright: ignore[reportAny] + self._lock_un = self._fcntl.LOCK_UN # pyright: ignore[reportAny] + + def acquire(self, fd: int) -> bool: + """Try to acquire exclusive lock on Unix.""" + try: + self._fcntl.flock(fd, self._lock_ex_nb) # pyright: ignore[reportAny] + except OSError: + return False + else: + return True + + def release(self, fd: int) -> None: + """Release exclusive lock on Unix.""" + self._fcntl.flock(fd, self._lock_un) # pyright: ignore[reportAny] + + +class _WindowsLock: + """Windows file locking using LockFileEx. + + Uses the Windows LockFileEx API via ctypes for proper file locking. + This provides semantics similar to Unix flock: + - Locks the entire file (not just a byte range) + - Works correctly on empty files + - Interoperates with other Windows applications using LockFileEx + """ + + # Windows API constants + _LOCKFILE_EXCLUSIVE_LOCK: Final[int] = 0x0002 + _LOCKFILE_FAIL_IMMEDIATELY: Final[int] = 0x0001 + + _msvcrt: "ModuleType" + _kernel32: object # WinDLL from ctypes + _overlapped_class: type[ctypes.Structure] + + def __init__(self) -> None: + # Get handle to kernel32.dll + self._kernel32 = ctypes.windll.kernel32 + + # Define OVERLAPPED structure for LockFileEx + # (ctypes _fields_ must be plain list, not ClassVar - standard ctypes pattern) + class _Overlapped(ctypes.Structure): + _fields_ = [ # noqa: RUF012 # pyright: ignore[reportUnannotatedClassAttribute] + ("Internal", ctypes.c_void_p), + ("InternalHigh", ctypes.c_void_p), + ("Offset", ctypes.c_ulong), + ("OffsetHigh", ctypes.c_ulong), + ("hEvent", ctypes.c_void_p), + ] + + self._overlapped_class = _Overlapped + + # Get msvcrt for get_osfhandle + self._msvcrt = importlib.import_module("msvcrt") + + def _get_handle(self, fd: int) -> int: + """Convert file descriptor to Windows HANDLE.""" + handle: int = self._msvcrt.get_osfhandle(fd) # pyright: ignore[reportAny] + return handle + + def acquire(self, fd: int) -> bool: + """Try to acquire exclusive lock on Windows using LockFileEx.""" + try: + handle = self._get_handle(fd) + overlapped = self._overlapped_class() + + # Lock entire file using maximum range (0xFFFFFFFF for both low/high) + # LOCKFILE_EXCLUSIVE_LOCK | LOCKFILE_FAIL_IMMEDIATELY + flags = self._LOCKFILE_EXCLUSIVE_LOCK | self._LOCKFILE_FAIL_IMMEDIATELY + # ctypes Windows API call - suppress type warnings for dynamic ctypes access + result = self._kernel32.LockFileEx( # pyright: ignore[reportUnknownVariableType, reportUnknownMemberType, reportAttributeAccessIssue] + handle, + flags, + 0, # dwReserved + 0xFFFFFFFF, # nNumberOfBytesToLockLow + 0xFFFFFFFF, # nNumberOfBytesToLockHigh + ctypes.byref(overlapped), + ) + return bool(result) # pyright: ignore[reportUnknownArgumentType] + except OSError: + return False + + def release(self, fd: int) -> None: + """Release exclusive lock on Windows using UnlockFileEx.""" + with contextlib.suppress(OSError): + handle = self._get_handle(fd) + overlapped = self._overlapped_class() + + # Unlock the same range we locked + self._kernel32.UnlockFileEx( # pyright: ignore[reportUnknownMemberType, reportAttributeAccessIssue] + handle, + 0, # dwReserved + 0xFFFFFFFF, # nNumberOfBytesToUnlockLow + 0xFFFFFFFF, # nNumberOfBytesToUnlockHigh + ctypes.byref(overlapped), + ) + + +# Initialize platform-specific lock implementation +_lock_impl: _LockModule = _WindowsLock() if sys.platform == "win32" else _UnixLock() + + +@contextmanager +def exclusive_lock( + file: "BinaryIO", + timeout: float | None = None, +) -> "Iterator[None]": + """Acquire exclusive lock on file, yield, then release. + + Uses platform-specific locking (fcntl on Unix, msvcrt on Windows). + Implements polling with exponential backoff up to 100ms. + + Args: + file: Open file object to lock. + timeout: Maximum seconds to wait. None means wait indefinitely. + + Yields: + None when lock is acquired. + + Raises: + LockError: If lock cannot be acquired within timeout. + """ + fd = file.fileno() + start_time = time.monotonic() + poll_interval = _LOCK_POLL_INTERVAL + + while True: + if _lock_impl.acquire(fd): + try: + yield + finally: + _lock_impl.release(fd) + return + + # Check timeout + if timeout is not None: + elapsed = time.monotonic() - start_time + if elapsed >= timeout: + msg = f"could not acquire file lock within {timeout}s" + raise LockError(msg) + + # Sleep with exponential backoff + time.sleep(poll_interval) + poll_interval = min(poll_interval * 2, _LOCK_MAX_POLL_INTERVAL) diff --git a/src/jsonlt/_readable.py b/src/jsonlt/_readable.py new file mode 100644 index 0000000..4de93dd --- /dev/null +++ b/src/jsonlt/_readable.py @@ -0,0 +1,241 @@ +"""Mixin class for readable table-like objects. + +This module provides ReadableMixin, an abstract base class that implements +read operations for both Table and Transaction classes. +""" + +from abc import ABC, abstractmethod +from functools import cmp_to_key +from typing import TYPE_CHECKING, ClassVar, TypeGuard, cast, overload + +from ._keys import Key, compare_keys + +if TYPE_CHECKING: + from collections.abc import Callable, Iterator + + from ._json import JSONObject + + +class ReadableMixin(ABC): + """Abstract mixin providing read operations for table-like objects. + + Subclasses must implement: + - _get_state(): Returns the dict[Key, JSONObject] to read from + - _prepare_read(): Called before each public read operation + + Subclasses must also have a `_cached_sorted_keys: list[Key] | None` slot. + """ + + __slots__: ClassVar[tuple[str, ...]] = () + + # --- Abstract methods for subclasses --- + + @abstractmethod + def _get_state(self) -> "dict[Key, JSONObject]": + """Return the state dictionary to read from.""" + ... + + @abstractmethod + def _prepare_read(self) -> None: + """Perform any required setup before a read operation.""" + ... + + # Subclasses must have this as a slot attribute + _cached_sorted_keys: list[Key] | None + + # --- Private helpers --- + + def _sorted_keys(self) -> list[Key]: + """Return keys sorted by JSONLT key ordering.""" + if self._cached_sorted_keys is None: + self._cached_sorted_keys = sorted( + self._get_state().keys(), key=cmp_to_key(compare_keys) + ) + return self._cached_sorted_keys + + def _sorted_records(self) -> "list[JSONObject]": + """Return records sorted by key order.""" + state = self._get_state() + return [state[k] for k in self._sorted_keys()] + + @staticmethod + def _is_valid_tuple_key( + key: tuple[object, ...], + ) -> "TypeGuard[tuple[str | int, ...]]": + """Check if a tuple is a valid Key tuple (all elements are str or int).""" + return all(isinstance(k, (str, int)) for k in key) + + @staticmethod + def _validate_key(key: Key) -> None: + """Validate that a key is not an empty tuple. + + Args: + key: The key to validate. + + Raises: + InvalidKeyError: If the key is an empty tuple. + """ + if isinstance(key, tuple) and len(key) == 0: + from ._exceptions import InvalidKeyError # noqa: PLC0415 + + msg = "empty tuple is not a valid key" + raise InvalidKeyError(msg) + + # --- Read methods --- + + def get(self, key: Key) -> "JSONObject | None": + """Get a record by key. + + Args: + key: The key to look up. + + Returns: + The record if found, None otherwise. + + Raises: + InvalidKeyError: If the key is an empty tuple. + """ + self._validate_key(key) + self._prepare_read() + return self._get_state().get(key) + + def has(self, key: Key) -> bool: + """Check if a key exists. + + Args: + key: The key to check. + + Returns: + True if the key exists, False otherwise. + + Raises: + InvalidKeyError: If the key is an empty tuple. + """ + self._validate_key(key) + self._prepare_read() + return key in self._get_state() + + def all(self) -> "list[JSONObject]": + """Get all records in key order. + + Returns: + A list of all records, sorted by key. + """ + self._prepare_read() + return self._sorted_records() + + def keys(self) -> list[Key]: + """Get all keys in key order. + + Returns: + A list of all keys, sorted. + """ + self._prepare_read() + return self._sorted_keys() + + def items(self) -> "list[tuple[Key, JSONObject]]": + """Get all key-value pairs in key order. + + Returns: + A list of (key, record) tuples, sorted by key. + """ + self._prepare_read() + state = self._get_state() + return [(k, state[k]) for k in self._sorted_keys()] + + def count(self) -> int: + """Get the number of records. + + Returns: + The number of records. + """ + self._prepare_read() + return len(self._get_state()) + + def __len__(self) -> int: + """Return the number of records.""" + return self.count() + + def __contains__(self, key: object) -> bool: + """Check if a key exists. + + Args: + key: The key to check. Must be a valid Key type. + + Returns: + True if the key exists, False otherwise. + """ + if isinstance(key, str): + return self.has(key) + if isinstance(key, int): + return self.has(key) + if isinstance(key, tuple): + tuple_key = cast("tuple[object, ...]", key) + if self._is_valid_tuple_key(tuple_key): + return self.has(tuple_key) + return False + + def __iter__(self) -> "Iterator[JSONObject]": + """Iterate over all records in key order.""" + yield from self.all() + + @overload + def find( + self, + predicate: "Callable[[JSONObject], bool]", + ) -> "list[JSONObject]": ... # pragma: no cover + + @overload + def find( + self, + predicate: "Callable[[JSONObject], bool]", + *, + limit: int, + ) -> "list[JSONObject]": ... # pragma: no cover + + def find( + self, + predicate: "Callable[[JSONObject], bool]", + *, + limit: "int | None" = None, + ) -> "list[JSONObject]": + """Find records matching a predicate. + + Records are returned in key order. + + Args: + predicate: A function that takes a record and returns True if + it should be included. + limit: Maximum number of records to return. + + Returns: + A list of matching records, in key order. + """ + self._prepare_read() + results: list[JSONObject] = [] + for record in self._sorted_records(): + if predicate(record): + results.append(record) + if limit is not None and len(results) >= limit: + break + return results + + def find_one( + self, + predicate: "Callable[[JSONObject], bool]", + ) -> "JSONObject | None": + """Find the first record matching a predicate. + + Records are checked in key order. + + Args: + predicate: A function that takes a record and returns True. + + Returns: + The first matching record, or None if no match. + """ + self._prepare_read() + for record in self._sorted_records(): + if predicate(record): + return record + return None diff --git a/src/jsonlt/_reader.py b/src/jsonlt/_reader.py new file mode 100644 index 0000000..5104890 --- /dev/null +++ b/src/jsonlt/_reader.py @@ -0,0 +1,179 @@ +"""File reading operations for JSONLT. + +This module provides functions for reading and parsing JSONLT files, +handling encoding normalization, line splitting, and header detection. +""" + +from pathlib import Path +from typing import TYPE_CHECKING + +from ._encoding import prepare_input +from ._exceptions import FileError, LimitError, ParseError +from ._header import Header, is_header_line, parse_header +from ._json import parse_json_line + +if TYPE_CHECKING: + from ._json import JSONObject + + +def read_table_file( + path: Path | str, + *, + max_file_size: int | None = None, +) -> tuple[Header | None, list["JSONObject"]]: + """Read and parse a JSONLT file. + + This function reads a file from disk and parses it according to the + JSONLT specification: + + 1. Read file contents as bytes + 2. Apply encoding normalization (BOM stripping, CRLF→LF, UTF-8 validation) + 3. Split by newlines into lines + 4. Parse each non-empty line as JSON + 5. Detect and parse header if present on first line + 6. Return header (if present) and list of operation objects + + Per specification: + - Empty files are valid (return None, []) + - Missing trailing newline is accepted + - Empty lines are skipped (though not expected in valid files) + - Header must be on the first line if present + + Args: + path: Path to the JSONLT file to read. + max_file_size: Maximum allowed file size in bytes. If the file exceeds + this limit, LimitError is raised. If None (default), no limit is + enforced. + + Returns: + A tuple of (header, operations) where: + - header is the parsed Header if the first line was a header, else None + - operations is a list of parsed JSON objects (records and tombstones) + + Raises: + FileError: If the file cannot be read (permissions, I/O errors). + LimitError: If the file size exceeds max_file_size. + ParseError: If the file contains invalid UTF-8, invalid JSON, + non-object JSON values, duplicate keys, or invalid header structure. + """ + file_path = Path(path) if isinstance(path, str) else path + + # Check file size before reading if limit is specified + if max_file_size is not None: + try: + file_size = file_path.stat().st_size + except OSError as e: + msg = f"cannot read file: {e}" + raise FileError(msg) from e + if file_size > max_file_size: + msg = f"file size {file_size} bytes exceeds maximum {max_file_size}" + raise LimitError(msg) + + try: + raw_bytes = file_path.read_bytes() + except OSError as e: + msg = f"cannot read file: {e}" + raise FileError(msg) from e + + return parse_table_content(raw_bytes) + + +def parse_table_content( + data: bytes, +) -> tuple[Header | None, list["JSONObject"]]: + """Parse JSONLT content from bytes. + + This is the core parsing function that processes raw bytes according + to the JSONLT specification. It handles: + - BOM stripping + - CRLF→LF normalization + - UTF-8 validation + - Line splitting + - JSON parsing with duplicate key detection + - Header detection and parsing + + This function is useful for parsing content that doesn't come from + a file (e.g., from network, in-memory buffers). + + Args: + data: Raw bytes containing JSONLT content. + + Returns: + A tuple of (header, operations) where: + - header is the parsed Header if the first line was a header, else None + - operations is a list of parsed JSON objects (records and tombstones) + + Raises: + ParseError: If the content contains invalid UTF-8, invalid JSON, + non-object JSON values, duplicate keys, or invalid header structure. + """ + # Handle empty content + if not data: + return (None, []) + + # Apply encoding normalization + try: + text = prepare_input(data) + except UnicodeDecodeError as e: + msg = f"invalid UTF-8: {e}" + raise ParseError(msg) from e + + # Handle empty string (e.g., file was just BOM) + if not text: + return (None, []) + + return parse_table_text(text) + + +def parse_table_text( + text: str, +) -> tuple[Header | None, list["JSONObject"]]: + """Parse JSONLT content from a decoded string. + + This function handles the line-by-line parsing of JSONLT content + after encoding normalization has been applied. + + Args: + text: Decoded and normalized text content. + + Returns: + A tuple of (header, operations) where: + - header is the parsed Header if the first line was a header, else None + - operations is a list of parsed JSON objects (records and tombstones) + + Raises: + ParseError: If the content contains invalid JSON, non-object JSON values, + duplicate keys, or invalid header structure. + """ + # Handle empty text + if not text: + return (None, []) + + # Split into lines + lines = text.split("\n") + + header: Header | None = None + operations: list[JSONObject] = [] + + for i, line in enumerate(lines): + # Skip empty lines (handles missing trailing newline and empty lines) + if not line: + continue + + # Parse the JSON object + obj = parse_json_line(line) + + # Check if first non-empty line is a header + if i == 0 and is_header_line(obj): + header = parse_header(obj) + continue + + # If header appears after first line, reject it + if is_header_line(obj): + msg = "header must be on first line" + raise ParseError(msg) + + # This is a record or tombstone + operations.append(obj) + + return (header, operations) diff --git a/src/jsonlt/_records.py b/src/jsonlt/_records.py new file mode 100644 index 0000000..739cc1e --- /dev/null +++ b/src/jsonlt/_records.py @@ -0,0 +1,258 @@ +"""Record operations for JSONLT. + +This module provides functions for validating records, extracting keys, +and handling tombstones per the JSONLT specification. +""" + +import math +from typing import TYPE_CHECKING + +from ._constants import MAX_INTEGER_KEY, MIN_INTEGER_KEY +from ._exceptions import InvalidKeyError, ParseError +from ._json import serialize_json, utf8_byte_length + +if TYPE_CHECKING: + from ._json import JSONObject + +# Key and KeySpecifier are TypeAlias definitions needed at runtime for type hints +from ._keys import Key, KeySpecifier, validate_key_arity + + +def _validate_key_field_value(value: object, field: str) -> str | int: + """Validate that a value is valid for a key field. + + Args: + value: The value from the key field. + field: The field name (for error messages). + + Returns: + The validated value as a string or int. + + Raises: + InvalidKeyError: If the value is not a valid key element. + """ + if value is None: + msg = f"key field '{field}' value is null" + raise InvalidKeyError(msg) + + if isinstance(value, bool): + msg = f"key field '{field}' value is boolean" + raise InvalidKeyError(msg) + + if isinstance(value, dict): + msg = f"key field '{field}' value is an object" + raise InvalidKeyError(msg) + + if isinstance(value, list): + msg = f"key field '{field}' value is an array" + raise InvalidKeyError(msg) + + if isinstance(value, float): + if math.isinf(value) or math.isnan(value): + msg = f"key field '{field}' value is Infinity or NaN" + raise InvalidKeyError(msg) + if not value.is_integer(): + msg = f"key field '{field}' value is not an integer" + raise InvalidKeyError(msg) + + if isinstance(value, (int, float)): + int_value = int(value) + if int_value < MIN_INTEGER_KEY or int_value > MAX_INTEGER_KEY: + msg = ( + f"key field '{field}' value {int_value} is outside valid integer range" + ) + raise InvalidKeyError(msg) + return int_value + + if isinstance(value, str): + return value + + # Defensive fallback - unreachable with valid JSON input + type_name = type(value).__name__ # pragma: no cover + msg = f"key field '{field}' has invalid type {type_name}" # pragma: no cover + raise InvalidKeyError(msg) # pragma: no cover + + +def validate_record(record: "JSONObject", key_specifier: KeySpecifier) -> None: + """Validate that a record contains required key fields and no $-prefixed fields. + + Args: + record: The record to validate. + key_specifier: The key specifier defining required fields. + + Raises: + InvalidKeyError: If the record is missing required key fields, + has invalid key field values, or contains $-prefixed field names. + """ + # Check for $-prefixed fields (reserved for protocol use) + for field_name in record: + if field_name.startswith("$"): + msg = f"record contains reserved field name '{field_name}'" + raise InvalidKeyError(msg) + + # Get the list of required key fields + if isinstance(key_specifier, str): + key_fields = [key_specifier] + else: + key_fields = list(key_specifier) + + # Validate each key field exists and has a valid value + for field in key_fields: + if field not in record: + msg = f"record missing required key field '{field}'" + raise InvalidKeyError(msg) + _ = _validate_key_field_value(record[field], field) + + +def is_tombstone(obj: "JSONObject") -> bool: + """Check if a JSON object is a tombstone (delete marker). + + A tombstone contains `$deleted` with value `true`. + + Args: + obj: A parsed JSON object. + + Returns: + True if the object contains `$deleted: true`, False otherwise. + """ + return obj.get("$deleted") is True + + +def validate_tombstone(obj: "JSONObject", key_specifier: KeySpecifier) -> None: + """Validate that a tombstone has the correct structure. + + A valid tombstone contains: + - The field `$deleted` with value `true` + - The required key fields per the key specifier + + Args: + obj: A parsed JSON object that is expected to be a tombstone. + key_specifier: The key specifier defining required key fields. + + Raises: + ParseError: If `$deleted` has an invalid value. + InvalidKeyError: If required key fields are missing or invalid. + """ + # Validate $deleted field + if "$deleted" not in obj: + msg = "tombstone missing $deleted field" + raise ParseError(msg) + + deleted_value = obj["$deleted"] + if deleted_value is not True: + if deleted_value is False: + msg = "$deleted must be true, got false" + elif deleted_value is None: + msg = "$deleted must be true, got null" + elif isinstance(deleted_value, str): + msg = "$deleted must be true, got string" + elif isinstance(deleted_value, (int, float)): + msg = "$deleted must be true, got number" + else: + msg = f"$deleted must be true, got {type(deleted_value).__name__}" + raise ParseError(msg) + + # Get the list of required key fields + if isinstance(key_specifier, str): + key_fields = [key_specifier] + else: + key_fields = list(key_specifier) + + # Validate each key field exists and has a valid value + for field in key_fields: + if field not in obj: + msg = f"tombstone missing required key field '{field}'" + raise InvalidKeyError(msg) + _ = _validate_key_field_value(obj[field], field) + + +def extract_key(record: "JSONObject", key_specifier: KeySpecifier) -> Key: + """Extract a key from a record using the given key specifier. + + Per the specification's "extract a key" algorithm: + - For a string key specifier, extract the single field value + - For a tuple key specifier, extract each field and return as a tuple + - Single-element tuple key specifiers return a scalar key (not a tuple) + + Args: + record: The record to extract the key from. + key_specifier: The key specifier defining which fields form the key. + + Returns: + The extracted key (string, int, or tuple). + + Raises: + InvalidKeyError: If a required key field is missing or has an invalid value. + """ + if isinstance(key_specifier, str): + # Single field key specifier + if key_specifier not in record: + msg = f"record missing required key field '{key_specifier}'" + raise InvalidKeyError(msg) + return _validate_key_field_value(record[key_specifier], key_specifier) + + # Tuple key specifier + if len(key_specifier) == 0: + msg = "key specifier cannot be empty" + raise InvalidKeyError(msg) + + elements: list[str | int] = [] + for field in key_specifier: + if field not in record: + msg = f"record missing required key field '{field}'" + raise InvalidKeyError(msg) + value = _validate_key_field_value(record[field], field) + elements.append(value) + + # Single-element tuple key specifiers return a scalar key + if len(elements) == 1: + return elements[0] + + return tuple(elements) + + +def build_tombstone(key: Key, key_specifier: KeySpecifier) -> "JSONObject": + """Build a tombstone object for the given key. + + A tombstone is a delete marker containing `$deleted: true` and the + key field(s) to identify the record being deleted. + + Args: + key: The key identifying the record to delete. + key_specifier: The key specifier defining key field names. + + Returns: + A tombstone JSONObject with $deleted: true and key fields. + + Raises: + InvalidKeyError: If key arity doesn't match specifier. + """ + validate_key_arity(key, key_specifier) + + tombstone: dict[str, str | int | bool] = {"$deleted": True} + + if isinstance(key_specifier, str): + # Scalar key specifier - key is scalar + tombstone[key_specifier] = key # pyright: ignore[reportArgumentType] + else: + # Tuple key specifier - key is tuple of same length + key_tuple: tuple[str | int, ...] = key # pyright: ignore[reportAssignmentType] + tombstone.update(dict(zip(key_specifier, key_tuple, strict=True))) + + return tombstone # pyright: ignore[reportReturnType] + + +def record_size(record: "JSONObject") -> int: + """Compute the serialized byte size of a record. + + The record size is the number of bytes in the record's JSON serialization + using deterministic serialization, encoded as UTF-8. + + Args: + record: The record to measure. + + Returns: + The size in bytes of the deterministically serialized record. + """ + serialized = serialize_json(record) + return utf8_byte_length(serialized) diff --git a/src/jsonlt/_state.py b/src/jsonlt/_state.py new file mode 100644 index 0000000..8b7a141 --- /dev/null +++ b/src/jsonlt/_state.py @@ -0,0 +1,62 @@ +"""Logical state computation for JSONLT. + +This module provides functions for computing the logical state of a table +by replaying a sequence of operations (upserts and deletes). +""" + +from typing import TYPE_CHECKING + +from ._records import extract_key, is_tombstone + +if TYPE_CHECKING: + from collections.abc import Sequence + + from ._json import JSONObject + from ._keys import Key, KeySpecifier + + +def compute_logical_state( + operations: "Sequence[JSONObject]", + key_specifier: "KeySpecifier", +) -> dict["Key", "JSONObject"]: + """Compute the logical state by replaying operations. + + This function processes a sequence of operations in order and + computes the resulting logical state of the table: + + - Upserts (non-tombstone records) add or update entries + - Deletes (tombstones) remove entries + + The key for each operation is extracted using the given key specifier. + + Per specification, the logical state is the result of applying all + operations in sequence, where later operations overwrite earlier ones + for the same key. + + Args: + operations: List of parsed JSON objects (records and tombstones). + key_specifier: The key specifier defining which fields form the key. + + Returns: + A dictionary mapping keys to their final record values. + Deleted records are not present in the result. + + Raises: + InvalidKeyError: If an operation is missing required key fields + or has invalid key field values. + """ + state: dict[Key, JSONObject] = {} + + for obj in operations: + # Extract the key from the operation + key = extract_key(obj, key_specifier) + + # Determine operation type and apply + if is_tombstone(obj): + # Delete: remove from state if present + _ = state.pop(key, None) + else: + # Upsert: add or update + state[key] = obj + + return state diff --git a/src/jsonlt/_table.py b/src/jsonlt/_table.py index 3344ea4..e8a952d 100644 --- a/src/jsonlt/_table.py +++ b/src/jsonlt/_table.py @@ -1,2 +1,810 @@ -class Table: - pass +"""Table class for JSONLT file operations. + +This module provides the Table class, the primary interface for working +with JSONLT files. It handles file loading, auto-reload, and read/write operations. +""" + +# pyright: reportImportCycles=false + +from pathlib import Path +from typing import TYPE_CHECKING, ClassVar +from typing_extensions import override + +from ._constants import MAX_KEY_LENGTH, MAX_RECORD_SIZE +from ._encoding import validate_no_surrogates +from ._exceptions import ( + ConflictError, + FileError, + InvalidKeyError, + LimitError, + TransactionError, +) +from ._filesystem import FileSystem, RealFileSystem +from ._header import serialize_header +from ._json import serialize_json, utf8_byte_length +from ._keys import ( + Key, + KeySpecifier, + key_length, + key_specifiers_match, + normalize_key_specifier, + validate_key_arity, +) +from ._readable import ReadableMixin +from ._reader import parse_table_content, read_table_file +from ._records import build_tombstone, extract_key, validate_record +from ._state import compute_logical_state + +if TYPE_CHECKING: + from ._header import Header + from ._json import JSONObject + from ._transaction import Transaction + +__all__ = ["Table"] + + +class Table(ReadableMixin): + """A JSONLT table backed by a file. + + The Table class provides the primary interface for working with JSONLT + files. It loads the file, computes the logical state, and provides + methods for reading records. + + The table supports auto-reload: before each read operation, it checks + if the underlying file has changed (by mtime and size) and reloads + the state if necessary. This can be disabled via `auto_reload=False`. + + Example: + >>> table = Table("users.jsonlt", key="id") + >>> table.get("alice") + {'id': 'alice', 'role': 'admin'} + >>> table.has("bob") + False + >>> table.count() + 1 + """ + + __slots__: ClassVar[tuple[str, ...]] = ( + "_active_transaction", + "_auto_reload", + "_cached_sorted_keys", + "_file_mtime", + "_file_size", + "_fs", + "_header", + "_key_specifier", + "_lock_timeout", + "_max_file_size", + "_path", + "_state", + ) + + _path: Path + _key_specifier: "KeySpecifier | None" + _auto_reload: bool + _lock_timeout: float | None + _max_file_size: int | None + _fs: FileSystem + _header: "Header | None" + _state: "dict[Key, JSONObject]" + _file_mtime: float + _file_size: int + _active_transaction: "Transaction | None" + _cached_sorted_keys: list[Key] | None + + def __init__( + self, + path: "Path | str", + key: "KeySpecifier | None" = None, + *, + auto_reload: bool = True, + lock_timeout: float | None = None, + max_file_size: int | None = None, + _fs: "FileSystem | None" = None, + ) -> None: + """Open or create a table at the given path. + + Args: + path: Path to the JSONLT file. + key: Key specifier for the table. If the file has a header with + a key specifier, this must match or be omitted. If the file + has operations but no header, this is required. + auto_reload: If True (default), check for file changes before + each read operation and reload if necessary. + lock_timeout: Maximum seconds to wait for file lock on write + operations. None means wait indefinitely. + max_file_size: Maximum allowed file size in bytes when loading + the file. If the file exceeds this limit, LimitError is raised. + If None (default), no limit is enforced. + _fs: Internal filesystem abstraction for testing. Do not use. + + Raises: + FileError: If the file cannot be read. + LimitError: If the file size exceeds max_file_size. + ParseError: If the file contains invalid content. + InvalidKeyError: If the key specifier is invalid or mismatches + the header, or if the file has operations but no key specifier + can be determined. + """ + self._path = Path(path) if isinstance(path, str) else path + self._auto_reload = auto_reload + self._lock_timeout = lock_timeout + self._max_file_size = max_file_size + self._fs = RealFileSystem() if _fs is None else _fs + + # These will be set by _load() + self._header = None + self._state = {} + self._file_mtime = 0.0 + self._file_size = 0 + self._key_specifier = None + self._active_transaction = None + self._cached_sorted_keys = None + + # Initial load + self._load(key) + + def _load(self, caller_key: "KeySpecifier | None" = None) -> None: + """Load or reload the table from disk. + + This method reads the file, parses it, validates the key specifier, + and computes the logical state. + + Args: + caller_key: Key specifier provided by caller (only used on + initial load, not on auto-reload). + + Raises: + FileError: If the file cannot be read. + ParseError: If the file contains invalid content. + InvalidKeyError: If the key specifier is invalid or mismatches + the header, or if the file has operations but no key specifier. + """ + # Check if file exists - if not, treat as empty table + if not self._path.exists(): + self._load_empty_table(caller_key) + return + + # Read and parse the file + header, operations = read_table_file( + self._path, max_file_size=self._max_file_size + ) + self._header = header + + # Track file stats for auto-reload + self._update_file_stats() + + # Resolve which key specifier to use + resolved_key = self._resolve_key_specifier(caller_key, header, operations) + if resolved_key is None: + # Empty file with no key specifier - OK for now + self._key_specifier = None + self._state = {} + self._cached_sorted_keys = None + return + + self._key_specifier = resolved_key + + # Compute logical state if we have operations + if operations: + self._state = compute_logical_state(operations, self._key_specifier) + else: + self._state = {} + self._cached_sorted_keys = None + + def _load_from_content(self, content: bytes) -> None: + """Load table state from bytes content. + + This is used when we already have the file content in memory + (e.g., read via a locked file handle on Windows). + + Args: + content: Raw bytes of the file content. + + Raises: + ParseError: If the content contains invalid data. + """ + if not content: + # Empty content - nothing to load + self._state = {} + self._cached_sorted_keys = None + return + + # Parse the content + header, operations = parse_table_content(content) + self._header = header + + # Resolve key specifier (use existing since this is a reload) + resolved_key = self._resolve_key_specifier(None, header, operations) + if resolved_key is None: + self._state = {} + self._cached_sorted_keys = None + return + + self._key_specifier = resolved_key + + # Compute logical state if we have operations + if operations: + self._state = compute_logical_state(operations, self._key_specifier) + else: + self._state = {} + self._cached_sorted_keys = None + + def _load_empty_table(self, caller_key: "KeySpecifier | None") -> None: + """Initialize state for a non-existent file.""" + self._header = None + self._state = {} + self._cached_sorted_keys = None + self._file_mtime = 0.0 + self._file_size = 0 + + # For new files, use caller's key specifier if provided + if caller_key is not None: + self._key_specifier = normalize_key_specifier(caller_key) + # Otherwise keep existing key specifier (may be None) + + def _update_file_stats(self) -> None: + """Update cached file mtime and size for auto-reload detection.""" + stats = self._fs.stat(self._path) + if not stats.exists: + msg = "cannot stat file: file does not exist" + raise FileError(msg) + self._file_mtime = stats.mtime + self._file_size = stats.size + + def _try_update_stats(self) -> None: + """Update cached file stats, ignoring errors.""" + try: + stats = self._fs.stat(self._path) + if stats.exists: + self._file_mtime = stats.mtime + self._file_size = stats.size + except FileError: + # Ignore stat failures - stats will refresh on next read + pass + + def _reload_if_changed(self, cached_mtime: float, cached_size: int) -> None: + """Reload file if stats differ from cached values. + + Called inside lock during transaction commit. If the file's mtime + and size match the cached values, the file is unchanged and we + skip the expensive reload. + + Args: + cached_mtime: File mtime when transaction started. + cached_size: File size when transaction started. + """ + stats = self._fs.stat(self._path) + if not stats.exists: + self._load() + return + + if stats.mtime != cached_mtime or stats.size != cached_size: + # File changed - full reload required + self._load() + # else: file unchanged, state is already current + + def _resolve_key_specifier( + self, + caller_key: "KeySpecifier | None", + header: "Header | None", + operations: "list[JSONObject]", + ) -> "KeySpecifier | None": + """Resolve which key specifier to use. + + Returns the resolved key specifier, or None if the file is empty + and no key specifier is available. + + Raises: + InvalidKeyError: If caller and header key specifiers conflict, + or if operations exist but no key specifier is available. + """ + header_key = header.key if header is not None else None + + if caller_key is not None and header_key is not None: + # Both provided - must match + caller_normalized = normalize_key_specifier(caller_key) + header_normalized = normalize_key_specifier(header_key) + if not key_specifiers_match(caller_normalized, header_normalized): + msg = ( + f"key specifier mismatch: caller provided {caller_key!r} " + f"but header specifies {header_key!r}" + ) + raise InvalidKeyError(msg) + return header_normalized + + if header_key is not None: + return normalize_key_specifier(header_key) + + if caller_key is not None: + return normalize_key_specifier(caller_key) + + if self._key_specifier is not None: + # Keep existing key specifier (from initial load) + return self._key_specifier + + if operations: + # File has operations but no key specifier - error + msg = ( + "file has operations but no key specifier: " + "provide key parameter or add header with key field" + ) + raise InvalidKeyError(msg) + + # Empty file with no key specifier + return None + + def _maybe_reload(self) -> None: + """Check if file changed and reload if necessary. + + This is called before read operations when auto_reload is enabled. + """ + if not self._auto_reload: + return + + stats = self._fs.stat(self._path) + if not stats.exists: + # File was deleted - clear state + if self._file_size != 0 or self._file_mtime != 0.0: + self._header = None + self._state = {} + self._cached_sorted_keys = None + self._file_mtime = 0.0 + self._file_size = 0 + return + + if stats.mtime != self._file_mtime or stats.size != self._file_size: + self._load() + + @override + def _get_state(self) -> "dict[Key, JSONObject]": + """Return the table state dictionary.""" + return self._state + + @override + def _prepare_read(self) -> None: + """Check for file changes and reload if necessary.""" + self._maybe_reload() + + @property + def path(self) -> Path: + """The path to the table file.""" + return self._path + + @property + def key_specifier(self) -> "KeySpecifier | None": + """The key specifier for this table.""" + return self._key_specifier + + @property + def header(self) -> "Header | None": + """The header of the table file, if present.""" + self._maybe_reload() + return self._header + + def reload(self) -> None: + """Force a reload of the table from disk. + + This method is useful when `auto_reload=False` and you want to + manually refresh the table state after external changes. + + Raises: + FileError: If the file cannot be read. + ParseError: If the file contains invalid content. + """ + self._load() + self._cached_sorted_keys = None + + # --- Write Operations --- + + def _require_key_specifier(self) -> KeySpecifier: + """Return key specifier or raise InvalidKeyError if not set.""" + if self._key_specifier is None: + msg = "key specifier is required for write operations" + raise InvalidKeyError(msg) + return self._key_specifier + + def put(self, record: "JSONObject") -> None: + """Insert or update a record. + + Validates the record, serializes it deterministically, and appends + to the file under exclusive lock. + + Args: + record: The record to insert/update. Must contain key fields. + + Raises: + InvalidKeyError: If key specifier not set, record missing key fields, + has invalid key values, contains $-prefixed fields, or contains + unpaired surrogates. + LimitError: If key length > 1024 bytes or record size > 1 MiB. + LockError: If file lock cannot be acquired within timeout. + FileError: If file write fails. + """ + key_specifier = self._require_key_specifier() + + # Check for unpaired surrogates in all strings + validate_no_surrogates(record) + + # Validate record structure (missing fields, invalid key types, $ fields) + validate_record(record, key_specifier) + + # Extract and validate key + key = extract_key(record, key_specifier) + key_len = key_length(key) + if key_len > MAX_KEY_LENGTH: + msg = f"key length {key_len} bytes exceeds maximum {MAX_KEY_LENGTH}" + raise LimitError(msg) + + # Serialize record + serialized = serialize_json(record) + record_bytes = utf8_byte_length(serialized) + if record_bytes > MAX_RECORD_SIZE: + msg = f"record size {record_bytes} bytes exceeds maximum {MAX_RECORD_SIZE}" + raise LimitError(msg) + + # Write under lock (put doesn't return whether key existed) + _ = self._write_with_lock(serialized, key, record) + + def _finalize_write(self, key: Key, record: "JSONObject | None") -> bool: + """Update state after successful write. Returns whether key existed.""" + existed = key in self._state + if record is not None: + self._state[key] = record + else: + _ = self._state.pop(key, None) + self._cached_sorted_keys = None + self._update_file_stats() + return existed + + _MAX_WRITE_RETRIES: ClassVar[int] = 3 + + def _write_with_lock( + self, + line: str, + key: Key, + record: "JSONObject | None", + *, + _retries: int = 0, + ) -> bool: + """Write a line to the file under exclusive lock. + + On Windows, LockFileEx prevents other handles (even from the same process) + from accessing the locked file. So we must read and write using the same + file handle that holds the lock. + + Args: + line: The JSON line to write. + key: The key for state update. + record: The record to add to state, or None for delete. + _retries: Internal retry counter (do not pass externally). + + Returns: + True if the key existed in the table (after reload), False otherwise. + """ + self._fs.ensure_parent_dir(self._path) + + try: + with self._fs.open_locked(self._path, "r+b", self._lock_timeout) as f: + content = f.read() + self._load_from_content(content) + _ = f.seek(0, 2) + encoded = (line + "\n").encode("utf-8") + _ = f.write(encoded) + f.sync() + return self._finalize_write(key, record) + except FileNotFoundError: + try: + with self._fs.open_locked(self._path, "xb", self._lock_timeout) as f: + encoded = (line + "\n").encode("utf-8") + _ = f.write(encoded) + f.sync() + return self._finalize_write(key, record) + except FileExistsError: + if _retries >= self._MAX_WRITE_RETRIES: + msg = "cannot acquire stable file handle after multiple retries" + raise FileError(msg) from None + return self._write_with_lock(line, key, record, _retries=_retries + 1) + + def delete(self, key: Key) -> bool: + """Delete a record by key. + + Writes a tombstone to the file. Returns whether the record existed. + + Args: + key: The key to delete. Must match key specifier arity. + + Returns: + True if record existed, False otherwise. + + Raises: + InvalidKeyError: If key specifier not set, key is invalid, + or key arity doesn't match specifier. + LockError: If file lock cannot be acquired within timeout. + FileError: If file write fails. + """ + key_specifier = self._require_key_specifier() + + # Validate key arity matches specifier + validate_key_arity(key, key_specifier) + + # Build tombstone + tombstone = build_tombstone(key, key_specifier) + serialized = serialize_json(tombstone) + + # Write under lock - returns whether key existed (checked after reload) + return self._write_with_lock(serialized, key, None) + + def clear(self) -> None: + """Remove all records from the table. + + Atomically replaces file with header only (if present). + + Raises: + LockError: If file lock cannot be acquired within timeout. + FileError: If file operations fail. + """ + # Build lines: header only if present + lines: list[str] = [] + if self._header is not None: + lines.append(serialize_header(self._header)) + + self._fs.ensure_parent_dir(self._path) + + # Atomically replace file + # On Windows, we must release the lock before atomic_replace because + # you can't rename to a locked file. We accept the small race window. + stats = self._fs.stat(self._path) + if stats.exists: + # Read current content under lock + with self._fs.open_locked(self._path, "r+b", self._lock_timeout) as f: + content = f.read() + self._load_from_content(content) + lines = [] + if self._header is not None: + lines.append(serialize_header(self._header)) + # Lock released, now do atomic replace + self._fs.atomic_replace(self._path, lines) + self._state = {} + self._cached_sorted_keys = None + self._try_update_stats() + elif lines: + # File doesn't exist - create with header + # No lock needed since atomic_replace handles races via temp file + self._fs.atomic_replace(self._path, lines) + self._state = {} + self._cached_sorted_keys = None + self._try_update_stats() + else: + # No header, nothing to write for empty table + self._state = {} + self._cached_sorted_keys = None + + def compact(self) -> None: + """Compact the table to its minimal representation. + + Rewrites the file atomically with only the header (if present) and + current records in key order. Removes all tombstones and superseded + record versions. + + Raises: + LockError: If file lock cannot be acquired within timeout. + FileError: If file operations fail. + """ + self._fs.ensure_parent_dir(self._path) + + # Atomically replace file with compacted content + stats = self._fs.stat(self._path) + if stats.exists: + with self._fs.open_locked(self._path, "r+b", self._lock_timeout) as f: + # Reload state using locked handle (Windows-compatible) + content = f.read() + self._load_from_content(content) + # Build lines from fresh state + lines: list[str] = [] + if self._header is not None: + lines.append(serialize_header(self._header)) + lines.extend(serialize_json(r) for r in self._sorted_records()) + # Lock released, now do atomic replace (Windows can't rename locked) + self._fs.atomic_replace(self._path, lines) + self._try_update_stats() + elif self._header is not None or self._state: + # File doesn't exist but we have in-memory content - create it + # No lock needed since atomic_replace handles races via temp file + lines = [] + if self._header is not None: + lines.append(serialize_header(self._header)) + lines.extend(serialize_json(r) for r in self._sorted_records()) + self._fs.atomic_replace(self._path, lines) + self._try_update_stats() + else: + # No header, no records - nothing to write + self._state = {} + + # --- Transaction Operations --- + + def transaction(self) -> "Transaction": + """Start a new transaction. + + Returns a Transaction object that provides snapshot isolation for reads + and buffered writes. Use as a context manager for automatic commit/abort. + + Example: + >>> with table.transaction() as tx: + ... tx.put({"id": "alice", "v": 1}) + ... tx.delete("bob") + ... # Commits on successful exit + + Returns: + A new Transaction. + + Raises: + InvalidKeyError: If no key specifier is set. + TransactionError: If a transaction is already active. + """ + # Import here to avoid circular import at runtime + from ._transaction import Transaction as TransactionImpl # noqa: PLC0415 + + key_specifier = self._require_key_specifier() + + if self._active_transaction is not None: + msg = "a transaction is already active on this table" + raise TransactionError(msg) + + # Reload to get fresh state + self._maybe_reload() + + # Create transaction with current state + tx: Transaction = TransactionImpl(self, key_specifier, self._state) + self._active_transaction = tx + return tx + + def _end_transaction(self) -> None: + """Clear the active transaction. + + Called by Transaction.commit() and Transaction.abort(). + """ + self._active_transaction = None + + def _commit_transaction_buffer( # noqa: PLR0913 + self, + lines: list[str], + start_state: "dict[Key, JSONObject]", + written_keys: set[Key], + buffer_updates: "dict[Key, JSONObject | None]", + start_mtime: float, + start_size: int, + *, + _retries: int = 0, + ) -> None: + """Commit a transaction's buffered writes. + + Called by Transaction.commit() to write buffered changes to the file. + Performs conflict detection and writes all lines under exclusive lock. + + Args: + lines: Serialized JSON lines to append. + start_state: Snapshot of table state when transaction started. + written_keys: Keys that were modified in the transaction. + buffer_updates: Map of key -> record (or None for delete). + start_mtime: File mtime when transaction started. + start_size: File size when transaction started. + _retries: Internal retry counter (do not pass externally). + + Raises: + ConflictError: If a write-write conflict is detected. + LockError: If file lock cannot be acquired within timeout. + FileError: If file write fails. + """ + self._fs.ensure_parent_dir(self._path) + + stats = self._fs.stat(self._path) + if stats.exists: + with self._fs.open_locked(self._path, "r+b", self._lock_timeout) as f: + # Read and reload if file changed (Windows-compatible) + content = f.read() + current_size = len(content) + if current_size != start_size: + # File changed - reload from content + self._load_from_content(content) + # Check for conflicts + self._detect_conflicts(start_state, written_keys) + # Write all buffered lines using same handle + _ = f.seek(0, 2) # Seek to end + for line in lines: + encoded = (line + "\n").encode("utf-8") + _ = f.write(encoded) + f.sync() + # Update state from buffer + self._apply_buffer_updates(buffer_updates) + self._try_update_stats() + else: + # File doesn't exist - create it + try: + with self._fs.open_locked(self._path, "xb", self._lock_timeout) as f: + # Check for conflicts (should be none since start_state + # was empty) + self._detect_conflicts(start_state, written_keys) + # Write all buffered lines + for line in lines: + encoded = (line + "\n").encode("utf-8") + _ = f.write(encoded) + f.sync() + # Update state from buffer + self._apply_buffer_updates(buffer_updates) + self._try_update_stats() + except FileExistsError: + # File was created between our check and open - retry + if _retries >= self._MAX_WRITE_RETRIES: + msg = "cannot acquire stable file handle after multiple retries" + raise FileError(msg) from None + self._commit_transaction_buffer( + lines, + start_state, + written_keys, + buffer_updates, + start_mtime, + start_size, + _retries=_retries + 1, + ) + + def _detect_conflicts( + self, + start_state: "dict[Key, JSONObject]", + written_keys: set[Key], + ) -> None: + """Detect write-write conflicts. + + For each key in written_keys, compare the current state (after reload) + with the start_state. If they differ, another process modified the + key since the transaction started. + + Args: + start_state: Snapshot of table state when transaction started. + written_keys: Keys that were modified in the transaction. + + Raises: + ConflictError: If a write-write conflict is detected. + """ + for key in written_keys: + key_in_current = key in self._state + key_in_start = key in start_state + + # Check if key presence differs + if key_in_current != key_in_start: + msg = f"conflict detected: key {key!r} was modified externally" + expected = start_state.get(key) + actual = self._state.get(key) + raise ConflictError(msg, key, expected, actual) + + # If neither has the key, no conflict + if not key_in_current: + continue + + # Both have the key - compare records + if self._state[key] != start_state[key]: + msg = f"conflict detected: key {key!r} was modified externally" + expected = start_state.get(key) + actual = self._state.get(key) + raise ConflictError(msg, key, expected, actual) + + def _apply_buffer_updates( + self, + buffer_updates: "dict[Key, JSONObject | None]", + ) -> None: + """Apply buffered updates to the table state. + + Args: + buffer_updates: Map of key -> record (or None for delete). + """ + for key, record in buffer_updates.items(): + if record is not None: + self._state[key] = record + else: + _ = self._state.pop(key, None) + self._cached_sorted_keys = None + + @override + def __repr__(self) -> str: + """Return a string representation of the table.""" + return f"Table({self._path!r}, key={self._key_specifier!r})" diff --git a/src/jsonlt/_transaction.py b/src/jsonlt/_transaction.py new file mode 100644 index 0000000..100e289 --- /dev/null +++ b/src/jsonlt/_transaction.py @@ -0,0 +1,314 @@ +"""Transaction class for JSONLT atomic operations. + +This module provides the Transaction class, which enables snapshot isolation +for reads and buffered writes until commit. Conflicts are detected at commit +time using optimistic concurrency control. +""" + +import copy +from typing import TYPE_CHECKING, ClassVar +from typing_extensions import override + +from ._constants import MAX_KEY_LENGTH, MAX_RECORD_SIZE +from ._encoding import validate_no_surrogates +from ._exceptions import LimitError, TransactionError +from ._json import serialize_json, utf8_byte_length +from ._keys import Key, KeySpecifier, key_length, validate_key_arity +from ._readable import ReadableMixin +from ._records import build_tombstone, extract_key, validate_record + +if TYPE_CHECKING: + from ._json import JSONObject + from ._table import Table + + +class Transaction(ReadableMixin): + """A transaction for atomic operations on a JSONLT table. + + Transactions provide snapshot isolation: reads see a consistent snapshot + taken when the transaction started, plus any writes made within the + transaction. Writes are buffered until commit. + + At commit time, the transaction detects conflicts by checking whether + any key it modified has been changed externally since the transaction + started. If a conflict is detected, the transaction raises ConflictError + and the table retains the externally modified state. + + Example: + >>> table = Table("users.jsonlt", key="id") + >>> with table.transaction() as tx: + ... user = tx.get("alice") + ... if user: + ... tx.put({"id": "alice", "visits": user["visits"] + 1}) + + Transactions can also be managed manually: + >>> tx = table.transaction() + >>> try: + ... tx.put({"id": "bob", "role": "admin"}) + ... tx.commit() + ... except: + ... tx.abort() + ... raise + """ + + __slots__: ClassVar[tuple[str, ...]] = ( + "_buffer_updates", + "_cached_sorted_keys", + "_file_mtime", + "_file_size", + "_finalized", + "_key_specifier", + "_snapshot", + "_start_state", + "_table", + "_written_keys", + ) + + _table: "Table" + _key_specifier: KeySpecifier + _snapshot: "dict[Key, JSONObject]" + _start_state: "dict[Key, JSONObject]" + _buffer_updates: "dict[Key, JSONObject | None]" + _written_keys: set[Key] + _finalized: bool + _file_mtime: float + _file_size: int + _cached_sorted_keys: list[Key] | None + + def __init__( + self, + table: "Table", + key_specifier: KeySpecifier, + state: "dict[Key, JSONObject]", + ) -> None: + """Initialize a transaction. + + This is an internal constructor. Use Table.transaction() to create + transactions. + + Args: + table: The parent table. + key_specifier: The key specifier for this table. + state: The current table state (will be deep copied). + """ + self._table = table + self._key_specifier = key_specifier + # Deep copy state for snapshot isolation + self._snapshot = copy.deepcopy(state) + # Shallow copy for conflict detection - values compared with == against + # reloaded state. Safe because _start_state values are never modified. + self._start_state = state.copy() + self._buffer_updates = {} + self._written_keys = set() + self._finalized = False + # Cache file stats for skip-reload optimization at commit time + # Access to table's private attributes is intentional (friend class pattern) + self._file_mtime = table._file_mtime # pyright: ignore[reportPrivateUsage] # noqa: SLF001 + self._file_size = table._file_size # pyright: ignore[reportPrivateUsage] # noqa: SLF001 + self._cached_sorted_keys = None + + def _require_active(self) -> None: + """Ensure the transaction is still active. + + Raises: + TransactionError: If the transaction has already been committed + or aborted. + """ + if self._finalized: + msg = "transaction has already been committed or aborted" + raise TransactionError(msg) + + @override + def _get_state(self) -> "dict[Key, JSONObject]": + """Return the transaction snapshot dictionary.""" + return self._snapshot + + @override + def _prepare_read(self) -> None: + """Ensure the transaction is still active.""" + self._require_active() + + def put(self, record: "JSONObject") -> None: + """Insert or update a record in the transaction. + + The record is validated and serialized, then buffered for commit. + The transaction snapshot is updated immediately. + + Args: + record: The record to insert/update. Must contain key fields. + + Raises: + TransactionError: If the transaction is no longer active. + InvalidKeyError: If record missing key fields, has invalid key + values, contains $-prefixed fields, or contains unpaired + surrogates. + LimitError: If key length > 1024 bytes or record size > 1 MiB. + """ + self._require_active() + + # Check for unpaired surrogates in all strings + validate_no_surrogates(record) + + # Validate record structure (missing fields, invalid key types, $ fields) + validate_record(record, self._key_specifier) + + # Extract and validate key + key = extract_key(record, self._key_specifier) + key_len = key_length(key) + if key_len > MAX_KEY_LENGTH: + msg = f"key length {key_len} bytes exceeds maximum {MAX_KEY_LENGTH}" + raise LimitError(msg) + + # Serialize record to check size limit (we don't store the serialized form) + serialized = serialize_json(record) + record_bytes = utf8_byte_length(serialized) + if record_bytes > MAX_RECORD_SIZE: + msg = f"record size {record_bytes} bytes exceeds maximum {MAX_RECORD_SIZE}" + raise LimitError(msg) + + # Buffer the update (only keep latest value per key) + record_copy = copy.deepcopy(record) + self._buffer_updates[key] = record_copy + self._written_keys.add(key) + + # Update snapshot + self._snapshot[key] = record_copy + self._cached_sorted_keys = None + + def delete(self, key: Key) -> bool: + """Delete a record by key in the transaction. + + Buffers a tombstone for commit. The transaction snapshot is updated + immediately. + + Args: + key: The key to delete. Must match key specifier arity. + + Returns: + True if record existed in snapshot, False otherwise. + + Raises: + TransactionError: If the transaction is no longer active. + InvalidKeyError: If key arity doesn't match specifier. + """ + self._require_active() + + # Validate key arity matches specifier + validate_key_arity(key, self._key_specifier) + + # Check if key exists in snapshot + existed = key in self._snapshot + + # Buffer the delete (only keep latest state per key) + self._buffer_updates[key] = None + self._written_keys.add(key) + + # Update snapshot + if existed: + del self._snapshot[key] + self._cached_sorted_keys = None + + return existed + + def commit(self) -> None: + """Commit the transaction. + + Writes all buffered changes to the file under exclusive lock. + Detects conflicts by checking if any key modified by this transaction + was also modified externally since the transaction started. + + Raises: + TransactionError: If the transaction is no longer active. + ConflictError: If a write-write conflict is detected. + LockError: If file lock cannot be acquired within timeout. + FileError: If file write fails. + """ + self._require_active() + + try: + # If no updates, just mark as committed + if not self._buffer_updates: + return + + # Build deduplicated buffer from _buffer_updates at commit time + # Dict preserves insertion order in Python 3.7+, so each key appears once + lines: list[str] = [] + for key, value in self._buffer_updates.items(): + if value is None: + # Tombstone (delete) + tombstone = build_tombstone(key, self._key_specifier) + lines.append(serialize_json(tombstone)) + else: + # Record (put) + lines.append(serialize_json(value)) + + # Commit via table (handles locking and conflict detection) + # Transaction is a friend class of Table - protected access is intentional + self._table._commit_transaction_buffer( # pyright: ignore[reportPrivateUsage] # noqa: SLF001 + lines, + self._start_state, + self._written_keys, + self._buffer_updates, + self._file_mtime, + self._file_size, + ) + finally: + self._finalized = True + # Transaction is a friend class of Table - protected access is intentional + self._table._end_transaction() # pyright: ignore[reportPrivateUsage] # noqa: SLF001 + + def abort(self) -> None: + """Abort the transaction. + + Discards all buffered changes. The table state is unchanged. + + Raises: + TransactionError: If the transaction is no longer active. + """ + self._require_active() + self._finalized = True + # Transaction is a friend class of Table - protected access is intentional + self._table._end_transaction() # pyright: ignore[reportPrivateUsage] # noqa: SLF001 + + def __enter__(self) -> "Transaction": + """Enter the transaction context. + + Returns: + This transaction. + """ + return self + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: object, + ) -> bool: + """Exit the transaction context. + + If no exception occurred, commits the transaction. Otherwise, + aborts the transaction. Exceptions are not suppressed. + + Args: + exc_type: Exception type if an exception occurred. + exc_val: Exception value if an exception occurred. + exc_tb: Exception traceback if an exception occurred. + + Returns: + False (exceptions are not suppressed). + """ + if self._finalized: + return False + if exc_type is None: + self.commit() + else: + self.abort() + return False + + @override + def __repr__(self) -> str: + """Return a string representation of the transaction.""" + status = "finalized" if self._finalized else "active" + return ( + f"Transaction({self._table._path!r}, key={self._key_specifier!r}, {status})" # pyright: ignore[reportPrivateUsage] # noqa: SLF001 + ) diff --git a/src/jsonlt/_writer.py b/src/jsonlt/_writer.py new file mode 100644 index 0000000..adf2900 --- /dev/null +++ b/src/jsonlt/_writer.py @@ -0,0 +1,130 @@ +"""File writing operations for JSONLT. + +This module provides low-level file writing with durability guarantees. +""" + +import contextlib +import os +import sys +import tempfile +from pathlib import Path +from typing import TYPE_CHECKING + +from ._exceptions import FileError + +if TYPE_CHECKING: + from collections.abc import Sequence + + +def append_line(path: Path, line: str) -> None: + """Append a single line to file with fsync. + + Opens file in append mode, writes line + newline, fsyncs. + Caller must hold exclusive lock. + + Args: + path: Path to the file. + line: JSON line to append (without trailing newline). + + Raises: + FileError: If append or sync fails. + """ + try: + with path.open("a", encoding="utf-8") as f: + _ = f.write(line) + _ = f.write("\n") + f.flush() + os.fsync(f.fileno()) + except OSError as e: + msg = f"cannot append to file: {e}" + raise FileError(msg) from e + + +def append_lines(path: Path, lines: "Sequence[str]") -> None: + """Append multiple lines to file with single fsync. + + Opens file in append mode, writes all lines, single flush + fsync. + Caller must hold exclusive lock. + + Args: + path: Path to the file. + lines: JSON lines to append (without trailing newlines). + + Raises: + FileError: If append or sync fails. + """ + if not lines: + return + try: + with path.open("a", encoding="utf-8") as f: + for line in lines: + _ = f.write(line) + _ = f.write("\n") + f.flush() + os.fsync(f.fileno()) + except OSError as e: + msg = f"cannot append to file: {e}" + raise FileError(msg) from e + + +def atomic_replace(path: Path, lines: "Sequence[str]") -> None: + """Atomically replace file contents with lines. + + Writes to temp file in same directory, fsyncs, renames. + Used by clear() and future compact(). + + Args: + path: Target file path. + lines: Lines to write (newlines added automatically). + + Raises: + FileError: If write, sync, or rename fails. + """ + # Create temp file in same directory to ensure atomic rename works + parent_dir = path.parent + temp_fd = -1 + temp_path: Path | None = None + + try: + # Create temp file + temp_fd, temp_path_str = tempfile.mkstemp( + suffix=".tmp", + prefix=".jsonlt_", + dir=parent_dir, + ) + temp_path = Path(temp_path_str) + + # Write content + with os.fdopen(temp_fd, "w", encoding="utf-8") as f: + temp_fd = -1 # Ownership transferred to fdopen + for line in lines: + _ = f.write(line) + _ = f.write("\n") + f.flush() + os.fsync(f.fileno()) + + # Atomic rename + _ = temp_path.replace(path) + temp_path = None # Successfully moved, don't delete + + # fsync the directory to ensure the rename is durable + # This is a POSIX-specific operation - Windows doesn't support + # opening directories and NTFS handles atomic renames differently + if sys.platform != "win32": + dir_fd = os.open(str(parent_dir), os.O_RDONLY) + try: + os.fsync(dir_fd) + finally: + os.close(dir_fd) + + except OSError as e: + msg = f"cannot write file atomically: {e}" + raise FileError(msg) from e + finally: + # Clean up temp file if it still exists (defensive cleanup) + if temp_fd != -1: # pragma: no cover + with contextlib.suppress(OSError): + os.close(temp_fd) + if temp_path is not None: # pragma: no cover + with contextlib.suppress(OSError): + temp_path.unlink() diff --git a/tests/conformance/conftest.py b/tests/conformance/conftest.py deleted file mode 100644 index b655700..0000000 --- a/tests/conformance/conftest.py +++ /dev/null @@ -1,10 +0,0 @@ -from pathlib import Path - -import pytest - - -def pytest_collection_modifyitems(items: list[pytest.Item]) -> None: - test_dir = Path(__file__).parent - for item in items: - if Path(item.fspath).is_relative_to(test_dir): - item.add_marker(pytest.mark.conformance) diff --git a/tests/conformance/test_format_conformance.py b/tests/conformance/test_format_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_generator_conformance.py b/tests/conformance/test_generator_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_header_conformance.py b/tests/conformance/test_header_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_keys_conformance.py b/tests/conformance/test_keys_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_ops_conformance.py b/tests/conformance/test_ops_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_recovery_conformance.py b/tests/conformance/test_recovery_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_state_conformance.py b/tests/conformance/test_state_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_transactions_conformance.py b/tests/conformance/test_transactions_conformance.py deleted file mode 100644 index e69de29..0000000 diff --git a/tests/conformance/test_compaction_conformance.py b/tests/fakes/__init__.py similarity index 100% rename from tests/conformance/test_compaction_conformance.py rename to tests/fakes/__init__.py diff --git a/tests/fakes/fake_filesystem.py b/tests/fakes/fake_filesystem.py new file mode 100644 index 0000000..b00f8d5 --- /dev/null +++ b/tests/fakes/fake_filesystem.py @@ -0,0 +1,138 @@ +"""Fake filesystem for testing.""" + +import time +from contextlib import contextmanager +from dataclasses import dataclass, field +from typing import TYPE_CHECKING, ClassVar + +from jsonlt._exceptions import FileError +from jsonlt._filesystem import FileStats, LockedFile + +if TYPE_CHECKING: + from collections.abc import Iterator, Sequence + from pathlib import Path + + +@dataclass +class FakeFile: + """In-memory file.""" + + content: bytes = b"" + mtime: float = 0.0 + + +class FakeLockedFile: + """Fake locked file handle.""" + + __slots__: ClassVar[tuple[str, ...]] = ("_file", "_position") + + _file: FakeFile + _position: int + + def __init__(self, fake_file: FakeFile) -> None: + self._file = fake_file + self._position = 0 + + def read(self) -> bytes: + data = self._file.content[self._position :] + self._position = len(self._file.content) + return data + + def write(self, data: bytes) -> int: + content = self._file.content + self._file.content = content[: self._position] + data + self._position += len(data) + return len(data) + + def seek(self, offset: int, whence: int = 0) -> int: + if whence == 0: # SEEK_SET + self._position = offset + elif whence == 1: # SEEK_CUR + self._position += offset + elif whence == 2: # SEEK_END + self._position = len(self._file.content) + offset + return self._position + + def sync(self) -> None: + # Update mtime on sync + self._file.mtime = time.time() + + +# Verify FakeLockedFile satisfies LockedFile protocol at module load time +_: type[LockedFile] = FakeLockedFile # type: ignore[type-abstract] + + +@dataclass +class FakeFileSystem: + """In-memory filesystem for testing.""" + + files: dict["Path", FakeFile] = field(default_factory=dict) + fail_stat: set["Path"] = field(default_factory=set) + fail_open: set["Path"] = field(default_factory=set) + fail_ensure_parent: set["Path"] = field(default_factory=set) + + def stat(self, path: "Path") -> FileStats: + if path in self.fail_stat: + msg = "simulated stat error" + raise FileError(msg) + if path not in self.files: + return FileStats(mtime=0.0, size=0, exists=False) + f = self.files[path] + return FileStats(mtime=f.mtime, size=len(f.content), exists=True) + + def read_bytes(self, path: "Path", *, max_size: int | None = None) -> bytes: + if path not in self.files: + msg = "file not found" + raise FileError(msg) + content = self.files[path].content + if max_size is not None and len(content) > max_size: + msg = f"file size {len(content)} exceeds maximum {max_size}" + raise FileError(msg) + return content + + def ensure_parent_dir(self, path: "Path") -> None: + if path in self.fail_ensure_parent: + msg = "cannot create directory" + raise FileError(msg) + # No-op in fake filesystem + + @contextmanager + def open_locked( + self, + path: "Path", + mode: str, + timeout: float | None, + ) -> "Iterator[LockedFile]": + # timeout is intentionally unused in this fake implementation + del timeout + if path in self.fail_open: + msg = "simulated open error" + raise FileError(msg) + + if mode == "r+b": + if path not in self.files: + raise FileNotFoundError + fake_file = self.files[path] + elif mode == "xb": + if path in self.files: + raise FileExistsError + fake_file = FakeFile() + self.files[path] = fake_file + else: + msg = f"unsupported mode: {mode}" + raise ValueError(msg) + + yield FakeLockedFile(fake_file) + + def atomic_replace(self, path: "Path", lines: "Sequence[str]") -> None: + content = "".join(line + "\n" for line in lines).encode("utf-8") + self.files[path] = FakeFile(content=content, mtime=time.time()) + + # Test helpers + def set_content(self, path: "Path", content: bytes) -> None: + self.files[path] = FakeFile(content=content, mtime=time.time()) + + def get_content(self, path: "Path") -> bytes: + if path not in self.files: + raise KeyError(path) + return self.files[path].content diff --git a/tests/properties/test_key_comparison.py b/tests/properties/test_key_comparison.py new file mode 100644 index 0000000..e008ea0 --- /dev/null +++ b/tests/properties/test_key_comparison.py @@ -0,0 +1,102 @@ +from hypothesis import given, strategies as st + +from jsonlt._constants import MAX_INTEGER_KEY, MAX_TUPLE_ELEMENTS, MIN_INTEGER_KEY +from jsonlt._keys import compare_keys + +key_element_strategy = st.one_of( + st.text(), + st.integers(min_value=MIN_INTEGER_KEY, max_value=MAX_INTEGER_KEY), +) + +key_strategy = st.one_of( + st.text(), + st.integers(min_value=MIN_INTEGER_KEY, max_value=MAX_INTEGER_KEY), + st.tuples(*[key_element_strategy] * 1), + st.tuples(*[key_element_strategy] * 2), + st.lists(key_element_strategy, min_size=1, max_size=MAX_TUPLE_ELEMENTS).map(tuple), +) + + +class TestTotalOrderProperties: + @given(key_strategy) + def test_reflexivity(self, a: str | int | tuple[str | int, ...]) -> None: + assert compare_keys(a, a) == 0 + + @given(key_strategy, key_strategy) + def test_antisymmetry( + self, a: str | int | tuple[str | int, ...], b: str | int | tuple[str | int, ...] + ) -> None: + cmp_ab = compare_keys(a, b) + cmp_ba = compare_keys(b, a) + + if cmp_ab <= 0 and cmp_ba <= 0: + assert cmp_ab == 0 + assert cmp_ba == 0 + + @given(key_strategy, key_strategy, key_strategy) + def test_transitivity( + self, + a: str | int | tuple[str | int, ...], + b: str | int | tuple[str | int, ...], + c: str | int | tuple[str | int, ...], + ) -> None: + cmp_ab = compare_keys(a, b) + cmp_bc = compare_keys(b, c) + cmp_ac = compare_keys(a, c) + + if cmp_ab < 0 and cmp_bc < 0: + assert cmp_ac < 0 + + @given(key_strategy, key_strategy) + def test_totality( + self, a: str | int | tuple[str | int, ...], b: str | int | tuple[str | int, ...] + ) -> None: + cmp = compare_keys(a, b) + assert cmp in {-1, 0, 1} + + +class TestComparisonConsistency: + @given(key_strategy, key_strategy) + def test_reverse_comparison( + self, a: str | int | tuple[str | int, ...], b: str | int | tuple[str | int, ...] + ) -> None: + cmp_ab = compare_keys(a, b) + cmp_ba = compare_keys(b, a) + assert cmp_ab == -cmp_ba + + @given(key_strategy, key_strategy) + def test_equality_symmetry( + self, a: str | int | tuple[str | int, ...], b: str | int | tuple[str | int, ...] + ) -> None: + if compare_keys(a, b) == 0: + assert compare_keys(b, a) == 0 + + +class TestTypeOrdering: + @given( + st.integers(min_value=MIN_INTEGER_KEY, max_value=MAX_INTEGER_KEY), + st.text(), + ) + def test_integer_before_string(self, i: int, s: str) -> None: + assert compare_keys(i, s) == -1 + assert compare_keys(s, i) == 1 + + @given( + st.text(), + st.lists(key_element_strategy, min_size=1, max_size=MAX_TUPLE_ELEMENTS).map( + tuple + ), + ) + def test_string_before_tuple(self, s: str, t: tuple[str | int, ...]) -> None: + assert compare_keys(s, t) == -1 + assert compare_keys(t, s) == 1 + + @given( + st.integers(min_value=MIN_INTEGER_KEY, max_value=MAX_INTEGER_KEY), + st.lists(key_element_strategy, min_size=1, max_size=MAX_TUPLE_ELEMENTS).map( + tuple + ), + ) + def test_integer_before_tuple(self, i: int, t: tuple[str | int, ...]) -> None: + assert compare_keys(i, t) == -1 + assert compare_keys(t, i) == 1 diff --git a/tests/unit/test_encoding.py b/tests/unit/test_encoding.py new file mode 100644 index 0000000..abcfc34 --- /dev/null +++ b/tests/unit/test_encoding.py @@ -0,0 +1,270 @@ +from typing import TYPE_CHECKING, cast + +import pytest + +from jsonlt._encoding import ( + has_unpaired_surrogates, + prepare_input, + strip_bom, + strip_cr_before_lf, + validate_no_surrogates, + validate_utf8, +) +from jsonlt._exceptions import ParseError + +if TYPE_CHECKING: + from jsonlt._json import JSONValue + + +class TestStripBom: + def test_strips_utf8_bom(self) -> None: + data = b'\xef\xbb\xbf{"id": 1}' + result = strip_bom(data) + assert result == b'{"id": 1}' + + def test_preserves_data_without_bom(self) -> None: + data = b'{"id": 1}' + result = strip_bom(data) + assert result == data + + def test_empty_input(self) -> None: + assert strip_bom(b"") == b"" + + def test_only_bom(self) -> None: + assert strip_bom(b"\xef\xbb\xbf") == b"" + + def test_bom_in_middle_not_stripped(self) -> None: + data = b'{"id": "\xef\xbb\xbf"}' + result = strip_bom(data) + assert result == data + + +class TestStripCrBeforeLf: + def test_strips_crlf_to_lf(self) -> None: + data = b'{"id": 1}\r\n{"id": 2}\r\n' + result = strip_cr_before_lf(data) + assert result == b'{"id": 1}\n{"id": 2}\n' + + def test_preserves_lf_only(self) -> None: + data = b'{"id": 1}\n{"id": 2}\n' + result = strip_cr_before_lf(data) + assert result == data + + def test_preserves_standalone_cr(self) -> None: + # CR not followed by LF should be preserved + data = b'{"id": 1}\r{"id": 2}' + result = strip_cr_before_lf(data) + assert result == data + + def test_mixed_line_endings(self) -> None: + data = b'{"id": 1}\r\n{"id": 2}\n{"id": 3}\r\n' + result = strip_cr_before_lf(data) + assert result == b'{"id": 1}\n{"id": 2}\n{"id": 3}\n' + + def test_empty_input(self) -> None: + assert strip_cr_before_lf(b"") == b"" + + +class TestValidateUtf8: + def test_valid_ascii(self) -> None: + result = validate_utf8(b'{"id": 1}') + assert result == '{"id": 1}' + + def test_valid_multibyte_utf8(self) -> None: + # "café" with é as 2-byte UTF-8 (0xC3 0xA9) + result = validate_utf8("café".encode()) + assert result == "café" + + def test_valid_emoji(self) -> None: + # Emoji 😀 (U+1F600) as 4-byte UTF-8 + result = validate_utf8("😀".encode()) + assert result == "😀" + + def test_valid_chinese(self) -> None: + # Chinese characters as 3-byte UTF-8 + result = validate_utf8("中文".encode()) + assert result == "中文" + + @pytest.mark.parametrize( + "data", + [ + # 2-byte overlong NUL (0xC0 0x80) + b"\xc0\x80", + # 2-byte overlong DEL (0xC1 0xBF) + b"\xc1\xbf", + # 3-byte overlong NUL (0xE0 0x80 0x80) + b"\xe0\x80\x80", + # 3-byte overlong slash (0xE0 0x80 0xAF) + b"\xe0\x80\xaf", + # 4-byte overlong NUL (0xF0 0x80 0x80 0x80) + b"\xf0\x80\x80\x80", + ], + ids=[ + "2-byte-overlong-nul", + "2-byte-overlong-del", + "3-byte-overlong-nul", + "3-byte-overlong-slash", + "4-byte-overlong-nul", + ], + ) + def test_rejects_overlong_encodings(self, data: bytes) -> None: + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(data) + + @pytest.mark.parametrize( + "data", + [ + # High surrogate U+D800 (0xED 0xA0 0x80) + b"\xed\xa0\x80", + # High surrogate mid-range U+DB00 (0xED 0xAC 0x80) + b"\xed\xac\x80", + # High surrogate max U+DBFF (0xED 0xAF 0xBF) + b"\xed\xaf\xbf", + # Low surrogate U+DC00 (0xED 0xB0 0x80) + b"\xed\xb0\x80", + # Low surrogate mid-range U+DE00 (0xED 0xB8 0x80) + b"\xed\xb8\x80", + # Low surrogate max U+DFFF (0xED 0xBF 0xBF) + b"\xed\xbf\xbf", + ], + ids=[ + "high-surrogate-min", + "high-surrogate-mid", + "high-surrogate-max", + "low-surrogate-min", + "low-surrogate-mid", + "low-surrogate-max", + ], + ) + def test_rejects_surrogate_codepoints(self, data: bytes) -> None: + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(data) + + def test_rejects_invalid_lead_byte_ff(self) -> None: + # 0xFF cannot start any valid UTF-8 sequence + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(b"\xff") + + def test_rejects_invalid_lead_byte_fe(self) -> None: + # 0xFE cannot start any valid UTF-8 sequence + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(b"\xfe") + + def test_rejects_truncated_2byte_sequence(self) -> None: + # 0xC2 expects a continuation byte + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(b"\xc2") + + def test_rejects_truncated_3byte_sequence(self) -> None: + # 0xE2 expects two continuation bytes + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(b"\xe2\x80") + + def test_rejects_truncated_4byte_sequence(self) -> None: + # 0xF0 expects three continuation bytes + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(b"\xf0\x9f\x98") + + def test_rejects_standalone_continuation_byte(self) -> None: + # 0x80-0xBF are continuation bytes, invalid as lead bytes + with pytest.raises(UnicodeDecodeError): + _ = validate_utf8(b"\x80") + + +class TestPrepareInput: + def test_combines_all_preprocessing(self) -> None: + # BOM + CRLF + valid UTF-8 + data = b'\xef\xbb\xbf{"id": 1}\r\n{"id": 2}\r\n' + result = prepare_input(data) + assert result == '{"id": 1}\n{"id": 2}\n' + + def test_handles_unicode_content(self) -> None: + data = '{"name": "café"}'.encode() + result = prepare_input(data) + assert result == '{"name": "café"}' + + def test_rejects_invalid_utf8(self) -> None: + # Contains overlong encoding (0xC0 0x80 is overlong NUL) + data = b'{"name": "bad\xc0\x80data"}' + with pytest.raises(UnicodeDecodeError): + _ = prepare_input(data) + + def test_empty_input(self) -> None: + assert prepare_input(b"") == "" + + +class TestHasUnpairedSurrogates: + @pytest.mark.parametrize( + "text", + [ + "hello world", + "", + "hello \U0001f600 world", + "\U0001f600\U0001f601\U0001f602", + ], + ids=[ + "plain-ascii", + "empty-string", + "emoji-in-middle", + "multiple-emojis", + ], + ) + def test_valid_text_returns_false(self, text: str) -> None: + assert has_unpaired_surrogates(text) is False + + @pytest.mark.parametrize( + "text", + [ + "hello " + chr(0xD800) + " world", + "hello " + chr(0xDC00) + " world", + "hello" + chr(0xD800), + chr(0xDC00) + "hello", + chr(0xD800), + chr(0xDFFF), + ], + ids=[ + "lone-high-surrogate-middle", + "lone-low-surrogate-middle", + "high-surrogate-at-end", + "low-surrogate-at-start", + "only-high-surrogate", + "only-low-surrogate", + ], + ) + def test_unpaired_surrogate_returns_true(self, text: str) -> None: + assert has_unpaired_surrogates(text) is True + + def test_valid_surrogate_pair_returns_false(self) -> None: + """Valid surrogate pair (high followed by low) should return False.""" + high = chr(0xD83D) # High surrogate + low = chr(0xDE00) # Low surrogate + text = high + low + assert has_unpaired_surrogates(text) is False + + def test_valid_surrogate_pair_in_middle(self) -> None: + """Valid surrogate pair embedded in text.""" + high = chr(0xD83D) + low = chr(0xDE00) + text = "hello" + high + low + "world" + assert has_unpaired_surrogates(text) is False + + +class TestValidateNoSurrogates: + def test_rejects_surrogate_in_dict_key(self) -> None: + """Unpaired surrogate in dict field name raises ParseError.""" + bad_key = "field" + chr(0xD800) # Lone high surrogate in key + value = cast("JSONValue", {bad_key: "value"}) + with pytest.raises(ParseError, match="field name"): + validate_no_surrogates(value) + + def test_accepts_valid_nested_dict(self) -> None: + """Nested dict without surrogates passes.""" + value = cast("JSONValue", {"outer": {"inner": "value"}}) + validate_no_surrogates(value) # Should not raise + + def test_rejects_surrogate_in_string_value(self) -> None: + """Unpaired surrogate in string value raises ParseError.""" + bad_value = "value" + chr(0xD800) # Lone high surrogate in value + value = cast("JSONValue", {"key": bad_value}) + with pytest.raises(ParseError, match="unpaired Unicode surrogates"): + validate_no_surrogates(value) diff --git a/tests/unit/test_header.py b/tests/unit/test_header.py new file mode 100644 index 0000000..bfbf922 --- /dev/null +++ b/tests/unit/test_header.py @@ -0,0 +1,315 @@ +from typing import TYPE_CHECKING + +import pytest + +from jsonlt._exceptions import ParseError +from jsonlt._header import Header, is_header_line, parse_header, serialize_header + +if TYPE_CHECKING: + from jsonlt._json import JSONObject + from jsonlt._keys import KeySpecifier + + +class TestIsHeaderLine: + @pytest.mark.parametrize( + ("record", "expected"), + [ + ({"$jsonlt": {"version": 1}}, True), + ({"id": "alice", "name": "Alice"}, False), + ({"jsonlt": {"version": 1}}, False), # Missing $ + ({}, False), + ], + ids=["with_jsonlt_field", "regular_record", "missing_dollar", "empty_object"], + ) + def test_is_header_line(self, record: "JSONObject", *, expected: bool) -> None: + assert is_header_line(record) is expected + + +class TestParseHeaderMinimal: + def test_version_only(self) -> None: + header = parse_header({"$jsonlt": {"version": 1}}) + + assert header.version == 1 + assert header.key is None + assert header.schema_url is None + assert header.schema is None + assert header.meta is None + + +class TestParseHeaderWithKey: + @pytest.mark.parametrize( + ("header", "expected"), + [ + ({"$jsonlt": {"version": 1, "key": "id"}}, "id"), + ({"$jsonlt": {"version": 1, "key": ""}}, ""), + ({"$jsonlt": {"version": 1, "key": ["id"]}}, ("id",)), + ({"$jsonlt": {"version": 1, "key": ["org", "id"]}}, ("org", "id")), + ( + {"$jsonlt": {"version": 1, "key": ["region", "org", "id"]}}, + ("region", "org", "id"), + ), + ], + ids=[ + "string_key", + "empty_string_key", + "single_element_array", + "compound_key", + "triple_compound_key", + ], + ) + def test_key_parsing(self, header: "JSONObject", expected: "KeySpecifier") -> None: + result = parse_header(header) + assert result.key == expected + + +class TestParseHeaderWithSchema: + def test_schema_url(self) -> None: + header = parse_header( + {"$jsonlt": {"version": 1, "$schema": "https://example.com/schema.json"}} + ) + assert header.schema_url == "https://example.com/schema.json" + assert header.schema is None + + def test_inline_schema(self) -> None: + header = parse_header( + { + "$jsonlt": { + "version": 1, + "schema": { + "type": "object", + "properties": {"id": {"type": "string"}}, + }, + } + } + ) + assert header.schema_url is None + assert header.schema == { + "type": "object", + "properties": {"id": {"type": "string"}}, + } + + def test_schema_url_and_inline_mutually_exclusive(self) -> None: + with pytest.raises(ParseError, match="mutually exclusive"): + _ = parse_header( + { + "$jsonlt": { + "version": 1, + "$schema": "https://example.com/schema.json", + "schema": {"type": "object"}, + } + } + ) + + +class TestParseHeaderWithMeta: + @pytest.mark.parametrize( + ("header", "expected"), + [ + ( + {"$jsonlt": {"version": 1, "meta": {"created": "2025-01-15"}}}, + {"created": "2025-01-15"}, + ), + ( + { + "$jsonlt": { + "version": 1, + "meta": { + "author": "Alice", + "tags": ["test"], + "nested": {"a": 1}, + }, + } + }, + {"author": "Alice", "tags": ["test"], "nested": {"a": 1}}, + ), + ], + ids=["simple_meta", "complex_meta"], + ) + def test_meta_parsing(self, header: "JSONObject", expected: "JSONObject") -> None: + result = parse_header(header) + assert result.meta == expected + + +class TestParseHeaderVersionErrors: + @pytest.mark.parametrize( + ("header", "match"), + [ + ({"$jsonlt": {"key": "id"}}, "missing required 'version' field"), + ({"$jsonlt": {"version": "1"}}, "version must be an integer"), + ({"$jsonlt": {"version": 1.0}}, "version must be an integer"), + ({"$jsonlt": {"version": True}}, "version must be an integer"), + ({"$jsonlt": {"version": 0}}, "unsupported version 0"), + ({"$jsonlt": {"version": 2}}, "unsupported version 2"), + ({"$jsonlt": {"version": -1}}, "unsupported version -1"), + ], + ids=[ + "missing_version", + "version_string", + "version_float", + "version_boolean", + "version_zero", + "version_two", + "version_negative", + ], + ) + def test_version_errors(self, header: "JSONObject", match: str) -> None: + with pytest.raises(ParseError, match=match): + _ = parse_header(header) + + +class TestParseHeaderStructureErrors: + @pytest.mark.parametrize( + ("header", "match"), + [ + ({"$jsonlt": "not an object"}, r"\$jsonlt value must be an object"), + ({"$jsonlt": [1, 2, 3]}, r"\$jsonlt value must be an object"), + ({"$jsonlt": None}, r"\$jsonlt value must be an object"), + ], + ids=["string", "array", "null"], + ) + def test_jsonlt_structure_errors(self, header: "JSONObject", match: str) -> None: + with pytest.raises(ParseError, match=match): + _ = parse_header(header) + + +class TestParseHeaderKeyErrors: + @pytest.mark.parametrize( + ("header", "match"), + [ + ( + {"$jsonlt": {"version": 1, "key": 42}}, + "key specifier must be a string or array", + ), + ( + {"$jsonlt": {"version": 1, "key": []}}, + "key specifier cannot be an empty array", + ), + ( + {"$jsonlt": {"version": 1, "key": ["id", 42]}}, + "must contain only strings", + ), + ( + {"$jsonlt": {"version": 1, "key": ["id", "name", "id"]}}, + "duplicate field names", + ), + ], + ids=["integer_key", "empty_array", "array_with_non_string", "duplicate_fields"], + ) + def test_key_errors(self, header: "JSONObject", match: str) -> None: + with pytest.raises(ParseError, match=match): + _ = parse_header(header) + + +class TestParseHeaderKeyTupleLimit: + def test_max_tuple_elements_accepted(self) -> None: + # 16 elements is the maximum allowed + fields = [f"field{i}" for i in range(16)] + header = parse_header( + {"$jsonlt": {"version": 1, "key": fields}} # pyright: ignore[reportArgumentType] + ) + assert header.key == tuple(fields) + + def test_exceeds_max_tuple_elements_rejected(self) -> None: + # 17 elements exceeds the limit + fields = [f"field{i}" for i in range(17)] + with pytest.raises(ParseError, match="exceeds maximum of 16 elements"): + _ = parse_header( + {"$jsonlt": {"version": 1, "key": fields}} # pyright: ignore[reportArgumentType] + ) + + +class TestParseHeaderSchemaErrors: + @pytest.mark.parametrize( + ("header", "match"), + [ + ({"$jsonlt": {"version": 1, "$schema": 123}}, r"\$schema must be a string"), + ( + {"$jsonlt": {"version": 1, "schema": "not an object"}}, + "schema must be an object", + ), + ( + {"$jsonlt": {"version": 1, "schema": [1, 2, 3]}}, + "schema must be an object", + ), + ], + ids=["schema_url_not_string", "inline_schema_string", "inline_schema_array"], + ) + def test_schema_errors(self, header: "JSONObject", match: str) -> None: + with pytest.raises(ParseError, match=match): + _ = parse_header(header) + + +class TestParseHeaderMetaErrors: + @pytest.mark.parametrize( + ("header", "match"), + [ + ( + {"$jsonlt": {"version": 1, "meta": "not an object"}}, + "meta must be an object", + ), + ({"$jsonlt": {"version": 1, "meta": ["a", "b"]}}, "meta must be an object"), + ({"$jsonlt": {"version": 1, "meta": 42}}, "meta must be an object"), + ], + ids=["string", "array", "number"], + ) + def test_meta_errors(self, header: "JSONObject", match: str) -> None: + with pytest.raises(ParseError, match=match): + _ = parse_header(header) + + +class TestHeaderEquality: + @pytest.mark.parametrize( + ("h1", "h2", "expected"), + [ + (Header(version=1, key="id"), Header(version=1, key="id"), True), + (Header(version=1), Header(version=1, key="id"), False), + (Header(version=1, key="id"), Header(version=1, key="name"), False), + (Header(version=1, key="id"), Header(version=1, key=("id",)), False), + ], + ids=[ + "equal_headers", + "different_key_presence", + "different_keys", + "tuple_vs_string", + ], + ) + def test_header_equality(self, h1: Header, h2: Header, *, expected: bool) -> None: + assert (h1 == h2) is expected + + +class TestSerializeHeader: + def test_minimal_header(self) -> None: + """Header with only version.""" + header = Header(version=1) + result = serialize_header(header) + assert result == '{"$jsonlt":{"version":1}}' + + def test_header_with_string_key(self) -> None: + """Header with string key specifier.""" + header = Header(version=1, key="id") + result = serialize_header(header) + assert '"key":"id"' in result + + def test_header_with_tuple_key(self) -> None: + """Header with tuple key specifier (becomes array).""" + header = Header(version=1, key=("org", "id")) + result = serialize_header(header) + assert '"key":["org","id"]' in result + + def test_header_with_schema_url(self) -> None: + """Header with $schema URL.""" + header = Header(version=1, schema_url="https://example.com/schema.json") + result = serialize_header(header) + assert '"$schema":"https://example.com/schema.json"' in result + + def test_header_with_inline_schema(self) -> None: + """Header with inline schema object.""" + header = Header(version=1, schema={"type": "object"}) + result = serialize_header(header) + assert '"schema":{"type":"object"}' in result + + def test_header_with_meta(self) -> None: + """Header with meta object.""" + header = Header(version=1, meta={"author": "test"}) + result = serialize_header(header) + assert '"meta":{"author":"test"}' in result diff --git a/tests/unit/test_json.py b/tests/unit/test_json.py new file mode 100644 index 0000000..b6263a6 --- /dev/null +++ b/tests/unit/test_json.py @@ -0,0 +1,291 @@ +import pytest + +from jsonlt._exceptions import LimitError, ParseError +from jsonlt._json import ( + json_nesting_depth, + parse_json_line, + serialize_json, +) + + +class TestJsonNestingDepth: + @pytest.mark.parametrize( + ("value", "expected"), + [ + # Primitives have depth 1 + (None, 1), + (True, 1), + (False, 1), + (42, 1), + (3.14, 1), + ("hello", 1), + # Empty containers have depth 1 + ({}, 1), + ([], 1), + # Containers with primitives have depth 2 + ({"a": 1}, 2), + ([1, 2, 3], 2), + # Nested containers + ({"a": {"b": 1}}, 3), + ({"a": {"b": {"c": 1}}}, 4), + ([[1]], 3), + ([[[1]]], 4), + # Mixed nesting + ({"a": [1, 2, 3]}, 3), + ([{"a": 1}], 3), + ({"a": [{"b": 1}]}, 4), + ], + ids=[ + "null", + "true", + "false", + "int", + "float", + "string", + "empty_dict", + "empty_list", + "flat_dict", + "flat_list", + "nested_dict_2", + "nested_dict_3", + "nested_list_2", + "nested_list_3", + "dict_with_list", + "list_with_dict", + "mixed_nesting", + ], + ) + def test_nesting_depth(self, value: object, expected: int) -> None: + assert json_nesting_depth(value) == expected + + def test_depth_64_exactly(self) -> None: + # Create a structure with exactly 64 levels of nesting + # Start with innermost value + value: object = 1 + for _ in range(63): # 63 arrays + 1 inner value = 64 levels + value = [value] + assert json_nesting_depth(value) == 64 + + def test_depth_65(self) -> None: + # Create a structure with 65 levels + value: object = 1 + for _ in range(64): # 64 arrays + 1 inner value = 65 levels + value = [value] + assert json_nesting_depth(value) == 65 + + +class TestParseJsonLine: + def test_parses_simple_object(self) -> None: + result = parse_json_line('{"id": 1, "name": "alice"}') + assert result == {"id": 1, "name": "alice"} + + def test_parses_empty_object(self) -> None: + result = parse_json_line("{}") + assert result == {} + + def test_parses_nested_object(self) -> None: + result = parse_json_line('{"id": 1, "data": {"value": 42}}') + assert result == {"id": 1, "data": {"value": 42}} + + def test_parses_object_with_array(self) -> None: + result = parse_json_line('{"id": 1, "items": [1, 2, 3]}') + assert result == {"id": 1, "items": [1, 2, 3]} + + def test_parses_unicode_content(self) -> None: + result = parse_json_line('{"name": "café", "emoji": "😀"}') + assert result == {"name": "café", "emoji": "😀"} + + def test_rejects_invalid_json(self) -> None: + with pytest.raises(ParseError, match="invalid JSON"): + _ = parse_json_line('{"id": 1') # missing closing brace + + def test_rejects_json_array(self) -> None: + with pytest.raises(ParseError, match="expected JSON object, got list"): + _ = parse_json_line("[1, 2, 3]") + + def test_rejects_json_string(self) -> None: + with pytest.raises(ParseError, match="expected JSON object, got str"): + _ = parse_json_line('"hello"') + + def test_rejects_json_number(self) -> None: + with pytest.raises(ParseError, match="expected JSON object, got int"): + _ = parse_json_line("42") + + def test_rejects_json_null(self) -> None: + with pytest.raises(ParseError, match="expected JSON object, got NoneType"): + _ = parse_json_line("null") + + def test_rejects_json_boolean(self) -> None: + with pytest.raises(ParseError, match="expected JSON object, got bool"): + _ = parse_json_line("true") + + +class TestDuplicateKeyDetection: + def test_rejects_duplicate_keys(self) -> None: + with pytest.raises(ParseError, match="duplicate key: 'id'"): + _ = parse_json_line('{"id": 1, "id": 2}') + + def test_rejects_duplicate_keys_different_values(self) -> None: + with pytest.raises(ParseError, match="duplicate key: 'name'"): + _ = parse_json_line('{"name": "alice", "name": "bob"}') + + def test_rejects_duplicate_keys_same_value(self) -> None: + # Even if values are the same, duplicate keys are rejected + with pytest.raises(ParseError, match="duplicate key: 'id'"): + _ = parse_json_line('{"id": 1, "id": 1}') + + def test_accepts_unique_keys(self) -> None: + result = parse_json_line('{"id": 1, "name": "alice", "value": 42}') + assert result == {"id": 1, "name": "alice", "value": 42} + + def test_rejects_duplicate_keys_in_nested_object(self) -> None: + with pytest.raises(ParseError, match="duplicate key: 'a'"): + _ = parse_json_line('{"outer": {"a": 1, "a": 2}}') + + +class TestNestingDepthEnforcement: + def test_accepts_depth_64(self) -> None: + # 64 levels: root object (1) + 62 nested arrays (2-63) + innermost value (64) + # This matches the conformance test format-nesting-depth-64-valid + json_str = '{"id": 1, "d": ' + "[" * 62 + "1" + "]" * 62 + "}" + result = parse_json_line(json_str) + assert result["id"] == 1 + + def test_rejects_depth_65(self) -> None: + # 65 levels: root object (1) + 63 nested arrays (2-64) + innermost value (65) + json_str = '{"id": 1, "d": ' + "[" * 63 + "1" + "]" * 63 + "}" + with pytest.raises(LimitError, match="nesting depth 65 exceeds maximum 64"): + _ = parse_json_line(json_str) + + def test_custom_max_depth(self) -> None: + json_str = '{"a": {"b": {"c": 1}}}' # depth 4 + # Should accept with max_depth=4 + result = parse_json_line(json_str, max_depth=4) + assert result == {"a": {"b": {"c": 1}}} + # Should reject with max_depth=3 + with pytest.raises(LimitError, match="nesting depth 4 exceeds maximum 3"): + _ = parse_json_line(json_str, max_depth=3) + + def test_extremely_deep_nesting_raises_limit_error(self) -> None: + # Create JSON that would cause RecursionError during parsing. + # Python's default recursion limit is ~1000, so 2000 nested arrays + # will trigger RecursionError in json.loads before our depth check. + # This verifies the DoS protection from M7 - we catch RecursionError + # and convert it to LimitError. + json_str = '{"d": ' + "[" * 2000 + "1" + "]" * 2000 + "}" + with pytest.raises(LimitError, match="nesting depth exceeds maximum"): + _ = parse_json_line(json_str) + + def test_recursion_error_during_depth_check( + self, monkeypatch: pytest.MonkeyPatch + ) -> None: + """RecursionError during depth check converts to LimitError.""" + from jsonlt import _json # noqa: PLC0415 # pyright: ignore[reportPrivateUsage] + + def raise_recursion(_value: object) -> int: + msg = "simulated" + raise RecursionError(msg) + + monkeypatch.setattr(_json, "json_nesting_depth", raise_recursion) + + with pytest.raises(LimitError, match="nesting depth exceeds maximum"): + _ = parse_json_line('{"id": 1}') + + +class TestSerializeJson: + def test_sorts_keys_alphabetically(self) -> None: + value = {"zebra": 1, "apple": 2, "Banana": 3} + result = serialize_json(value) + # Unicode code point order: uppercase before lowercase + assert result == '{"Banana":3,"apple":2,"zebra":1}' + + def test_no_whitespace(self) -> None: + value = {"id": 1, "name": "alice"} + result = serialize_json(value) + assert " " not in result + assert "\n" not in result + assert "\t" not in result + + def test_unicode_preserved(self) -> None: + value = {"name": "café", "emoji": "😀"} + result = serialize_json(value) + # ensure_ascii=False means Unicode is preserved + assert "café" in result + assert "😀" in result + # Should not have escaped sequences for these characters + assert "\\u" not in result + + def test_sorts_nested_keys(self) -> None: + value = {"outer": {"z": 1, "a": 2}} + result = serialize_json(value) + assert result == '{"outer":{"a":2,"z":1}}' + + def test_sorts_keys_in_arrays_of_objects(self) -> None: + value = {"items": [{"z": 1, "a": 2}]} + result = serialize_json(value) + assert result == '{"items":[{"a":2,"z":1}]}' + + def test_empty_object(self) -> None: + result = serialize_json({}) + assert result == "{}" + + def test_nested_empty_containers(self) -> None: + value: dict[str, list[object] | dict[str, object]] = {"a": [], "b": {}} + result = serialize_json(value) + assert result == '{"a":[],"b":{}}' + + def test_complex_nested_structure(self) -> None: + value = { + "z": {"b": 2, "a": 1}, + "a": [{"y": 3, "x": 4}], + } + result = serialize_json(value) + assert result == '{"a":[{"x":4,"y":3}],"z":{"a":1,"b":2}}' + + +class TestSerializationDeterminism: + def test_consistent_output_across_calls(self) -> None: + value = {"zebra": 1, "apple": 2, "Banana": 3} + result1 = serialize_json(value) + result2 = serialize_json(value) + assert result1 == result2 + + def test_consistent_for_identical_data(self) -> None: + # Same data constructed differently should serialize identically + value1 = {"b": 2, "a": 1} + value2 = {"a": 1, "b": 2} + result1 = serialize_json(value1) + result2 = serialize_json(value2) + assert result1 == result2 + + def test_preserves_value_types(self) -> None: + value = { + "null": None, + "bool": True, + "int": 42, + "float": 3.14, + "string": "hello", + "array": [1, 2, 3], + "object": {"nested": True}, + } + result = serialize_json(value) + expected = ( + '{"array":[1,2,3],"bool":true,"float":3.14,' + '"int":42,"null":null,"object":{"nested":true},"string":"hello"}' + ) + assert result == expected + + def test_escapes_control_characters(self) -> None: + value = {"text": "hello\nworld\ttab"} + result = serialize_json(value) + assert result == '{"text":"hello\\nworld\\ttab"}' + + def test_escapes_backslash(self) -> None: + value = {"path": "c:\\users\\test"} + result = serialize_json(value) + assert result == '{"path":"c:\\\\users\\\\test"}' + + def test_escapes_quotes(self) -> None: + value = {"quote": 'say "hello"'} + result = serialize_json(value) + assert result == '{"quote":"say \\"hello\\""}' diff --git a/tests/unit/test_keys.py b/tests/unit/test_keys.py new file mode 100644 index 0000000..1d342be --- /dev/null +++ b/tests/unit/test_keys.py @@ -0,0 +1,420 @@ +import pytest + +from jsonlt._constants import MAX_INTEGER_KEY, MAX_TUPLE_ELEMENTS, MIN_INTEGER_KEY +from jsonlt._keys import ( + compare_keys, + is_valid_key, + is_valid_key_element, + is_valid_key_specifier, + key_from_json, + key_length, + key_specifiers_match, + normalize_key_specifier, + serialize_key, +) + + +class TestIsValidKeyElement: + @pytest.mark.parametrize( + "value", + [ + "alice", + "", + 42, + 0, + -100, + MAX_INTEGER_KEY, + MIN_INTEGER_KEY, + ], + ids=[ + "string", + "empty_string", + "positive_int", + "zero", + "negative_int", + "max_int", + "min_int", + ], + ) + def test_valid_key_elements(self, value: str | int) -> None: + assert is_valid_key_element(value) + + @pytest.mark.parametrize( + "value", + [ + MAX_INTEGER_KEY + 1, + MIN_INTEGER_KEY - 1, + 3.14, + None, + True, + False, + [1, 2, 3], + {"a": 1}, + ], + ids=[ + "above_max_int", + "below_min_int", + "float", + "none", + "true", + "false", + "list", + "dict", + ], + ) + def test_invalid_key_elements(self, value: object) -> None: + assert not is_valid_key_element(value) + + +class TestIsValidKey: + @pytest.mark.parametrize( + "value", + [ + "alice", + "", + 42, + MAX_INTEGER_KEY, + MIN_INTEGER_KEY, + ("a", 1), + ("a",), + tuple(f"field{i}" for i in range(MAX_TUPLE_ELEMENTS)), + ], + ids=[ + "string", + "empty_string", + "integer", + "max_integer", + "min_integer", + "tuple", + "single_element_tuple", + "max_element_tuple", + ], + ) + def test_valid_keys(self, value: str | int | tuple[str | int, ...]) -> None: + assert is_valid_key(value) + + @pytest.mark.parametrize( + "value", + [ + MAX_INTEGER_KEY + 1, + MIN_INTEGER_KEY - 1, + tuple(f"field{i}" for i in range(MAX_TUPLE_ELEMENTS + 1)), + (), + ("a", None), + ("a", True), + ("a", [1, 2]), + True, + False, + None, + 3.14, + [1, 2], + {"a": 1}, + ], + ids=[ + "int_above_max", + "int_below_min", + "tuple_too_many_elements", + "empty_tuple", + "tuple_with_none", + "tuple_with_bool", + "tuple_with_list", + "true", + "false", + "none", + "float", + "list", + "dict", + ], + ) + def test_invalid_keys(self, value: object) -> None: + assert not is_valid_key(value) + + +class TestIsValidKeySpecifier: + @pytest.mark.parametrize( + "specifier", + [ + "id", + "", + ("org", "id"), + ("id",), + ], + ids=[ + "string", + "empty_string", + "tuple_of_strings", + "single_element_tuple", + ], + ) + def test_valid_key_specifiers(self, specifier: str | tuple[str, ...]) -> None: + assert is_valid_key_specifier(specifier) + + @pytest.mark.parametrize( + "specifier", + [ + (), + ("id", 42), + (1, 2), + ("id", "id"), + ("a", "b", "a"), + ["id", "name"], + 42, + None, + ], + ids=[ + "empty_tuple", + "tuple_with_int", + "tuple_of_ints", + "duplicate_fields", + "duplicate_non_adjacent", + "list", + "integer", + "none", + ], + ) + def test_invalid_key_specifiers(self, specifier: object) -> None: + assert not is_valid_key_specifier(specifier) + + +class TestNormalizeKeySpecifier: + @pytest.mark.parametrize( + ("specifier", "expected"), + [ + ("id", "id"), + (("id",), "id"), + (("org", "id"), ("org", "id")), + ], + ids=["string_unchanged", "single_tuple_to_string", "multi_tuple_unchanged"], + ) + def test_normalization( + self, + specifier: str | tuple[str, ...], + expected: str | tuple[str, ...], + ) -> None: + assert normalize_key_specifier(specifier) == expected + + +class TestKeySpecifiersMatch: + @pytest.mark.parametrize( + ("a", "b"), + [ + ("id", "id"), + ("id", ("id",)), + (("id",), "id"), + (("org", "id"), ("org", "id")), + ], + ids=[ + "identical_strings", + "string_matches_single_tuple", + "single_tuple_matches_string", + "identical_tuples", + ], + ) + def test_matching_specifiers( + self, a: str | tuple[str, ...], b: str | tuple[str, ...] + ) -> None: + assert key_specifiers_match(a, b) + + @pytest.mark.parametrize( + ("a", "b"), + [ + ("id", "name"), + (("org", "id"), ("id", "org")), + (("id",), ("org", "id")), + ("id", ("org", "id")), + ], + ids=[ + "different_strings", + "different_order", + "different_length", + "string_vs_multi_tuple", + ], + ) + def test_non_matching_specifiers( + self, a: str | tuple[str, ...], b: str | tuple[str, ...] + ) -> None: + assert not key_specifiers_match(a, b) + + +class TestCompareKeys: + @pytest.mark.parametrize( + ("a", "b", "expected"), + [ + # Equal values + (42, 42, 0), + ("alice", "alice", 0), + (("a", 1), ("a", 1), 0), + # Integer comparisons + (1, 2, -1), + (2, 1, 1), + (-10, -5, -1), + (-5, -10, 1), + # String comparisons + ("alice", "bob", -1), + ("bob", "alice", 1), + # Unicode code point ordering: uppercase before lowercase + ("Alice", "alice", -1), + ("Zebra", "apple", -1), + # Cross-type ordering: int < str < tuple + (42, "42", -1), + ("42", 42, 1), + ("alice", ("alice",), -1), + (("alice",), "alice", 1), + (42, ("a", 1), -1), + (("a", 1), 42, 1), + # Tuple element ordering + (("a", 1), ("a", 2), -1), + (("a", 2), ("b", 1), -1), + (("a",), ("a", 1), -1), + (("a", 1), ("a",), 1), + # Tuple element type ordering: int < str + ((1, "a"), ("a", 1), -1), + ], + ids=[ + "equal_integers", + "equal_strings", + "equal_tuples", + "less_integer", + "greater_integer", + "negative_less", + "negative_greater", + "less_string", + "greater_string", + "uppercase_before_lowercase", + "code_point_ordering", + "int_before_string", + "string_after_int", + "string_before_tuple", + "tuple_after_string", + "int_before_tuple", + "tuple_after_int", + "tuple_element_ordering", + "tuple_first_element_wins", + "shorter_tuple_first", + "longer_tuple_second", + "tuple_int_before_string", + ], + ) + def test_comparison( + self, + a: str | int | tuple[str | int, ...], + b: str | int | tuple[str | int, ...], + expected: int, + ) -> None: + assert compare_keys(a, b) == expected + + +class TestSerializeKey: + @pytest.mark.parametrize( + ("key", "expected"), + [ + ("alice", '"alice"'), + ("", '""'), + ("hello\nworld", '"hello\\nworld"'), + ("tab\there", '"tab\\there"'), + (42, "42"), + (-100, "-100"), + (0, "0"), + (("a", 1), '["a",1]'), + (("alice", "bob", 123), '["alice","bob",123]'), + ], + ids=[ + "string", + "empty_string", + "newline_escape", + "tab_escape", + "positive_int", + "negative_int", + "zero", + "tuple", + "tuple_no_spaces", + ], + ) + def test_serialization( + self, key: str | int | tuple[str | int, ...], expected: str + ) -> None: + assert serialize_key(key) == expected + + +class TestKeyLength: + @pytest.mark.parametrize( + ("key", "expected"), + [ + # "alice" = 7 bytes (5 chars + 2 quotes) + ("alice", 7), + # "" = 2 bytes (just quotes) + ("", 2), + # Integer lengths + (42, 2), + (12345, 5), + (-1, 2), + (-100, 4), + # ["a",1] = 7 bytes + (("a", 1), 7), + # "café" = 4 chars, é is 2 UTF-8 bytes, + 2 quotes = 7 bytes + ("café", 7), + # "\n" in JSON is "\\n" = 4 bytes: quote, backslash, n, quote + ("\n", 4), + ], + ids=[ + "string", + "empty_string", + "small_int", + "larger_int", + "negative_one", + "negative_three_digit", + "tuple", + "unicode", + "escaped_char", + ], + ) + def test_key_length( + self, key: str | int | tuple[str | int, ...], expected: int + ) -> None: + assert key_length(key) == expected + + +class TestKeyFromJson: + @pytest.mark.parametrize( + ("value", "expected"), + [ + ("alice", "alice"), + (42, 42), + (["a", 1], ("a", 1)), + ([], ()), + ], + ids=["string", "integer", "list_to_tuple", "empty_list"], + ) + def test_conversion( + self, value: object, expected: str | int | tuple[str | int, ...] + ) -> None: + assert key_from_json(value) == expected + + @pytest.mark.parametrize( + ("value", "match"), + [ + (True, "bool"), + (None, "NoneType"), + ({"a": 1}, "dict"), + (3.14, "float"), + ], + ids=["bool", "none", "dict", "float"], + ) + def test_invalid_conversion_raises(self, value: object, match: str) -> None: + with pytest.raises(TypeError, match=match): + _ = key_from_json(value) + + def test_invalid_list_element_float_raises(self) -> None: + """List with float element raises TypeError.""" + with pytest.raises(TypeError, match=r"Cannot convert.*key element"): + _ = key_from_json(["valid", 3.14]) + + def test_invalid_list_with_none_element_raises(self) -> None: + """List with None element raises TypeError.""" + with pytest.raises(TypeError, match=r"Cannot convert.*key element"): + _ = key_from_json(["valid", None]) + + def test_invalid_list_with_bool_element_raises(self) -> None: + """List with bool element raises TypeError.""" + with pytest.raises(TypeError, match=r"Cannot convert.*key element"): + _ = key_from_json(["valid", True]) diff --git a/tests/unit/test_lock.py b/tests/unit/test_lock.py new file mode 100644 index 0000000..e1720e7 --- /dev/null +++ b/tests/unit/test_lock.py @@ -0,0 +1,172 @@ +import threading +import time +from typing import TYPE_CHECKING + +import pytest + +from jsonlt._exceptions import LockError +from jsonlt._lock import exclusive_lock + +if TYPE_CHECKING: + from pathlib import Path + + +class TestExclusiveLock: + def test_lock_acquires_and_releases(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + with path.open("r+b") as f, exclusive_lock(f): + # Lock is held - we got here without error + pass + # Lock released after context exit + + def test_lock_timeout_zero_succeeds_when_available(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + with path.open("r+b") as f, exclusive_lock(f, timeout=0): + pass # Should succeed when not locked + + def test_lock_allows_operations_inside_context(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("initial") + + with path.open("r+b") as f, exclusive_lock(f): + # Can read while holding lock + content = f.read() + assert content == b"initial" + + def test_lock_released_on_exception(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + def raise_inside_lock() -> None: + msg = "test error" + with path.open("r+b") as f, exclusive_lock(f): + raise ValueError(msg) + + with pytest.raises(ValueError, match="test error"): + raise_inside_lock() + + # Should be able to acquire lock again after exception + with path.open("r+b") as f, exclusive_lock(f): + pass + + def test_lock_can_be_reacquired_after_release(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + # First acquisition + with path.open("r+b") as f, exclusive_lock(f): + pass + + # Second acquisition + with path.open("r+b") as f, exclusive_lock(f): + pass + + def test_lock_with_explicit_timeout(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + with path.open("r+b") as f, exclusive_lock(f, timeout=1.0): + pass + + def test_lock_timeout_with_contention(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + # Track thread execution + lock_acquired = threading.Event() + can_release = threading.Event() + timeout_occurred = threading.Event() + error_message: list[str] = [] + + def holder_thread() -> None: + with path.open("r+b") as f, exclusive_lock(f): + _ = lock_acquired.set() + _ = can_release.wait(timeout=5.0) + + def waiter_thread() -> None: + _ = lock_acquired.wait(timeout=5.0) + try: + with path.open("r+b") as f, exclusive_lock(f, timeout=0.05): + pass + except LockError as e: + _ = timeout_occurred.set() + error_message.append(str(e)) + + holder = threading.Thread(target=holder_thread) + waiter = threading.Thread(target=waiter_thread) + + holder.start() + waiter.start() + + # Wait for waiter to timeout + waiter.join(timeout=2.0) + + # Clean up holder thread + _ = can_release.set() + holder.join(timeout=2.0) + + assert timeout_occurred.is_set() + assert len(error_message) == 1 + assert "could not acquire file lock" in error_message[0] + + def test_lock_blocks_until_available(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + # Track thread execution + lock_acquired = threading.Event() + waiter_succeeded = threading.Event() + hold_duration = 0.05 # 50ms + + def holder_thread() -> None: + with path.open("r+b") as f, exclusive_lock(f): + _ = lock_acquired.set() + time.sleep(hold_duration) + + def waiter_thread() -> None: + _ = lock_acquired.wait(timeout=5.0) + with path.open("r+b") as f, exclusive_lock(f, timeout=1.0): + _ = waiter_succeeded.set() + + holder = threading.Thread(target=holder_thread) + waiter = threading.Thread(target=waiter_thread) + + holder.start() + waiter.start() + + holder.join(timeout=2.0) + waiter.join(timeout=2.0) + + assert waiter_succeeded.is_set() + + def test_lock_with_none_timeout_waits_indefinitely(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + lock_acquired = threading.Event() + waiter_succeeded = threading.Event() + + def holder_thread() -> None: + with path.open("r+b") as f, exclusive_lock(f): + _ = lock_acquired.set() + time.sleep(0.05) + + def waiter_thread() -> None: + _ = lock_acquired.wait(timeout=5.0) + with path.open("r+b") as f, exclusive_lock(f, timeout=None): + _ = waiter_succeeded.set() + + holder = threading.Thread(target=holder_thread) + waiter = threading.Thread(target=waiter_thread) + + holder.start() + waiter.start() + + holder.join(timeout=2.0) + waiter.join(timeout=2.0) + + assert waiter_succeeded.is_set() diff --git a/tests/unit/test_reader.py b/tests/unit/test_reader.py new file mode 100644 index 0000000..7996088 --- /dev/null +++ b/tests/unit/test_reader.py @@ -0,0 +1,234 @@ +from typing import TYPE_CHECKING + +import pytest + +from jsonlt import FileError, LimitError, ParseError +from jsonlt._reader import parse_table_content, parse_table_text, read_table_file + +if TYPE_CHECKING: + from pathlib import Path + + +class TestParseTableContent: + def test_empty_bytes_returns_empty(self) -> None: + header, operations = parse_table_content(b"") + assert header is None + assert operations == [] + + def test_single_record(self) -> None: + content = b'{"id": 1, "name": "test"}\n' + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 1 + assert operations[0] == {"id": 1, "name": "test"} + + def test_multiple_records(self) -> None: + content = b'{"id": 1}\n{"id": 2}\n{"id": 3}\n' + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 3 + assert operations[0] == {"id": 1} + assert operations[1] == {"id": 2} + assert operations[2] == {"id": 3} + + def test_missing_trailing_newline_accepted(self) -> None: + content = b'{"id": 1}' + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 1 + assert operations[0] == {"id": 1} + + def test_bom_stripped(self) -> None: + bom = b"\xef\xbb\xbf" + content = bom + b'{"id": 1}\n' + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 1 + assert operations[0] == {"id": 1} + + def test_bom_only_returns_empty(self) -> None: + """File containing only BOM returns empty result.""" + bom = b"\xef\xbb\xbf" + header, operations = parse_table_content(bom) + assert header is None + assert operations == [] + + def test_crlf_normalized(self) -> None: + content = b'{"id": 1}\r\n{"id": 2}\r\n' + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 2 + + def test_header_detected_on_first_line(self) -> None: + content = b'{"$jsonlt": {"version": 1, "key": "id"}}\n{"id": "alice"}\n' + header, operations = parse_table_content(content) + assert header is not None + assert header.version == 1 + assert header.key == "id" + assert len(operations) == 1 + assert operations[0] == {"id": "alice"} + + def test_header_only_file(self) -> None: + content = b'{"$jsonlt": {"version": 1}}\n' + header, operations = parse_table_content(content) + assert header is not None + assert header.version == 1 + assert operations == [] + + def test_header_not_on_first_line_rejected(self) -> None: + content = b'{"id": 1}\n{"$jsonlt": {"version": 1}}\n' + with pytest.raises(ParseError, match="header must be on first line"): + _ = parse_table_content(content) + + @pytest.mark.parametrize( + ("content", "match"), + [ + pytest.param( + b'{"id": 1, "name": "\xff\xfe"}\n', "invalid UTF-8", id="invalid_utf8" + ), + pytest.param(b'{"id": 1, "name": }\n', "invalid JSON", id="invalid_json"), + pytest.param(b"[1, 2, 3]\n", "expected JSON object", id="non_object"), + pytest.param(b'{"id": 1, "id": 2}\n', "duplicate key", id="duplicate_keys"), + ], + ) + def test_parse_error_rejected(self, content: bytes, match: str) -> None: + with pytest.raises(ParseError, match=match): + _ = parse_table_content(content) + + def test_tombstone_parsed(self) -> None: + content = b'{"id": 1}\n{"$deleted": true, "id": 1}\n' + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 2 + assert operations[0] == {"id": 1} + assert operations[1] == {"$deleted": True, "id": 1} + + def test_unicode_content_preserved(self) -> None: + content = '{"id": "test", "emoji": "👍"}\n'.encode() + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 1 + assert operations[0] == {"id": "test", "emoji": "👍"} + + def test_escaped_newline_in_string(self) -> None: + content = b'{"id": 1, "text": "line1\\nline2"}\n' + header, operations = parse_table_content(content) + assert header is None + assert len(operations) == 1 + assert operations[0] == {"id": 1, "text": "line1\nline2"} + + @pytest.mark.parametrize( + "bad_bytes", + [ + pytest.param(b"\xc0\x80", id="2byte_overlong_nul"), + pytest.param(b"\xc1\xbf", id="2byte_overlong_del"), + pytest.param(b"\xe0\x80\x80", id="3byte_overlong_nul"), + pytest.param(b"\xe0\x80\xaf", id="3byte_overlong_slash"), + ], + ) + def test_overlong_utf8_rejected(self, bad_bytes: bytes) -> None: + content = b'{"id": 1, "name": "abc' + bad_bytes + b'xyz"}\n' + with pytest.raises(ParseError, match="invalid UTF-8"): + _ = parse_table_content(content) + + +class TestParseTableText: + def test_empty_string_returns_empty(self) -> None: + header, operations = parse_table_text("") + assert header is None + assert operations == [] + + def test_single_record(self) -> None: + header, operations = parse_table_text('{"id": 1}\n') + assert header is None + assert len(operations) == 1 + + def test_skips_empty_lines(self) -> None: + header, operations = parse_table_text('{"id": 1}\n\n{"id": 2}\n') + assert header is None + assert len(operations) == 2 + + +class TestReadTableFile: + def test_reads_file(self, tmp_path: "Path") -> None: + file = tmp_path / "test.jsonlt" + _ = file.write_text('{"id": 1}\n{"id": 2}\n') + header, operations = read_table_file(file) + assert header is None + assert len(operations) == 2 + + def test_reads_file_from_string_path(self, tmp_path: "Path") -> None: + file = tmp_path / "test.jsonlt" + _ = file.write_text('{"id": 1}\n') + header, operations = read_table_file(str(file)) + assert header is None + assert len(operations) == 1 + + def test_reads_empty_file(self, tmp_path: "Path") -> None: + file = tmp_path / "empty.jsonlt" + _ = file.write_text("") + header, operations = read_table_file(file) + assert header is None + assert operations == [] + + def test_file_not_found_raises_file_error(self, tmp_path: "Path") -> None: + file = tmp_path / "nonexistent.jsonlt" + with pytest.raises(FileError, match="cannot read file"): + _ = read_table_file(file) + + def test_file_with_header(self, tmp_path: "Path") -> None: + file = tmp_path / "test.jsonlt" + content = '{"$jsonlt": {"version": 1, "key": "id"}}\n{"id": "alice"}\n' + _ = file.write_text(content) + header, operations = read_table_file(file) + assert header is not None + assert header.version == 1 + assert header.key == "id" + assert len(operations) == 1 + + +class TestMaxFileSize: + def test_file_within_limit_succeeds(self, tmp_path: "Path") -> None: + file = tmp_path / "test.jsonlt" + content = '{"id": 1}\n' + _ = file.write_text(content) + file_size = file.stat().st_size + # Set limit higher than file size + header, operations = read_table_file(file, max_file_size=file_size + 100) + assert header is None + assert len(operations) == 1 + + def test_file_at_exact_limit_succeeds(self, tmp_path: "Path") -> None: + file = tmp_path / "test.jsonlt" + content = '{"id": 1}\n' + _ = file.write_text(content) + file_size = file.stat().st_size + # Set limit exactly at file size + header, operations = read_table_file(file, max_file_size=file_size) + assert header is None + assert len(operations) == 1 + + def test_file_exceeds_limit_raises_limit_error(self, tmp_path: "Path") -> None: + file = tmp_path / "test.jsonlt" + content = '{"id": 1, "data": "some longer content here"}\n' + _ = file.write_text(content) + file_size = file.stat().st_size + # Set limit lower than file size + with pytest.raises(LimitError, match=r"file size .* exceeds maximum"): + _ = read_table_file(file, max_file_size=file_size - 1) + + def test_none_limit_means_no_limit(self, tmp_path: "Path") -> None: + file = tmp_path / "test.jsonlt" + # Create a reasonably sized file + content = '{"id": 1, "data": "' + "x" * 1000 + '"}\n' + _ = file.write_text(content) + # None (default) means no limit - should succeed + header, operations = read_table_file(file, max_file_size=None) + assert header is None + assert len(operations) == 1 + + def test_stat_failure_raises_file_error(self, tmp_path: "Path") -> None: + file = tmp_path / "nonexistent.jsonlt" + # File doesn't exist - stat should fail and raise FileError + with pytest.raises(FileError, match="cannot read file"): + _ = read_table_file(file, max_file_size=1000) diff --git a/tests/unit/test_records.py b/tests/unit/test_records.py new file mode 100644 index 0000000..c4af280 --- /dev/null +++ b/tests/unit/test_records.py @@ -0,0 +1,503 @@ +from typing import TYPE_CHECKING + +import pytest + +from jsonlt._constants import MAX_INTEGER_KEY, MIN_INTEGER_KEY +from jsonlt._exceptions import InvalidKeyError, ParseError +from jsonlt._records import ( + build_tombstone, + extract_key, + is_tombstone, + record_size, + validate_record, + validate_tombstone, +) + +if TYPE_CHECKING: + from jsonlt._json import JSONObject + from jsonlt._keys import Key, KeySpecifier + + +class TestValidateRecordValid: + @pytest.mark.parametrize( + ("record", "key_specifier"), + [ + ({"id": "alice", "name": "Alice"}, "id"), + ({"id": 42, "name": "Item"}, "id"), + ({"id": "", "name": "Default"}, "id"), + ({"id": MAX_INTEGER_KEY, "data": "max"}, "id"), + ({"id": MIN_INTEGER_KEY, "data": "min"}, "id"), + ({"id": 0, "data": "zero"}, "id"), + ({"id": -1, "data": "negative"}, "id"), + ], + ids=[ + "string_key", + "integer_key", + "empty_string_key", + "max_integer_key", + "min_integer_key", + "zero_key", + "negative_key", + ], + ) + def test_valid_records( + self, record: "JSONObject", key_specifier: "KeySpecifier" + ) -> None: + validate_record(record, key_specifier) # Should not raise + + +class TestValidateRecordInvalid: + @pytest.mark.parametrize( + ("record", "key_specifier", "match"), + [ + ({"name": "Alice"}, "id", "missing required key field 'id'"), + ({"id": None, "name": "Alice"}, "id", "key field 'id' value is null"), + ({"id": True, "name": "Alice"}, "id", "key field 'id' value is boolean"), + ( + {"id": {"nested": "value"}, "name": "Alice"}, + "id", + "key field 'id' value is an object", + ), + ( + {"id": [1, 2, 3], "name": "Alice"}, + "id", + "key field 'id' value is an array", + ), + ({"id": 3.14, "name": "Alice"}, "id", "value is not an integer"), + ( + {"id": MAX_INTEGER_KEY + 1, "name": "Alice"}, + "id", + "outside valid integer range", + ), + ( + {"id": MIN_INTEGER_KEY - 1, "name": "Alice"}, + "id", + "outside valid integer range", + ), + ( + {"id": "alice", "$custom": "value"}, + "id", + "record contains reserved field", + ), + ( + {"id": "alice", "$field1": 1, "$field2": 2}, + "id", + "record contains reserved", + ), + ], + ids=[ + "missing_key_field", + "null_key_value", + "boolean_key_value", + "object_key_value", + "array_key_value", + "fractional_number", + "integer_above_max", + "integer_below_min", + "dollar_prefixed_field", + "multiple_dollar_prefixed", + ], + ) + def test_invalid_records( + self, record: "JSONObject", key_specifier: "KeySpecifier", match: str + ) -> None: + with pytest.raises(InvalidKeyError, match=match): + validate_record(record, key_specifier) + + +class TestValidateRecordCompoundKey: + @pytest.mark.parametrize( + ("record", "key_specifier"), + [ + ({"org": "acme", "id": "alice", "name": "Alice"}, ("org", "id")), + ( + {"region": "us-east", "org": "acme", "id": 1, "name": "Alice"}, + ("region", "org", "id"), + ), + ({"org": "acme", "id": 42}, ("org", "id")), + ], + ids=["two_field_key", "three_field_key", "mixed_types"], + ) + def test_valid_compound_keys( + self, record: "JSONObject", key_specifier: "KeySpecifier" + ) -> None: + validate_record(record, key_specifier) # Should not raise + + @pytest.mark.parametrize( + ("record", "key_specifier", "match"), + [ + ( + {"id": "alice", "name": "Alice"}, + ("org", "id"), + "missing required key field 'org'", + ), + ( + {"org": "acme", "name": "Alice"}, + ("org", "id"), + "missing required key field 'id'", + ), + ( + {"org": "acme", "id": None, "name": "Alice"}, + ("org", "id"), + "key field 'id' value is null", + ), + ], + ids=["missing_first_field", "missing_second_field", "invalid_value"], + ) + def test_invalid_compound_keys( + self, record: "JSONObject", key_specifier: "KeySpecifier", match: str + ) -> None: + with pytest.raises(InvalidKeyError, match=match): + validate_record(record, key_specifier) + + +class TestIsTombstone: + @pytest.mark.parametrize( + ("record", "expected"), + [ + ({"$deleted": True, "id": "alice"}, True), + ({"$deleted": False, "id": "alice"}, False), + ({"$deleted": None, "id": "alice"}, False), + ({"$deleted": "true", "id": "alice"}, False), + ({"$deleted": 1, "id": "alice"}, False), + ({"id": "alice", "name": "Alice"}, False), + ], + ids=[ + "deleted_true", + "deleted_false", + "deleted_null", + "deleted_string", + "deleted_one", + "no_deleted_field", + ], + ) + def test_is_tombstone(self, record: "JSONObject", *, expected: bool) -> None: + assert is_tombstone(record) is expected + + +class TestValidateTombstoneValid: + @pytest.mark.parametrize( + ("record", "key_specifier"), + [ + ({"$deleted": True, "id": "alice"}, "id"), + ({"$deleted": True, "org": "acme", "id": "alice"}, ("org", "id")), + ({"$deleted": True, "id": 42}, "id"), + ], + ids=["simple_tombstone", "compound_key_tombstone", "integer_key_tombstone"], + ) + def test_valid_tombstones( + self, record: "JSONObject", key_specifier: "KeySpecifier" + ) -> None: + validate_tombstone(record, key_specifier) # Should not raise + + +class TestValidateTombstoneInvalid: + @pytest.mark.parametrize( + ("record", "key_specifier", "error_type", "match"), + [ + ({"id": "alice"}, "id", ParseError, "tombstone missing \\$deleted field"), + ( + {"$deleted": False, "id": "alice"}, + "id", + ParseError, + r"\$deleted must be true, got false", + ), + ( + {"$deleted": None, "id": "alice"}, + "id", + ParseError, + r"\$deleted must be true, got null", + ), + ( + {"$deleted": "true", "id": "alice"}, + "id", + ParseError, + r"\$deleted must be true, got string", + ), + ( + {"$deleted": 1, "id": "alice"}, + "id", + ParseError, + r"\$deleted must be true, got number", + ), + ( + {"$deleted": True}, + "id", + InvalidKeyError, + "tombstone missing required key field 'id'", + ), + ( + {"$deleted": True, "id": None}, + "id", + InvalidKeyError, + "key field 'id' value is null", + ), + ( + {"$deleted": {"nested": True}, "id": "alice"}, + "id", + ParseError, + r"\$deleted must be true, got dict", + ), + ], + ids=[ + "missing_deleted_field", + "deleted_false", + "deleted_null", + "deleted_string", + "deleted_number", + "missing_key_field", + "invalid_key_value", + "deleted_dict_type", + ], + ) + def test_invalid_tombstones( + self, + record: "JSONObject", + key_specifier: "KeySpecifier", + error_type: type[Exception], + match: str, + ) -> None: + with pytest.raises(error_type, match=match): + validate_tombstone(record, key_specifier) + + +class TestExtractKeyScalar: + @pytest.mark.parametrize( + ("record", "key_specifier", "expected"), + [ + ({"id": "alice", "name": "Alice"}, "id", "alice"), + ({"id": "", "name": "Default"}, "id", ""), + ({"id": 42, "data": "value"}, "id", 42), + ({"id": -100, "data": "value"}, "id", -100), + ({"id": 0, "data": "value"}, "id", 0), + ({"id": MAX_INTEGER_KEY, "data": "max"}, "id", MAX_INTEGER_KEY), + ({"id": MIN_INTEGER_KEY, "data": "min"}, "id", MIN_INTEGER_KEY), + ], + ids=[ + "string_key", + "empty_string_key", + "positive_integer_key", + "negative_integer_key", + "zero_key", + "max_integer_key", + "min_integer_key", + ], + ) + def test_extract_scalar_key( + self, record: "JSONObject", key_specifier: str, expected: "Key" + ) -> None: + assert extract_key(record, key_specifier) == expected + + +class TestExtractKeyCompound: + @pytest.mark.parametrize( + ("record", "key_specifier", "expected", "expected_type"), + [ + ( + {"org": "acme", "id": "alice", "name": "Alice"}, + ("org", "id"), + ("acme", "alice"), + tuple, + ), + ( + {"region": "us-east", "org": "acme", "id": 1, "name": "Alice"}, + ("region", "org", "id"), + ("us-east", "acme", 1), + tuple, + ), + ( + {"org": "acme", "id": 42, "name": "Widget"}, + ("org", "id"), + ("acme", 42), + tuple, + ), + # Single-element tuple key specifiers return scalar keys + ({"id": "alice", "name": "Alice"}, ("id",), "alice", str), + ({"id": 42, "name": "Widget"}, ("id",), 42, int), + ], + ids=[ + "two_element_tuple", + "three_element_tuple", + "mixed_type_tuple", + "single_element_returns_string", + "single_element_returns_int", + ], + ) + def test_extract_compound_key( + self, + record: "JSONObject", + key_specifier: tuple[str, ...], + expected: "Key", + expected_type: type, + ) -> None: + key = extract_key(record, key_specifier) + assert key == expected + assert isinstance(key, expected_type) + + +class TestExtractKeyErrors: + @pytest.mark.parametrize( + ("record", "key_specifier", "match"), + [ + ({"name": "Alice"}, "id", "missing required key field"), + ({"id": None, "name": "Alice"}, "id", "key field 'id' value is null"), + ( + {"org": "acme", "name": "Alice"}, + ("org", "id"), + "missing required key field 'id'", + ), + ( + {"org": "acme", "id": True, "name": "Alice"}, + ("org", "id"), + "key field 'id' value is boolean", + ), + ({"id": "alice"}, (), "key specifier cannot be empty"), + ], + ids=[ + "missing_key_field", + "invalid_key_value", + "compound_missing_field", + "compound_invalid_field", + "empty_key_specifier", + ], + ) + def test_extract_key_errors( + self, record: "JSONObject", key_specifier: "KeySpecifier", match: str + ) -> None: + with pytest.raises(InvalidKeyError, match=match): + _ = extract_key(record, key_specifier) + + +class TestExtractKeyFloatHandling: + @pytest.mark.parametrize( + ("record", "key_specifier", "expected"), + [ + ({"id": 1.0, "name": "Widget"}, "id", 1), + ({"id": 1e2, "name": "Widget"}, "id", 100), + ({"id": -5.0, "name": "Widget"}, "id", -5), + ], + ids=["float_whole_number", "float_with_exponent", "negative_float_whole"], + ) + def test_float_whole_numbers_accepted( + self, record: "JSONObject", key_specifier: str, expected: int + ) -> None: + key = extract_key(record, key_specifier) + assert key == expected + assert isinstance(key, int) + + def test_float_fractional_rejected(self) -> None: + record: JSONObject = {"id": 1.5, "name": "Widget"} + with pytest.raises(InvalidKeyError, match="value is not an integer"): + _ = extract_key(record, "id") + + +class TestExtractKeyInfinityNaN: + @pytest.mark.parametrize( + ("record", "key_specifier", "match"), + [ + ({"id": float("inf"), "name": "Widget"}, "id", "Infinity or NaN"), + ({"id": float("-inf"), "name": "Widget"}, "id", "Infinity or NaN"), + ({"id": float("nan"), "name": "Widget"}, "id", "Infinity or NaN"), + ], + ids=["positive_infinity", "negative_infinity", "nan"], + ) + def test_infinity_nan_rejected( + self, record: "JSONObject", key_specifier: str, match: str + ) -> None: + with pytest.raises(InvalidKeyError, match=match): + _ = extract_key(record, key_specifier) + + @pytest.mark.parametrize( + ("record", "key_specifier", "match"), + [ + ({"org": float("inf"), "id": "alice"}, ("org", "id"), "Infinity or NaN"), + ({"org": "acme", "id": float("nan")}, ("org", "id"), "Infinity or NaN"), + ], + ids=["infinity_in_compound_key", "nan_in_compound_key"], + ) + def test_infinity_nan_rejected_compound( + self, record: "JSONObject", key_specifier: tuple[str, ...], match: str + ) -> None: + with pytest.raises(InvalidKeyError, match=match): + _ = extract_key(record, key_specifier) + + +class TestRecordSize: + @pytest.mark.parametrize( + ("record", "expected"), + [ + ({"id": "a"}, 10), # {"id":"a"} + ({"id": "alice", "name": "Alice"}, 29), # {"id":"alice","name":"Alice"} + ({"id": 42}, 9), # {"id":42} + ({"id": "a", "data": {"x": 1}}, 25), # {"data":{"x":1},"id":"a"} + ({"id": "café"}, 14), # {"id":"café"} - é is 2 UTF-8 bytes + ({}, 2), # {} + ({"id": "a", "tags": ["x", "y"]}, 27), # {"id":"a","tags":["x","y"]} + ({"id": "a", "active": True}, 24), # {"active":true,"id":"a"} + ({"id": "a", "data": None}, 22), # {"data":null,"id":"a"} + ], + ids=[ + "simple", + "multiple_fields", + "integer_value", + "nested_object", + "unicode", + "empty", + "array", + "boolean", + "null", + ], + ) + def test_record_size(self, record: "JSONObject", expected: int) -> None: + assert record_size(record) == expected + + def test_record_size_deterministic_key_order(self) -> None: + record1: JSONObject = {"zebra": 1, "apple": 2} + record2: JSONObject = {"apple": 2, "zebra": 1} + assert record_size(record1) == record_size(record2) + + +class TestBuildTombstoneValid: + @pytest.mark.parametrize( + ("key", "key_specifier", "expected"), + [ + ("alice", "id", {"$deleted": True, "id": "alice"}), + (42, "id", {"$deleted": True, "id": 42}), + ( + ("acme", "alice"), + ("org", "id"), + {"$deleted": True, "org": "acme", "id": "alice"}, + ), + ], + ids=[ + "scalar_string_key", + "scalar_integer_key", + "tuple_key_with_tuple_specifier", + ], + ) + def test_valid_tombstones( + self, key: "Key", key_specifier: "KeySpecifier", expected: "JSONObject" + ) -> None: + result = build_tombstone(key, key_specifier) + assert result == expected + + +class TestBuildTombstoneArityMismatch: + @pytest.mark.parametrize( + ("key", "key_specifier", "match"), + [ + (("a", "b"), "id", "expected scalar key"), + ("alice", ("org", "id"), "expected tuple"), + (("a", "b", "c"), ("org", "id"), "expected tuple of 2"), + ], + ids=[ + "tuple_key_with_scalar_specifier", + "scalar_key_with_tuple_specifier", + "wrong_length_tuple", + ], + ) + def test_arity_mismatch( + self, key: "Key", key_specifier: "KeySpecifier", match: str + ) -> None: + with pytest.raises(InvalidKeyError, match=match): + _ = build_tombstone(key, key_specifier) diff --git a/tests/unit/test_state.py b/tests/unit/test_state.py new file mode 100644 index 0000000..4f6c4c9 --- /dev/null +++ b/tests/unit/test_state.py @@ -0,0 +1,150 @@ +from typing import TYPE_CHECKING + +import pytest + +from jsonlt import InvalidKeyError +from jsonlt._state import compute_logical_state + +if TYPE_CHECKING: + from jsonlt._json import JSONObject + + +class TestComputeLogicalState: + def test_empty_operations_returns_empty_state(self) -> None: + operations: list[JSONObject] = [] + state = compute_logical_state(operations, "id") + assert state == {} + + def test_single_record(self) -> None: + operations: list[JSONObject] = [{"id": "alice", "role": "admin"}] + state = compute_logical_state(operations, "id") + assert state == {"alice": {"id": "alice", "role": "admin"}} + + def test_multiple_records_distinct_keys(self) -> None: + operations: list[JSONObject] = [ + {"id": "alice", "role": "admin"}, + {"id": "bob", "role": "user"}, + {"id": "carol", "role": "user"}, + ] + state = compute_logical_state(operations, "id") + assert len(state) == 3 + assert state["alice"] == {"id": "alice", "role": "admin"} + assert state["bob"] == {"id": "bob", "role": "user"} + assert state["carol"] == {"id": "carol", "role": "user"} + + def test_upsert_overwrites(self) -> None: + operations: list[JSONObject] = [ + {"id": "alice", "role": "user"}, + {"id": "alice", "role": "admin"}, + ] + state = compute_logical_state(operations, "id") + assert len(state) == 1 + assert state["alice"] == {"id": "alice", "role": "admin"} + + def test_tombstone_removes(self) -> None: + operations: list[JSONObject] = [ + {"id": "alice", "role": "admin"}, + {"$deleted": True, "id": "alice"}, + ] + state = compute_logical_state(operations, "id") + assert state == {} + + def test_tombstone_nonexistent_key(self) -> None: + operations: list[JSONObject] = [ + {"id": "alice", "role": "admin"}, + {"$deleted": True, "id": "bob"}, + ] + state = compute_logical_state(operations, "id") + assert len(state) == 1 + assert state["alice"] == {"id": "alice", "role": "admin"} + + def test_reinsert_after_delete(self) -> None: + operations: list[JSONObject] = [ + {"id": "alice", "role": "admin"}, + {"$deleted": True, "id": "alice"}, + {"id": "alice", "role": "user"}, + ] + state = compute_logical_state(operations, "id") + assert len(state) == 1 + assert state["alice"] == {"id": "alice", "role": "user"} + + def test_integer_key(self) -> None: + operations: list[JSONObject] = [ + {"id": 1, "name": "first"}, + {"id": 2, "name": "second"}, + ] + state = compute_logical_state(operations, "id") + assert len(state) == 2 + assert state[1] == {"id": 1, "name": "first"} + assert state[2] == {"id": 2, "name": "second"} + + def test_integer_float_equivalent(self) -> None: + operations: list[JSONObject] = [ + {"id": 1, "v": 1}, + {"id": 1.0, "v": 2}, + ] + state = compute_logical_state(operations, "id") + assert len(state) == 1 + assert state[1] == {"id": 1.0, "v": 2} + + def test_compound_key(self) -> None: + operations: list[JSONObject] = [ + {"org": "acme", "id": 1, "name": "alice"}, + {"org": "acme", "id": 2, "name": "bob"}, + {"org": "globex", "id": 1, "name": "carol"}, + ] + state = compute_logical_state(operations, ("org", "id")) + assert len(state) == 3 + assert state[("acme", 1)] == {"org": "acme", "id": 1, "name": "alice"} + assert state[("acme", 2)] == {"org": "acme", "id": 2, "name": "bob"} + assert state[("globex", 1)] == {"org": "globex", "id": 1, "name": "carol"} + + def test_compound_key_delete(self) -> None: + operations: list[JSONObject] = [ + {"org": "acme", "id": 1, "name": "alice"}, + {"org": "acme", "id": 2, "name": "bob"}, + {"$deleted": True, "org": "acme", "id": 1}, + ] + state = compute_logical_state(operations, ("org", "id")) + assert len(state) == 1 + assert state[("acme", 2)] == {"org": "acme", "id": 2, "name": "bob"} + + def test_extra_fields_preserved(self) -> None: + operations: list[JSONObject] = [ + { + "id": "alice", + "role": "admin", + "email": "alice@example.com", + "active": True, + } + ] + state = compute_logical_state(operations, "id") + assert state["alice"] == { + "id": "alice", + "role": "admin", + "email": "alice@example.com", + "active": True, + } + + def test_nested_values_preserved(self) -> None: + operations: list[JSONObject] = [ + { + "id": "alice", + "meta": {"tags": ["admin", "active"], "created": "2025-01-01"}, + } + ] + state = compute_logical_state(operations, "id") + assert state["alice"]["meta"] == { + "tags": ["admin", "active"], + "created": "2025-01-01", + } + + def test_missing_key_field_raises(self) -> None: + operations: list[JSONObject] = [{"name": "alice"}] + with pytest.raises(InvalidKeyError, match="missing required key field"): + _ = compute_logical_state(operations, "id") + + def test_single_element_tuple_key(self) -> None: + operations: list[JSONObject] = [{"id": "alice", "role": "admin"}] + state = compute_logical_state(operations, ("id",)) + assert state["alice"] == {"id": "alice", "role": "admin"} diff --git a/tests/unit/test_table.py b/tests/unit/test_table.py index e69de29..4b5cd51 100644 --- a/tests/unit/test_table.py +++ b/tests/unit/test_table.py @@ -0,0 +1,1272 @@ +import time +from typing import TYPE_CHECKING + +import pytest + +from jsonlt import FileError, InvalidKeyError, LimitError, Table + +from tests.fakes.fake_filesystem import FakeFileSystem + +if TYPE_CHECKING: + from pathlib import Path + + +class TestTableConstruction: + def test_new_file_with_key_specifier(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + assert table.path == table_path + assert table.key_specifier == "id" + assert table.count() == 0 + assert table.header is None + + def test_new_file_without_key_specifier(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path) + + assert table.path == table_path + assert table.key_specifier is None + assert table.count() == 0 + + def test_existing_file_with_header_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"$jsonlt": {"version": 1, "key": "id"}}\n') + + table = Table(table_path) + + assert table.key_specifier == "id" + assert table.header is not None + assert table.header.key == "id" + assert table.count() == 0 + + def test_existing_file_with_matching_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"$jsonlt": {"version": 1, "key": "id"}}\n') + + table = Table(table_path, key="id") + + assert table.key_specifier == "id" + + def test_existing_file_with_mismatched_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"$jsonlt": {"version": 1, "key": "id"}}\n') + + with pytest.raises(InvalidKeyError, match="key specifier mismatch"): + _ = Table(table_path, key="name") + + def test_existing_file_with_operations_no_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1, "name": "alice"}\n') + + with pytest.raises(InvalidKeyError, match="no key specifier"): + _ = Table(table_path) + + def test_existing_file_with_operations_and_caller_key( + self, tmp_path: "Path" + ) -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1, "name": "alice"}\n') + + table = Table(table_path, key="id") + + assert table.key_specifier == "id" + assert table.count() == 1 + + def test_compound_key_specifier(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"org": "acme", "id": 1, "name": "alice"}\n') + + table = Table(table_path, key=("org", "id")) + + assert table.key_specifier == ("org", "id") + assert table.count() == 1 + + def test_single_element_tuple_normalized(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("id",)) + + assert table.key_specifier == "id" + + def test_repr(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + assert "Table" in repr(table) + # Check path is present (use name to avoid Windows path sep issues) + assert table_path.name in repr(table) + assert "'id'" in repr(table) + + +class TestTableGet: + def test_get_existing_record(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "role": "admin"}\n') + + table = Table(table_path, key="id") + + assert table.get("alice") == {"id": "alice", "role": "admin"} + + def test_get_nonexistent_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "role": "admin"}\n') + + table = Table(table_path, key="id") + + assert table.get("bob") is None + + def test_get_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + + table = Table(table_path, key="id") + + assert table.get("alice") is None + + def test_get_integer_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1, "name": "first"}\n') + + table = Table(table_path, key="id") + + assert table.get(1) == {"id": 1, "name": "first"} + + def test_get_compound_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"org": "acme", "id": 1, "name": "alice"}\n') + + table = Table(table_path, key=("org", "id")) + + assert table.get(("acme", 1)) == {"org": "acme", "id": 1, "name": "alice"} + + +class TestTableHas: + def test_has_existing_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "role": "admin"}\n') + + table = Table(table_path, key="id") + + assert table.has("alice") is True + + def test_has_nonexistent_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "role": "admin"}\n') + + table = Table(table_path, key="id") + + assert table.has("bob") is False + + def test_has_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + + table = Table(table_path, key="id") + + assert table.has("alice") is False + + +class TestTableAll: + def test_all_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + + table = Table(table_path, key="id") + + assert table.all() == [] + + def test_all_single_record(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id") + + assert table.all() == [{"id": "alice", "v": 1}] + + def test_all_sorted_by_key_strings(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "bob", "v": 2}\n{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id") + + records = table.all() + assert len(records) == 2 + assert records[0] == {"id": "alice", "v": 1} + assert records[1] == {"id": "bob", "v": 2} + + def test_all_sorted_by_key_integers(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text( + '{"id": 10, "v": 1}\n{"id": 2, "v": 2}\n{"id": 1, "v": 3}\n' + ) + + table = Table(table_path, key="id") + + records = table.all() + assert len(records) == 3 + assert records[0]["id"] == 1 + assert records[1]["id"] == 2 + assert records[2]["id"] == 10 + + def test_all_sorted_mixed_types(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": "b", "v": 1}\n{"id": 2, "v": 2}\n' + content += '{"id": "a", "v": 3}\n{"id": 1, "v": 4}\n' + _ = table_path.write_text(content) + + table = Table(table_path, key="id") + + records = table.all() + keys = [r["id"] for r in records] + assert keys == [1, 2, "a", "b"] + + +class TestTableKeys: + def test_keys_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + + table = Table(table_path, key="id") + + assert table.keys() == [] + + def test_keys_sorted(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "bob", "v": 2}\n{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id") + + assert table.keys() == ["alice", "bob"] + + +class TestTableCount: + def test_count_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + + table = Table(table_path, key="id") + + assert table.count() == 0 + + def test_count_single_record(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id") + + assert table.count() == 1 + + def test_count_multiple_records(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1}\n{"id": 2}\n{"id": 3}\n') + + table = Table(table_path, key="id") + + assert table.count() == 3 + + +class TestTableFind: + def test_find_matches(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": 1, "role": "admin"}\n' + content += '{"id": 2, "role": "user"}\n' + content += '{"id": 3, "role": "admin"}\n' + _ = table_path.write_text(content) + + table = Table(table_path, key="id") + + results = table.find(lambda r: r["role"] == "admin") + assert len(results) == 2 + assert results[0]["id"] == 1 + assert results[1]["id"] == 3 + + def test_find_no_matches(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1, "role": "user"}\n') + + table = Table(table_path, key="id") + + results = table.find(lambda r: r["role"] == "admin") + assert results == [] + + def test_find_with_limit(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": 1, "role": "admin"}\n' + content += '{"id": 2, "role": "admin"}\n' + content += '{"id": 3, "role": "admin"}\n' + _ = table_path.write_text(content) + + table = Table(table_path, key="id") + + results = table.find(lambda r: r["role"] == "admin", limit=2) + assert len(results) == 2 + + def test_find_in_key_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text( + '{"id": "c", "v": 1}\n{"id": "a", "v": 2}\n{"id": "b", "v": 3}\n' + ) + + table = Table(table_path, key="id") + + results = table.find(lambda _: True) + keys = [r["id"] for r in results] + assert keys == ["a", "b", "c"] + + def test_find_one_matches(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": 1, "role": "user"}\n' + content += '{"id": 2, "role": "admin"}\n' + content += '{"id": 3, "role": "admin"}\n' + _ = table_path.write_text(content) + + table = Table(table_path, key="id") + + result = table.find_one(lambda r: r["role"] == "admin") + assert result is not None + assert result["id"] == 2 + + def test_find_one_no_match(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1, "role": "user"}\n') + + table = Table(table_path, key="id") + + result = table.find_one(lambda r: r["role"] == "admin") + assert result is None + + +class TestTableLogicalState: + def test_upsert_overwrites(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text( + '{"id": "alice", "role": "user"}\n{"id": "alice", "role": "admin"}\n' + ) + + table = Table(table_path, key="id") + + assert table.get("alice") == {"id": "alice", "role": "admin"} + assert table.count() == 1 + + def test_tombstone_deletes(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text( + '{"id": "alice", "role": "admin"}\n{"id": "alice", "$deleted": true}\n' + ) + + table = Table(table_path, key="id") + + assert table.get("alice") is None + assert table.count() == 0 + + def test_upsert_after_tombstone(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": "alice", "role": "admin"}\n' + content += '{"id": "alice", "$deleted": true}\n' + content += '{"id": "alice", "role": "user"}\n' + _ = table_path.write_text(content) + + table = Table(table_path, key="id") + + assert table.get("alice") == {"id": "alice", "role": "user"} + + +class TestTableAutoReload: + def test_auto_reload_detects_changes(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id") + assert table.get("alice") == {"id": "alice", "v": 1} + + # Wait a bit to ensure mtime changes + time.sleep(0.01) + _ = table_path.write_text('{"id": "alice", "v": 2}\n') + + assert table.get("alice") == {"id": "alice", "v": 2} + + def test_auto_reload_disabled(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id", auto_reload=False) + assert table.get("alice") == {"id": "alice", "v": 1} + + # Wait a bit to ensure mtime changes + time.sleep(0.01) + _ = table_path.write_text('{"id": "alice", "v": 2}\n') + + # Should still return old value + assert table.get("alice") == {"id": "alice", "v": 1} + + def test_auto_reload_file_deleted(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id") + assert table.count() == 1 + + table_path.unlink() + + assert table.count() == 0 + assert table.get("alice") is None + + def test_auto_reload_new_records(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + + table = Table(table_path, key="id") + assert table.count() == 1 + + # Wait a bit to ensure mtime changes + time.sleep(0.01) + _ = table_path.write_text('{"id": "alice", "v": 1}\n{"id": "bob", "v": 2}\n') + + assert table.count() == 2 + assert table.has("bob") is True + + +class TestTablePut: + def test_put_creates_file(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.put({"id": "alice", "name": "Alice"}) + + assert table_path.exists() + assert table.count() == 1 + assert table.get("alice") == {"id": "alice", "name": "Alice"} + + def test_put_appends_to_file(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "name": "Alice"}\n') + table = Table(table_path, key="id") + + table.put({"id": "bob", "name": "Bob"}) + + assert table.count() == 2 + assert table.get("bob") == {"id": "bob", "name": "Bob"} + # Check file has both lines + content = table_path.read_text() + assert content.count("\n") == 2 + + def test_put_updates_existing_record(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.put({"id": "alice", "role": "user"}) + table.put({"id": "alice", "role": "admin"}) + + assert table.count() == 1 + assert table.get("alice") == {"id": "alice", "role": "admin"} + + def test_put_with_integer_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.put({"id": 1, "name": "First"}) + + assert table.get(1) == {"id": 1, "name": "First"} + + def test_put_with_compound_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("org", "id")) + + table.put({"org": "acme", "id": 1, "name": "Alice"}) + + assert table.get(("acme", 1)) == {"org": "acme", "id": 1, "name": "Alice"} + + def test_put_without_key_specifier_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path) + + with pytest.raises(InvalidKeyError, match="key specifier is required"): + table.put({"id": "alice"}) + + def test_put_missing_key_field_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with pytest.raises(InvalidKeyError, match="missing required key field"): + table.put({"name": "Alice"}) + + def test_put_dollar_field_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with pytest.raises(InvalidKeyError, match="reserved field name"): + table.put({"id": "alice", "$custom": "value"}) + + def test_put_invalid_key_type_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with pytest.raises(InvalidKeyError, match="boolean"): + table.put({"id": True, "name": "Alice"}) + + def test_put_key_length_limit(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + # Key with > 1024 bytes when serialized (string with quotes) + long_key = "x" * 1030 + with pytest.raises(LimitError, match="key length"): + table.put({"id": long_key}) + + def test_put_deterministic_serialization(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.put({"z": 1, "id": "test", "a": 2}) + + content = table_path.read_text() + # Keys should be sorted: a, id, z + assert content.strip() == '{"a":2,"id":"test","z":1}' + + +class TestTableDelete: + def test_delete_existing_record(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "name": "Alice"}\n') + table = Table(table_path, key="id") + + result = table.delete("alice") + + assert result is True + assert table.get("alice") is None + assert table.count() == 0 + + def test_delete_nonexistent_record(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + result = table.delete("bob") + + assert result is False + + def test_delete_writes_tombstone(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "name": "Alice"}\n') + table = Table(table_path, key="id") + + _ = table.delete("alice") + + content = table_path.read_text() + assert "$deleted" in content + assert content.count("\n") == 2 + + def test_delete_with_integer_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1, "name": "First"}\n') + table = Table(table_path, key="id") + + result = table.delete(1) + + assert result is True + assert table.get(1) is None + + def test_delete_with_compound_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"org": "acme", "id": 1, "name": "Alice"}\n') + table = Table(table_path, key=("org", "id")) + + result = table.delete(("acme", 1)) + + assert result is True + assert table.get(("acme", 1)) is None + + def test_delete_without_key_specifier_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path) + + with pytest.raises(InvalidKeyError, match="key specifier is required"): + _ = table.delete("alice") + + def test_delete_key_arity_mismatch_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("org", "id")) + + with pytest.raises(InvalidKeyError, match="key arity mismatch"): + _ = table.delete("alice") # scalar key, tuple specifier + + def test_delete_tuple_key_arity_mismatch_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("org", "id")) + + with pytest.raises(InvalidKeyError, match="key arity mismatch"): + _ = table.delete(("acme", 1, "extra")) # 3 elements, specifier has 2 + + +class TestTableClear: + def test_clear_removes_all_records(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "a"}\n{"id": "b"}\n{"id": "c"}\n') + table = Table(table_path, key="id") + + table.clear() + + assert table.count() == 0 + assert table.get("a") is None + + def test_clear_preserves_header(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"$jsonlt": {"version": 1, "key": "id"}}\n' + content += '{"id": "alice"}\n' + _ = table_path.write_text(content) + table = Table(table_path) + + table.clear() + + assert table.count() == 0 + assert table.header is not None + assert table.header.key == "id" + # File should only have header + file_content = table_path.read_text() + lines = [line for line in file_content.strip().split("\n") if line] + assert len(lines) == 1 + assert "$jsonlt" in lines[0] + + def test_clear_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.clear() # Should not raise + + assert table.count() == 0 + + def test_clear_creates_file_with_header(self, tmp_path: "Path") -> None: + # First create table with header, then delete file, then clear + table_path = tmp_path / "test.jsonlt" + content = '{"$jsonlt": {"version": 1, "key": "id"}}\n' + _ = table_path.write_text(content) + table = Table(table_path) + + table_path.unlink() + table.clear() + + # Should recreate file with header + assert table_path.exists() + + def test_clear_reloads_header_inside_lock(self, tmp_path: "Path") -> None: + """Clear reloads state inside lock to get current header.""" + table_path = tmp_path / "test.jsonlt" + content = ( + '{"$jsonlt": {"version": 1, "key": "id", "meta": {"tag": "initial"}}}\n' + ) + content += '{"id": "alice"}\n' + _ = table_path.write_text(content) + + # Open table without auto_reload + table = Table(table_path, auto_reload=False) + assert table.count() == 1 + + # External modification: change header meta tag + time.sleep(0.01) # Ensure mtime differs + new_content = ( + '{"$jsonlt": {"version": 1, "key": "id", "meta": {"tag": "updated"}}}\n' + ) + new_content += '{"id": "alice"}\n' + new_content += '{"id": "bob"}\n' + _ = table_path.write_text(new_content) + + # Call clear - it should reload inside the lock and use the new header + table.clear() + + # Verify the file has the updated header, not the initial one + file_content = table_path.read_text() + assert "updated" in file_content + assert "initial" not in file_content + + +class TestTableCompact: + def test_compact_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.compact() # Should not raise + + assert table.count() == 0 + + def test_compact_removes_tombstones(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": "alice", "name": "Alice"}\n' + content += '{"id": "alice", "$deleted": true}\n' + content += '{"id": "bob", "name": "Bob"}\n' + _ = table_path.write_text(content) + table = Table(table_path, key="id") + + table.compact() + + # Tombstone should be removed + file_content = table_path.read_text() + assert "$deleted" not in file_content + # Only bob remains + assert table.count() == 1 + assert table.get("bob") == {"id": "bob", "name": "Bob"} + + def test_compact_preserves_header(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"$jsonlt": {"version": 1, "key": "id"}}\n' + content += '{"id": "alice"}\n' + _ = table_path.write_text(content) + table = Table(table_path) + + table.compact() + + assert table.header is not None + assert table.header.key == "id" + # File should have header + file_content = table_path.read_text() + assert "$jsonlt" in file_content + + def test_compact_preserves_records(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "a"}\n{"id": "b"}\n{"id": "c"}\n') + table = Table(table_path, key="id") + + table.compact() + + assert table.count() == 3 + assert table.get("a") == {"id": "a"} + assert table.get("b") == {"id": "b"} + assert table.get("c") == {"id": "c"} + + def test_compact_records_in_key_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + # Write records in reverse order + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + table.compact() + + # Read file and verify order + file_content = table_path.read_text() + lines = [line for line in file_content.strip().split("\n") if line] + assert len(lines) == 3 + assert '"id":"a"' in lines[0] + assert '"id":"b"' in lines[1] + assert '"id":"c"' in lines[2] + + def test_compact_removes_history(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + # Multiple versions of same key + content = '{"id": "alice", "v": 1}\n' + content += '{"id": "alice", "v": 2}\n' + content += '{"id": "alice", "v": 3}\n' + _ = table_path.write_text(content) + table = Table(table_path, key="id") + + table.compact() + + # Only one record should remain + file_content = table_path.read_text() + lines = [line for line in file_content.strip().split("\n") if line] + assert len(lines) == 1 + assert '"v":3' in lines[0] + assert table.get("alice") == {"id": "alice", "v": 3} + + def test_compact_only_tombstones(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": "alice", "name": "Alice"}\n' + content += '{"id": "alice", "$deleted": true}\n' + _ = table_path.write_text(content) + table = Table(table_path, key="id") + + table.compact() + + # Table should be empty + assert table.count() == 0 + # File should be empty + file_content = table_path.read_text() + assert file_content.strip() == "" + + def test_compact_returns_none(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + result = table.compact() + + assert result is None + + def test_compact_creates_file(self, tmp_path: "Path") -> None: + # Create table with header, add records, delete file, then compact + table_path = tmp_path / "test.jsonlt" + content = '{"$jsonlt": {"version": 1, "key": "id"}}\n' + content += '{"id": "alice"}\n' + _ = table_path.write_text(content) + table = Table(table_path) + + # Delete file but keep in-memory state + table_path.unlink() + table.compact() + + # Should recreate file with header and record + assert table_path.exists() + file_content = table_path.read_text() + assert "$jsonlt" in file_content + assert '"id":"alice"' in file_content + + def test_compact_with_compound_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("org", "id")) + + table.put({"org": "acme", "id": 1, "name": "alice"}) + table.put({"org": "acme", "id": 2, "name": "bob"}) + table.put({"org": "globex", "id": 1, "name": "carol"}) + _ = table.delete(("acme", 2)) + table.compact() + + assert table.count() == 2 + assert table.keys() == [("acme", 1), ("globex", 1)] + + def test_compact_integer_keys_sorted(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.put({"id": 10, "v": 1}) + table.put({"id": 2, "v": 2}) + table.put({"id": 100, "v": 3}) + table.compact() + + assert table.keys() == [2, 10, 100] + + def test_compact_mixed_key_types_sorted(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + table.put({"id": "z", "v": 1}) + table.put({"id": 1, "v": 2}) + table.put({"id": "a", "v": 3}) + table.put({"id": 10, "v": 4}) + table.compact() + + assert table.keys() == [1, 10, "a", "z"] + + +class TestTableWriteReload: + def test_put_updates_state_immediately(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id", auto_reload=False) + + table.put({"id": "alice", "name": "Alice"}) + + # Should be immediately visible without reload + assert table.get("alice") == {"id": "alice", "name": "Alice"} + + def test_delete_updates_state_immediately(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id", auto_reload=False) + + _ = table.delete("alice") + + # Should be immediately gone without reload + assert table.get("alice") is None + + def test_clear_updates_state_immediately(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n{"id": "bob"}\n') + table = Table(table_path, key="id", auto_reload=False) + + table.clear() + + # Should be immediately empty without reload + assert table.count() == 0 + + +class TestTableMaxFileSize: + def test_file_within_limit_loads_successfully(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + file_size = table_path.stat().st_size + + table = Table(table_path, key="id", max_file_size=file_size + 100) + + assert table.get("alice") == {"id": "alice", "v": 1} + + def test_file_exceeds_limit_raises_limit_error(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + file_size = table_path.stat().st_size + + with pytest.raises(LimitError, match=r"file size .* exceeds maximum"): + _ = Table(table_path, key="id", max_file_size=file_size - 1) + + def test_none_limit_means_no_limit(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + # Create a reasonably sized file + _ = table_path.write_text('{"id": "alice", "data": "' + "x" * 1000 + '"}\n') + + # None (default) means no limit - should succeed + table = Table(table_path, key="id", max_file_size=None) + + assert table.count() == 1 + + def test_new_file_with_max_file_size_succeeds(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + + # New file (doesn't exist) should not check size limit + table = Table(table_path, key="id", max_file_size=10) + + assert table.count() == 0 + + +class TestTableMagicMethods: + def test_len_returns_count(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "a"}\n{"id": "b"}\n{"id": "c"}\n') + table = Table(table_path, key="id") + + assert len(table) == 3 + + def test_contains_with_existing_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + assert "alice" in table + + def test_contains_with_missing_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + assert "bob" not in table + + def test_contains_with_int_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1}\n') + table = Table(table_path, key="id") + + assert 1 in table + assert 2 not in table + + def test_contains_with_tuple_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"a": 1, "b": "x"}\n') + table = Table(table_path, key=("a", "b")) + + assert (1, "x") in table + assert (1, "y") not in table + + def test_contains_with_invalid_type_returns_false(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + # Non-key types should return False, not raise + assert 3.14 not in table + assert None not in table + assert ["list"] not in table + assert {"dict": "value"} not in table + + def test_contains_with_invalid_tuple_returns_false(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"a": 1, "b": "x"}\n') + table = Table(table_path, key=("a", "b")) + + # Tuple with invalid element types should return False + assert (1, 3.14) not in table + assert (None, "x") not in table + + def test_iter_yields_records_in_key_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + # Write in reverse order + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + records = list(table) + + assert len(records) == 3 + assert records[0] == {"id": "a"} + assert records[1] == {"id": "b"} + assert records[2] == {"id": "c"} + + def test_iter_on_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + records = list(table) + + assert records == [] + + +class TestTableEmptyTupleKeyRejection: + """Tests for empty tuple key rejection.""" + + def test_get_empty_tuple_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with pytest.raises(InvalidKeyError, match="empty tuple"): + _ = table.get(()) + + def test_has_empty_tuple_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with pytest.raises(InvalidKeyError, match="empty tuple"): + _ = table.has(()) + + def test_delete_empty_tuple_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with pytest.raises(InvalidKeyError, match="arity mismatch"): + _ = table.delete(()) + + +class TestTableItems: + def test_items_returns_key_value_pairs(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n{"id": "bob", "v": 2}\n') + table = Table(table_path, key="id") + + items = table.items() + + assert len(items) == 2 + assert items[0] == ("alice", {"id": "alice", "v": 1}) + assert items[1] == ("bob", {"id": "bob", "v": 2}) + + def test_items_in_key_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + # Write in reverse order + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + items = table.items() + + assert [k for k, _ in items] == ["a", "b", "c"] + + def test_items_empty_table(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + items = table.items() + + assert items == [] + + +class TestTableReload: + def test_reload_updates_state(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id", auto_reload=False) + + assert table.get("alice") == {"id": "alice", "v": 1} + + # Modify file externally + _ = table_path.write_text('{"id": "alice", "v": 2}\n') + + # Before reload, still sees old value + assert table.get("alice") == {"id": "alice", "v": 1} + + # After reload, sees new value + table.reload() + assert table.get("alice") == {"id": "alice", "v": 2} + + def test_reload_adds_new_records(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id", auto_reload=False) + + assert table.count() == 1 + + # Add record externally + with table_path.open("a") as f: + _ = f.write('{"id": "bob"}\n') + + table.reload() + + assert table.count() == 2 + assert "bob" in table + + def test_reload_clears_sorted_keys_cache(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n') + table = Table(table_path, key="id", auto_reload=False) + + # Access keys to populate cache + keys1 = table.keys() + assert keys1 == ["a", "c"] + + # Add record externally + with table_path.open("a") as f: + _ = f.write('{"id": "b"}\n') + + table.reload() + + keys2 = table.keys() + assert keys2 == ["a", "b", "c"] + + +class TestFileSystemEdgeCases: + """Tests for edge cases using FakeFileSystem. + + Note: The FakeFileSystem is only used for stat, open_locked, atomic_replace, + and ensure_parent_dir operations. The initial _load() still reads from the + real filesystem. Tests must create real files on disk when testing loading. + + Some tests access protected members to verify internal state changes; this + is intentional for testing implementation details. + """ + + def test_load_empty_file_with_header_but_no_ops(self, tmp_path: "Path") -> None: + """File with header but no operations has empty state.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + # Create real file on disk for _load() + _ = table_path.write_bytes(b'{"$jsonlt":{"version":1,"key":"id"}}\n') + # Also set in fake_fs for stat operations + fake_fs.set_content(table_path, b'{"$jsonlt":{"version":1,"key":"id"}}\n') + + table = Table(table_path, _fs=fake_fs) + + assert table.count() == 0 + assert table.keys() == [] + + def test_load_from_content_empty(self, tmp_path: "Path") -> None: + """_load_from_content with empty bytes clears state.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + # Create real file on disk for _load() + _ = table_path.write_bytes(b'{"id":"alice"}\n') + # Also set in fake_fs for stat operations + fake_fs.set_content(table_path, b'{"id":"alice"}\n') + + table = Table(table_path, key="id", _fs=fake_fs) + assert table.count() == 1 + + # Simulate reload with empty content (testing internal method) + table._load_from_content(b"") # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + assert table._state == {} # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + + def test_resolve_key_specifier_empty_no_key(self, tmp_path: "Path") -> None: + """Empty file with no key specifier returns None.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + # File does not exist - table should be empty + + table = Table(table_path, _fs=fake_fs) + + assert table.key_specifier is None + assert table.count() == 0 + + def test_reload_if_changed_stat_fails(self, tmp_path: "Path") -> None: + """_reload_if_changed raises when stat fails during reload.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + # Create real file on disk for _load() + _ = table_path.write_bytes(b'{"id":"alice"}\n') + # Also set in fake_fs for stat operations + fake_fs.set_content(table_path, b'{"id":"alice"}\n') + + table = Table(table_path, key="id", _fs=fake_fs) + assert table.count() == 1 + + # Make stat fail + fake_fs.fail_stat.add(table_path) + + # _reload_if_changed should raise FileError since stat fails + # _load() uses path.exists() + read_table_file() but _update_file_stats() fails + with pytest.raises(FileError, match="simulated stat error"): + # Testing internal method + table._reload_if_changed(0.0, 0) # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + + def test_write_file_not_found_then_exists(self, tmp_path: "Path") -> None: + """Test that put creates file in fake filesystem.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + + table = Table(table_path, key="id", _fs=fake_fs) + + # First put should create the file in the fake filesystem + table.put({"id": "alice"}) + + assert table.get("alice") == {"id": "alice"} + + def test_try_update_stats_ignores_file_error(self, tmp_path: "Path") -> None: + """_try_update_stats silently ignores FileError and preserves stats.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + # Create real file on disk for _load() + _ = table_path.write_bytes(b'{"id":"alice"}\n') + # Also set in fake_fs for stat operations + fake_fs.set_content(table_path, b'{"id":"alice"}\n') + + table = Table(table_path, key="id", _fs=fake_fs) + # Capture stats via internal attributes (testing implementation detail) + old_mtime = table._file_mtime # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + old_size = table._file_size # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + + # Make stat fail + fake_fs.fail_stat.add(table_path) + + # _try_update_stats should not raise + table._try_update_stats() # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + + # Stats should remain unchanged + assert table._file_mtime == old_mtime # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + assert table._file_size == old_size # noqa: SLF001 # pyright: ignore[reportPrivateUsage] + + def test_auto_reload_disabled_uses_cache(self, tmp_path: "Path") -> None: + """Table with auto_reload=False uses cached state.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + # Create real file on disk for _load() + _ = table_path.write_bytes(b'{"id":"alice"}\n') + # Also set in fake_fs for stat operations + fake_fs.set_content(table_path, b'{"id":"alice"}\n') + + table = Table(table_path, key="id", auto_reload=False, _fs=fake_fs) + assert table.get("alice") == {"id": "alice"} + + # Make stat fail - with auto_reload=False, this won't be called on get + fake_fs.fail_stat.add(table_path) + + # Should still be able to read from cache since auto_reload is disabled + assert table.get("alice") == {"id": "alice"} + + def test_clear_on_file_with_header(self, tmp_path: "Path") -> None: + """clear() uses atomic_replace when file has header.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + # Create real file on disk for _load() + _ = table_path.write_bytes( + b'{"$jsonlt":{"version":1,"key":"id"}}\n{"id":"alice"}\n' + ) + # Also set in fake_fs for stat operations + fake_fs.set_content( + table_path, b'{"$jsonlt":{"version":1,"key":"id"}}\n{"id":"alice"}\n' + ) + + table = Table(table_path, _fs=fake_fs) + assert table.count() == 1 + + # clear() uses _fs.stat to check existence, then _fs.atomic_replace + table.clear() + + # Count should be 0 after clear + assert table.count() == 0 + + # File should exist with just header via atomic_replace + assert table_path in fake_fs.files + content = fake_fs.get_content(table_path) + assert b'"$jsonlt"' in content + assert b'"alice"' not in content + + def test_compact_recreates_deleted_file(self, tmp_path: "Path") -> None: + """compact() recreates file if there's in-memory state.""" + fake_fs = FakeFileSystem() + table_path = tmp_path / "test.jsonlt" + + # Create table and add record (uses fake_fs for write) + table = Table(table_path, key="id", _fs=fake_fs) + table.put({"id": "alice"}) + + # Delete the file from fake_fs + del fake_fs.files[table_path] + + # Compact should recreate the file via atomic_replace + table.compact() + + assert table_path in fake_fs.files diff --git a/tests/unit/test_transaction.py b/tests/unit/test_transaction.py new file mode 100644 index 0000000..0505947 --- /dev/null +++ b/tests/unit/test_transaction.py @@ -0,0 +1,1086 @@ +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest + +from jsonlt import ( + ConflictError, + InvalidKeyError, + LimitError, + Table, + Transaction, + TransactionError, +) + +if TYPE_CHECKING: + from os import stat_result + + from jsonlt._json import JSONObject + + +class TestTransactionCreation: + def test_transaction_returns_transaction_object(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + + assert isinstance(tx, Transaction) + tx.abort() + + def test_transaction_requires_key_specifier(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path) + + with pytest.raises(InvalidKeyError, match="key specifier is required"): + _ = table.transaction() + + def test_nested_transaction_rejected(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + try: + with pytest.raises(TransactionError, match="already active"): + _ = table.transaction() + finally: + tx.abort() + + +class TestTransactionSnapshotIsolation: + def test_transaction_sees_initial_state(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert tx.get("alice") == {"id": "alice", "v": 1} + + def test_transaction_sees_own_writes(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + assert tx.get("alice") == {"id": "alice", "v": 1} + assert tx.has("alice") is True + + def test_transaction_snapshot_is_isolated(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + # External modification + _ = table_path.write_text('{"id": "alice", "v": 99}\n') + # Transaction should still see original value + assert tx.get("alice") == {"id": "alice", "v": 1} + + +class TestTransactionReadOperations: + def test_get_returns_none_for_nonexistent_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert tx.get("nonexistent") is None + + def test_has_returns_false_for_nonexistent_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert tx.has("nonexistent") is False + + def test_all_returns_records_in_key_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + records = tx.all() + assert len(records) == 3 + assert [r["id"] for r in records] == ["a", "b", "c"] + + def test_keys_returns_keys_in_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert tx.keys() == ["a", "b", "c"] + + def test_count_returns_record_count(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert tx.count() == 2 + + def test_find_matches_predicate(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": 1, "role": "admin"}\n' + content += '{"id": 2, "role": "user"}\n' + content += '{"id": 3, "role": "admin"}\n' + _ = table_path.write_text(content) + table = Table(table_path, key="id") + + with table.transaction() as tx: + results = tx.find(lambda r: r["role"] == "admin") + assert len(results) == 2 + assert results[0]["id"] == 1 + assert results[1]["id"] == 3 + + def test_find_with_limit(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "a"}\n{"id": "b"}\n{"id": "c"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + results = tx.find(lambda _: True, limit=2) + assert len(results) == 2 + + def test_find_one_returns_first_match(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": 1, "role": "user"}\n' + content += '{"id": 2, "role": "admin"}\n' + content += '{"id": 3, "role": "admin"}\n' + _ = table_path.write_text(content) + table = Table(table_path, key="id") + + with table.transaction() as tx: + result = tx.find_one(lambda r: r["role"] == "admin") + assert result is not None + assert result["id"] == 2 + + def test_find_one_returns_none_when_no_match(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "a", "role": "user"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + result = tx.find_one(lambda r: r["role"] == "admin") + assert result is None + + +class TestTransactionWriteOperations: + def test_put_updates_snapshot(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + assert tx.get("alice") == {"id": "alice", "v": 1} + assert tx.count() == 1 + + def test_put_overwrites_existing(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 2}) + assert tx.get("alice") == {"id": "alice", "v": 2} + + def test_put_isolates_from_caller_mutations(self, tmp_path: "Path") -> None: + """Put creates a deep copy, so caller mutations don't affect transaction.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + record: JSONObject = {"id": "alice", "items": [1, 2, 3]} + tx.put(record) + # Mutate the original record after putting + items = record["items"] + assert isinstance(items, list) + items.append(4) + record["name"] = "modified" + # Transaction should see original value (before mutation) + result = tx.get("alice") + assert result is not None + assert result == {"id": "alice", "items": [1, 2, 3]} + + def test_delete_updates_snapshot(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + result = tx.delete("alice") + assert result is True + assert tx.has("alice") is False + assert tx.count() == 0 + + def test_delete_nonexistent_returns_false(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + result = tx.delete("nonexistent") + assert result is False + + def test_put_validates_record(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with ( + table.transaction() as tx, + pytest.raises(InvalidKeyError, match="missing required key field"), + ): + tx.put({"name": "alice"}) + + def test_put_rejects_dollar_fields(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with ( + table.transaction() as tx, + pytest.raises(InvalidKeyError, match="reserved field name"), + ): + tx.put({"id": "alice", "$meta": "value"}) + + def test_delete_validates_key_arity(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("org", "id")) + + with ( + table.transaction() as tx, + pytest.raises(InvalidKeyError, match="key arity mismatch"), + ): + _ = tx.delete("alice") + + def test_put_key_length_limit_raises(self, tmp_path: "Path") -> None: + """Put with key exceeding 1024 bytes raises LimitError.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + long_key = "x" * 1030 # > 1024 bytes when serialized + + with ( + table.transaction() as tx, + pytest.raises(LimitError, match="key length"), + ): + tx.put({"id": long_key}) + + def test_put_record_size_limit_raises(self, tmp_path: "Path") -> None: + """Put with record exceeding 1 MiB raises LimitError.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + large_data = "x" * (1024 * 1024 + 1000) + + with ( + table.transaction() as tx, + pytest.raises(LimitError, match="record size"), + ): + tx.put({"id": "test", "data": large_data}) + + +class TestTransactionCommit: + def test_commit_persists_writes(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + + # After commit, table should see the record + assert table.get("alice") == {"id": "alice", "v": 1} + + def test_commit_persists_delete(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + _ = tx.delete("alice") + + assert table.has("alice") is False + + def test_commit_writes_to_file(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + + content = table_path.read_text() + assert '"id":"alice"' in content + assert '"v":1' in content + + def test_empty_buffer_commit_succeeds(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + _ = tx.get("alice") # Read-only, no writes + + # Should not raise, table unchanged + assert table.get("alice") == {"id": "alice", "v": 1} + + def test_multiple_writes_committed_together(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + tx.put({"id": "bob", "v": 2}) + tx.put({"id": "carol", "v": 3}) + + assert table.count() == 3 + assert table.keys() == ["alice", "bob", "carol"] + + def test_commit_succeeds_when_stat_fails_after_write( + self, tmp_path: "Path", monkeypatch: "pytest.MonkeyPatch" + ) -> None: + """Verify commit succeeds even if stat fails after write. + + The implementation catches OSError when updating file stats after + a successful write, ensuring durability is preserved. + """ + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + # Track when file has content (write has occurred) + original_stat = Path.stat + + def stat_fails_if_file_has_content( + self: "Path", *, follow_symlinks: bool = True + ) -> "stat_result": + # First call stat to get the result + result = original_stat(self, follow_symlinks=follow_symlinks) + # If this is the table file and it has content, fail + if self == table_path and result.st_size > 0: + msg = "simulated stat failure" + raise OSError(msg) + return result + + monkeypatch.setattr(type(table_path), "stat", stat_fails_if_file_has_content) + + # Commit should succeed despite stat failure after write + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + + # Data should be written + assert table_path.read_text().strip().endswith('{"id":"alice","v":1}') + + +class TestTransactionAbort: + def test_abort_discards_writes(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + tx.abort() + + # Table should still have original value + assert table.get("alice") == {"id": "alice", "v": 1} + + def test_abort_does_not_write_to_file(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + original_content = '{"id": "alice", "v": 1}\n' + _ = table_path.write_text(original_content) + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "bob", "v": 2}) + tx.abort() + + # File should be unchanged + content = table_path.read_text() + assert content == original_content + + +class TestTransactionContextManager: + def test_context_manager_commits_on_success(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + + assert table.get("alice") == {"id": "alice", "v": 1} + + def test_context_manager_aborts_on_exception(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + class TestError(Exception): + pass + + def trigger_error() -> None: + raise TestError + + try: + with table.transaction() as tx: + tx.put({"id": "alice", "v": 2}) + trigger_error() + except TestError: + pass + + # Table should still have original value + assert table.get("alice") == {"id": "alice", "v": 1} + + def test_context_manager_does_not_suppress_exceptions( + self, tmp_path: "Path" + ) -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + class PropagateError(Exception): + pass + + def trigger_error() -> None: + raise PropagateError + + try: + with table.transaction(): + trigger_error() + except PropagateError: + pass # Expected - exception was not suppressed + + +class TestTransactionAfterCommitOrAbort: + def test_operations_fail_after_commit(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 1}) + tx.commit() + + with pytest.raises(TransactionError, match="already been committed"): + tx.put({"id": "bob", "v": 2}) + + def test_operations_fail_after_abort(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 1}) + tx.abort() + + with pytest.raises(TransactionError, match="already been committed"): + _ = tx.get("alice") + + def test_double_commit_fails(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.commit() + + with pytest.raises(TransactionError, match="already been committed"): + tx.commit() + + def test_double_abort_fails(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.abort() + + with pytest.raises(TransactionError, match="already been committed"): + tx.abort() + + def test_can_start_new_transaction_after_commit(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx1: + tx1.put({"id": "alice", "v": 1}) + + # Should be able to start a new transaction + with table.transaction() as tx2: + tx2.put({"id": "bob", "v": 2}) + + assert table.count() == 2 + + def test_exit_when_already_finalized_returns_false(self, tmp_path: "Path") -> None: + """__exit__ returns False immediately if transaction already finalized.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.commit() + + result = tx.__exit__(None, None, None) + assert result is False + + +class TestTransactionConflictDetection: + def test_conflict_same_key_update(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # External modification + with table_path.open("a") as f: + _ = f.write('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError, match="conflict detected"): + tx.commit() + + def test_conflict_transaction_delete_vs_external_update( + self, tmp_path: "Path" + ) -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + _ = tx.delete("alice") + + # External modification + with table_path.open("a") as f: + _ = f.write('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError, match="conflict detected"): + tx.commit() + + def test_conflict_transaction_update_vs_external_delete( + self, tmp_path: "Path" + ) -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # External delete + with table_path.open("a") as f: + _ = f.write('{"id": "alice", "$deleted": true}\n') + + with pytest.raises(ConflictError, match="conflict detected"): + tx.commit() + + def test_conflict_both_create_same_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 1}) + + # External creation + with table_path.open("a") as f: + _ = f.write('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError, match="conflict detected"): + tx.commit() + + def test_conflict_both_delete_same_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + _ = tx.delete("alice") + + # External delete + with table_path.open("a") as f: + _ = f.write('{"id": "alice", "$deleted": true}\n') + + with pytest.raises(ConflictError, match="conflict detected"): + tx.commit() + + def test_no_conflict_different_keys(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + content = '{"id": "alice", "v": 1}\n{"id": "bob", "v": 1}\n' + _ = table_path.write_text(content) + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # External modification of different key + with table_path.open("a") as f: + _ = f.write('{"id": "bob", "v": 99}\n') + + # Should not raise + tx.commit() + + # Both changes should be visible + assert table.get("alice") == {"id": "alice", "v": 2} + assert table.get("bob") == {"id": "bob", "v": 99} + + def test_table_retains_external_state_on_conflict(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # External modification + with table_path.open("a") as f: + _ = f.write('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError): + tx.commit() + + # Table should have external value + assert table.get("alice") == {"id": "alice", "v": 99} + + +class TestTransactionWithCompoundKeys: + def test_put_with_compound_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("org", "id")) + + with table.transaction() as tx: + tx.put({"org": "acme", "id": 1, "name": "alice"}) + assert tx.get(("acme", 1)) == {"org": "acme", "id": 1, "name": "alice"} + + def test_delete_with_compound_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"org": "acme", "id": 1, "name": "alice"}\n') + table = Table(table_path, key=("org", "id")) + + with table.transaction() as tx: + result = tx.delete(("acme", 1)) + assert result is True + assert tx.has(("acme", 1)) is False + + +class TestTransactionBufferDeduplication: + def test_multiple_puts_same_key_produces_single_line( + self, tmp_path: "Path" + ) -> None: + """Putting the same key multiple times should produce only one line per key.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + tx.put({"id": "alice", "v": 2}) + tx.put({"id": "alice", "v": 3}) + + # Read raw file content and count lines + content = table_path.read_text() + lines = [line for line in content.split("\n") if line.strip()] + assert len(lines) == 1 + assert '"id":"alice"' in lines[0] + assert '"v":3' in lines[0] + + def test_multiple_puts_same_key_final_value_correct(self, tmp_path: "Path") -> None: + """The final value should be the last put for each key.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + tx.put({"id": "alice", "v": 2}) + tx.put({"id": "alice", "v": 3}) + + assert table.get("alice") == {"id": "alice", "v": 3} + + def test_put_then_delete_same_key_produces_single_tombstone( + self, tmp_path: "Path" + ) -> None: + """Put then delete on same key should produce only one tombstone.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + tx.put({"id": "alice", "v": 2}) + _ = tx.delete("alice") + + # Read raw file content + content = table_path.read_text() + lines = [line for line in content.split("\n") if line.strip()] + assert len(lines) == 1 + assert '"$deleted":true' in lines[0] + assert '"id":"alice"' in lines[0] + assert table.has("alice") is False + + def test_delete_then_put_same_key_produces_single_record( + self, tmp_path: "Path" + ) -> None: + """Delete then put on same key should produce only one record.""" + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + _ = tx.delete("alice") + tx.put({"id": "alice", "v": 99}) + + # Read raw file content - should have original line + one new record + content = table_path.read_text() + lines = [line for line in content.split("\n") if line.strip()] + assert len(lines) == 2 # original + new record (not tombstone + record) + # The second line should be the record, not a tombstone + assert '"$deleted"' not in lines[1] + assert '"v":99' in lines[1] + assert table.get("alice") == {"id": "alice", "v": 99} + + def test_multiple_keys_produces_one_line_per_key(self, tmp_path: "Path") -> None: + """Multiple keys with multiple updates each should produce one line per key.""" + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "alice", "v": 1}) + tx.put({"id": "bob", "v": 1}) + tx.put({"id": "alice", "v": 2}) + tx.put({"id": "bob", "v": 2}) + tx.put({"id": "alice", "v": 3}) + + content = table_path.read_text() + lines = [line for line in content.split("\n") if line.strip()] + assert len(lines) == 2 # One for alice, one for bob + assert table.get("alice") == {"id": "alice", "v": 3} + assert table.get("bob") == {"id": "bob", "v": 2} + + +class TestTransactionMagicMethods: + def test_len_returns_count(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "a"}\n{"id": "b"}\n{"id": "c"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert len(tx) == 3 + + def test_contains_with_existing_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert "alice" in tx + + def test_contains_with_missing_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert "bob" not in tx + + def test_contains_with_int_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + assert 1 in tx + assert 2 not in tx + + def test_contains_with_tuple_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"a": 1, "b": "x"}\n') + table = Table(table_path, key=("a", "b")) + + with table.transaction() as tx: + assert (1, "x") in tx + assert (1, "y") not in tx + + def test_contains_with_invalid_type_returns_false(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + # Non-key types should return False, not raise + assert 3.14 not in tx + assert None not in tx + assert ["list"] not in tx + assert {"dict": "value"} not in tx + + def test_contains_with_invalid_tuple_returns_false(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"a": 1, "b": "x"}\n') + table = Table(table_path, key=("a", "b")) + + with table.transaction() as tx: + # Tuple with invalid element types should return False + assert (1, 3.14) not in tx + assert (None, "x") not in tx + + def test_iter_yields_records_in_key_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + # Write in reverse order + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + records = list(tx) + + assert len(records) == 3 + assert records[0] == {"id": "a"} + assert records[1] == {"id": "b"} + assert records[2] == {"id": "c"} + + def test_iter_on_empty_transaction(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + records = list(tx) + assert records == [] + + def test_repr_active_transaction(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + try: + result = repr(tx) + assert "Transaction(" in result + # Use name to avoid Windows path separator issues + assert table_path.name in result + assert "key='id'" in result + assert "active" in result + finally: + tx.abort() + + def test_repr_finalized_transaction(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.commit() + + result = repr(tx) + assert "Transaction(" in result + # Use name to avoid Windows path separator issues + assert table_path.name in result + assert "key='id'" in result + assert "finalized" in result + + def test_repr_with_tuple_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key=("org", "id")) + + tx = table.transaction() + try: + result = repr(tx) + assert "Transaction(" in result + assert "key=('org', 'id')" in result + finally: + tx.abort() + + +class TestTransactionItems: + def test_items_returns_key_value_pairs(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n{"id": "bob", "v": 2}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + items = tx.items() + + assert len(items) == 2 + assert items[0] == ("alice", {"id": "alice", "v": 1}) + assert items[1] == ("bob", {"id": "bob", "v": 2}) + + def test_items_in_key_order(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + # Write in reverse order + _ = table_path.write_text('{"id": "c"}\n{"id": "a"}\n{"id": "b"}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + items = tx.items() + assert [k for k, _ in items] == ["a", "b", "c"] + + def test_items_empty_transaction(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + with table.transaction() as tx: + items = tx.items() + assert items == [] + + def test_items_reflects_transaction_changes(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + with table.transaction() as tx: + tx.put({"id": "bob", "v": 2}) + items = tx.items() + + assert len(items) == 2 + assert ("bob", {"id": "bob", "v": 2}) in items + + def test_items_on_finalized_transaction_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.commit() + + with pytest.raises(TransactionError, match="already been committed or aborted"): + _ = tx.items() + + +class TestTransactionEmptyTupleKeyRejection: + """Tests for empty tuple key rejection in transactions.""" + + def test_get_empty_tuple_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with ( + table.transaction() as tx, + pytest.raises(InvalidKeyError, match="empty tuple"), + ): + _ = tx.get(()) + + def test_has_empty_tuple_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with ( + table.transaction() as tx, + pytest.raises(InvalidKeyError, match="empty tuple"), + ): + _ = tx.has(()) + + def test_delete_empty_tuple_raises(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice"}\n') + table = Table(table_path, key="id") + + with ( + table.transaction() as tx, + pytest.raises(InvalidKeyError, match="arity mismatch"), + ): + _ = tx.delete(()) + + +class TestConflictErrorProperties: + def test_conflict_error_has_key_property(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # Modify externally + _ = table_path.write_text('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError) as exc_info: + tx.commit() + + assert exc_info.value.key == "alice" + + def test_conflict_error_has_expected_property(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # Modify externally + _ = table_path.write_text('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError) as exc_info: + tx.commit() + + assert exc_info.value.expected == {"id": "alice", "v": 1} + + def test_conflict_error_has_actual_property(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # Modify externally + _ = table_path.write_text('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError) as exc_info: + tx.commit() + + assert exc_info.value.actual == {"id": "alice", "v": 99} + + def test_conflict_on_new_key_has_none_expected(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 1}) + + # Create file with same key externally + _ = table_path.write_text('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError) as exc_info: + tx.commit() + + assert exc_info.value.key == "alice" + assert exc_info.value.expected is None + assert exc_info.value.actual == {"id": "alice", "v": 99} + + def test_conflict_on_deleted_key_has_none_actual(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + _ = tx.delete("alice") + + # Delete externally (via tombstone) + _ = table_path.write_text( + '{"id": "alice", "v": 1}\n{"id": "alice", "$deleted": true}\n' + ) + + with pytest.raises(ConflictError) as exc_info: + tx.commit() + + assert exc_info.value.key == "alice" + assert exc_info.value.expected == {"id": "alice", "v": 1} + assert exc_info.value.actual is None + + def test_conflict_error_repr_shows_message_and_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"id": "alice", "v": 1}\n') + table = Table(table_path, key="id") + + tx = table.transaction() + tx.put({"id": "alice", "v": 2}) + + # Modify externally + _ = table_path.write_text('{"id": "alice", "v": 99}\n') + + with pytest.raises(ConflictError) as exc_info: + tx.commit() + + result = repr(exc_info.value) + assert "ConflictError(" in result + assert "key='alice'" in result + + def test_conflict_error_repr_with_tuple_key(self, tmp_path: "Path") -> None: + table_path = tmp_path / "test.jsonlt" + _ = table_path.write_text('{"org": "acme", "id": 1, "v": 1}\n') + table = Table(table_path, key=("org", "id")) + + tx = table.transaction() + tx.put({"org": "acme", "id": 1, "v": 2}) + + # Modify externally + _ = table_path.write_text('{"org": "acme", "id": 1, "v": 99}\n') + + with pytest.raises(ConflictError) as exc_info: + tx.commit() + + result = repr(exc_info.value) + assert "ConflictError(" in result + assert "key=('acme', 1)" in result diff --git a/tests/unit/test_writer.py b/tests/unit/test_writer.py new file mode 100644 index 0000000..e861e83 --- /dev/null +++ b/tests/unit/test_writer.py @@ -0,0 +1,228 @@ +from typing import TYPE_CHECKING + +import pytest + +from jsonlt._exceptions import FileError +from jsonlt._writer import append_line, append_lines, atomic_replace + +if TYPE_CHECKING: + from pathlib import Path + + +class TestAppendLine: + def test_appends_line_with_newline(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + append_line(path, '{"id":"alice"}') + + assert path.read_text() == '{"id":"alice"}\n' + + def test_appends_multiple_lines(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + append_line(path, '{"id":"alice"}') + append_line(path, '{"id":"bob"}') + + assert path.read_text() == '{"id":"alice"}\n{"id":"bob"}\n' + + def test_appends_to_existing_content(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text('{"id":"existing"}\n') + + append_line(path, '{"id":"new"}') + + assert path.read_text() == '{"id":"existing"}\n{"id":"new"}\n' + + def test_creates_file_if_not_exists(self, tmp_path: "Path") -> None: + """Creates file if it doesn't exist.""" + path = tmp_path / "test.jsonlt" + + append_line(path, '{"id":"first"}') + + assert path.read_text() == '{"id":"first"}\n' + + def test_raises_file_error_on_failure(self, tmp_path: "Path") -> None: + path = tmp_path / "nonexistent" / "dir" / "test.jsonlt" + + with pytest.raises(FileError, match="cannot append to file"): + append_line(path, '{"id":"test"}') + + def test_appends_unicode_content(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + append_line(path, '{"name":"cafe\\u0301"}') + + assert path.read_text() == '{"name":"cafe\\u0301"}\n' + + def test_appends_empty_object(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + append_line(path, "{}") + + assert path.read_text() == "{}\n" + + def test_appends_header_line(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + append_line(path, '{"$jsonlt":{"version":1,"key":"id"}}') + + assert path.read_text() == '{"$jsonlt":{"version":1,"key":"id"}}\n' + + +class TestAppendLines: + def test_appends_multiple_lines(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + append_lines(path, ['{"id":"a"}', '{"id":"b"}', '{"id":"c"}']) + + assert path.read_text() == '{"id":"a"}\n{"id":"b"}\n{"id":"c"}\n' + + def test_empty_lines_is_noop(self, tmp_path: "Path") -> None: + """Empty list does nothing (early return).""" + path = tmp_path / "test.jsonlt" + _ = path.write_text("existing\n") + + append_lines(path, []) + + assert path.read_text() == "existing\n" + + def test_appends_to_existing_content(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text('{"id":"existing"}\n') + + append_lines(path, ['{"id":"new1"}', '{"id":"new2"}']) + + expected = '{"id":"existing"}\n{"id":"new1"}\n{"id":"new2"}\n' + assert path.read_text() == expected + + def test_raises_file_error_on_failure(self, tmp_path: "Path") -> None: + path = tmp_path / "nonexistent" / "dir" / "test.jsonlt" + + with pytest.raises(FileError, match="cannot append to file"): + append_lines(path, ['{"id":"test"}']) + + +class TestAtomicReplace: + def test_replaces_file_contents(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text('{"id":"old"}\n') + + atomic_replace(path, ['{"id":"new"}']) + + assert path.read_text() == '{"id":"new"}\n' + + def test_writes_multiple_lines(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text("") + + atomic_replace(path, ['{"$jsonlt":{"version":1}}', '{"id":"alice"}']) + + assert path.read_text() == '{"$jsonlt":{"version":1}}\n{"id":"alice"}\n' + + def test_creates_file_if_not_exists(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + + atomic_replace(path, ['{"id":"new"}']) + + assert path.read_text() == '{"id":"new"}\n' + + def test_empty_lines_creates_empty_file(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text('{"id":"old"}\n') + + atomic_replace(path, []) + + assert path.read_text() == "" + + def test_raises_file_error_on_failure(self, tmp_path: "Path") -> None: + path = tmp_path / "nonexistent" / "dir" / "test.jsonlt" + + with pytest.raises(FileError, match="cannot write file atomically"): + atomic_replace(path, ['{"id":"test"}']) + + def test_no_temp_file_left_on_success(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + + atomic_replace(path, ['{"id":"test"}']) + + # Check no .tmp files exist + tmp_files = list(tmp_path.glob(".jsonlt_*.tmp")) + assert tmp_files == [] + + def test_overwrites_larger_file(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + # Write many lines + original_lines = [f'{{"id":"{i}"}}' for i in range(100)] + _ = path.write_text("\n".join(original_lines) + "\n") + + atomic_replace(path, ['{"id":"only"}']) + + assert path.read_text() == '{"id":"only"}\n' + + def test_replaces_with_larger_content(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text('{"id":"small"}\n') + + new_lines = [f'{{"id":"{i}"}}' for i in range(100)] + atomic_replace(path, new_lines) + + content = path.read_text() + lines = content.strip().split("\n") + assert len(lines) == 100 + + def test_writes_unicode_content(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + + atomic_replace(path, ['{"name":"cafe\\u0301"}', '{"emoji":"\\ud83d\\ude00"}']) + + content = path.read_text() + assert '{"name":"cafe\\u0301"}' in content + assert '{"emoji":"\\ud83d\\ude00"}' in content + + def test_file_remains_accessible_after_atomic_replace( + self, tmp_path: "Path" + ) -> None: + path = tmp_path / "test.jsonlt" + _ = path.write_text('{"id":"old"}\n') + + atomic_replace(path, ['{"id":"new"}']) + + # Verify file is still readable/writable + assert path.read_text() == '{"id":"new"}\n' + _ = path.write_text('{"id":"verify"}\n') + assert path.read_text() == '{"id":"verify"}\n' + + def test_single_line_write(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + + atomic_replace(path, ['{"single":"line"}']) + + assert path.read_text() == '{"single":"line"}\n' + + def test_header_only_write(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + + atomic_replace(path, ['{"$jsonlt":{"version":1,"key":"id"}}']) + + assert path.read_text() == '{"$jsonlt":{"version":1,"key":"id"}}\n' + + def test_no_partial_write_on_failure(self, tmp_path: "Path") -> None: + path = tmp_path / "test.jsonlt" + original_content = '{"id":"original"}\n' + _ = path.write_text(original_content) + + # Try to write to a path where the directory doesn't exist + # This will fail when trying to create the temp file + bad_path = tmp_path / "nonexistent_subdir" / "test.jsonlt" + + with pytest.raises(FileError): + atomic_replace(bad_path, ['{"id":"new"}']) + + # Original file should be unchanged + assert path.read_text() == original_content diff --git a/uv.lock b/uv.lock index 61aced1..3ba36c0 100644 --- a/uv.lock +++ b/uv.lock @@ -42,8 +42,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/95/eb/ae5cab15efa365e13d56b31b0d085a62600298bf398a7986f8388f73b598/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:b395bbca716c38bef3c764f187860e88c724b342c26275bc03e906142fc5964f", size = 1542025, upload-time = "2025-10-28T20:55:51.861Z" }, { url = "https://files.pythonhosted.org/packages/e9/2d/1683e8d67ec72d911397fe4e575688d2a9b8f6a6e03c8fdc9f3fd3d4c03f/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:204ffff2426c25dfda401ba08da85f9c59525cdc42bda26660463dd1cbcfec6f", size = 1714918, upload-time = "2025-10-28T20:55:53.515Z" }, { url = "https://files.pythonhosted.org/packages/99/a2/ffe8e0e1c57c5e542d47ffa1fcf95ef2b3ea573bf7c4d2ee877252431efc/aiohttp-3.13.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:05c4dd3c48fb5f15db31f57eb35374cb0c09afdde532e7fb70a75aede0ed30f6", size = 1656113, upload-time = "2025-10-28T20:55:55.438Z" }, - { url = "https://files.pythonhosted.org/packages/0d/42/d511aff5c3a2b06c09d7d214f508a4ad8ac7799817f7c3d23e7336b5e896/aiohttp-3.13.2-cp310-cp310-win32.whl", hash = "sha256:e574a7d61cf10351d734bcddabbe15ede0eaa8a02070d85446875dc11189a251", size = 432290, upload-time = "2025-10-28T20:55:56.96Z" }, - { url = "https://files.pythonhosted.org/packages/8b/ea/1c2eb7098b5bad4532994f2b7a8228d27674035c9b3234fe02c37469ef14/aiohttp-3.13.2-cp310-cp310-win_amd64.whl", hash = "sha256:364f55663085d658b8462a1c3f17b2b84a5c2e1ba858e1b79bff7b2e24ad1514", size = 455075, upload-time = "2025-10-28T20:55:58.373Z" }, { url = "https://files.pythonhosted.org/packages/35/74/b321e7d7ca762638cdf8cdeceb39755d9c745aff7a64c8789be96ddf6e96/aiohttp-3.13.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4647d02df098f6434bafd7f32ad14942f05a9caa06c7016fdcc816f343997dd0", size = 743409, upload-time = "2025-10-28T20:56:00.354Z" }, { url = "https://files.pythonhosted.org/packages/99/3d/91524b905ec473beaf35158d17f82ef5a38033e5809fe8742e3657cdbb97/aiohttp-3.13.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e3403f24bcb9c3b29113611c3c16a2a447c3953ecf86b79775e7be06f7ae7ccb", size = 497006, upload-time = "2025-10-28T20:56:01.85Z" }, { url = "https://files.pythonhosted.org/packages/eb/d3/7f68bc02a67716fe80f063e19adbd80a642e30682ce74071269e17d2dba1/aiohttp-3.13.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:43dff14e35aba17e3d6d5ba628858fb8cb51e30f44724a2d2f0c75be492c55e9", size = 493195, upload-time = "2025-10-28T20:56:03.314Z" }, @@ -59,8 +57,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/be/f6a7a426e02fc82781afd62016417b3948e2207426d90a0e478790d1c8a4/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:2ca6ffef405fc9c09a746cb5d019c1672cd7f402542e379afc66b370833170cf", size = 1595126, upload-time = "2025-10-28T20:56:20.836Z" }, { url = "https://files.pythonhosted.org/packages/e5/c7/8e22d5d28f94f67d2af496f14a83b3c155d915d1fe53d94b66d425ec5b42/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:47f438b1a28e926c37632bff3c44df7d27c9b57aaf4e34b1def3c07111fdb782", size = 1800665, upload-time = "2025-10-28T20:56:22.922Z" }, { url = "https://files.pythonhosted.org/packages/d1/11/91133c8b68b1da9fc16555706aa7276fdf781ae2bb0876c838dd86b8116e/aiohttp-3.13.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9acda8604a57bb60544e4646a4615c1866ee6c04a8edef9b8ee6fd1d8fa2ddc8", size = 1739532, upload-time = "2025-10-28T20:56:25.924Z" }, - { url = "https://files.pythonhosted.org/packages/17/6b/3747644d26a998774b21a616016620293ddefa4d63af6286f389aedac844/aiohttp-3.13.2-cp311-cp311-win32.whl", hash = "sha256:868e195e39b24aaa930b063c08bb0c17924899c16c672a28a65afded9c46c6ec", size = 431876, upload-time = "2025-10-28T20:56:27.524Z" }, - { url = "https://files.pythonhosted.org/packages/c3/63/688462108c1a00eb9f05765331c107f95ae86f6b197b865d29e930b7e462/aiohttp-3.13.2-cp311-cp311-win_amd64.whl", hash = "sha256:7fd19df530c292542636c2a9a85854fab93474396a52f1695e799186bbd7f24c", size = 456205, upload-time = "2025-10-28T20:56:29.062Z" }, { url = "https://files.pythonhosted.org/packages/29/9b/01f00e9856d0a73260e86dd8ed0c2234a466c5c1712ce1c281548df39777/aiohttp-3.13.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:b1e56bab2e12b2b9ed300218c351ee2a3d8c8fdab5b1ec6193e11a817767e47b", size = 737623, upload-time = "2025-10-28T20:56:30.797Z" }, { url = "https://files.pythonhosted.org/packages/5a/1b/4be39c445e2b2bd0aab4ba736deb649fabf14f6757f405f0c9685019b9e9/aiohttp-3.13.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:364e25edaabd3d37b1db1f0cbcee8c73c9a3727bfa262b83e5e4cf3489a2a9dc", size = 492664, upload-time = "2025-10-28T20:56:32.708Z" }, { url = "https://files.pythonhosted.org/packages/28/66/d35dcfea8050e131cdd731dff36434390479b4045a8d0b9d7111b0a968f1/aiohttp-3.13.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c5c94825f744694c4b8db20b71dba9a257cd2ba8e010a803042123f3a25d50d7", size = 491808, upload-time = "2025-10-28T20:56:34.57Z" }, @@ -76,8 +72,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/03/d3/ddd348f8a27a634daae39a1b8e291ff19c77867af438af844bf8b7e3231b/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:9eb3e33fdbe43f88c3c75fa608c25e7c47bbd80f48d012763cb67c47f39a7e16", size = 1555132, upload-time = "2025-10-28T20:56:52.568Z" }, { url = "https://files.pythonhosted.org/packages/39/b8/46790692dc46218406f94374903ba47552f2f9f90dad554eed61bfb7b64c/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9434bc0d80076138ea986833156c5a48c9c7a8abb0c96039ddbb4afc93184169", size = 1764802, upload-time = "2025-10-28T20:56:54.292Z" }, { url = "https://files.pythonhosted.org/packages/ba/e4/19ce547b58ab2a385e5f0b8aa3db38674785085abcf79b6e0edd1632b12f/aiohttp-3.13.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ff15c147b2ad66da1f2cbb0622313f2242d8e6e8f9b79b5206c84523a4473248", size = 1719512, upload-time = "2025-10-28T20:56:56.428Z" }, - { url = "https://files.pythonhosted.org/packages/70/30/6355a737fed29dcb6dfdd48682d5790cb5eab050f7b4e01f49b121d3acad/aiohttp-3.13.2-cp312-cp312-win32.whl", hash = "sha256:27e569eb9d9e95dbd55c0fc3ec3a9335defbf1d8bc1d20171a49f3c4c607b93e", size = 426690, upload-time = "2025-10-28T20:56:58.736Z" }, - { url = "https://files.pythonhosted.org/packages/0a/0d/b10ac09069973d112de6ef980c1f6bb31cb7dcd0bc363acbdad58f927873/aiohttp-3.13.2-cp312-cp312-win_amd64.whl", hash = "sha256:8709a0f05d59a71f33fd05c17fc11fcb8c30140506e13c2f5e8ee1b8964e1b45", size = 453465, upload-time = "2025-10-28T20:57:00.795Z" }, { url = "https://files.pythonhosted.org/packages/bf/78/7e90ca79e5aa39f9694dcfd74f4720782d3c6828113bb1f3197f7e7c4a56/aiohttp-3.13.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7519bdc7dfc1940d201651b52bf5e03f5503bda45ad6eacf64dda98be5b2b6be", size = 732139, upload-time = "2025-10-28T20:57:02.455Z" }, { url = "https://files.pythonhosted.org/packages/db/ed/1f59215ab6853fbaa5c8495fa6cbc39edfc93553426152b75d82a5f32b76/aiohttp-3.13.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:088912a78b4d4f547a1f19c099d5a506df17eacec3c6f4375e2831ec1d995742", size = 490082, upload-time = "2025-10-28T20:57:04.784Z" }, { url = "https://files.pythonhosted.org/packages/68/7b/fe0fe0f5e05e13629d893c760465173a15ad0039c0a5b0d0040995c8075e/aiohttp-3.13.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5276807b9de9092af38ed23ce120539ab0ac955547b38563a9ba4f5b07b95293", size = 489035, upload-time = "2025-10-28T20:57:06.894Z" }, @@ -93,8 +87,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/34/cb/90f15dd029f07cebbd91f8238a8b363978b530cd128488085b5703683594/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:550bf765101ae721ee1d37d8095f47b1f220650f85fe1af37a90ce75bab89d04", size = 1550093, upload-time = "2025-10-28T20:57:26.257Z" }, { url = "https://files.pythonhosted.org/packages/69/46/12dce9be9d3303ecbf4d30ad45a7683dc63d90733c2d9fe512be6716cd40/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:fe91b87fc295973096251e2d25a811388e7d8adf3bd2b97ef6ae78bc4ac6c476", size = 1758084, upload-time = "2025-10-28T20:57:28.349Z" }, { url = "https://files.pythonhosted.org/packages/f9/c8/0932b558da0c302ffd639fc6362a313b98fdf235dc417bc2493da8394df7/aiohttp-3.13.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e0c8e31cfcc4592cb200160344b2fb6ae0f9e4effe06c644b5a125d4ae5ebe23", size = 1716987, upload-time = "2025-10-28T20:57:30.233Z" }, - { url = "https://files.pythonhosted.org/packages/5d/8b/f5bd1a75003daed099baec373aed678f2e9b34f2ad40d85baa1368556396/aiohttp-3.13.2-cp313-cp313-win32.whl", hash = "sha256:0740f31a60848d6edb296a0df827473eede90c689b8f9f2a4cdde74889eb2254", size = 425859, upload-time = "2025-10-28T20:57:32.105Z" }, - { url = "https://files.pythonhosted.org/packages/5d/28/a8a9fc6957b2cee8902414e41816b5ab5536ecf43c3b1843c10e82c559b2/aiohttp-3.13.2-cp313-cp313-win_amd64.whl", hash = "sha256:a88d13e7ca367394908f8a276b89d04a3652044612b9a408a0bb22a5ed976a1a", size = 452192, upload-time = "2025-10-28T20:57:34.166Z" }, { url = "https://files.pythonhosted.org/packages/9b/36/e2abae1bd815f01c957cbf7be817b3043304e1c87bad526292a0410fdcf9/aiohttp-3.13.2-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:2475391c29230e063ef53a66669b7b691c9bfc3f1426a0f7bcdf1216bdbac38b", size = 735234, upload-time = "2025-10-28T20:57:36.415Z" }, { url = "https://files.pythonhosted.org/packages/ca/e3/1ee62dde9b335e4ed41db6bba02613295a0d5b41f74a783c142745a12763/aiohttp-3.13.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:f33c8748abef4d8717bb20e8fb1b3e07c6adacb7fd6beaae971a764cf5f30d61", size = 490733, upload-time = "2025-10-28T20:57:38.205Z" }, { url = "https://files.pythonhosted.org/packages/1a/aa/7a451b1d6a04e8d15a362af3e9b897de71d86feac3babf8894545d08d537/aiohttp-3.13.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:ae32f24bbfb7dbb485a24b30b1149e2f200be94777232aeadba3eecece4d0aa4", size = 491303, upload-time = "2025-10-28T20:57:40.122Z" }, @@ -110,8 +102,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/19/e2/19bd4c547092b773caeb48ff5ae4b1ae86756a0ee76c16727fcfd281404b/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:ec7534e63ae0f3759df3a1ed4fa6bc8f75082a924b590619c0dd2f76d7043caa", size = 1544395, upload-time = "2025-10-28T20:58:01.914Z" }, { url = "https://files.pythonhosted.org/packages/cf/87/860f2803b27dfc5ed7be532832a3498e4919da61299b4a1f8eb89b8ff44d/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5b927cf9b935a13e33644cbed6c8c4b2d0f25b713d838743f8fe7191b33829c4", size = 1742965, upload-time = "2025-10-28T20:58:03.972Z" }, { url = "https://files.pythonhosted.org/packages/67/7f/db2fc7618925e8c7a601094d5cbe539f732df4fb570740be88ed9e40e99a/aiohttp-3.13.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:88d6c017966a78c5265d996c19cdb79235be5e6412268d7e2ce7dee339471b7a", size = 1697585, upload-time = "2025-10-28T20:58:06.189Z" }, - { url = "https://files.pythonhosted.org/packages/0c/07/9127916cb09bb38284db5036036042b7b2c514c8ebaeee79da550c43a6d6/aiohttp-3.13.2-cp314-cp314-win32.whl", hash = "sha256:f7c183e786e299b5d6c49fb43a769f8eb8e04a2726a2bd5887b98b5cc2d67940", size = 431621, upload-time = "2025-10-28T20:58:08.636Z" }, - { url = "https://files.pythonhosted.org/packages/fb/41/554a8a380df6d3a2bba8a7726429a23f4ac62aaf38de43bb6d6cde7b4d4d/aiohttp-3.13.2-cp314-cp314-win_amd64.whl", hash = "sha256:fe242cd381e0fb65758faf5ad96c2e460df6ee5b2de1072fe97e4127927e00b4", size = 457627, upload-time = "2025-10-28T20:58:11Z" }, { url = "https://files.pythonhosted.org/packages/c7/8e/3824ef98c039d3951cb65b9205a96dd2b20f22241ee17d89c5701557c826/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:f10d9c0b0188fe85398c61147bbd2a657d616c876863bfeff43376e0e3134673", size = 767360, upload-time = "2025-10-28T20:58:13.358Z" }, { url = "https://files.pythonhosted.org/packages/a4/0f/6a03e3fc7595421274fa34122c973bde2d89344f8a881b728fa8c774e4f1/aiohttp-3.13.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:e7c952aefdf2460f4ae55c5e9c3e80aa72f706a6317e06020f80e96253b1accd", size = 504616, upload-time = "2025-10-28T20:58:15.339Z" }, { url = "https://files.pythonhosted.org/packages/c6/aa/ed341b670f1bc8a6f2c6a718353d13b9546e2cef3544f573c6a1ff0da711/aiohttp-3.13.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c20423ce14771d98353d2e25e83591fa75dfa90a3c1848f3d7c68243b4fbded3", size = 509131, upload-time = "2025-10-28T20:58:17.693Z" }, @@ -127,8 +117,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/e7/80/7368bd0d06b16b3aba358c16b919e9c46cf11587dc572091031b0e9e3ef0/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:94f05348c4406450f9d73d38efb41d669ad6cd90c7ee194810d0eefbfa875a7a", size = 1617548, upload-time = "2025-10-28T20:58:43.674Z" }, { url = "https://files.pythonhosted.org/packages/7d/4b/a6212790c50483cb3212e507378fbe26b5086d73941e1ec4b56a30439688/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:fa4dcb605c6f82a80c7f95713c2b11c3b8e9893b3ebd2bc9bde93165ed6107be", size = 1817240, upload-time = "2025-10-28T20:58:45.787Z" }, { url = "https://files.pythonhosted.org/packages/ff/f7/ba5f0ba4ea8d8f3c32850912944532b933acbf0f3a75546b89269b9b7dde/aiohttp-3.13.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cf00e5db968c3f67eccd2778574cf64d8b27d95b237770aa32400bd7a1ca4f6c", size = 1762334, upload-time = "2025-10-28T20:58:47.936Z" }, - { url = "https://files.pythonhosted.org/packages/7e/83/1a5a1856574588b1cad63609ea9ad75b32a8353ac995d830bf5da9357364/aiohttp-3.13.2-cp314-cp314t-win32.whl", hash = "sha256:d23b5fe492b0805a50d3371e8a728a9134d8de5447dce4c885f5587294750734", size = 464685, upload-time = "2025-10-28T20:58:50.642Z" }, - { url = "https://files.pythonhosted.org/packages/9f/4d/d22668674122c08f4d56972297c51a624e64b3ed1efaa40187607a7cb66e/aiohttp-3.13.2-cp314-cp314t-win_amd64.whl", hash = "sha256:ff0a7b0a82a7ab905cbda74006318d1b12e37c797eb1b0d4eb3e316cf47f658f", size = 498093, upload-time = "2025-10-28T20:58:52.782Z" }, ] [[package]] @@ -638,9 +626,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/40/90/25b201b9c015dbc999a5baf475a257010471a1fa8c200c843fd4abbee725/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:42145cd2748ca39f32801dad54aeea10039da6f86e303659db90db1c4b614c8c", size = 228785, upload-time = "2025-10-06T05:35:37.949Z" }, { url = "https://files.pythonhosted.org/packages/84/f4/b5bc148df03082f05d2dd30c089e269acdbe251ac9a9cf4e727b2dbb8a3d/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:e2de870d16a7a53901e41b64ffdf26f2fbb8917b3e6ebf398098d72c5b20bd7f", size = 230312, upload-time = "2025-10-06T05:35:39.178Z" }, { url = "https://files.pythonhosted.org/packages/db/4b/87e95b5d15097c302430e647136b7d7ab2398a702390cf4c8601975709e7/frozenlist-1.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:20e63c9493d33ee48536600d1a5c95eefc870cd71e7ab037763d1fbb89cc51e7", size = 217650, upload-time = "2025-10-06T05:35:40.377Z" }, - { url = "https://files.pythonhosted.org/packages/e5/70/78a0315d1fea97120591a83e0acd644da638c872f142fd72a6cebee825f3/frozenlist-1.8.0-cp310-cp310-win32.whl", hash = "sha256:adbeebaebae3526afc3c96fad434367cafbfd1b25d72369a9e5858453b1bb71a", size = 39659, upload-time = "2025-10-06T05:35:41.863Z" }, - { url = "https://files.pythonhosted.org/packages/66/aa/3f04523fb189a00e147e60c5b2205126118f216b0aa908035c45336e27e4/frozenlist-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:667c3777ca571e5dbeb76f331562ff98b957431df140b54c85fd4d52eea8d8f6", size = 43837, upload-time = "2025-10-06T05:35:43.205Z" }, - { url = "https://files.pythonhosted.org/packages/39/75/1135feecdd7c336938bd55b4dc3b0dfc46d85b9be12ef2628574b28de776/frozenlist-1.8.0-cp310-cp310-win_arm64.whl", hash = "sha256:80f85f0a7cc86e7a54c46d99c9e1318ff01f4687c172ede30fd52d19d1da1c8e", size = 39989, upload-time = "2025-10-06T05:35:44.596Z" }, { url = "https://files.pythonhosted.org/packages/bc/03/077f869d540370db12165c0aa51640a873fb661d8b315d1d4d67b284d7ac/frozenlist-1.8.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:09474e9831bc2b2199fad6da3c14c7b0fbdd377cce9d3d77131be28906cb7d84", size = 86912, upload-time = "2025-10-06T05:35:45.98Z" }, { url = "https://files.pythonhosted.org/packages/df/b5/7610b6bd13e4ae77b96ba85abea1c8cb249683217ef09ac9e0ae93f25a91/frozenlist-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:17c883ab0ab67200b5f964d2b9ed6b00971917d5d8a92df149dc2c9779208ee9", size = 50046, upload-time = "2025-10-06T05:35:47.009Z" }, { url = "https://files.pythonhosted.org/packages/6e/ef/0e8f1fe32f8a53dd26bdd1f9347efe0778b0fddf62789ea683f4cc7d787d/frozenlist-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa47e444b8ba08fffd1c18e8cdb9a75db1b6a27f17507522834ad13ed5922b93", size = 50119, upload-time = "2025-10-06T05:35:48.38Z" }, @@ -654,9 +639,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b9/49/ecccb5f2598daf0b4a1415497eba4c33c1e8ce07495eb07d2860c731b8d5/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:c8d1634419f39ea6f5c427ea2f90ca85126b54b50837f31497f3bf38266e853d", size = 241544, upload-time = "2025-10-06T05:35:59.719Z" }, { url = "https://files.pythonhosted.org/packages/53/4b/ddf24113323c0bbcc54cb38c8b8916f1da7165e07b8e24a717b4a12cbf10/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:1a7fa382a4a223773ed64242dbe1c9c326ec09457e6b8428efb4118c685c3dfd", size = 241806, upload-time = "2025-10-06T05:36:00.959Z" }, { url = "https://files.pythonhosted.org/packages/a7/fb/9b9a084d73c67175484ba2789a59f8eebebd0827d186a8102005ce41e1ba/frozenlist-1.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:11847b53d722050808926e785df837353bd4d75f1d494377e59b23594d834967", size = 229382, upload-time = "2025-10-06T05:36:02.22Z" }, - { url = "https://files.pythonhosted.org/packages/95/a3/c8fb25aac55bf5e12dae5c5aa6a98f85d436c1dc658f21c3ac73f9fa95e5/frozenlist-1.8.0-cp311-cp311-win32.whl", hash = "sha256:27c6e8077956cf73eadd514be8fb04d77fc946a7fe9f7fe167648b0b9085cc25", size = 39647, upload-time = "2025-10-06T05:36:03.409Z" }, - { url = "https://files.pythonhosted.org/packages/0a/f5/603d0d6a02cfd4c8f2a095a54672b3cf967ad688a60fb9faf04fc4887f65/frozenlist-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:ac913f8403b36a2c8610bbfd25b8013488533e71e62b4b4adce9c86c8cea905b", size = 44064, upload-time = "2025-10-06T05:36:04.368Z" }, - { url = "https://files.pythonhosted.org/packages/5d/16/c2c9ab44e181f043a86f9a8f84d5124b62dbcb3a02c0977ec72b9ac1d3e0/frozenlist-1.8.0-cp311-cp311-win_arm64.whl", hash = "sha256:d4d3214a0f8394edfa3e303136d0575eece0745ff2b47bd2cb2e66dd92d4351a", size = 39937, upload-time = "2025-10-06T05:36:05.669Z" }, { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, @@ -670,9 +652,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, - { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, - { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, - { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, @@ -686,9 +665,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, - { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, - { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, - { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, @@ -702,9 +678,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, - { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, - { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, - { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, @@ -718,9 +691,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, - { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, - { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, - { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, @@ -734,9 +704,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, - { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, - { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, - { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, ] @@ -777,7 +744,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/a8/15d0aa26c0036a15d2659175af00954aaaa5d0d66ba538345bd88013b4d7/greenlet-3.3.0-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7dee147740789a4632cace364816046e43310b59ff8fb79833ab043aefa72fd5", size = 586910, upload-time = "2025-12-04T14:25:59.705Z" }, { url = "https://files.pythonhosted.org/packages/e1/9b/68d5e3b7ccaba3907e5532cf8b9bf16f9ef5056a008f195a367db0ff32db/greenlet-3.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:39b28e339fc3c348427560494e28d8a6f3561c8d2bcf7d706e1c624ed8d822b9", size = 1547206, upload-time = "2025-12-04T15:04:21.027Z" }, { url = "https://files.pythonhosted.org/packages/66/bd/e3086ccedc61e49f91e2cfb5ffad9d8d62e5dc85e512a6200f096875b60c/greenlet-3.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b3c374782c2935cc63b2a27ba8708471de4ad1abaa862ffdb1ef45a643ddbb7d", size = 1613359, upload-time = "2025-12-04T14:27:26.548Z" }, - { url = "https://files.pythonhosted.org/packages/f4/6b/d4e73f5dfa888364bbf02efa85616c6714ae7c631c201349782e5b428925/greenlet-3.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:b49e7ed51876b459bd645d83db257f0180e345d3f768a35a85437a24d5a49082", size = 300740, upload-time = "2025-12-04T14:47:52.773Z" }, { url = "https://files.pythonhosted.org/packages/1f/cb/48e964c452ca2b92175a9b2dca037a553036cb053ba69e284650ce755f13/greenlet-3.3.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e29f3018580e8412d6aaf5641bb7745d38c85228dacf51a73bd4e26ddf2a6a8e", size = 274908, upload-time = "2025-12-04T14:23:26.435Z" }, { url = "https://files.pythonhosted.org/packages/28/da/38d7bff4d0277b594ec557f479d65272a893f1f2a716cad91efeb8680953/greenlet-3.3.0-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a687205fb22794e838f947e2194c0566d3812966b41c78709554aa883183fb62", size = 577113, upload-time = "2025-12-04T14:50:05.493Z" }, { url = "https://files.pythonhosted.org/packages/3c/f2/89c5eb0faddc3ff014f1c04467d67dee0d1d334ab81fadbf3744847f8a8a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4243050a88ba61842186cb9e63c7dfa677ec146160b0efd73b855a3d9c7fcf32", size = 590338, upload-time = "2025-12-04T14:57:41.136Z" }, @@ -785,7 +751,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/dc/a6/e959a127b630a58e23529972dbc868c107f9d583b5a9f878fb858c46bc1a/greenlet-3.3.0-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cb3a8ec3db4a3b0eb8a3c25436c2d49e3505821802074969db017b87bc6a948", size = 590206, upload-time = "2025-12-04T14:26:01.254Z" }, { url = "https://files.pythonhosted.org/packages/48/60/29035719feb91798693023608447283b266b12efc576ed013dd9442364bb/greenlet-3.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2de5a0b09eab81fc6a382791b995b1ccf2b172a9fec934747a7a23d2ff291794", size = 1550668, upload-time = "2025-12-04T15:04:22.439Z" }, { url = "https://files.pythonhosted.org/packages/0a/5f/783a23754b691bfa86bd72c3033aa107490deac9b2ef190837b860996c9f/greenlet-3.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4449a736606bd30f27f8e1ff4678ee193bc47f6ca810d705981cfffd6ce0d8c5", size = 1615483, upload-time = "2025-12-04T14:27:28.083Z" }, - { url = "https://files.pythonhosted.org/packages/1d/d5/c339b3b4bc8198b7caa4f2bd9fd685ac9f29795816d8db112da3d04175bb/greenlet-3.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:7652ee180d16d447a683c04e4c5f6441bae7ba7b17ffd9f6b3aff4605e9e6f71", size = 301164, upload-time = "2025-12-04T14:42:51.577Z" }, { url = "https://files.pythonhosted.org/packages/f8/0a/a3871375c7b9727edaeeea994bfff7c63ff7804c9829c19309ba2e058807/greenlet-3.3.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:b01548f6e0b9e9784a2c99c5651e5dc89ffcbe870bc5fb2e5ef864e9cc6b5dcb", size = 276379, upload-time = "2025-12-04T14:23:30.498Z" }, { url = "https://files.pythonhosted.org/packages/43/ab/7ebfe34dce8b87be0d11dae91acbf76f7b8246bf9d6b319c741f99fa59c6/greenlet-3.3.0-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:349345b770dc88f81506c6861d22a6ccd422207829d2c854ae2af8025af303e3", size = 597294, upload-time = "2025-12-04T14:50:06.847Z" }, { url = "https://files.pythonhosted.org/packages/a4/39/f1c8da50024feecd0793dbd5e08f526809b8ab5609224a2da40aad3a7641/greenlet-3.3.0-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e8e18ed6995e9e2c0b4ed264d2cf89260ab3ac7e13555b8032b25a74c6d18655", size = 607742, upload-time = "2025-12-04T14:57:42.349Z" }, @@ -793,7 +758,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/75/b0/6bde0b1011a60782108c01de5913c588cf51a839174538d266de15e4bf4d/greenlet-3.3.0-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:047ab3df20ede6a57c35c14bf5200fcf04039d50f908270d3f9a7a82064f543b", size = 609885, upload-time = "2025-12-04T14:26:02.368Z" }, { url = "https://files.pythonhosted.org/packages/49/0e/49b46ac39f931f59f987b7cd9f34bfec8ef81d2a1e6e00682f55be5de9f4/greenlet-3.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2d9ad37fc657b1102ec880e637cccf20191581f75c64087a549e66c57e1ceb53", size = 1567424, upload-time = "2025-12-04T15:04:23.757Z" }, { url = "https://files.pythonhosted.org/packages/05/f5/49a9ac2dff7f10091935def9165c90236d8f175afb27cbed38fb1d61ab6b/greenlet-3.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:83cd0e36932e0e7f36a64b732a6f60c2fc2df28c351bae79fbaf4f8092fe7614", size = 1636017, upload-time = "2025-12-04T14:27:29.688Z" }, - { url = "https://files.pythonhosted.org/packages/6c/79/3912a94cf27ec503e51ba493692d6db1e3cd8ac7ac52b0b47c8e33d7f4f9/greenlet-3.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a7a34b13d43a6b78abf828a6d0e87d3385680eaf830cd60d20d52f249faabf39", size = 301964, upload-time = "2025-12-04T14:36:58.316Z" }, { url = "https://files.pythonhosted.org/packages/02/2f/28592176381b9ab2cafa12829ba7b472d177f3acc35d8fbcf3673d966fff/greenlet-3.3.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a1e41a81c7e2825822f4e068c48cb2196002362619e2d70b148f20a831c00739", size = 275140, upload-time = "2025-12-04T14:23:01.282Z" }, { url = "https://files.pythonhosted.org/packages/2c/80/fbe937bf81e9fca98c981fe499e59a3f45df2a04da0baa5c2be0dca0d329/greenlet-3.3.0-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9f515a47d02da4d30caaa85b69474cec77b7929b2e936ff7fb853d42f4bf8808", size = 599219, upload-time = "2025-12-04T14:50:08.309Z" }, { url = "https://files.pythonhosted.org/packages/c2/ff/7c985128f0514271b8268476af89aee6866df5eec04ac17dcfbc676213df/greenlet-3.3.0-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:7d2d9fd66bfadf230b385fdc90426fcd6eb64db54b40c495b72ac0feb5766c54", size = 610211, upload-time = "2025-12-04T14:57:43.968Z" }, @@ -801,7 +765,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/fd/8e/424b8c6e78bd9837d14ff7df01a9829fc883ba2ab4ea787d4f848435f23f/greenlet-3.3.0-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:087ea5e004437321508a8d6f20efc4cfec5e3c30118e1417ea96ed1d93950527", size = 612833, upload-time = "2025-12-04T14:26:03.669Z" }, { url = "https://files.pythonhosted.org/packages/b5/ba/56699ff9b7c76ca12f1cdc27a886d0f81f2189c3455ff9f65246780f713d/greenlet-3.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab97cf74045343f6c60a39913fa59710e4bd26a536ce7ab2397adf8b27e67c39", size = 1567256, upload-time = "2025-12-04T15:04:25.276Z" }, { url = "https://files.pythonhosted.org/packages/1e/37/f31136132967982d698c71a281a8901daf1a8fbab935dce7c0cf15f942cc/greenlet-3.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5375d2e23184629112ca1ea89a53389dddbffcf417dad40125713d88eb5f96e8", size = 1636483, upload-time = "2025-12-04T14:27:30.804Z" }, - { url = "https://files.pythonhosted.org/packages/7e/71/ba21c3fb8c5dce83b8c01f458a42e99ffdb1963aeec08fff5a18588d8fd7/greenlet-3.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:9ee1942ea19550094033c35d25d20726e4f1c40d59545815e1128ac58d416d38", size = 301833, upload-time = "2025-12-04T14:32:23.929Z" }, { url = "https://files.pythonhosted.org/packages/d7/7c/f0a6d0ede2c7bf092d00bc83ad5bafb7e6ec9b4aab2fbdfa6f134dc73327/greenlet-3.3.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:60c2ef0f578afb3c8d92ea07ad327f9a062547137afe91f38408f08aacab667f", size = 275671, upload-time = "2025-12-04T14:23:05.267Z" }, { url = "https://files.pythonhosted.org/packages/44/06/dac639ae1a50f5969d82d2e3dd9767d30d6dbdbab0e1a54010c8fe90263c/greenlet-3.3.0-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0a5d554d0712ba1de0a6c94c640f7aeba3f85b3a6e1f2899c11c2c0428da9365", size = 646360, upload-time = "2025-12-04T14:50:10.026Z" }, { url = "https://files.pythonhosted.org/packages/e0/94/0fb76fe6c5369fba9bf98529ada6f4c3a1adf19e406a47332245ef0eb357/greenlet-3.3.0-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3a898b1e9c5f7307ebbde4102908e6cbfcb9ea16284a3abe15cab996bee8b9b3", size = 658160, upload-time = "2025-12-04T14:57:45.41Z" }, @@ -809,7 +772,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b8/14/bab308fc2c1b5228c3224ec2bf928ce2e4d21d8046c161e44a2012b5203e/greenlet-3.3.0-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5773edda4dc00e173820722711d043799d3adb4f01731f40619e07ea2750b955", size = 660166, upload-time = "2025-12-04T14:26:05.099Z" }, { url = "https://files.pythonhosted.org/packages/4b/d2/91465d39164eaa0085177f61983d80ffe746c5a1860f009811d498e7259c/greenlet-3.3.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ac0549373982b36d5fd5d30beb8a7a33ee541ff98d2b502714a09f1169f31b55", size = 1615193, upload-time = "2025-12-04T15:04:27.041Z" }, { url = "https://files.pythonhosted.org/packages/42/1b/83d110a37044b92423084d52d5d5a3b3a73cafb51b547e6d7366ff62eff1/greenlet-3.3.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d198d2d977460358c3b3a4dc844f875d1adb33817f0613f663a656f463764ccc", size = 1683653, upload-time = "2025-12-04T14:27:32.366Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/9030e6f9aa8fd7808e9c31ba4c38f87c4f8ec324ee67431d181fe396d705/greenlet-3.3.0-cp314-cp314-win_amd64.whl", hash = "sha256:73f51dd0e0bdb596fb0417e475fa3c5e32d4c83638296e560086b8d7da7c4170", size = 305387, upload-time = "2025-12-04T14:26:51.063Z" }, { url = "https://files.pythonhosted.org/packages/a0/66/bd6317bc5932accf351fc19f177ffba53712a202f9df10587da8df257c7e/greenlet-3.3.0-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:d6ed6f85fae6cdfdb9ce04c9bf7a08d666cfcfb914e7d006f44f840b46741931", size = 282638, upload-time = "2025-12-04T14:25:20.941Z" }, { url = "https://files.pythonhosted.org/packages/30/cf/cc81cb030b40e738d6e69502ccbd0dd1bced0588e958f9e757945de24404/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d9125050fcf24554e69c4cacb086b87b3b55dc395a8b3ebe6487b045b2614388", size = 651145, upload-time = "2025-12-04T14:50:11.039Z" }, { url = "https://files.pythonhosted.org/packages/9c/ea/1020037b5ecfe95ca7df8d8549959baceb8186031da83d5ecceff8b08cd2/greenlet-3.3.0-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:87e63ccfa13c0a0f6234ed0add552af24cc67dd886731f2261e46e241608bee3", size = 654236, upload-time = "2025-12-04T14:57:47.007Z" }, @@ -874,8 +836,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/37/4349484d7693324e40d51d0dc2f4d52b49b6765598e14af56642075ecdde/iterfzf-1.8.0.62.0-py3-none-manylinux_1_2_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15094a8ac72d0755d3657333fcd72329ef2d79ea6633c37a6be27be66671ae48", size = 1610003, upload-time = "2025-05-15T13:13:00.101Z" }, { url = "https://files.pythonhosted.org/packages/c9/85/520fedf1a01fe014366307ea4b1ce5527e29dfcf042155eb874972dcc1f7/iterfzf-1.8.0.62.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32878d993fc6058641bdec50867a25e199b40736cebb0e28449b8a61b054bf3d", size = 1482981, upload-time = "2025-05-15T13:13:02.153Z" }, { url = "https://files.pythonhosted.org/packages/7b/08/e9fafe7bc4609f317d50df7cdb3c23a46ec65cedf255bd2ef55916d04e54/iterfzf-1.8.0.62.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b43e757b4c0141250aa48cb545083da25e41722d14b003fdc9bddb366a373479", size = 1604392, upload-time = "2025-05-15T13:13:04.102Z" }, - { url = "https://files.pythonhosted.org/packages/5b/0f/7169c67ba591917c9eb09ab5433cb6c8cc2346baee7b9abab9351da277a6/iterfzf-1.8.0.62.0-py3-none-win_amd64.whl", hash = "sha256:b7cb95f93d5c1d901e8b7ccc886870f48f8331ebbba230cbd49eaedf045f142e", size = 1833659, upload-time = "2025-05-15T13:13:06.39Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/ff38147ebb94a9ad79781e7356f2b5e69bd79721a9f0653ccf25506c4cfe/iterfzf-1.8.0.62.0-py3-none-win_arm64.whl", hash = "sha256:9ae79840b14c090c6a4590add2eaa90f1e945319433d9c900badf0b588b576f8", size = 1689599, upload-time = "2025-05-15T13:13:08.463Z" }, ] [[package]] @@ -902,7 +862,7 @@ dependencies = [ dev = [ { name = "basedpyright" }, { name = "codespell" }, - { name = "cosmic-ray" }, + { name = "cosmic-ray", marker = "sys_platform != 'win32'" }, { name = "dirty-equals" }, { name = "faker" }, { name = "hypothesis" }, @@ -912,7 +872,7 @@ dev = [ { name = "pytest-codspeed" }, { name = "pytest-cov" }, { name = "pytest-examples" }, - { name = "pytest-memray" }, + { name = "pytest-memray", marker = "sys_platform != 'win32'" }, { name = "pytest-mock" }, { name = "pytest-test-groups" }, { name = "rich" }, @@ -932,7 +892,7 @@ requires-dist = [{ name = "typing-extensions", specifier = ">=4.15.0" }] dev = [ { name = "basedpyright", specifier = ">=1.36.1" }, { name = "codespell", specifier = ">=2.4.1" }, - { name = "cosmic-ray", specifier = ">=8.4.3" }, + { name = "cosmic-ray", marker = "sys_platform != 'win32'", specifier = ">=8.4.3" }, { name = "dirty-equals", specifier = ">=0.11" }, { name = "faker", specifier = ">=39.0.0" }, { name = "hypothesis", specifier = ">=6.148.7" }, @@ -942,7 +902,7 @@ dev = [ { name = "pytest-codspeed", specifier = ">=4.2.0" }, { name = "pytest-cov", specifier = ">=7.0.0" }, { name = "pytest-examples", specifier = ">=0.0.18" }, - { name = "pytest-memray", specifier = ">=1.8.0" }, + { name = "pytest-memray", marker = "sys_platform != 'win32'", specifier = ">=1.8.0" }, { name = "pytest-mock", specifier = ">=3.15.1" }, { name = "pytest-test-groups", specifier = ">=1.2.1" }, { name = "rich", specifier = ">=14.2.0" }, @@ -1190,9 +1150,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, - { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, - { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, - { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, @@ -1201,9 +1158,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, - { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, - { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, - { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, @@ -1212,9 +1166,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, - { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, - { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, - { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, @@ -1223,9 +1174,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, - { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, - { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, @@ -1234,9 +1182,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, - { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, - { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, - { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, @@ -1245,9 +1190,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, - { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, - { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, - { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, @@ -1256,9 +1198,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, - { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, - { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, - { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, ] [[package]] @@ -1355,9 +1294,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b9/c3/e9d9e2f20c9474e7a8fcef28f863c5cbd29bb5adce6b70cebe8bdad0039d/multidict-6.7.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:df0e3bf7993bdbeca5ac25aa859cf40d39019e015c9c91809ba7093967f7a648", size = 248999, upload-time = "2025-10-06T14:48:46.703Z" }, { url = "https://files.pythonhosted.org/packages/b5/3f/df171b6efa3239ae33b97b887e42671cd1d94d460614bfb2c30ffdab3b95/multidict-6.7.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:661709cdcd919a2ece2234f9bae7174e5220c80b034585d7d8a755632d3e2111", size = 243711, upload-time = "2025-10-06T14:48:48.146Z" }, { url = "https://files.pythonhosted.org/packages/3c/2f/9b5564888c4e14b9af64c54acf149263721a283aaf4aa0ae89b091d5d8c1/multidict-6.7.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:096f52730c3fb8ed419db2d44391932b63891b2c5ed14850a7e215c0ba9ade36", size = 237504, upload-time = "2025-10-06T14:48:49.447Z" }, - { url = "https://files.pythonhosted.org/packages/6c/3a/0bd6ca0f7d96d790542d591c8c3354c1e1b6bfd2024d4d92dc3d87485ec7/multidict-6.7.0-cp310-cp310-win32.whl", hash = "sha256:afa8a2978ec65d2336305550535c9c4ff50ee527914328c8677b3973ade52b85", size = 41422, upload-time = "2025-10-06T14:48:50.789Z" }, - { url = "https://files.pythonhosted.org/packages/00/35/f6a637ea2c75f0d3b7c7d41b1189189acff0d9deeb8b8f35536bb30f5e33/multidict-6.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:b15b3afff74f707b9275d5ba6a91ae8f6429c3ffb29bbfd216b0b375a56f13d7", size = 46050, upload-time = "2025-10-06T14:48:51.938Z" }, - { url = "https://files.pythonhosted.org/packages/e7/b8/f7bf8329b39893d02d9d95cf610c75885d12fc0f402b1c894e1c8e01c916/multidict-6.7.0-cp310-cp310-win_arm64.whl", hash = "sha256:4b73189894398d59131a66ff157837b1fafea9974be486d036bb3d32331fdbf0", size = 43153, upload-time = "2025-10-06T14:48:53.146Z" }, { url = "https://files.pythonhosted.org/packages/34/9e/5c727587644d67b2ed479041e4b1c58e30afc011e3d45d25bbe35781217c/multidict-6.7.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:4d409aa42a94c0b3fa617708ef5276dfe81012ba6753a0370fcc9d0195d0a1fc", size = 76604, upload-time = "2025-10-06T14:48:54.277Z" }, { url = "https://files.pythonhosted.org/packages/17/e4/67b5c27bd17c085a5ea8f1ec05b8a3e5cba0ca734bfcad5560fb129e70ca/multidict-6.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14c9e076eede3b54c636f8ce1c9c252b5f057c62131211f0ceeec273810c9721", size = 44715, upload-time = "2025-10-06T14:48:55.445Z" }, { url = "https://files.pythonhosted.org/packages/4d/e1/866a5d77be6ea435711bef2a4291eed11032679b6b28b56b4776ab06ba3e/multidict-6.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c09703000a9d0fa3c3404b27041e574cc7f4df4c6563873246d0e11812a94b6", size = 44332, upload-time = "2025-10-06T14:48:56.706Z" }, @@ -1373,9 +1309,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/23/ef/43d1c3ba205b5dec93dc97f3fba179dfa47910fc73aaaea4f7ceb41cec2a/multidict-6.7.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:0a13fb8e748dfc94749f622de065dd5c1def7e0d2216dba72b1d8069a389c6ff", size = 253345, upload-time = "2025-10-06T14:49:12.331Z" }, { url = "https://files.pythonhosted.org/packages/6b/03/eaf95bcc2d19ead522001f6a650ef32811aa9e3624ff0ad37c445c7a588c/multidict-6.7.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e3aa16de190d29a0ea1b48253c57d99a68492c8dd8948638073ab9e74dc9410b", size = 246940, upload-time = "2025-10-06T14:49:13.821Z" }, { url = "https://files.pythonhosted.org/packages/e8/df/ec8a5fd66ea6cd6f525b1fcbb23511b033c3e9bc42b81384834ffa484a62/multidict-6.7.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a048ce45dcdaaf1defb76b2e684f997fb5abf74437b6cb7b22ddad934a964e34", size = 242229, upload-time = "2025-10-06T14:49:15.603Z" }, - { url = "https://files.pythonhosted.org/packages/8a/a2/59b405d59fd39ec86d1142630e9049243015a5f5291ba49cadf3c090c541/multidict-6.7.0-cp311-cp311-win32.whl", hash = "sha256:a90af66facec4cebe4181b9e62a68be65e45ac9b52b67de9eec118701856e7ff", size = 41308, upload-time = "2025-10-06T14:49:16.871Z" }, - { url = "https://files.pythonhosted.org/packages/32/0f/13228f26f8b882c34da36efa776c3b7348455ec383bab4a66390e42963ae/multidict-6.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:95b5ffa4349df2887518bb839409bcf22caa72d82beec453216802f475b23c81", size = 46037, upload-time = "2025-10-06T14:49:18.457Z" }, - { url = "https://files.pythonhosted.org/packages/84/1f/68588e31b000535a3207fd3c909ebeec4fb36b52c442107499c18a896a2a/multidict-6.7.0-cp311-cp311-win_arm64.whl", hash = "sha256:329aa225b085b6f004a4955271a7ba9f1087e39dcb7e65f6284a988264a63912", size = 43023, upload-time = "2025-10-06T14:49:19.648Z" }, { url = "https://files.pythonhosted.org/packages/c2/9e/9f61ac18d9c8b475889f32ccfa91c9f59363480613fc807b6e3023d6f60b/multidict-6.7.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:8a3862568a36d26e650a19bb5cbbba14b71789032aebc0423f8cc5f150730184", size = 76877, upload-time = "2025-10-06T14:49:20.884Z" }, { url = "https://files.pythonhosted.org/packages/38/6f/614f09a04e6184f8824268fce4bc925e9849edfa654ddd59f0b64508c595/multidict-6.7.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:960c60b5849b9b4f9dcc9bea6e3626143c252c74113df2c1540aebce70209b45", size = 45467, upload-time = "2025-10-06T14:49:22.054Z" }, { url = "https://files.pythonhosted.org/packages/b3/93/c4f67a436dd026f2e780c433277fff72be79152894d9fc36f44569cab1a6/multidict-6.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2049be98fb57a31b4ccf870bf377af2504d4ae35646a19037ec271e4c07998aa", size = 43834, upload-time = "2025-10-06T14:49:23.566Z" }, @@ -1391,9 +1324,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0d/2f/919258b43bb35b99fa127435cfb2d91798eb3a943396631ef43e3720dcf4/multidict-6.7.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8a19cdb57cd3df4cd865849d93ee14920fb97224300c88501f16ecfa2604b4e0", size = 263579, upload-time = "2025-10-06T14:49:39.502Z" }, { url = "https://files.pythonhosted.org/packages/31/22/a0e884d86b5242b5a74cf08e876bdf299e413016b66e55511f7a804a366e/multidict-6.7.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9b2fd74c52accced7e75de26023b7dccee62511a600e62311b918ec5c168fc2a", size = 259654, upload-time = "2025-10-06T14:49:41.32Z" }, { url = "https://files.pythonhosted.org/packages/b2/e5/17e10e1b5c5f5a40f2fcbb45953c9b215f8a4098003915e46a93f5fcaa8f/multidict-6.7.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3e8bfdd0e487acf992407a140d2589fe598238eaeffa3da8448d63a63cd363f8", size = 251511, upload-time = "2025-10-06T14:49:46.021Z" }, - { url = "https://files.pythonhosted.org/packages/e3/9a/201bb1e17e7af53139597069c375e7b0dcbd47594604f65c2d5359508566/multidict-6.7.0-cp312-cp312-win32.whl", hash = "sha256:dd32a49400a2c3d52088e120ee00c1e3576cbff7e10b98467962c74fdb762ed4", size = 41895, upload-time = "2025-10-06T14:49:48.718Z" }, - { url = "https://files.pythonhosted.org/packages/46/e2/348cd32faad84eaf1d20cce80e2bb0ef8d312c55bca1f7fa9865e7770aaf/multidict-6.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:92abb658ef2d7ef22ac9f8bb88e8b6c3e571671534e029359b6d9e845923eb1b", size = 46073, upload-time = "2025-10-06T14:49:50.28Z" }, - { url = "https://files.pythonhosted.org/packages/25/ec/aad2613c1910dce907480e0c3aa306905830f25df2e54ccc9dea450cb5aa/multidict-6.7.0-cp312-cp312-win_arm64.whl", hash = "sha256:490dab541a6a642ce1a9d61a4781656b346a55c13038f0b1244653828e3a83ec", size = 43226, upload-time = "2025-10-06T14:49:52.304Z" }, { url = "https://files.pythonhosted.org/packages/d2/86/33272a544eeb36d66e4d9a920602d1a2f57d4ebea4ef3cdfe5a912574c95/multidict-6.7.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bee7c0588aa0076ce77c0ea5d19a68d76ad81fcd9fe8501003b9a24f9d4000f6", size = 76135, upload-time = "2025-10-06T14:49:54.26Z" }, { url = "https://files.pythonhosted.org/packages/91/1c/eb97db117a1ebe46d457a3d235a7b9d2e6dcab174f42d1b67663dd9e5371/multidict-6.7.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7ef6b61cad77091056ce0e7ce69814ef72afacb150b7ac6a3e9470def2198159", size = 45117, upload-time = "2025-10-06T14:49:55.82Z" }, { url = "https://files.pythonhosted.org/packages/f1/d8/6c3442322e41fb1dd4de8bd67bfd11cd72352ac131f6368315617de752f1/multidict-6.7.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9c0359b1ec12b1d6849c59f9d319610b7f20ef990a6d454ab151aa0e3b9f78ca", size = 43472, upload-time = "2025-10-06T14:49:57.048Z" }, @@ -1409,9 +1339,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8c/a4/a89abdb0229e533fb925e7c6e5c40201c2873efebc9abaf14046a4536ee6/multidict-6.7.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:7b022717c748dd1992a83e219587aabe45980d88969f01b316e78683e6285f64", size = 261254, upload-time = "2025-10-06T14:50:12.28Z" }, { url = "https://files.pythonhosted.org/packages/8d/aa/0e2b27bd88b40a4fb8dc53dd74eecac70edaa4c1dd0707eb2164da3675b3/multidict-6.7.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:9600082733859f00d79dee64effc7aef1beb26adb297416a4ad2116fd61374bd", size = 257967, upload-time = "2025-10-06T14:50:14.16Z" }, { url = "https://files.pythonhosted.org/packages/d0/8e/0c67b7120d5d5f6d874ed85a085f9dc770a7f9d8813e80f44a9fec820bb7/multidict-6.7.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:94218fcec4d72bc61df51c198d098ce2b378e0ccbac41ddbed5ef44092913288", size = 250085, upload-time = "2025-10-06T14:50:15.639Z" }, - { url = "https://files.pythonhosted.org/packages/ba/55/b73e1d624ea4b8fd4dd07a3bb70f6e4c7c6c5d9d640a41c6ffe5cdbd2a55/multidict-6.7.0-cp313-cp313-win32.whl", hash = "sha256:a37bd74c3fa9d00be2d7b8eca074dc56bd8077ddd2917a839bd989612671ed17", size = 41713, upload-time = "2025-10-06T14:50:17.066Z" }, - { url = "https://files.pythonhosted.org/packages/32/31/75c59e7d3b4205075b4c183fa4ca398a2daf2303ddf616b04ae6ef55cffe/multidict-6.7.0-cp313-cp313-win_amd64.whl", hash = "sha256:30d193c6cc6d559db42b6bcec8a5d395d34d60c9877a0b71ecd7c204fcf15390", size = 45915, upload-time = "2025-10-06T14:50:18.264Z" }, - { url = "https://files.pythonhosted.org/packages/31/2a/8987831e811f1184c22bc2e45844934385363ee61c0a2dcfa8f71b87e608/multidict-6.7.0-cp313-cp313-win_arm64.whl", hash = "sha256:ea3334cabe4d41b7ccd01e4d349828678794edbc2d3ae97fc162a3312095092e", size = 43077, upload-time = "2025-10-06T14:50:19.853Z" }, { url = "https://files.pythonhosted.org/packages/e8/68/7b3a5170a382a340147337b300b9eb25a9ddb573bcdfff19c0fa3f31ffba/multidict-6.7.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:ad9ce259f50abd98a1ca0aa6e490b58c316a0fce0617f609723e40804add2c00", size = 83114, upload-time = "2025-10-06T14:50:21.223Z" }, { url = "https://files.pythonhosted.org/packages/55/5c/3fa2d07c84df4e302060f555bbf539310980362236ad49f50eeb0a1c1eb9/multidict-6.7.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07f5594ac6d084cbb5de2df218d78baf55ef150b91f0ff8a21cc7a2e3a5a58eb", size = 48442, upload-time = "2025-10-06T14:50:22.871Z" }, { url = "https://files.pythonhosted.org/packages/fc/56/67212d33239797f9bd91962bb899d72bb0f4c35a8652dcdb8ed049bef878/multidict-6.7.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0591b48acf279821a579282444814a2d8d0af624ae0bc600aa4d1b920b6e924b", size = 46885, upload-time = "2025-10-06T14:50:24.258Z" }, @@ -1427,9 +1354,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/54/0a/4349d540d4a883863191be6eb9a928846d4ec0ea007d3dcd36323bb058ac/multidict-6.7.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:4ef089f985b8c194d341eb2c24ae6e7408c9a0e2e5658699c92f497437d88c3c", size = 252312, upload-time = "2025-10-06T14:50:41.612Z" }, { url = "https://files.pythonhosted.org/packages/26/64/d5416038dbda1488daf16b676e4dbfd9674dde10a0cc8f4fc2b502d8125d/multidict-6.7.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e93a0617cd16998784bf4414c7e40f17a35d2350e5c6f0bd900d3a8e02bd3762", size = 246935, upload-time = "2025-10-06T14:50:43.972Z" }, { url = "https://files.pythonhosted.org/packages/9f/8c/8290c50d14e49f35e0bd4abc25e1bc7711149ca9588ab7d04f886cdf03d9/multidict-6.7.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0feece2ef8ebc42ed9e2e8c78fc4aa3cf455733b507c09ef7406364c94376c6", size = 243385, upload-time = "2025-10-06T14:50:45.648Z" }, - { url = "https://files.pythonhosted.org/packages/ef/a0/f83ae75e42d694b3fbad3e047670e511c138be747bc713cf1b10d5096416/multidict-6.7.0-cp313-cp313t-win32.whl", hash = "sha256:19a1d55338ec1be74ef62440ca9e04a2f001a04d0cc49a4983dc320ff0f3212d", size = 47777, upload-time = "2025-10-06T14:50:47.154Z" }, - { url = "https://files.pythonhosted.org/packages/dc/80/9b174a92814a3830b7357307a792300f42c9e94664b01dee8e457551fa66/multidict-6.7.0-cp313-cp313t-win_amd64.whl", hash = "sha256:3da4fb467498df97e986af166b12d01f05d2e04f978a9c1c680ea1988e0bc4b6", size = 53104, upload-time = "2025-10-06T14:50:48.851Z" }, - { url = "https://files.pythonhosted.org/packages/cc/28/04baeaf0428d95bb7a7bea0e691ba2f31394338ba424fb0679a9ed0f4c09/multidict-6.7.0-cp313-cp313t-win_arm64.whl", hash = "sha256:b4121773c49a0776461f4a904cdf6264c88e42218aaa8407e803ca8025872792", size = 45503, upload-time = "2025-10-06T14:50:50.16Z" }, { url = "https://files.pythonhosted.org/packages/e2/b1/3da6934455dd4b261d4c72f897e3a5728eba81db59959f3a639245891baa/multidict-6.7.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3bab1e4aff7adaa34410f93b1f8e57c4b36b9af0426a76003f441ee1d3c7e842", size = 75128, upload-time = "2025-10-06T14:50:51.92Z" }, { url = "https://files.pythonhosted.org/packages/14/2c/f069cab5b51d175a1a2cb4ccdf7a2c2dabd58aa5bd933fa036a8d15e2404/multidict-6.7.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:b8512bac933afc3e45fb2b18da8e59b78d4f408399a960339598374d4ae3b56b", size = 44410, upload-time = "2025-10-06T14:50:53.275Z" }, { url = "https://files.pythonhosted.org/packages/42/e2/64bb41266427af6642b6b128e8774ed84c11b80a90702c13ac0a86bb10cc/multidict-6.7.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:79dcf9e477bc65414ebfea98ffd013cb39552b5ecd62908752e0e413d6d06e38", size = 43205, upload-time = "2025-10-06T14:50:54.911Z" }, @@ -1445,9 +1369,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8d/01/476d38fc73a212843f43c852b0eee266b6971f0e28329c2184a8df90c376/multidict-6.7.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:b6234e14f9314731ec45c42fc4554b88133ad53a09092cc48a88e771c125dadb", size = 258903, upload-time = "2025-10-06T14:51:12.466Z" }, { url = "https://files.pythonhosted.org/packages/49/6d/23faeb0868adba613b817d0e69c5f15531b24d462af8012c4f6de4fa8dc3/multidict-6.7.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:08d4379f9744d8f78d98c8673c06e202ffa88296f009c71bbafe8a6bf847d01f", size = 252333, upload-time = "2025-10-06T14:51:14.48Z" }, { url = "https://files.pythonhosted.org/packages/1e/cc/48d02ac22b30fa247f7dad82866e4b1015431092f4ba6ebc7e77596e0b18/multidict-6.7.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:9fe04da3f79387f450fd0061d4dd2e45a72749d31bf634aecc9e27f24fdc4b3f", size = 243411, upload-time = "2025-10-06T14:51:16.072Z" }, - { url = "https://files.pythonhosted.org/packages/4a/03/29a8bf5a18abf1fe34535c88adbdfa88c9fb869b5a3b120692c64abe8284/multidict-6.7.0-cp314-cp314-win32.whl", hash = "sha256:fbafe31d191dfa7c4c51f7a6149c9fb7e914dcf9ffead27dcfd9f1ae382b3885", size = 40940, upload-time = "2025-10-06T14:51:17.544Z" }, - { url = "https://files.pythonhosted.org/packages/82/16/7ed27b680791b939de138f906d5cf2b4657b0d45ca6f5dd6236fdddafb1a/multidict-6.7.0-cp314-cp314-win_amd64.whl", hash = "sha256:2f67396ec0310764b9222a1728ced1ab638f61aadc6226f17a71dd9324f9a99c", size = 45087, upload-time = "2025-10-06T14:51:18.875Z" }, - { url = "https://files.pythonhosted.org/packages/cd/3c/e3e62eb35a1950292fe39315d3c89941e30a9d07d5d2df42965ab041da43/multidict-6.7.0-cp314-cp314-win_arm64.whl", hash = "sha256:ba672b26069957ee369cfa7fc180dde1fc6f176eaf1e6beaf61fbebbd3d9c000", size = 42368, upload-time = "2025-10-06T14:51:20.225Z" }, { url = "https://files.pythonhosted.org/packages/8b/40/cd499bd0dbc5f1136726db3153042a735fffd0d77268e2ee20d5f33c010f/multidict-6.7.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:c1dcc7524066fa918c6a27d61444d4ee7900ec635779058571f70d042d86ed63", size = 82326, upload-time = "2025-10-06T14:51:21.588Z" }, { url = "https://files.pythonhosted.org/packages/13/8a/18e031eca251c8df76daf0288e6790561806e439f5ce99a170b4af30676b/multidict-6.7.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:27e0b36c2d388dc7b6ced3406671b401e84ad7eb0656b8f3a2f46ed0ce483718", size = 48065, upload-time = "2025-10-06T14:51:22.93Z" }, { url = "https://files.pythonhosted.org/packages/40/71/5e6701277470a87d234e433fb0a3a7deaf3bcd92566e421e7ae9776319de/multidict-6.7.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2a7baa46a22e77f0988e3b23d4ede5513ebec1929e34ee9495be535662c0dfe2", size = 46475, upload-time = "2025-10-06T14:51:24.352Z" }, @@ -1463,9 +1384,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/3d/b6/fed5ac6b8563ec72df6cb1ea8dac6d17f0a4a1f65045f66b6d3bf1497c02/multidict-6.7.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:5aa873cbc8e593d361ae65c68f85faadd755c3295ea2c12040ee146802f23b38", size = 248774, upload-time = "2025-10-06T14:51:46.836Z" }, { url = "https://files.pythonhosted.org/packages/6b/8d/b954d8c0dc132b68f760aefd45870978deec6818897389dace00fcde32ff/multidict-6.7.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3d7b6ccce016e29df4b7ca819659f516f0bc7a4b3efa3bb2012ba06431b044f9", size = 242731, upload-time = "2025-10-06T14:51:48.541Z" }, { url = "https://files.pythonhosted.org/packages/16/9d/a2dac7009125d3540c2f54e194829ea18ac53716c61b655d8ed300120b0f/multidict-6.7.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:171b73bd4ee683d307599b66793ac80981b06f069b62eea1c9e29c9241aa66b0", size = 240193, upload-time = "2025-10-06T14:51:50.355Z" }, - { url = "https://files.pythonhosted.org/packages/39/ca/c05f144128ea232ae2178b008d5011d4e2cea86e4ee8c85c2631b1b94802/multidict-6.7.0-cp314-cp314t-win32.whl", hash = "sha256:b2d7f80c4e1fd010b07cb26820aae86b7e73b681ee4889684fb8d2d4537aab13", size = 48023, upload-time = "2025-10-06T14:51:51.883Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8f/0a60e501584145588be1af5cc829265701ba3c35a64aec8e07cbb71d39bb/multidict-6.7.0-cp314-cp314t-win_amd64.whl", hash = "sha256:09929cab6fcb68122776d575e03c6cc64ee0b8fca48d17e135474b042ce515cd", size = 53507, upload-time = "2025-10-06T14:51:53.672Z" }, - { url = "https://files.pythonhosted.org/packages/7f/ae/3148b988a9c6239903e786eac19c889fab607c31d6efa7fb2147e5680f23/multidict-6.7.0-cp314-cp314t-win_arm64.whl", hash = "sha256:cc41db090ed742f32bd2d2c721861725e6109681eddf835d0a82bd3a5c382827", size = 44804, upload-time = "2025-10-06T14:51:55.415Z" }, { url = "https://files.pythonhosted.org/packages/b7/da/7d22601b625e241d4f23ef1ebff8acfc60da633c9e7e7922e24d10f592b3/multidict-6.7.0-py3-none-any.whl", hash = "sha256:394fc5c42a333c9ffc3e421a4c85e08580d990e08b99f6bf35b4132114c5dcb3", size = 12317, upload-time = "2025-10-06T14:52:29.272Z" }, ] @@ -1605,9 +1523,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/30/ee/ab4d727dd70806e5b4de96a798ae7ac6e4d42516f030ee60522474b6b332/propcache-0.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fd138803047fb4c062b1c1dd95462f5209456bfab55c734458f15d11da288f8f", size = 200113, upload-time = "2025-10-08T19:46:16.695Z" }, { url = "https://files.pythonhosted.org/packages/8a/0b/38b46208e6711b016aa8966a3ac793eee0d05c7159d8342aa27fc0bc365e/propcache-0.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:8c9b3cbe4584636d72ff556d9036e0c9317fa27b3ac1f0f558e7e84d1c9c5900", size = 200778, upload-time = "2025-10-08T19:46:18.023Z" }, { url = "https://files.pythonhosted.org/packages/cf/81/5abec54355ed344476bee711e9f04815d4b00a311ab0535599204eecc257/propcache-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f93243fdc5657247533273ac4f86ae106cc6445a0efacb9a1bfe982fcfefd90c", size = 193047, upload-time = "2025-10-08T19:46:19.449Z" }, - { url = "https://files.pythonhosted.org/packages/ec/b6/1f237c04e32063cb034acd5f6ef34ef3a394f75502e72703545631ab1ef6/propcache-0.4.1-cp310-cp310-win32.whl", hash = "sha256:a0ee98db9c5f80785b266eb805016e36058ac72c51a064040f2bc43b61101cdb", size = 38093, upload-time = "2025-10-08T19:46:20.643Z" }, - { url = "https://files.pythonhosted.org/packages/a6/67/354aac4e0603a15f76439caf0427781bcd6797f370377f75a642133bc954/propcache-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:1cdb7988c4e5ac7f6d175a28a9aa0c94cb6f2ebe52756a3c0cda98d2809a9e37", size = 41638, upload-time = "2025-10-08T19:46:21.935Z" }, - { url = "https://files.pythonhosted.org/packages/e0/e1/74e55b9fd1a4c209ff1a9a824bf6c8b3d1fc5a1ac3eabe23462637466785/propcache-0.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:d82ad62b19645419fe79dd63b3f9253e15b30e955c0170e5cebc350c1844e581", size = 38229, upload-time = "2025-10-08T19:46:23.368Z" }, { url = "https://files.pythonhosted.org/packages/8c/d4/4e2c9aaf7ac2242b9358f98dccd8f90f2605402f5afeff6c578682c2c491/propcache-0.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:60a8fda9644b7dfd5dece8c61d8a85e271cb958075bfc4e01083c148b61a7caf", size = 80208, upload-time = "2025-10-08T19:46:24.597Z" }, { url = "https://files.pythonhosted.org/packages/c2/21/d7b68e911f9c8e18e4ae43bdbc1e1e9bbd971f8866eb81608947b6f585ff/propcache-0.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c30b53e7e6bda1d547cabb47c825f3843a0a1a42b0496087bb58d8fedf9f41b5", size = 45777, upload-time = "2025-10-08T19:46:25.733Z" }, { url = "https://files.pythonhosted.org/packages/d3/1d/11605e99ac8ea9435651ee71ab4cb4bf03f0949586246476a25aadfec54a/propcache-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6918ecbd897443087a3b7cd978d56546a812517dcaaca51b49526720571fa93e", size = 47647, upload-time = "2025-10-08T19:46:27.304Z" }, @@ -1620,9 +1535,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f2/26/7f00bd6bd1adba5aafe5f4a66390f243acab58eab24ff1a08bebb2ef9d40/propcache-0.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f10207adf04d08bec185bae14d9606a1444715bc99180f9331c9c02093e1959e", size = 212429, upload-time = "2025-10-08T19:46:38.398Z" }, { url = "https://files.pythonhosted.org/packages/84/89/fd108ba7815c1117ddca79c228f3f8a15fc82a73bca8b142eb5de13b2785/propcache-0.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:e9b0d8d0845bbc4cfcdcbcdbf5086886bc8157aa963c31c777ceff7846c77757", size = 216727, upload-time = "2025-10-08T19:46:39.732Z" }, { url = "https://files.pythonhosted.org/packages/79/37/3ec3f7e3173e73f1d600495d8b545b53802cbf35506e5732dd8578db3724/propcache-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:981333cb2f4c1896a12f4ab92a9cc8f09ea664e9b7dbdc4eff74627af3a11c0f", size = 205097, upload-time = "2025-10-08T19:46:41.025Z" }, - { url = "https://files.pythonhosted.org/packages/61/b0/b2631c19793f869d35f47d5a3a56fb19e9160d3c119f15ac7344fc3ccae7/propcache-0.4.1-cp311-cp311-win32.whl", hash = "sha256:f1d2f90aeec838a52f1c1a32fe9a619fefd5e411721a9117fbf82aea638fe8a1", size = 38084, upload-time = "2025-10-08T19:46:42.693Z" }, - { url = "https://files.pythonhosted.org/packages/f4/78/6cce448e2098e9f3bfc91bb877f06aa24b6ccace872e39c53b2f707c4648/propcache-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:364426a62660f3f699949ac8c621aad6977be7126c5807ce48c0aeb8e7333ea6", size = 41637, upload-time = "2025-10-08T19:46:43.778Z" }, - { url = "https://files.pythonhosted.org/packages/9c/e9/754f180cccd7f51a39913782c74717c581b9cc8177ad0e949f4d51812383/propcache-0.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:e53f3a38d3510c11953f3e6a33f205c6d1b001129f972805ca9b42fc308bc239", size = 38064, upload-time = "2025-10-08T19:46:44.872Z" }, { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, @@ -1635,9 +1547,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, - { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, - { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, - { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, @@ -1650,9 +1559,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, - { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, - { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, - { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, @@ -1665,9 +1571,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, - { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, - { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, - { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, @@ -1680,9 +1583,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, - { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, - { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, - { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, @@ -1695,9 +1595,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, - { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, - { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, - { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] @@ -2208,35 +2105,25 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/d7/ec/32ae09139f61bef3de3142e85c47abdee8db9a55af2bb438da54a4549263/sqlalchemy-2.0.45-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f02325709d1b1a1489f23a39b318e175a171497374149eae74d612634b234c0", size = 3232781, upload-time = "2025-12-09T22:09:54.435Z" }, { url = "https://files.pythonhosted.org/packages/ad/bd/bf7b869b6f5585eac34222e1cf4405f4ba8c3b85dd6b1af5d4ce8bca695f/sqlalchemy-2.0.45-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d2c3684fca8a05f0ac1d9a21c1f4a266983a7ea9180efb80ffeb03861ecd01a0", size = 3182096, upload-time = "2025-12-09T22:06:06.169Z" }, { url = "https://files.pythonhosted.org/packages/21/6a/c219720a241bb8f35c88815ccc27761f5af7fdef04b987b0e8a2c1a6dcaa/sqlalchemy-2.0.45-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:040f6f0545b3b7da6b9317fc3e922c9a98fc7243b2a1b39f78390fc0942f7826", size = 3205109, upload-time = "2025-12-09T22:09:55.969Z" }, - { url = "https://files.pythonhosted.org/packages/bd/c4/6ccf31b2bc925d5d95fab403ffd50d20d7c82b858cf1a4855664ca054dce/sqlalchemy-2.0.45-cp310-cp310-win32.whl", hash = "sha256:830d434d609fe7bfa47c425c445a8b37929f140a7a44cdaf77f6d34df3a7296a", size = 2114240, upload-time = "2025-12-09T21:29:54.007Z" }, - { url = "https://files.pythonhosted.org/packages/de/29/a27a31fca07316def418db6f7c70ab14010506616a2decef1906050a0587/sqlalchemy-2.0.45-cp310-cp310-win_amd64.whl", hash = "sha256:0209d9753671b0da74da2cfbb9ecf9c02f72a759e4b018b3ab35f244c91842c7", size = 2137615, upload-time = "2025-12-09T21:29:55.85Z" }, { url = "https://files.pythonhosted.org/packages/a2/1c/769552a9d840065137272ebe86ffbb0bc92b0f1e0a68ee5266a225f8cd7b/sqlalchemy-2.0.45-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e90a344c644a4fa871eb01809c32096487928bd2038bf10f3e4515cb688cc56", size = 2153860, upload-time = "2025-12-10T20:03:23.843Z" }, { url = "https://files.pythonhosted.org/packages/f3/f8/9be54ff620e5b796ca7b44670ef58bc678095d51b0e89d6e3102ea468216/sqlalchemy-2.0.45-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b8c8b41b97fba5f62349aa285654230296829672fc9939cd7f35aab246d1c08b", size = 3309379, upload-time = "2025-12-09T22:06:07.461Z" }, { url = "https://files.pythonhosted.org/packages/f6/2b/60ce3ee7a5ae172bfcd419ce23259bb874d2cddd44f67c5df3760a1e22f9/sqlalchemy-2.0.45-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:12c694ed6468333a090d2f60950e4250b928f457e4962389553d6ba5fe9951ac", size = 3309948, upload-time = "2025-12-09T22:09:57.643Z" }, { url = "https://files.pythonhosted.org/packages/a3/42/bac8d393f5db550e4e466d03d16daaafd2bad1f74e48c12673fb499a7fc1/sqlalchemy-2.0.45-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f7d27a1d977a1cfef38a0e2e1ca86f09c4212666ce34e6ae542f3ed0a33bc606", size = 3261239, upload-time = "2025-12-09T22:06:08.879Z" }, { url = "https://files.pythonhosted.org/packages/6f/12/43dc70a0528c59842b04ea1c1ed176f072a9b383190eb015384dd102fb19/sqlalchemy-2.0.45-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d62e47f5d8a50099b17e2bfc1b0c7d7ecd8ba6b46b1507b58cc4f05eefc3bb1c", size = 3284065, upload-time = "2025-12-09T22:09:59.454Z" }, - { url = "https://files.pythonhosted.org/packages/cf/9c/563049cf761d9a2ec7bc489f7879e9d94e7b590496bea5bbee9ed7b4cc32/sqlalchemy-2.0.45-cp311-cp311-win32.whl", hash = "sha256:3c5f76216e7b85770d5bb5130ddd11ee89f4d52b11783674a662c7dd57018177", size = 2113480, upload-time = "2025-12-09T21:29:57.03Z" }, - { url = "https://files.pythonhosted.org/packages/bc/fa/09d0a11fe9f15c7fa5c7f0dd26be3d235b0c0cbf2f9544f43bc42efc8a24/sqlalchemy-2.0.45-cp311-cp311-win_amd64.whl", hash = "sha256:a15b98adb7f277316f2c276c090259129ee4afca783495e212048daf846654b2", size = 2138407, upload-time = "2025-12-09T21:29:58.556Z" }, { url = "https://files.pythonhosted.org/packages/2d/c7/1900b56ce19bff1c26f39a4ce427faec7716c81ac792bfac8b6a9f3dca93/sqlalchemy-2.0.45-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b3ee2aac15169fb0d45822983631466d60b762085bc4535cd39e66bea362df5f", size = 3333760, upload-time = "2025-12-09T22:11:02.66Z" }, { url = "https://files.pythonhosted.org/packages/0a/93/3be94d96bb442d0d9a60e55a6bb6e0958dd3457751c6f8502e56ef95fed0/sqlalchemy-2.0.45-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba547ac0b361ab4f1608afbc8432db669bd0819b3e12e29fb5fa9529a8bba81d", size = 3348268, upload-time = "2025-12-09T22:13:49.054Z" }, { url = "https://files.pythonhosted.org/packages/48/4b/f88ded696e61513595e4a9778f9d3f2bf7332cce4eb0c7cedaabddd6687b/sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:215f0528b914e5c75ef2559f69dca86878a3beeb0c1be7279d77f18e8d180ed4", size = 3278144, upload-time = "2025-12-09T22:11:04.14Z" }, { url = "https://files.pythonhosted.org/packages/ed/6a/310ecb5657221f3e1bd5288ed83aa554923fb5da48d760a9f7622afeb065/sqlalchemy-2.0.45-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:107029bf4f43d076d4011f1afb74f7c3e2ea029ec82eb23d8527d5e909e97aa6", size = 3313907, upload-time = "2025-12-09T22:13:50.598Z" }, - { url = "https://files.pythonhosted.org/packages/5c/39/69c0b4051079addd57c84a5bfb34920d87456dd4c90cf7ee0df6efafc8ff/sqlalchemy-2.0.45-cp312-cp312-win32.whl", hash = "sha256:0c9f6ada57b58420a2c0277ff853abe40b9e9449f8d7d231763c6bc30f5c4953", size = 2112182, upload-time = "2025-12-09T21:39:30.824Z" }, - { url = "https://files.pythonhosted.org/packages/f7/4e/510db49dd89fc3a6e994bee51848c94c48c4a00dc905e8d0133c251f41a7/sqlalchemy-2.0.45-cp312-cp312-win_amd64.whl", hash = "sha256:8defe5737c6d2179c7997242d6473587c3beb52e557f5ef0187277009f73e5e1", size = 2139200, upload-time = "2025-12-09T21:39:32.321Z" }, { url = "https://files.pythonhosted.org/packages/6a/c8/7cc5221b47a54edc72a0140a1efa56e0a2730eefa4058d7ed0b4c4357ff8/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fe187fc31a54d7fd90352f34e8c008cf3ad5d064d08fedd3de2e8df83eb4a1cf", size = 3277082, upload-time = "2025-12-09T22:11:06.167Z" }, { url = "https://files.pythonhosted.org/packages/0e/50/80a8d080ac7d3d321e5e5d420c9a522b0aa770ec7013ea91f9a8b7d36e4a/sqlalchemy-2.0.45-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:672c45cae53ba88e0dad74b9027dddd09ef6f441e927786b05bec75d949fbb2e", size = 3293131, upload-time = "2025-12-09T22:13:52.626Z" }, { url = "https://files.pythonhosted.org/packages/da/4c/13dab31266fc9904f7609a5dc308a2432a066141d65b857760c3bef97e69/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:470daea2c1ce73910f08caf10575676a37159a6d16c4da33d0033546bddebc9b", size = 3225389, upload-time = "2025-12-09T22:11:08.093Z" }, { url = "https://files.pythonhosted.org/packages/74/04/891b5c2e9f83589de202e7abaf24cd4e4fa59e1837d64d528829ad6cc107/sqlalchemy-2.0.45-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9c6378449e0940476577047150fd09e242529b761dc887c9808a9a937fe990c8", size = 3266054, upload-time = "2025-12-09T22:13:54.262Z" }, - { url = "https://files.pythonhosted.org/packages/f1/24/fc59e7f71b0948cdd4cff7a286210e86b0443ef1d18a23b0d83b87e4b1f7/sqlalchemy-2.0.45-cp313-cp313-win32.whl", hash = "sha256:4b6bec67ca45bc166c8729910bd2a87f1c0407ee955df110d78948f5b5827e8a", size = 2110299, upload-time = "2025-12-09T21:39:33.486Z" }, - { url = "https://files.pythonhosted.org/packages/c0/c5/d17113020b2d43073412aeca09b60d2009442420372123b8d49cc253f8b8/sqlalchemy-2.0.45-cp313-cp313-win_amd64.whl", hash = "sha256:afbf47dc4de31fa38fd491f3705cac5307d21d4bb828a4f020ee59af412744ee", size = 2136264, upload-time = "2025-12-09T21:39:36.801Z" }, { url = "https://files.pythonhosted.org/packages/3d/8d/bb40a5d10e7a5f2195f235c0b2f2c79b0bf6e8f00c0c223130a4fbd2db09/sqlalchemy-2.0.45-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:83d7009f40ce619d483d26ac1b757dfe3167b39921379a8bd1b596cf02dab4a6", size = 3521998, upload-time = "2025-12-09T22:13:28.622Z" }, { url = "https://files.pythonhosted.org/packages/75/a5/346128b0464886f036c039ea287b7332a410aa2d3fb0bb5d404cb8861635/sqlalchemy-2.0.45-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d8a2ca754e5415cde2b656c27900b19d50ba076aa05ce66e2207623d3fe41f5a", size = 3473434, upload-time = "2025-12-09T22:13:30.188Z" }, { url = "https://files.pythonhosted.org/packages/cc/64/4e1913772646b060b025d3fc52ce91a58967fe58957df32b455de5a12b4f/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7f46ec744e7f51275582e6a24326e10c49fbdd3fc99103e01376841213028774", size = 3272404, upload-time = "2025-12-09T22:11:09.662Z" }, { url = "https://files.pythonhosted.org/packages/b3/27/caf606ee924282fe4747ee4fd454b335a72a6e018f97eab5ff7f28199e16/sqlalchemy-2.0.45-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:883c600c345123c033c2f6caca18def08f1f7f4c3ebeb591a63b6fceffc95cce", size = 3277057, upload-time = "2025-12-09T22:13:56.213Z" }, { url = "https://files.pythonhosted.org/packages/85/d0/3d64218c9724e91f3d1574d12eb7ff8f19f937643815d8daf792046d88ab/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2c0b74aa79e2deade948fe8593654c8ef4228c44ba862bb7c9585c8e0db90f33", size = 3222279, upload-time = "2025-12-09T22:11:11.1Z" }, { url = "https://files.pythonhosted.org/packages/24/10/dd7688a81c5bc7690c2a3764d55a238c524cd1a5a19487928844cb247695/sqlalchemy-2.0.45-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8a420169cef179d4c9064365f42d779f1e5895ad26ca0c8b4c0233920973db74", size = 3244508, upload-time = "2025-12-09T22:13:57.932Z" }, - { url = "https://files.pythonhosted.org/packages/aa/41/db75756ca49f777e029968d9c9fee338c7907c563267740c6d310a8e3f60/sqlalchemy-2.0.45-cp314-cp314-win32.whl", hash = "sha256:e50dcb81a5dfe4b7b4a4aa8f338116d127cb209559124f3694c70d6cd072b68f", size = 2113204, upload-time = "2025-12-09T21:39:38.365Z" }, - { url = "https://files.pythonhosted.org/packages/89/a2/0e1590e9adb292b1d576dbcf67ff7df8cf55e56e78d2c927686d01080f4b/sqlalchemy-2.0.45-cp314-cp314-win_amd64.whl", hash = "sha256:4748601c8ea959e37e03d13dcda4a44837afcd1b21338e637f7c935b8da06177", size = 2138785, upload-time = "2025-12-09T21:39:39.503Z" }, { url = "https://files.pythonhosted.org/packages/42/39/f05f0ed54d451156bbed0e23eb0516bcad7cbb9f18b3bf219c786371b3f0/sqlalchemy-2.0.45-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd337d3526ec5298f67d6a30bbbe4ed7e5e68862f0bf6dd21d289f8d37b7d60b", size = 3522029, upload-time = "2025-12-09T22:13:32.09Z" }, { url = "https://files.pythonhosted.org/packages/54/0f/d15398b98b65c2bce288d5ee3f7d0a81f77ab89d9456994d5c7cc8b2a9db/sqlalchemy-2.0.45-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:9a62b446b7d86a3909abbcd1cd3cc550a832f99c2bc37c5b22e1925438b9367b", size = 3475142, upload-time = "2025-12-09T22:13:33.739Z" }, { url = "https://files.pythonhosted.org/packages/bf/e1/3ccb13c643399d22289c6a9786c1a91e3dcbb68bce4beb44926ac2c557bf/sqlalchemy-2.0.45-py3-none-any.whl", hash = "sha256:5225a288e4c8cc2308dbdd874edad6e7d0fd38eac1e9e5f23503425c8eee20d0", size = 1936672, upload-time = "2025-12-09T21:54:52.608Z" }, @@ -2426,9 +2313,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/57/2e/34c5b4eb9b07e16e873db5b182c71e5f06f9b5af388cdaa97736d79dd9a6/yarl-1.22.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f0d97c18dfd9a9af4490631905a3f131a8e4c9e80a39353919e2cfed8f00aedc", size = 359082, upload-time = "2025-10-06T14:09:01.936Z" }, { url = "https://files.pythonhosted.org/packages/31/71/fa7e10fb772d273aa1f096ecb8ab8594117822f683bab7d2c5a89914c92a/yarl-1.22.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:437840083abe022c978470b942ff832c3940b2ad3734d424b7eaffcd07f76737", size = 357811, upload-time = "2025-10-06T14:09:03.445Z" }, { url = "https://files.pythonhosted.org/packages/26/da/11374c04e8e1184a6a03cf9c8f5688d3e5cec83ed6f31ad3481b3207f709/yarl-1.22.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a899cbd98dce6f5d8de1aad31cb712ec0a530abc0a86bd6edaa47c1090138467", size = 351223, upload-time = "2025-10-06T14:09:05.401Z" }, - { url = "https://files.pythonhosted.org/packages/82/8f/e2d01f161b0c034a30410e375e191a5d27608c1f8693bab1a08b089ca096/yarl-1.22.0-cp310-cp310-win32.whl", hash = "sha256:595697f68bd1f0c1c159fcb97b661fc9c3f5db46498043555d04805430e79bea", size = 82118, upload-time = "2025-10-06T14:09:11.148Z" }, - { url = "https://files.pythonhosted.org/packages/62/46/94c76196642dbeae634c7a61ba3da88cd77bed875bf6e4a8bed037505aa6/yarl-1.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:cb95a9b1adaa48e41815a55ae740cfda005758104049a640a398120bf02515ca", size = 86852, upload-time = "2025-10-06T14:09:12.958Z" }, - { url = "https://files.pythonhosted.org/packages/af/af/7df4f179d3b1a6dcb9a4bd2ffbc67642746fcafdb62580e66876ce83fff4/yarl-1.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:b85b982afde6df99ecc996990d4ad7ccbdbb70e2a4ba4de0aecde5922ba98a0b", size = 82012, upload-time = "2025-10-06T14:09:14.664Z" }, { url = "https://files.pythonhosted.org/packages/4d/27/5ab13fc84c76a0250afd3d26d5936349a35be56ce5785447d6c423b26d92/yarl-1.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:1ab72135b1f2db3fed3997d7e7dc1b80573c67138023852b6efb336a5eae6511", size = 141607, upload-time = "2025-10-06T14:09:16.298Z" }, { url = "https://files.pythonhosted.org/packages/6a/a1/d065d51d02dc02ce81501d476b9ed2229d9a990818332242a882d5d60340/yarl-1.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:669930400e375570189492dc8d8341301578e8493aec04aebc20d4717f899dd6", size = 94027, upload-time = "2025-10-06T14:09:17.786Z" }, { url = "https://files.pythonhosted.org/packages/c1/da/8da9f6a53f67b5106ffe902c6fa0164e10398d4e150d85838b82f424072a/yarl-1.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:792a2af6d58177ef7c19cbf0097aba92ca1b9cb3ffdd9c7470e156c8f9b5e028", size = 94963, upload-time = "2025-10-06T14:09:19.662Z" }, @@ -2442,9 +2326,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/2c/d1/b49454411a60edb6fefdcad4f8e6dbba7d8019e3a508a1c5836cba6d0781/yarl-1.22.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:d5372ca1df0f91a86b047d1277c2aaf1edb32d78bbcefffc81b40ffd18f027ed", size = 385311, upload-time = "2025-10-06T14:09:34.634Z" }, { url = "https://files.pythonhosted.org/packages/87/e5/40d7a94debb8448c7771a916d1861d6609dddf7958dc381117e7ba36d9e8/yarl-1.22.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:51af598701f5299012b8416486b40fceef8c26fc87dc6d7d1f6fc30609ea0aa6", size = 381094, upload-time = "2025-10-06T14:09:36.268Z" }, { url = "https://files.pythonhosted.org/packages/35/d8/611cc282502381ad855448643e1ad0538957fc82ae83dfe7762c14069e14/yarl-1.22.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b266bd01fedeffeeac01a79ae181719ff848a5a13ce10075adbefc8f1daee70e", size = 370944, upload-time = "2025-10-06T14:09:37.872Z" }, - { url = "https://files.pythonhosted.org/packages/2d/df/fadd00fb1c90e1a5a8bd731fa3d3de2e165e5a3666a095b04e31b04d9cb6/yarl-1.22.0-cp311-cp311-win32.whl", hash = "sha256:a9b1ba5610a4e20f655258d5a1fdc7ebe3d837bb0e45b581398b99eb98b1f5ca", size = 81804, upload-time = "2025-10-06T14:09:39.359Z" }, - { url = "https://files.pythonhosted.org/packages/b5/f7/149bb6f45f267cb5c074ac40c01c6b3ea6d8a620d34b337f6321928a1b4d/yarl-1.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:078278b9b0b11568937d9509b589ee83ef98ed6d561dfe2020e24a9fd08eaa2b", size = 86858, upload-time = "2025-10-06T14:09:41.068Z" }, - { url = "https://files.pythonhosted.org/packages/2b/13/88b78b93ad3f2f0b78e13bfaaa24d11cbc746e93fe76d8c06bf139615646/yarl-1.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:b6a6f620cfe13ccec221fa312139135166e47ae169f8253f72a0abc0dae94376", size = 81637, upload-time = "2025-10-06T14:09:42.712Z" }, { url = "https://files.pythonhosted.org/packages/75/ff/46736024fee3429b80a165a732e38e5d5a238721e634ab41b040d49f8738/yarl-1.22.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e340382d1afa5d32b892b3ff062436d592ec3d692aeea3bef3a5cfe11bbf8c6f", size = 142000, upload-time = "2025-10-06T14:09:44.631Z" }, { url = "https://files.pythonhosted.org/packages/5a/9a/b312ed670df903145598914770eb12de1bac44599549b3360acc96878df8/yarl-1.22.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f1e09112a2c31ffe8d80be1b0988fa6a18c5d5cad92a9ffbb1c04c91bfe52ad2", size = 94338, upload-time = "2025-10-06T14:09:46.372Z" }, { url = "https://files.pythonhosted.org/packages/ba/f5/0601483296f09c3c65e303d60c070a5c19fcdbc72daa061e96170785bc7d/yarl-1.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:939fe60db294c786f6b7c2d2e121576628468f65453d86b0fe36cb52f987bd74", size = 94909, upload-time = "2025-10-06T14:09:48.648Z" }, @@ -2458,9 +2339,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/25/e1/5302ff9b28f0c59cac913b91fe3f16c59a033887e57ce9ca5d41a3a94737/yarl-1.22.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:b790b39c7e9a4192dc2e201a282109ed2985a1ddbd5ac08dc56d0e121400a8f7", size = 382324, upload-time = "2025-10-06T14:10:02.756Z" }, { url = "https://files.pythonhosted.org/packages/bf/cd/4617eb60f032f19ae3a688dc990d8f0d89ee0ea378b61cac81ede3e52fae/yarl-1.22.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31f0b53913220599446872d757257be5898019c85e7971599065bc55065dc99d", size = 383803, upload-time = "2025-10-06T14:10:04.552Z" }, { url = "https://files.pythonhosted.org/packages/59/65/afc6e62bb506a319ea67b694551dab4a7e6fb7bf604e9bd9f3e11d575fec/yarl-1.22.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a49370e8f711daec68d09b821a34e1167792ee2d24d405cbc2387be4f158b520", size = 374220, upload-time = "2025-10-06T14:10:06.489Z" }, - { url = "https://files.pythonhosted.org/packages/e7/3d/68bf18d50dc674b942daec86a9ba922d3113d8399b0e52b9897530442da2/yarl-1.22.0-cp312-cp312-win32.whl", hash = "sha256:70dfd4f241c04bd9239d53b17f11e6ab672b9f1420364af63e8531198e3f5fe8", size = 81589, upload-time = "2025-10-06T14:10:09.254Z" }, - { url = "https://files.pythonhosted.org/packages/c8/9a/6ad1a9b37c2f72874f93e691b2e7ecb6137fb2b899983125db4204e47575/yarl-1.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:8884d8b332a5e9b88e23f60bb166890009429391864c685e17bd73a9eda9105c", size = 87213, upload-time = "2025-10-06T14:10:11.369Z" }, - { url = "https://files.pythonhosted.org/packages/44/c5/c21b562d1680a77634d748e30c653c3ca918beb35555cff24986fff54598/yarl-1.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:ea70f61a47f3cc93bdf8b2f368ed359ef02a01ca6393916bc8ff877427181e74", size = 81330, upload-time = "2025-10-06T14:10:13.112Z" }, { url = "https://files.pythonhosted.org/packages/ea/f3/d67de7260456ee105dc1d162d43a019ecad6b91e2f51809d6cddaa56690e/yarl-1.22.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8dee9c25c74997f6a750cd317b8ca63545169c098faee42c84aa5e506c819b53", size = 139980, upload-time = "2025-10-06T14:10:14.601Z" }, { url = "https://files.pythonhosted.org/packages/01/88/04d98af0b47e0ef42597b9b28863b9060bb515524da0a65d5f4db160b2d5/yarl-1.22.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01e73b85a5434f89fc4fe27dcda2aff08ddf35e4d47bbbea3bdcd25321af538a", size = 93424, upload-time = "2025-10-06T14:10:16.115Z" }, { url = "https://files.pythonhosted.org/packages/18/91/3274b215fd8442a03975ce6bee5fe6aa57a8326b29b9d3d56234a1dca244/yarl-1.22.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:22965c2af250d20c873cdbee8ff958fb809940aeb2e74ba5f20aaf6b7ac8c70c", size = 93821, upload-time = "2025-10-06T14:10:17.993Z" }, @@ -2474,9 +2352,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ca/5a/09b7be3905962f145b73beb468cdd53db8aa171cf18c80400a54c5b82846/yarl-1.22.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c7044802eec4524fde550afc28edda0dd5784c4c45f0be151a2d3ba017daca7d", size = 382590, upload-time = "2025-10-06T14:10:33.352Z" }, { url = "https://files.pythonhosted.org/packages/aa/7f/59ec509abf90eda5048b0bc3e2d7b5099dffdb3e6b127019895ab9d5ef44/yarl-1.22.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:139718f35149ff544caba20fce6e8a2f71f1e39b92c700d8438a0b1d2a631a02", size = 385316, upload-time = "2025-10-06T14:10:35.034Z" }, { url = "https://files.pythonhosted.org/packages/e5/84/891158426bc8036bfdfd862fabd0e0fa25df4176ec793e447f4b85cf1be4/yarl-1.22.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e1b51bebd221006d3d2f95fbe124b22b247136647ae5dcc8c7acafba66e5ee67", size = 374431, upload-time = "2025-10-06T14:10:37.76Z" }, - { url = "https://files.pythonhosted.org/packages/bb/49/03da1580665baa8bef5e8ed34c6df2c2aca0a2f28bf397ed238cc1bbc6f2/yarl-1.22.0-cp313-cp313-win32.whl", hash = "sha256:d3e32536234a95f513bd374e93d717cf6b2231a791758de6c509e3653f234c95", size = 81555, upload-time = "2025-10-06T14:10:39.649Z" }, - { url = "https://files.pythonhosted.org/packages/9a/ee/450914ae11b419eadd067c6183ae08381cfdfcb9798b90b2b713bbebddda/yarl-1.22.0-cp313-cp313-win_amd64.whl", hash = "sha256:47743b82b76d89a1d20b83e60d5c20314cbd5ba2befc9cda8f28300c4a08ed4d", size = 86965, upload-time = "2025-10-06T14:10:41.313Z" }, - { url = "https://files.pythonhosted.org/packages/98/4d/264a01eae03b6cf629ad69bae94e3b0e5344741e929073678e84bf7a3e3b/yarl-1.22.0-cp313-cp313-win_arm64.whl", hash = "sha256:5d0fcda9608875f7d052eff120c7a5da474a6796fe4d83e152e0e4d42f6d1a9b", size = 81205, upload-time = "2025-10-06T14:10:43.167Z" }, { url = "https://files.pythonhosted.org/packages/88/fc/6908f062a2f77b5f9f6d69cecb1747260831ff206adcbc5b510aff88df91/yarl-1.22.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:719ae08b6972befcba4310e49edb1161a88cdd331e3a694b84466bd938a6ab10", size = 146209, upload-time = "2025-10-06T14:10:44.643Z" }, { url = "https://files.pythonhosted.org/packages/65/47/76594ae8eab26210b4867be6f49129861ad33da1f1ebdf7051e98492bf62/yarl-1.22.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:47d8a5c446df1c4db9d21b49619ffdba90e77c89ec6e283f453856c74b50b9e3", size = 95966, upload-time = "2025-10-06T14:10:46.554Z" }, { url = "https://files.pythonhosted.org/packages/ab/ce/05e9828a49271ba6b5b038b15b3934e996980dd78abdfeb52a04cfb9467e/yarl-1.22.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cfebc0ac8333520d2d0423cbbe43ae43c8838862ddb898f5ca68565e395516e9", size = 97312, upload-time = "2025-10-06T14:10:48.007Z" }, @@ -2490,9 +2365,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c2/ad/b77d7b3f14a4283bffb8e92c6026496f6de49751c2f97d4352242bba3990/yarl-1.22.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:59c189e3e99a59cf8d83cbb31d4db02d66cda5a1a4374e8a012b51255341abf5", size = 350996, upload-time = "2025-10-06T14:11:03.452Z" }, { url = "https://files.pythonhosted.org/packages/81/c8/06e1d69295792ba54d556f06686cbd6a7ce39c22307100e3fb4a2c0b0a1d/yarl-1.22.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:5a3bf7f62a289fa90f1990422dc8dff5a458469ea71d1624585ec3a4c8d6960f", size = 356047, upload-time = "2025-10-06T14:11:05.115Z" }, { url = "https://files.pythonhosted.org/packages/4b/b8/4c0e9e9f597074b208d18cef227d83aac36184bfbc6eab204ea55783dbc5/yarl-1.22.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:de6b9a04c606978fdfe72666fa216ffcf2d1a9f6a381058d4378f8d7b1e5de62", size = 342947, upload-time = "2025-10-06T14:11:08.137Z" }, - { url = "https://files.pythonhosted.org/packages/e0/e5/11f140a58bf4c6ad7aca69a892bff0ee638c31bea4206748fc0df4ebcb3a/yarl-1.22.0-cp313-cp313t-win32.whl", hash = "sha256:1834bb90991cc2999f10f97f5f01317f99b143284766d197e43cd5b45eb18d03", size = 86943, upload-time = "2025-10-06T14:11:10.284Z" }, - { url = "https://files.pythonhosted.org/packages/31/74/8b74bae38ed7fe6793d0c15a0c8207bbb819cf287788459e5ed230996cdd/yarl-1.22.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ff86011bd159a9d2dfc89c34cfd8aff12875980e3bd6a39ff097887520e60249", size = 93715, upload-time = "2025-10-06T14:11:11.739Z" }, - { url = "https://files.pythonhosted.org/packages/69/66/991858aa4b5892d57aef7ee1ba6b4d01ec3b7eb3060795d34090a3ca3278/yarl-1.22.0-cp313-cp313t-win_arm64.whl", hash = "sha256:7861058d0582b847bc4e3a4a4c46828a410bca738673f35a29ba3ca5db0b473b", size = 83857, upload-time = "2025-10-06T14:11:13.586Z" }, { url = "https://files.pythonhosted.org/packages/46/b3/e20ef504049f1a1c54a814b4b9bed96d1ac0e0610c3b4da178f87209db05/yarl-1.22.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:34b36c2c57124530884d89d50ed2c1478697ad7473efd59cfd479945c95650e4", size = 140520, upload-time = "2025-10-06T14:11:15.465Z" }, { url = "https://files.pythonhosted.org/packages/e4/04/3532d990fdbab02e5ede063676b5c4260e7f3abea2151099c2aa745acc4c/yarl-1.22.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:0dd9a702591ca2e543631c2a017e4a547e38a5c0f29eece37d9097e04a7ac683", size = 93504, upload-time = "2025-10-06T14:11:17.106Z" }, { url = "https://files.pythonhosted.org/packages/11/63/ff458113c5c2dac9a9719ac68ee7c947cb621432bcf28c9972b1c0e83938/yarl-1.22.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:594fcab1032e2d2cc3321bb2e51271e7cd2b516c7d9aee780ece81b07ff8244b", size = 94282, upload-time = "2025-10-06T14:11:19.064Z" }, @@ -2506,9 +2378,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/2a/249405fd26776f8b13c067378ef4d7dd49c9098d1b6457cdd152a99e96a9/yarl-1.22.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:ebd4549b108d732dba1d4ace67614b9545b21ece30937a63a65dd34efa19732d", size = 381451, upload-time = "2025-10-06T14:11:35.136Z" }, { url = "https://files.pythonhosted.org/packages/67/a8/fb6b1adbe98cf1e2dd9fad71003d3a63a1bc22459c6e15f5714eb9323b93/yarl-1.22.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f87ac53513d22240c7d59203f25cc3beac1e574c6cd681bbfd321987b69f95fd", size = 383814, upload-time = "2025-10-06T14:11:37.094Z" }, { url = "https://files.pythonhosted.org/packages/d9/f9/3aa2c0e480fb73e872ae2814c43bc1e734740bb0d54e8cb2a95925f98131/yarl-1.22.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:22b029f2881599e2f1b06f8f1db2ee63bd309e2293ba2d566e008ba12778b8da", size = 370799, upload-time = "2025-10-06T14:11:38.83Z" }, - { url = "https://files.pythonhosted.org/packages/50/3c/af9dba3b8b5eeb302f36f16f92791f3ea62e3f47763406abf6d5a4a3333b/yarl-1.22.0-cp314-cp314-win32.whl", hash = "sha256:6a635ea45ba4ea8238463b4f7d0e721bad669f80878b7bfd1f89266e2ae63da2", size = 82990, upload-time = "2025-10-06T14:11:40.624Z" }, - { url = "https://files.pythonhosted.org/packages/ac/30/ac3a0c5bdc1d6efd1b41fa24d4897a4329b3b1e98de9449679dd327af4f0/yarl-1.22.0-cp314-cp314-win_amd64.whl", hash = "sha256:0d6e6885777af0f110b0e5d7e5dda8b704efed3894da26220b7f3d887b839a79", size = 88292, upload-time = "2025-10-06T14:11:42.578Z" }, - { url = "https://files.pythonhosted.org/packages/df/0a/227ab4ff5b998a1b7410abc7b46c9b7a26b0ca9e86c34ba4b8d8bc7c63d5/yarl-1.22.0-cp314-cp314-win_arm64.whl", hash = "sha256:8218f4e98d3c10d683584cb40f0424f4b9fd6e95610232dd75e13743b070ee33", size = 82888, upload-time = "2025-10-06T14:11:44.863Z" }, { url = "https://files.pythonhosted.org/packages/06/5e/a15eb13db90abd87dfbefb9760c0f3f257ac42a5cac7e75dbc23bed97a9f/yarl-1.22.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:45c2842ff0e0d1b35a6bf1cd6c690939dacb617a70827f715232b2e0494d55d1", size = 146223, upload-time = "2025-10-06T14:11:46.796Z" }, { url = "https://files.pythonhosted.org/packages/18/82/9665c61910d4d84f41a5bf6837597c89e665fa88aa4941080704645932a9/yarl-1.22.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:d947071e6ebcf2e2bee8fce76e10faca8f7a14808ca36a910263acaacef08eca", size = 95981, upload-time = "2025-10-06T14:11:48.845Z" }, { url = "https://files.pythonhosted.org/packages/5d/9a/2f65743589809af4d0a6d3aa749343c4b5f4c380cc24a8e94a3c6625a808/yarl-1.22.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:334b8721303e61b00019474cc103bdac3d7b1f65e91f0bfedeec2d56dfe74b53", size = 97303, upload-time = "2025-10-06T14:11:50.897Z" }, @@ -2522,9 +2391,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/86/a0/c2ab48d74599c7c84cb104ebd799c5813de252bea0f360ffc29d270c2caa/yarl-1.22.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:e4b582bab49ac33c8deb97e058cd67c2c50dac0dd134874106d9c774fd272529", size = 352400, upload-time = "2025-10-06T14:12:06.624Z" }, { url = "https://files.pythonhosted.org/packages/32/75/f8919b2eafc929567d3d8411f72bdb1a2109c01caaab4ebfa5f8ffadc15b/yarl-1.22.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:0b5bcc1a9c4839e7e30b7b30dd47fe5e7e44fb7054ec29b5bb8d526aa1041093", size = 357140, upload-time = "2025-10-06T14:12:08.362Z" }, { url = "https://files.pythonhosted.org/packages/cf/72/6a85bba382f22cf78add705d8c3731748397d986e197e53ecc7835e76de7/yarl-1.22.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:c0232bce2170103ec23c454e54a57008a9a72b5d1c3105dc2496750da8cfa47c", size = 341473, upload-time = "2025-10-06T14:12:10.994Z" }, - { url = "https://files.pythonhosted.org/packages/35/18/55e6011f7c044dc80b98893060773cefcfdbf60dfefb8cb2f58b9bacbd83/yarl-1.22.0-cp314-cp314t-win32.whl", hash = "sha256:8009b3173bcd637be650922ac455946197d858b3630b6d8787aa9e5c4564533e", size = 89056, upload-time = "2025-10-06T14:12:13.317Z" }, - { url = "https://files.pythonhosted.org/packages/f9/86/0f0dccb6e59a9e7f122c5afd43568b1d31b8ab7dda5f1b01fb5c7025c9a9/yarl-1.22.0-cp314-cp314t-win_amd64.whl", hash = "sha256:9fb17ea16e972c63d25d4a97f016d235c78dd2344820eb35bc034bc32012ee27", size = 96292, upload-time = "2025-10-06T14:12:15.398Z" }, - { url = "https://files.pythonhosted.org/packages/48/b7/503c98092fb3b344a179579f55814b613c1fbb1c23b3ec14a7b008a66a6e/yarl-1.22.0-cp314-cp314t-win_arm64.whl", hash = "sha256:9f6d73c1436b934e3f01df1e1b21ff765cd1d28c77dfb9ace207f746d4610ee1", size = 85171, upload-time = "2025-10-06T14:12:16.935Z" }, { url = "https://files.pythonhosted.org/packages/73/ae/b48f95715333080afb75a4504487cbe142cae1268afc482d06692d605ae6/yarl-1.22.0-py3-none-any.whl", hash = "sha256:1380560bdba02b6b6c90de54133c81c9f2a453dee9912fe58c1dcced1edb7cff", size = 46814, upload-time = "2025-10-06T14:12:53.872Z" }, ]