Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ modules =
azul.service.user_controller,
azul.service.user_service,
scripts.pull_request,
scripts.claude_mv,


packages =
Expand Down
16 changes: 16 additions & 0 deletions CLAUDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@
path in the `modules` section of that file, or *implicitly*, by listing its
parent or ancestor package in the `packages` section

- When adding a module to the `modules` list in `.mypy.ini`, always append it
at the end of the list

- Prefer to use `git mv` when renaming or moving files

- Do not commit any changes unless explicitly asked to do so. However, it's OK
Expand All @@ -39,3 +42,16 @@
- Do not quote type hints in annotations. The project uses Python 3.14, which
defers evaluation of annotations by default (PEP 649), so forward references
and `TYPE_CHECKING`-guarded imports work without quotes

- When using `assert` with `R()` and at least one assertion in the function
needs line wrapping, wrap all of them consistently. The convention is:
```python
assert condition, R(
'message', value)
```
`R(` goes at the end of the `assert` line, the closing `)` at the end of
the following line

- For pairs of symmetric assignments like `a = foo(x)` and `b = foo(y)`, use
tuple assignment: `a, b = foo(x), foo(y)`. Do not apply this when it would
require wrapping the line
25 changes: 25 additions & 0 deletions UPGRADING.rst
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,31 @@ branch that does not have the listed changes, the steps would need to be
reverted. This is all fairly informal and loosely defined. Hopefully we won't
have too many entries in this file.


#7963 Disabled secrets cause OAuth clients to be flagged
========================================================

Everyone
--------

Make sure that the OAuth 2.0 client for each of your personal deployments has
exactly one secret, and that that secret is enabled. Any secrets that have
Comment thread
achave11-ucsc marked this conversation as resolved.
already been disabled for at least one hour can be immediately deleted. If there
are still more than one secret left, disable all but the newest secret, wait one
hour and ten minutes, and delete the previously disabled secrets.

If you're OK with potentially breaking the login functionality on the Swagger UI
of your personal deployment, and the login functionality of any Data Browser
instance backed by your deployment, you don't need to wait between disabling and
deleting a secret.

Operator
--------

Follow the steps above for all shared deployments. The waiting period between
disabling and deleting secrets should be observed.


#7927 Use MA mirror bucket for MA files
=======================================

Expand Down
320 changes: 320 additions & 0 deletions scripts/claude_mv.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,320 @@
"""
Update Claude Code's internal metadata (the context) in preparation of renaming
or moving projects. Move Claude Code sessions between projects.
"""
from abc import (
ABCMeta,
abstractmethod,
)
import argparse
import json
import logging
from pathlib import (
Path,
PurePath,
)
import re
import shutil
import sys
from typing import (
ClassVar,
Mapping,
Sequence,
)

import attrs

from azul.lib import (
R,
)
from azul.lib.types import (
AnyJSON,
AnyMutableJSON,
JSON,
MutableJSON,
json_list_of_dicts,
json_str,
)
from azul.logging import (
configure_script_logging,
)

log = logging.getLogger(__name__)


def main(argv: list[str]) -> None:
parser = argparse.ArgumentParser(description=__doc__)
subparsers = parser.add_subparsers(dest='command')
subparsers.required = True
for command_cls in Command.__subclasses__():
command_cls.add_subparser(subparsers)
args = parser.parse_args(argv)
command = args.command_class(args)
command.execute()


@attrs.define
class Command(metaclass=ABCMeta):
_claude_dir: ClassVar[Path] = Path.home() / '.claude'
_contexts_dir: ClassVar[Path] = _claude_dir / 'projects'
_args: argparse.Namespace

@classmethod
@abstractmethod
def add_subparser(cls, subparsers: argparse._SubParsersAction) -> None:
raise NotImplementedError

@abstractmethod
def execute(self) -> None:
raise NotImplementedError

def _encode_path(self, path: PurePath) -> str:
return re.sub(r'[/.]', '-', str(path))

def _rewrite_session(self,
session_file: Path,
old_dir: Path,
new_dir: Path
) -> None:
old, new = str(old_dir), str(new_dir)
text = session_file.read_text()
if old in text:
def rewrite(message: str) -> str:
message = json.loads(message)
message = self._replace_paths(message, old, new)
return json.dumps(message, ensure_ascii=False, separators=(',', ':'))

lines = list(map(rewrite, text.splitlines()))
session_file.write_text('\n'.join(lines) + '\n')

def _replace_paths(self, obj: AnyJSON, old: str, new: str) -> AnyMutableJSON:
if isinstance(obj, str):
return obj.replace(old, new)
elif isinstance(obj, Mapping):
return {k: self._replace_paths(v, old, new) for k, v in obj.items()}
elif isinstance(obj, Sequence):
return [self._replace_paths(v, old, new) for v in obj]
else:
return obj


class MoveProjectCommand(Command):
"""
Update Claude Code's context in preparation of renaming or moving a project
directory. Does not actually move the project directory.
"""

@classmethod
def add_subparser(cls, subparsers: argparse._SubParsersAction) -> None:
parser = subparsers.add_parser('project', help=cls.__doc__)
parser.add_argument('old_project',
metavar='OLD',
help='The old path of the project directory')
parser.add_argument('new_project',
metavar='NEW',
help='The new path of the project directory')
group = parser.add_mutually_exclusive_group()
group.add_argument('--merge',
action='store_true',
default=False,
help='In the unexpected event that a context '
'already exists at the new location, combine '
'it with the one from the old location. This '
'action is not reversible.')
group.add_argument('--clobber',
action='store_true',
default=False,
help='In the unexpected event that a context '
'already exists at the new location, discard '
'it and replace it with the one from the old '
'location. This action is not reversible.')
parser.set_defaults(command_class=cls)

def execute(self) -> None:
old_dir = Path(self._args.old_project).resolve()
new_dir = Path(self._args.new_project).resolve()
assert old_dir != new_dir, R(
'Old and new project locations are the same', old_dir)

old_key = self._encode_path(old_dir)
new_key = self._encode_path(new_dir)
old_context = self._contexts_dir / old_key
new_context = self._contexts_dir / new_key

conflicts = self._find_conflicts(new_key)
if conflicts:
assert self._args.merge or self._args.clobber, R(
'A context for the new project location already exists. '
'Use --merge or --clobber to resolve.', conflicts)

if self._args.clobber:
log.info('Removing existing context for %r at %r', str(new_dir), str(new_context))
self._remove_context(new_key)

log.info('Moving context for %r at %r to %r', str(old_dir), str(old_context), str(new_context))
self._move_context(old_key, new_key)
log.info('Rewriting session paths')
self._rewrite_sessions(new_key, old_dir, new_dir)

_context_subdirs = ['projects']

def _find_conflicts(self, key: str) -> list[Path]:
conflicts = []
for subdir in self._context_subdirs:
dst = self._claude_dir / subdir / key
if dst.exists():
conflicts.append(dst)
return conflicts

def _remove_context(self, key: str) -> None:
for subdir in self._context_subdirs:
dst = self._claude_dir / subdir / key
if dst.is_dir():
shutil.rmtree(dst)
elif dst.is_file():
dst.unlink()

def _move_context(self, old_key: str, new_key: str) -> None:
for subdir in self._context_subdirs:
src = self._claude_dir / subdir / old_key
dst = self._claude_dir / subdir / new_key
if src.is_dir():
if dst.is_dir() and self._args.merge:
for child in src.iterdir():
child.rename(dst / child.name)
src.rmdir()
else:
src.rename(dst)
elif src.is_file():
src.rename(dst)

def _rewrite_sessions(self, key: str, old_dir: Path, new_dir: Path) -> None:
context_dir = self._contexts_dir / key
for session_file in context_dir.glob('*.jsonl'):
self._rewrite_session(session_file, old_dir, new_dir)


class MoveSessionCommand(Command):
"""
Move a session from one project to another.
"""

@classmethod
def add_subparser(cls, subparsers: argparse._SubParsersAction) -> None:
parser = subparsers.add_parser('session', help=cls.__doc__)
parser.add_argument('session_id',
metavar='SESSION_ID',
help='The UUID of the session to move')
parser.add_argument('src_project',
metavar='SOURCE',
help='The path of the source project directory')
parser.add_argument('dst_project',
metavar='DESTINATION',
help='The path of the destination project directory')
parser.set_defaults(command_class=cls)

def execute(self) -> None:
session_id = self._args.session_id
src_project_dir = Path(self._args.src_project).resolve()
dst_project_dir = Path(self._args.dst_project).resolve()
assert src_project_dir.is_dir(), R(
'Source project directory does not exist', src_project_dir)
assert dst_project_dir.is_dir(), R(
'Destination project directory does not exist', dst_project_dir)

src_key = self._encode_path(src_project_dir)
dst_key = self._encode_path(dst_project_dir)
src_context_dir = self._contexts_dir / src_key
dst_context_dir = self._contexts_dir / dst_key
assert src_context_dir.is_dir(), R(
'No Claude context for source project', src_project_dir)
assert dst_context_dir.is_dir(), R(
'No Claude context for destination project', dst_project_dir)
assert src_context_dir != dst_context_dir, R(
'Source and destination projects are the same', src_project_dir)

session_base_name = session_id + '.jsonl'
src_session_file = src_context_dir / session_base_name
dst_session_file = dst_context_dir / session_base_name

if src_session_file.exists():
log.info('Moving session file %r', str(src_session_file))
src_session_file.rename(dst_session_file)

src_session_dir = src_context_dir / session_id
if src_session_dir.is_dir():
log.info('Moving session directory %r', str(src_session_dir))
src_session_dir.rename(dst_context_dir / session_id)

log.info('Updating session index')
entry = self._remove_from_session_index(src_context_dir, session_id)
if entry is not None:
self._add_to_session_index(dst_context_dir, entry, dst_project_dir)
else:
assert dst_session_file.exists(), R(
'Session not found in source or destination project', session_id)
log.info('Session already moved, skipping to path rewriting')

log.info('Rewriting session paths')
self._rewrite_session(dst_session_file, src_project_dir, dst_project_dir)

def _read_session_index(self, context_dir: Path) -> MutableJSON | None:
index_file = context_dir / 'sessions-index.json'
if index_file.exists():
return json.loads(index_file.read_text())
else:
return None

def _write_session_index(self, context_dir: Path, index: JSON) -> None:
index_file = context_dir / 'sessions-index.json'
index_file.write_text(json.dumps(index, ensure_ascii=False, separators=(',', ':')) + '\n')

def _remove_from_session_index(self,
context_dir: Path,
session_id: str
) -> MutableJSON | None:
index = self._read_session_index(context_dir)
if index is None:
return None
else:
try:
entries = json_list_of_dicts(index['entries'])
except KeyError:
return None
else:
remove_at = None
for i, entry in enumerate(entries):
if json_str(entry['sessionId']) == session_id:
assert remove_at is None, R(
'Duplicate session in index', session_id)
remove_at = i
if remove_at is None:
return None
else:
removed = entries.pop(remove_at)
self._write_session_index(context_dir, index)
return removed

def _add_to_session_index(self,
context_dir: Path,
entry: MutableJSON,
dst_project: Path
) -> None:
index = self._read_session_index(context_dir)
if index is not None:
session_id = json_str(entry['sessionId'])
entry['fullPath'] = str(context_dir / (session_id + '.jsonl'))
entry['projectPath'] = str(dst_project)
entries = json_list_of_dicts(index['entries'])
for i, e in enumerate(entries):
if json_str(e['sessionId']) == session_id:
entries[i] = entry
break
else:
entries.append(entry)
self._write_session_index(context_dir, index)


if __name__ == '__main__':
configure_script_logging(log)
main(sys.argv[1:])
1 change: 0 additions & 1 deletion scripts/stage_fixup.py → scripts/git_fixup.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
#!/usr/bin/env python3
"""
EXPERIMENTAL — written by Claude Code (claude-opus-4-6).

Expand Down
Loading
Loading