Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion vectorless/ask/prompts.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ class NavigationParams:
- doc_card Show document-level overview (title, summary, sections)
- concepts List key concepts extracted from the document
- find_section <title> Find a section by exact title (case-insensitive)
- chain <name> Follow reasoning chains from a node (pre-computed links)
- compare <a> <b> Compare two nodes using LLM analysis (use node IDs)
- trace <name> Trace reasoning chain from a node using LLM
- summarize <name> Generate dynamic LLM summary of a node
Expand Down Expand Up @@ -202,7 +203,7 @@ def worker_dispatch(params: WorkerDispatchParams) -> tuple[str, str]:
f"cd .., back, cat, cat <name>, head <name>, find <keyword>, findtree <pattern>, grep <regex>, "
f"toc [depth], stats <name>, grep_node <node> <pattern>, similar <name>, overview <name>, "
f"siblings <name>, ancestors <name>, doc_card, concepts, find_section <title>, "
f"compare <a> <b>, trace <name>, summarize <name>, "
f"chain <name>, compare <a> <b>, trace <name>, summarize <name>, "
f"wc <name>, pwd, check, done\n"
f"\n"
f"SEARCH STRATEGY:\n"
Expand Down
72 changes: 63 additions & 9 deletions vectorless/ask/worker/agent.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,13 @@ async def run(self) -> WorkerOutput:
pass
logger.info("Worker starting: doc=%s max_rounds=%d", doc_name, max_rounds)

# Phase 0.5: try intent routes for direct jump
intent_hint = ""
try:
intent_hint = await self._build_intent_hint(doc, query)
except Exception:
pass

try:
children = await doc.ls()
if children:
Expand All @@ -104,9 +111,15 @@ async def run(self) -> WorkerOutput:
except Exception:
pass

combined_hints = ""
if intent_hint:
combined_hints += intent_hint
if keyword_hints:
logger.info("Phase 1.5: keyword hints available, generating plan")
await self._generate_plan(doc, query, task, state, keyword_hints, llm)
combined_hints += keyword_hints

if combined_hints:
logger.info("Phase 1.5: hints available, generating plan")
await self._generate_plan(doc, query, task, state, combined_hints, llm)
if state.plan:
logger.info("Phase 1.5: plan generated — %s", state.plan[:150])

Expand Down Expand Up @@ -142,7 +155,7 @@ async def run(self) -> WorkerOutput:
visited_titles=visited_titles,
plan=state.plan,
intent_context=intent_context,
keyword_hints=keyword_hints,
keyword_hints=combined_hints,
shared_context=shared_context,
))

Expand Down Expand Up @@ -172,7 +185,7 @@ async def run(self) -> WorkerOutput:
f'"{raw_preview}"\n\n'
f"Please output exactly one command "
f"(ls, cd, cat, head, find, grep, toc, stats, similar, overview, "
f"siblings, ancestors, doc_card, concepts, find_section, "
f"siblings, ancestors, doc_card, concepts, find_section, chain, "
f"compare, trace, summarize, wc, pwd, check, or done)."
)
state.push_history("(unrecognized) \u2192 parse failure")
Expand Down Expand Up @@ -239,6 +252,39 @@ async def run(self) -> WorkerOutput:

return state.into_worker_output(doc_name)

async def _build_intent_hint(self, doc: NavigableDocument, query: str) -> str:
"""Build navigation hints from pre-computed intent routes.

If the query routing table has routes for this document's structure,
we can suggest direct jumps instead of root-level exploration.
"""
try:
routes = await doc.intent_routes()
except Exception:
return ""

if not routes:
return ""

lines = ["Intent routes (pre-computed shortcuts — use cd to jump directly):"]
for route in routes[:5]:
targets = getattr(route, "targets", [])
if not targets:
continue
for target in targets[:3]:
title = await doc.node_title(target.node_id)
relevance = getattr(target, "relevance", 0.0)
reason = getattr(target, "reason", "")
lines.append(
f" - {title} (relevance {relevance:.2f}: {reason})"
)

if len(lines) == 1:
return ""

logger.info("Intent routes: %d routes found", len(routes))
return "\n".join(lines) + "\n"

async def _build_keyword_hints(self, doc: NavigableDocument, query: str) -> str:
"""Build keyword hints from the document's reasoning index and acceleration data."""
keywords = extract_keywords(query)
Expand Down Expand Up @@ -276,15 +322,23 @@ async def _build_keyword_hints(self, doc: NavigableDocument, query: str) -> str:
pass

# Top evidence scores (pre-computed by ScorePass)
# Filter to nodes whose titles overlap with query keywords for relevance
score_hints = []
try:
scores = await doc.evidence_scores_ranked()
for s in scores[:5]:
kw_lower = {kw.lower() for kw in keywords}
for s in scores[:20]:
title = await doc.node_title(s.node_id)
score_hints.append(
f" - {title} (score {s.composite:.2f}: "
f"density={s.density:.2f} richness={s.data_richness:.2f})"
)
title_lower = title.lower()
# Include if title contains any query keyword or is top-3 by score
is_keyword_match = any(kw in title_lower for kw in kw_lower)
if is_keyword_match or len(score_hints) < 3:
score_hints.append(
f" - {title} (score {s.composite:.2f}: "
f"density={s.density:.2f} richness={s.data_richness:.2f})"
)
if len(score_hints) >= 8:
break
except Exception:
pass

Expand Down
57 changes: 57 additions & 0 deletions vectorless/ask/worker/commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,23 @@ async def handle_cat(

preview = content[:500] + "..." if len(content) > 500 else content
state.last_feedback = f"[{title}] collected as evidence:\n{preview}"

# Mark overlapping nodes as visited to avoid redundant reads
try:
overlaps = await doc.overlaps_for(node_id)
if overlaps:
for overlap in overlaps[:5]:
overlap_id = getattr(overlap, "node_id", None)
if overlap_id and overlap_id not in state.collected_nodes:
state.visited.add(overlap_id)
overlap_title = await doc.node_title(overlap_id)
logger.info(
"Overlap dedup: marking '%s' as visited (overlap of '%s')",
overlap_title, title,
)
except Exception:
pass

return Step(kind="continue")
except Exception as e:
state.last_feedback = f"Error reading node: {e}"
Expand Down Expand Up @@ -688,6 +705,45 @@ async def handle_find_section(
return Step(kind="continue")


async def handle_chain(
command: Any, doc: NavigableDocument, state: WorkerState, query: str, llm: LLMClient,
) -> Step:
"""Follow reasoning chains from a node — uses pre-computed ChainIndex."""
target = command.target
node_id = await _resolve_target(doc, target if target else ".", state)
if node_id is None:
state.last_feedback = f"Node '{target}' not found."
return Step(kind="continue")
try:
chains = await doc.chains_for(node_id)
if not chains:
title = await doc.node_title(node_id)
state.last_feedback = f"No reasoning chains found for '{title}'."
return Step(kind="continue")
lines = ["Reasoning chains:"]
for chain in chains[:5]:
chain_title = getattr(chain, "title", "")
chain_summary = getattr(chain, "summary", "")
nodes = getattr(chain, "nodes", [])
if nodes:
node_titles = []
for n in nodes[:6]:
n_title = getattr(n, "title", "?")
node_titles.append(n_title)
more = f" (+{len(nodes) - 6} more)" if len(nodes) > 6 else ""
path = " → ".join(node_titles)
lines.append(f" - {chain_title}: {path}{more}")
if chain_summary:
lines.append(f" {chain_summary[:120]}")
else:
desc = chain_summary or chain_title or "(unnamed chain)"
lines.append(f" - {desc}")
state.last_feedback = "\n".join(lines)
except Exception as e:
state.last_feedback = f"chain error: {e}"
return Step(kind="continue")


async def handle_check(
command: Any, doc: NavigableDocument, state: WorkerState, query: str, llm: LLMClient,
) -> Step:
Expand Down Expand Up @@ -758,6 +814,7 @@ async def handle_done(
"doc_card": handle_doc_card,
"concepts": handle_concepts,
"find_section": handle_find_section,
"chain": handle_chain,
"check": handle_check,
"done": handle_done,
}
Expand Down
3 changes: 3 additions & 0 deletions vectorless/ask/worker/parse.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,9 @@ def parse_command(llm_output: str) -> Command:
elif cmd == "find_section":
target = _strip_quotes(" ".join(parts[1:])) if len(parts) > 1 else ""
return Command(kind="find_section", target=target)
elif cmd == "chain":
target = _strip_quotes(" ".join(parts[1:])) if len(parts) > 1 else ""
return Command(kind="chain", target=target)
else:
return Command(kind="ls") # fallback: re-observe

Expand Down
Loading