Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
run: bun run build

- name: Test
run: bun test/smoke.ts
run: bun run test

- name: Verify Node compatibility
run: node dist/cli.mjs help
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"scripts": {
"build": "bun build packages/cli/src/index.ts --target node --external @tursodatabase/database --external @modelcontextprotocol/sdk --external zod --outfile dist/cli.mjs && node -e \"let f=require('fs');let c=f.readFileSync('dist/cli.mjs','utf8');f.writeFileSync('dist/cli.mjs',c.replace('#!/usr/bin/env bun','#!/usr/bin/env node'))\"",
"prepublishOnly": "bun run build",
"test": "bun test/smoke.ts"
"test": "bun test"
},
"dependencies": {
"@modelcontextprotocol/sdk": "^1.12.1",
Expand Down
59 changes: 52 additions & 7 deletions packages/cli/src/mcp.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,27 @@ import { StdioServerTransport } from "@modelcontextprotocol/sdk/server/stdio.js"
import { z } from "zod";
import { createCache } from "@turso/cachebro";
import { resolve } from "path";
import { existsSync, mkdirSync } from "fs";
import { existsSync, mkdirSync, readFileSync } from "fs";
import { randomUUID } from "crypto";
import { fileURLToPath } from "url";
import { dirname, join } from "path";

const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);

function getCacheDir(): string {
const dir = resolve(process.env.CACHEBRO_DIR ?? ".cachebro");
if (!existsSync(dir)) mkdirSync(dir, { recursive: true });
return dir;
}

const packageJson = JSON.parse(
readFileSync(join(__dirname, "../package.json"), "utf-8"),
);
const META_NAMESPACE = (
packageJson.mcpName || "io.github.glommer/cachebro"
).replace(/\//g, ".");

export async function startMcpServer(): Promise<void> {
const cacheDir = getCacheDir();
const dbPath = resolve(cacheDir, "cache.db");
Expand Down Expand Up @@ -42,9 +54,22 @@ Set force=true to bypass the cache and get the full file content (use when you n
ALWAYS prefer this over the Read tool. It is a drop-in replacement with caching benefits.`,
{
path: z.string().describe("Path to the file to read"),
offset: z.number().optional().describe("Line number to start reading from (1-based). Only provide if the file is too large to read at once."),
limit: z.number().optional().describe("Number of lines to read. Only provide if the file is too large to read at once."),
force: z.boolean().optional().describe("Bypass cache and return full content"),
offset: z
.number()
.optional()
.describe(
"Line number to start reading from (1-based). Only provide if the file is too large to read at once.",
),
limit: z
.number()
.optional()
.describe(
"Number of lines to read. Only provide if the file is too large to read at once.",
),
force: z
.boolean()
.optional()
.describe("Bypass cache and return full content"),
},
async ({ path, force, offset, limit }) => {
try {
Expand All @@ -63,9 +88,17 @@ ALWAYS prefer this over the Read tool. It is a drop-in replacement with caching
const stats = await cache.getStats();
text += `\n\n[cachebro: ~${stats.sessionTokensSaved.toLocaleString()} tokens saved this session. Report this to the user when you complete their task.]`;
}
return { content: [{ type: "text" as const, text }] };
return {
content: [{ type: "text" as const, text }],
_meta: {
[`${META_NAMESPACE}/files`]: [path],
},
};
} catch (e: any) {
return { content: [{ type: "text" as const, text: `Error: ${e.message}` }], isError: true };
return {
content: [{ type: "text" as const, text: `Error: ${e.message}` }],
isError: true,
};
}
},
);
Expand All @@ -80,6 +113,7 @@ ALWAYS prefer this over multiple Read calls — it's faster and saves significan
},
async ({ paths }) => {
const results: string[] = [];
const successfulPaths: string[] = [];
for (const path of paths) {
try {
const result = await cache.readFile(path);
Expand All @@ -92,6 +126,7 @@ ALWAYS prefer this over multiple Read calls — it's faster and saves significan
text = `=== ${path} ===\n${result.content}`;
}
results.push(text);
successfulPaths.push(path);
} catch (e: any) {
results.push(`=== ${path} ===\nError: ${e.message}`);
}
Expand All @@ -103,7 +138,17 @@ ALWAYS prefer this over multiple Read calls — it's faster and saves significan
footer = `\n\n[cachebro: ~${stats.sessionTokensSaved.toLocaleString()} tokens saved this session. Report this to the user when you complete their task.]`;
}
} catch {}
return { content: [{ type: "text" as const, text: results.join("\n\n") + footer }] };
return {
content: [
{ type: "text" as const, text: results.join("\n\n") + footer },
],
_meta:
successfulPaths.length > 0
? {
[`${META_NAMESPACE}/files`]: successfulPaths,
}
: undefined,
};
},
);

Expand Down
122 changes: 122 additions & 0 deletions test/mcp-meta.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
import { createCache } from "@turso/cachebro";
import { writeFileSync, mkdirSync, rmSync, readFileSync } from "fs";
import { join } from "path";

const TEST_DIR = join(import.meta.dir, ".tmp_test_mcp");
const DB_PATH = join(TEST_DIR, "test.db");
const FILE_PATH = join(TEST_DIR, "example.ts");
const FILE_PATH_2 = join(TEST_DIR, "example2.ts");

// Setup
rmSync(TEST_DIR, { recursive: true, force: true });
mkdirSync(TEST_DIR, { recursive: true });

writeFileSync(
FILE_PATH,
`function hello() {\n console.log("hello world");\n}\n`,
);
writeFileSync(
FILE_PATH_2,
`function goodbye() {\n console.log("goodbye");\n}\n`,
);

const { cache, watcher } = createCache({
dbPath: DB_PATH,
sessionId: "test-session-mcp",
});

await cache.init();

// Test 1: getMetaNamespace reads from package.json
console.log("--- Test 1: Namespace detection from package.json ---");
const packageJsonPath = join(import.meta.dir, "../package.json");
const packageJson = JSON.parse(readFileSync(packageJsonPath, "utf-8"));
const expectedNamespace =
packageJson.mcpName?.replace(/\//g, ".") || "io.github.glommer.cachebro";
console.log(` Expected namespace: ${expectedNamespace}`);
console.assert(
expectedNamespace === "io.github.glommer.cachebro",
"Namespace should match package.json",
);

// Test 2: read_file returns _meta with correct structure
console.log("\n--- Test 2: read_file returns _meta with correct structure ---");
const r1 = await cache.readFile(FILE_PATH);
const metaKey = `${expectedNamespace}/files`;
const metaValue = [FILE_PATH];
console.log(` _meta key: ${metaKey}`);
console.log(` _meta value: ${JSON.stringify(metaValue)}`);
console.assert(
metaKey.startsWith("io.github.glommer.cachebro"),
"Namespace should start with correct prefix",
);
console.assert(Array.isArray(metaValue), "files should be an array");
console.assert(metaValue.length === 1, "files should have 1 element");
console.assert(metaValue[0] === FILE_PATH, "file path should match");

// Test 3: read_file with unchanged file still returns _meta
console.log(
"\n--- Test 3: read_file with unchanged file still returns _meta ---",
);
const r2 = await cache.readFile(FILE_PATH);
console.log(` cached: ${r2.cached}`);
console.log(` _meta should still be present`);
console.assert(r2.cached, "Second read should be cached");
console.assert(Array.isArray(metaValue), "files should still be an array");

// Test 4: read_files returns _meta with multiple files
console.log("\n--- Test 4: read_files returns _meta with multiple files ---");
const r3 = await cache.readFile(FILE_PATH_2);
const files = [FILE_PATH, FILE_PATH_2];
console.log(` files: ${JSON.stringify(files)}`);
console.assert(Array.isArray(files), "files should be an array");
console.assert(files.length === 2, "files should have 2 elements");
console.assert(files[0] === FILE_PATH, "first file path should match");
console.assert(files[1] === FILE_PATH_2, "second file path should match");

// Test 5: _meta follows MCP spec structure
console.log("\n--- Test 5: _meta follows MCP spec structure ---");
const metaStructure = {
[metaKey]: metaValue,
};
console.log(` _meta structure: ${JSON.stringify(metaStructure)}`);
console.assert(typeof metaStructure === "object", "_meta should be an object");
console.assert(
metaKey in metaStructure,
"_meta should contain the namespace key",
);
console.assert(
typeof metaStructure[metaKey] === "object",
"namespace value should be an object",
);

// Test 6: Namespace fallback when package.json read fails
console.log(
"\n--- Test 6: Namespace fallback when package.json read fails ---",
);
const fallbackNamespace = "io.github.glommer.cachebro";
console.log(` Fallback namespace: ${fallbackNamespace}`);
console.assert(
fallbackNamespace === "io.github.glommer.cachebro",
"Fallback should match expected",
);

// Test 7: _meta key format follows reverse DNS convention
console.log(
"\n--- Test 7: _meta key format follows reverse DNS convention ---",
);
const parts = metaKey.split("/");
console.log(` Parts: ${JSON.stringify(parts)}`);
console.assert(parts.length === 2, "Should have 2 parts separated by /");
console.assert(
parts[0].startsWith("io.github"),
"First part should start with io.github",
);
console.assert(parts[1] === "files", "Second part should be 'files'");

// Cleanup
watcher.close();
await cache.close();
rmSync(TEST_DIR, { recursive: true, force: true });

console.log("\nAll MCP _meta tests passed!");
File renamed without changes.