-
{info.icon}
-
{info.label}
- {overallConfidence !== null && (
-
- Confidence: {(overallConfidence * 100).toFixed(0)}%
+
+
+
+ {info.icon}
+
+
+
{info.label}
+
+ Stage: {stage}
+
+ {overallConfidence !== null && (
+
+
+ {(overallConfidence * 100).toFixed(0)}%
+
+
+ Confidence
+
+
)}
-
- Stage: {stage}
-
-
{Number.isFinite(job?.fileCount) && (
-
- Files scanned: {job.fileCount}
+
+ Files scanned: {job.fileCount}
)}
{agentTrace.length > 0 && (
-
+
{agentTrace.map((result, index) => {
const confidence = Number(result?.confidence);
const score = Number.isFinite(confidence) ? confidence : 0;
@@ -68,9 +76,9 @@ export default function JobProgressBar({ job }) {
return (
- {label} {pct}%
+ {label} | {pct}%
);
})}
diff --git a/client/src/index.css b/client/src/index.css
index 1d2b0a2..449bfe4 100644
--- a/client/src/index.css
+++ b/client/src/index.css
@@ -6,9 +6,12 @@
@layer base {
:root {
/* Refined for Neumorphism - subtle mid-tones */
- --background: 240 242 245; /* #F0F2F5 */
- --foreground: 30 41 59; /* #1E293B */
- --card: 240 242 245; /* Same as background for neumorphism */
+ --background: 240 242 245;
+ /* #F0F2F5 */
+ --foreground: 30 41 59;
+ /* #1E293B */
+ --card: 240 242 245;
+ /* Same as background for neumorphism */
--card-foreground: 30 41 59;
--popover: 255 255 255;
--popover-foreground: 30 41 59;
@@ -16,18 +19,22 @@
--primary-foreground: 255 255 255;
--secondary: 100 116 139;
--secondary-foreground: 248 250 252;
- --muted: 226 232 240; /* #E2E8F0 */
+ --muted: 226 232 240;
+ /* #E2E8F0 */
--muted-foreground: 100 116 139;
--accent: 212 175 55;
--accent-foreground: 30 41 59;
--border: 210 214 220;
--input: 240 242 245;
--ring: 212 175 55;
- --radius: 1rem; /* Rounder for neumorphism */
+ --auth-panel: 11 11 11;
+ --radius: 1rem;
+ /* Rounder for neumorphism */
}
.dark {
- --background: 15 15 15; /* Deep Black */
+ --background: 15 15 15;
+ /* Deep Black */
--foreground: 241 245 249;
--card: 15 15 15;
--card-foreground: 241 245 249;
@@ -44,6 +51,7 @@
--border: 38 38 38;
--input: 15 15 15;
--ring: 212 175 55;
+ --auth-panel: 11 11 11;
}
}
@@ -58,11 +66,11 @@
--color-muted-foreground: rgb(var(--muted-foreground));
--color-border: rgb(var(--border));
--color-gold: #D4AF37;
-
+
--font-sans: 'Outfit', sans-serif;
--font-display: 'Sora', sans-serif;
--font-mono: 'JetBrains Mono', monospace;
-
+
--radius-xl: 1rem;
--radius-2xl: 1.5rem;
}
@@ -71,6 +79,7 @@
* {
border-color: rgb(var(--border));
}
+
body {
background-color: rgb(var(--background));
color: rgb(var(--foreground));
@@ -93,33 +102,161 @@
pointer-events: none;
background-image: url("data:image/svg+xml,%3Csvg viewBox='0 0 200 200' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='noiseFilter'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='0.65' numOctaves='3' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23noiseFilter)'/%3E%3C/svg%3E");
}
+
+ /* Global Scrollbar Styling */
+ html {
+ scrollbar-width: thin;
+ scrollbar-color: rgba(var(--foreground) / 0.1) transparent;
+ }
+
+ /* Webkit Browsers (Chrome, Safari, Edge) */
+ ::-webkit-scrollbar {
+ width: 6px;
+ height: 6px;
+ }
+
+ ::-webkit-scrollbar-track {
+ background: transparent;
+ }
+
+ ::-webkit-scrollbar-thumb {
+ background: rgba(var(--foreground) / 0.1);
+ border-radius: 10px;
+ transition: background 0.3s var(--ease-out);
+ }
+
+ ::-webkit-scrollbar-thumb:hover {
+ background: rgba(var(--foreground) / 0.2);
+ }
+
+ .dark {
+ scrollbar-color: rgba(var(--foreground) / 0.15) transparent;
+ }
+
+ .dark ::-webkit-scrollbar-thumb {
+ background: rgba(var(--foreground) / 0.15);
+ }
+
+ .dark ::-webkit-scrollbar-thumb:hover {
+ background: rgba(var(--foreground) / 0.25);
+ }
}
@layer utilities {
.shadow-neu-inset {
- box-shadow: inset 4px 4px 8px rgba(0, 0, 0, 0.05), inset -4px -4px 8px rgba(255, 255, 255, 0.8);
+ box-shadow: inset 2px 2px 5px rgba(0, 0, 0, 0.05), inset -2px -2px 5px rgba(255, 255, 255, 0.7);
}
+
.dark .shadow-neu-inset {
- box-shadow: inset 4px 4px 8px rgba(0, 0, 0, 0.4), inset -2px -2px 4px rgba(255, 255, 255, 0.05);
+ box-shadow: inset 2px 2px 5px rgba(0, 0, 0, 0.3), inset -1px -1px 3px rgba(255, 255, 255, 0.03);
+ }
+
+ .shadow-neu-flat {
+ box-shadow: 4px 4px 8px rgba(0, 0, 0, 0.05), -4px -4px 8px rgba(255, 255, 255, 0.8);
}
-
+
+ .dark .shadow-neu-flat {
+ box-shadow: 4px 4px 8px rgba(0, 0, 0, 0.3), -2px -2px 4px rgba(255, 255, 255, 0.02);
+ }
+
.glass-premium {
background: rgba(var(--background) / 0.6);
backdrop-filter: blur(12px) saturate(180%);
-webkit-backdrop-filter: blur(12px) saturate(180%);
border: 1px solid rgba(255, 255, 255, 0.12);
}
-
+
.text-gradient-gold {
background: linear-gradient(135deg, #D4AF37 0%, #F1D382 50%, #D4AF37 100%);
-webkit-background-clip: text;
background-clip: text;
-webkit-text-fill-color: transparent;
}
+
+ /* Design Engineering: Custom Easings */
+ :root {
+ --ease-out: cubic-bezier(0.23, 1, 0.32, 1);
+ --ease-in-out: cubic-bezier(0.77, 0, 0.175, 1);
+ --ease-drawer: cubic-bezier(0.32, 0.72, 0, 1);
+ }
+
+ /* Active Scale Utility */
+
+ .active-scale:active {
+ transform: scale(0.97);
+ }
}
@layer utilities {
.text-balance {
text-wrap: balance;
}
+}
+
+.language-javascript .token.comment,
+.language-typescript .token.comment,
+.language-tsx .token.comment,
+.language-jsx .token.comment,
+.language-json .token.comment,
+.language-markup .token.comment,
+.language-python .token.comment,
+.language-bash .token.comment,
+.language-markdown .token.comment,
+.language-yaml .token.comment,
+.token.comment,
+.token.prolog,
+.token.doctype,
+.token.cdata {
+ color: #6b7280;
+}
+
+.token.punctuation {
+ color: #94a3b8;
+}
+
+.token.property,
+.token.tag,
+.token.constant,
+.token.symbol,
+.token.deleted {
+ color: #f43f5e;
+}
+
+.token.boolean,
+.token.number {
+ color: #fb7185;
+}
+
+.token.selector,
+.token.attr-name,
+.token.string,
+.token.char,
+.token.builtin,
+.token.inserted {
+ color: #22c55e;
+}
+
+.token.operator,
+.token.entity,
+.token.url,
+.language-css .token.string,
+.style .token.string {
+ color: #38bdf8;
+}
+
+.token.atrule,
+.token.attr-value,
+.token.keyword {
+ color: #f59e0b;
+}
+
+.token.function,
+.token.class-name {
+ color: #60a5fa;
+}
+
+.token.regex,
+.token.important,
+.token.variable {
+ color: #f97316;
}
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
index 1a14cdc..781ecb9 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -37,7 +37,6 @@ services:
- /app/node_modules
command: sh -c "
until pg_isready -h postgres -p 5432; do sleep 1; done;
- if [ ! -d node_modules ]; then npm install; fi;
npm run migrate;
npm run dev
"
diff --git a/server/.env.example b/server/.env.example
index fc6d393..2b9a57b 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -44,8 +44,46 @@ CLIENT_URL=http://localhost:5173
# COOKIE_SECRET=your_cookie_secret
# REFRESH_TOKEN_SECRET=your_refresh_secret
+# ===============================
+# AI Provider (chat + summarization)
+# ===============================
+# Supported providers: openai-compatible, anthropic, gemini
+AI_PROVIDER=openai-compatible
+AI_API_KEY=sk-...
+AI_MODEL=gpt-4o-mini
+# Optional base URL for OpenAI-compatible providers (OpenRouter, Groq, Together, etc.)
+# AI_BASE_URL=https://api.openai.com/v1
+
+# Confidence controls (agentic responses)
+# If model confidence is below threshold, the agent re-runs the LLM call.
+AI_CONFIDENCE_RETRY_THRESHOLD=0.6
+AI_CONFIDENCE_MAX_RERUNS=1
+
+# Optional snippet-agent-specific overrides
+# AI_SNIPPET_CONFIDENCE_RETRY_THRESHOLD=0.6
+# AI_SNIPPET_CONFIDENCE_MAX_RERUNS=1
+
+# Optional provider-specific fallbacks if AI_API_KEY is not set
+# ANTHROPIC_API_KEY=sk-ant-...
+# GEMINI_API_KEY=AIza...
+
+# Backward-compatible OpenAI envs (still supported)
OPENAI_API_KEY=sk-...
-OPENAI_MODEL=gpt-4o-mini # cheapest model with good quality
+OPENAI_MODEL=gpt-4o-mini
+
+# ===============================
+# AI Embeddings (semantic search)
+# ===============================
+# Keep this openai-compatible so vector dimensions match DB expectations.
+AI_EMBEDDING_PROVIDER=openai-compatible
+AI_EMBEDDING_API_KEY=sk-...
+AI_EMBEDDING_MODEL=text-embedding-3-small
+# Optional base URL for embedding endpoint
+# AI_EMBEDDING_BASE_URL=https://api.openai.com/v1
+
+# Backward-compatible embedding env
+OPENAI_EMBEDDING_MODEL=text-embedding-3-small
+
AI_CACHE_TTL_SECONDS=3600 # cache explanations for 1 hour
REDIS_URL=redis://localhost:6379 # omit to use in-memory cache
REDIS_HOST=localhost # use redis when app runs inside Docker
diff --git a/server/index.js b/server/index.js
index e2f1997..f7585f9 100644
--- a/server/index.js
+++ b/server/index.js
@@ -2,6 +2,7 @@ import dotenv from 'dotenv';
import path from 'path';
import { fileURLToPath } from 'url';
import * as Sentry from '@sentry/node';
+import { startAnalysisWorker } from './src/queue/analysisQueue.js';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
@@ -20,6 +21,8 @@ const { default: app } = await import('./app.js');
const PORT = process.env.PORT || 5000;
+startAnalysisWorker();
+
app.listen(PORT, () => {
console.log(`[server] Running on http://localhost:${PORT} (${process.env.NODE_ENV || 'development'})`);
});
diff --git a/server/package-lock.json b/server/package-lock.json
index 57cf535..b0f784c 100644
--- a/server/package-lock.json
+++ b/server/package-lock.json
@@ -29,7 +29,9 @@
"passport-github2": "^0.1.12",
"passport-jwt": "^4.0.1",
"pg": "^8.20.0",
- "pgvector": "^0.2.1"
+ "pgvector": "^0.2.1",
+ "tree-sitter-wasms": "^0.1.13",
+ "web-tree-sitter": "^0.25.10"
},
"devDependencies": {
"@vitest/coverage-v8": "^4.0.8",
@@ -128,7 +130,6 @@
"dev": true,
"license": "MIT",
"optional": true,
- "peer": true,
"dependencies": {
"tslib": "^2.4.0"
}
@@ -190,6 +191,37 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
+ "node_modules/@eslint/config-array/node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@eslint/config-array/node_modules/brace-expansion": {
+ "version": "1.1.13",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
+ "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/@eslint/config-array/node_modules/minimatch": {
+ "version": "3.1.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
+ "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
"node_modules/@eslint/config-helpers": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz",
@@ -240,6 +272,37 @@
"url": "https://opencollective.com/eslint"
}
},
+ "node_modules/@eslint/eslintrc/node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/@eslint/eslintrc/node_modules/brace-expansion": {
+ "version": "1.1.13",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
+ "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/@eslint/eslintrc/node_modules/minimatch": {
+ "version": "3.1.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
+ "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
"node_modules/@eslint/js": {
"version": "9.39.4",
"resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.39.4.tgz",
@@ -278,9 +341,9 @@
}
},
"node_modules/@fastify/otel": {
- "version": "0.17.1",
- "resolved": "https://registry.npmjs.org/@fastify/otel/-/otel-0.17.1.tgz",
- "integrity": "sha512-K4wyxfUZx2ux5o+b6BtTqouYFVILohLZmSbA2tKUueJstNcBnoGPVhllCaOvbQ3ZrXdUxUC/fyrSWSCqHhdOPg==",
+ "version": "0.18.0",
+ "resolved": "https://registry.npmjs.org/@fastify/otel/-/otel-0.18.0.tgz",
+ "integrity": "sha512-3TASCATfw+ctICSb4ymrv7iCm0qJ0N9CarB+CZ7zIJ7KqNbwI5JjyDL1/sxoC0ccTO1Zyd1iQ+oqncPg5FJXaA==",
"funding": [
{
"type": "github",
@@ -331,27 +394,6 @@
"@opentelemetry/api": "^1.3.0"
}
},
- "node_modules/@fastify/otel/node_modules/balanced-match": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
- "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
- "license": "MIT",
- "engines": {
- "node": "18 || 20 || >=22"
- }
- },
- "node_modules/@fastify/otel/node_modules/brace-expansion": {
- "version": "5.0.5",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
- "integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^4.0.2"
- },
- "engines": {
- "node": "18 || 20 || >=22"
- }
- },
"node_modules/@fastify/otel/node_modules/import-in-the-middle": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/import-in-the-middle/-/import-in-the-middle-2.0.6.tgz",
@@ -364,21 +406,6 @@
"module-details-from-path": "^1.0.4"
}
},
- "node_modules/@fastify/otel/node_modules/minimatch": {
- "version": "10.2.4",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
- "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
- "license": "BlueOak-1.0.0",
- "dependencies": {
- "brace-expansion": "^5.0.2"
- },
- "engines": {
- "node": "18 || 20 || >=22"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
"node_modules/@humanfs/core": {
"version": "0.19.1",
"resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
@@ -580,14 +607,15 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.1.tgz",
"integrity": "sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==",
"license": "Apache-2.0",
+ "peer": true,
"engines": {
"node": ">=8.0.0"
}
},
"node_modules/@opentelemetry/api-logs": {
- "version": "0.213.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.213.0.tgz",
- "integrity": "sha512-zRM5/Qj6G84Ej3F1yt33xBVY/3tnMxtL1fiDIxYbDWYaZ/eudVw3/PBiZ8G7JwUxXxjW8gU4g6LnOyfGKYHYgw==",
+ "version": "0.214.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/api-logs/-/api-logs-0.214.0.tgz",
+ "integrity": "sha512-40lSJeqYO8Uz2Yj7u94/SJWE/wONa7rmMKjI1ZcIjgf3MHNHv1OZUCrCETGuaRF62d5pQD1wKIW+L4lmSMTzZA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/api": "^1.3.0"
@@ -601,6 +629,7 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/context-async-hooks/-/context-async-hooks-2.6.1.tgz",
"integrity": "sha512-XHzhwRNkBpeP8Fs/qjGrAf9r9PRv67wkJQ/7ZPaBQQ68DYlTBBx5MF9LvPx7mhuXcDessKK2b+DcxqwpgkcivQ==",
"license": "Apache-2.0",
+ "peer": true,
"engines": {
"node": "^18.19.0 || >=20.6.0"
},
@@ -613,6 +642,7 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.6.1.tgz",
"integrity": "sha512-8xHSGWpJP9wBxgBpnqGL0R3PbdWQndL1Qp50qrg71+B28zK5OQmUgcDKLJgzyAAV38t4tOyLMGDD60LneR5W8g==",
"license": "Apache-2.0",
+ "peer": true,
"dependencies": {
"@opentelemetry/semantic-conventions": "^1.29.0"
},
@@ -624,12 +654,12 @@
}
},
"node_modules/@opentelemetry/instrumentation": {
- "version": "0.213.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.213.0.tgz",
- "integrity": "sha512-3i9NdkET/KvQomeh7UaR/F4r9P25Rx6ooALlWXPIjypcEOUxksCmVu0zA70NBJWlrMW1rPr/LRidFAflLI+s/w==",
+ "version": "0.214.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation/-/instrumentation-0.214.0.tgz",
+ "integrity": "sha512-MHqEX5Dk59cqVah5LiARMACku7jXSVk9iVDWOea4x3cr7VfdByeDCURK6o1lntT1JS/Tsovw01UJrBhN3/uC5w==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/api-logs": "0.213.0",
+ "@opentelemetry/api-logs": "0.214.0",
"import-in-the-middle": "^3.0.0",
"require-in-the-middle": "^8.0.0"
},
@@ -641,13 +671,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-amqplib": {
- "version": "0.60.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-amqplib/-/instrumentation-amqplib-0.60.0.tgz",
- "integrity": "sha512-q/B2IvoVXRm1M00MvhnzpMN6rKYOszPXVsALi6u0ss4AYHe+TidZEtLW9N1ZhrobI1dSriHnBqqtAOZVAv07sg==",
+ "version": "0.61.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-amqplib/-/instrumentation-amqplib-0.61.0.tgz",
+ "integrity": "sha512-mCKoyTGfRNisge4br0NpOFSy2Z1NnEW8hbCJdUDdJFHrPqVzc4IIBPA/vX0U+LUcQqrQvJX+HMIU0dbDRe0i0Q==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.33.0"
},
"engines": {
@@ -658,13 +688,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-connect": {
- "version": "0.56.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.56.0.tgz",
- "integrity": "sha512-PKp+sSZ7AfzMvGgO3VCyo1inwNu+q7A1k9X88WK4PQ+S6Hp7eFk8pie+sWHDTaARovmqq5V2osav3lQej2B0nw==",
+ "version": "0.57.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-connect/-/instrumentation-connect-0.57.0.tgz",
+ "integrity": "sha512-FMEBChnI4FLN5TE9DHwfH7QpNir1JzXno1uz/TAucVdLCyrG0jTrKIcNHt/i30A0M2AunNBCkcd8Ei26dIPKdg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.27.0",
"@types/connect": "3.4.38"
},
@@ -676,12 +706,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-dataloader": {
- "version": "0.30.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dataloader/-/instrumentation-dataloader-0.30.0.tgz",
- "integrity": "sha512-MXHP2Q38cd2OhzEBKAIXUi9uBlPEYzF6BNJbyjUXBQ6kLaf93kRC41vNMIz0Nl5mnuwK7fDvKT+/lpx7BXRwdg==",
+ "version": "0.31.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-dataloader/-/instrumentation-dataloader-0.31.0.tgz",
+ "integrity": "sha512-f654tZFQXS5YeLDNb9KySrwtg7SnqZN119FauD7acBoTzuLduaiGTNz88ixcVSOOMGZ+EjJu/RFtx5klObC95g==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0"
+ "@opentelemetry/instrumentation": "^0.214.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
@@ -691,13 +721,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-express": {
- "version": "0.61.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.61.0.tgz",
- "integrity": "sha512-Xdmqo9RZuZlL29Flg8QdwrrX7eW1CZ7wFQPKHyXljNymgKhN1MCsYuqQ/7uxavhSKwAl7WxkTzKhnqpUApLMvQ==",
+ "version": "0.62.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-express/-/instrumentation-express-0.62.0.tgz",
+ "integrity": "sha512-Tvx+vgAZKEQxU3Rx+xWLiR0mLxHwmk69/8ya04+VsV9WYh8w6Lhx5hm5yAMvo1wy0KqWgFKBLwSeo3sHCwdOww==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.27.0"
},
"engines": {
@@ -708,13 +738,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-fs": {
- "version": "0.32.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fs/-/instrumentation-fs-0.32.0.tgz",
- "integrity": "sha512-koR6apx0g0wX6RRiPpjA4AFQUQUbXrK16kq4/SZjVp7u5cffJhNkY4TnITxcGA4acGSPYAfx3NHRIv4Khn1axQ==",
+ "version": "0.33.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-fs/-/instrumentation-fs-0.33.0.tgz",
+ "integrity": "sha512-sCZWXGalQ01wr3tAhSR9ucqFJ0phidpAle6/17HVjD6gN8FLmZMK/8sKxdXYHy3PbnlV1P4zeiSVFNKpbFMNLA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0"
+ "@opentelemetry/instrumentation": "^0.214.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
@@ -724,12 +754,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-generic-pool": {
- "version": "0.56.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-generic-pool/-/instrumentation-generic-pool-0.56.0.tgz",
- "integrity": "sha512-fg+Jffs6fqrf0uQS0hom7qBFKsbtpBiBl8+Vkc63Gx8xh6pVh+FhagmiO6oM0m3vyb683t1lP7yGYq22SiDnqg==",
+ "version": "0.57.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-generic-pool/-/instrumentation-generic-pool-0.57.0.tgz",
+ "integrity": "sha512-orhmlaK+ZIW9hKU+nHTbXrCSXZcH83AescTqmpamHRobRmYSQwRbD0a1odc0yAzuzOtxYiHiXAnpnIpaSSY7Ow==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0"
+ "@opentelemetry/instrumentation": "^0.214.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
@@ -739,12 +769,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-graphql": {
- "version": "0.61.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.61.0.tgz",
- "integrity": "sha512-pUiVASv6nh2XrerTvlbVHh7vKFzscpgwiQ/xvnZuAIzQ5lRjWVdRPUuXbvZJ/Yq79QsE81TZdJ7z9YsXiss1ew==",
+ "version": "0.62.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-graphql/-/instrumentation-graphql-0.62.0.tgz",
+ "integrity": "sha512-3YNuLVPUxafXkH1jBAbGsKNsP3XVzcFDhCDCE3OqBwCwShlqQbLMRMFh1T/d5jaVZiGVmSsfof+ICKD2iOV8xg==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0"
+ "@opentelemetry/instrumentation": "^0.214.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
@@ -754,13 +784,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-hapi": {
- "version": "0.59.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.59.0.tgz",
- "integrity": "sha512-33wa4mEr+9+ztwdgLor1SeBu4Opz4IsmpcLETXAd3VmBrOjez8uQtrsOhPCa5Vhbm5gzDlMYTgFRLQzf8/YHFA==",
+ "version": "0.60.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-hapi/-/instrumentation-hapi-0.60.0.tgz",
+ "integrity": "sha512-aNljZKYrEa7obLAxd1bCEDxF7kzCLGXTuTJZ8lMR9rIVEjmuKBXN1gfqpm/OB//Zc2zP4iIve1jBp7sr3mQV6w==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.27.0"
},
"engines": {
@@ -771,13 +801,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-http": {
- "version": "0.213.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.213.0.tgz",
- "integrity": "sha512-B978Xsm5XEPGhm1P07grDoaOFLHapJPkOG9h016cJsyWWxmiLnPu2M/4Nrm7UCkHSiLnkXgC+zVGUAIahy8EEA==",
+ "version": "0.214.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-http/-/instrumentation-http-0.214.0.tgz",
+ "integrity": "sha512-FlkDhZDRjDJDcO2LcSCtjRpkal1NJ8y0fBqBhTvfAR3JSYY2jAIj1kSS5IjmEBt4c3aWv+u/lqLuoCDrrKCSKg==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/core": "2.6.0",
- "@opentelemetry/instrumentation": "0.213.0",
+ "@opentelemetry/core": "2.6.1",
+ "@opentelemetry/instrumentation": "0.214.0",
"@opentelemetry/semantic-conventions": "^1.29.0",
"forwarded-parse": "2.1.2"
},
@@ -788,28 +818,13 @@
"@opentelemetry/api": "^1.3.0"
}
},
- "node_modules/@opentelemetry/instrumentation-http/node_modules/@opentelemetry/core": {
- "version": "2.6.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/core/-/core-2.6.0.tgz",
- "integrity": "sha512-HLM1v2cbZ4TgYN6KEOj+Bbj8rAKriOdkF9Ed3tG25FoprSiQl7kYc+RRT6fUZGOvx0oMi5U67GoFdT+XUn8zEg==",
- "license": "Apache-2.0",
- "dependencies": {
- "@opentelemetry/semantic-conventions": "^1.29.0"
- },
- "engines": {
- "node": "^18.19.0 || >=20.6.0"
- },
- "peerDependencies": {
- "@opentelemetry/api": ">=1.0.0 <1.10.0"
- }
- },
"node_modules/@opentelemetry/instrumentation-ioredis": {
- "version": "0.61.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.61.0.tgz",
- "integrity": "sha512-hsHDadUtAFbws1YSDc1XW0svGFKiUbqv2td1Cby+UAiwvojm1NyBo/taifH0t8CuFZ0x/2SDm0iuTwrM5pnVOg==",
+ "version": "0.62.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-ioredis/-/instrumentation-ioredis-0.62.0.tgz",
+ "integrity": "sha512-ZYt//zcPve8qklaZX+5Z4MkU7UpEkFRrxsf2cnaKYBitqDnsCN69CPAuuMOX6NYdW2rG9sFy7V/QWtBlP5XiNQ==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/redis-common": "^0.38.2",
"@opentelemetry/semantic-conventions": "^1.33.0"
},
@@ -821,12 +836,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-kafkajs": {
- "version": "0.22.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-kafkajs/-/instrumentation-kafkajs-0.22.0.tgz",
- "integrity": "sha512-wJU4IBQMUikdJAcTChLFqK5lo+flo7pahqd8DSLv7uMxsdOdAHj6RzKYAm8pPfUS6ItKYutYyuicwKaFwQKsoA==",
+ "version": "0.23.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-kafkajs/-/instrumentation-kafkajs-0.23.0.tgz",
+ "integrity": "sha512-4K+nVo+zI+aDz0Z85SObwbdixIbzS9moIuKJaYsdlzcHYnKOPtB7ya8r8Ezivy/GVIBHiKJVq4tv+BEkgOMLaQ==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.30.0"
},
"engines": {
@@ -837,12 +852,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-knex": {
- "version": "0.57.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-knex/-/instrumentation-knex-0.57.0.tgz",
- "integrity": "sha512-vMCSh8kolEm5rRsc+FZeTZymWmIJwc40hjIKnXH4O0Dv/gAkJJIRXCsPX5cPbe0c0j/34+PsENd0HqKruwhVYw==",
+ "version": "0.58.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-knex/-/instrumentation-knex-0.58.0.tgz",
+ "integrity": "sha512-Hc/o8fSsaWxZ8r1Yw4rNDLwTpUopTf4X32y4W6UhlHmW8Wizz8wfhgOKIelSeqFVTKBBPIDUOsQWuIMxBmu8Bw==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.33.1"
},
"engines": {
@@ -853,13 +868,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-koa": {
- "version": "0.61.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.61.0.tgz",
- "integrity": "sha512-lvrfWe9ShK/D2X4brmx8ZqqeWPfRl8xekU0FCn7C1dHm5k6+rTOOi36+4fnaHAP8lig9Ux6XQ1D4RNIpPCt1WQ==",
+ "version": "0.62.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-koa/-/instrumentation-koa-0.62.0.tgz",
+ "integrity": "sha512-uVip0VuGUQXZ+vFxkKxAUNq8qNl+VFlyHDh/U6IQ8COOEDfbEchdaHnpFrMYF3psZRUuoSIgb7xOeXj00RdwDA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.36.0"
},
"engines": {
@@ -870,12 +885,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-lru-memoizer": {
- "version": "0.57.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-lru-memoizer/-/instrumentation-lru-memoizer-0.57.0.tgz",
- "integrity": "sha512-cEqpUocSKJfwDtLYTTJehRLWzkZ2eoePCxfVIgGkGkb83fMB71O+y4MvRHJPbeV2bdoWdOVrl8uO0+EynWhTEA==",
+ "version": "0.58.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-lru-memoizer/-/instrumentation-lru-memoizer-0.58.0.tgz",
+ "integrity": "sha512-6grM3TdMyHzlGY1cUA+mwoPueB1F3dYKgKtZIH6jOFXqfHAByyLTc+6PFjGM9tKh52CFBJaDwodNlL/Td39z7Q==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0"
+ "@opentelemetry/instrumentation": "^0.214.0"
},
"engines": {
"node": "^18.19.0 || >=20.6.0"
@@ -885,12 +900,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-mongodb": {
- "version": "0.66.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.66.0.tgz",
- "integrity": "sha512-d7m9QnAY+4TCWI4q1QRkfrc6fo/92VwssaB1DzQfXNRvu51b78P+HJlWP7Qg6N6nkwdb9faMZNBCZJfftmszkw==",
+ "version": "0.67.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongodb/-/instrumentation-mongodb-0.67.0.tgz",
+ "integrity": "sha512-1WJp5N1lYfHq2IhECOTewFs5Tf2NfUOwQRqs/rZdXKTezArMlucxgzAaqcgp3A3YREXopXTpXHsxZTGHjNhMdQ==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.33.0"
},
"engines": {
@@ -901,13 +916,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-mongoose": {
- "version": "0.59.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.59.0.tgz",
- "integrity": "sha512-6/jWU+c1NgznkVLDU/2y0bXV2nJo3o9FWZ9mZ9nN6T/JBNRoMnVXZl2FdBmgH+a5MwaWLs5kmRJTP5oUVGIkPw==",
+ "version": "0.60.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mongoose/-/instrumentation-mongoose-0.60.0.tgz",
+ "integrity": "sha512-8BahAZpKsOoc+lrZGb7Ofn4g3z8qtp5IxDfvAVpKXsEheQN7ONMH5djT5ihy6yf8yyeQJGS0gXFfpEAEeEHqQg==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.33.0"
},
"engines": {
@@ -918,12 +933,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-mysql": {
- "version": "0.59.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.59.0.tgz",
- "integrity": "sha512-r+V/Fh0sm7Ga8/zk/TI5H5FQRAjwr0RrpfPf8kNIehlsKf12XnvIaZi8ViZkpX0gyPEpLXqzqWD6QHlgObgzZw==",
+ "version": "0.60.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql/-/instrumentation-mysql-0.60.0.tgz",
+ "integrity": "sha512-08pO8GFPEIz2zquKDGteBZDNmwketdgH8hTe9rVYgW9kCJXq1Psj3wPQGx+VaX4ZJKCfPeoLMYup9+cxHvZyVQ==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.33.0",
"@types/mysql": "2.15.27"
},
@@ -935,12 +950,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-mysql2": {
- "version": "0.59.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.59.0.tgz",
- "integrity": "sha512-n9/xrVCRBfG9egVbffnlU1uhr+HX0vF4GgtAB/Bvm48wpFgRidqD8msBMiym1kRYzmpWvJqTxNT47u1MkgBEdw==",
+ "version": "0.60.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-mysql2/-/instrumentation-mysql2-0.60.0.tgz",
+ "integrity": "sha512-m/5d3bxQALllCzezYDk/6vajh0tj5OijMMvOZGr+qN1NMXm1dzMNwyJ0gNZW7Fo3YFRyj/jJMxIw+W7d525dlw==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.33.0",
"@opentelemetry/sql-common": "^0.41.2"
},
@@ -952,13 +967,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-pg": {
- "version": "0.65.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.65.0.tgz",
- "integrity": "sha512-W0zpHEIEuyZ8zvb3njaX9AAbHgPYOsSWVOoWmv1sjVRSF6ZpBqtlxBWbU+6hhq1TFWBeWJOXZ8nZS/PUFpLJYQ==",
+ "version": "0.66.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-pg/-/instrumentation-pg-0.66.0.tgz",
+ "integrity": "sha512-KxfLGXBb7k2ueaPJfq2GXBDXBly8P+SpR/4Mj410hhNgmQF3sCqwXvUBQxZQkDAmsdBAoenM+yV1LhtsMRamcA==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.34.0",
"@opentelemetry/sql-common": "^0.41.2",
"@types/pg": "8.15.6",
@@ -972,12 +987,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-redis": {
- "version": "0.61.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis/-/instrumentation-redis-0.61.0.tgz",
- "integrity": "sha512-JnPexA034/0UJRsvH96B0erQoNOqKJZjE2ZRSw9hiTSC23LzE0nJE/u6D+xqOhgUhRnhhcPHq4MdYtmUdYTF+Q==",
+ "version": "0.62.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-redis/-/instrumentation-redis-0.62.0.tgz",
+ "integrity": "sha512-y3pPpot7WzR/8JtHcYlTYsyY8g+pbFhAqbwAuG5bLPnR6v6pt1rQc0DpH0OlGP/9CZbWBP+Zhwp9yFoygf/ZXQ==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/redis-common": "^0.38.2",
"@opentelemetry/semantic-conventions": "^1.27.0"
},
@@ -989,12 +1004,12 @@
}
},
"node_modules/@opentelemetry/instrumentation-tedious": {
- "version": "0.32.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-tedious/-/instrumentation-tedious-0.32.0.tgz",
- "integrity": "sha512-BQS6gG8RJ1foEqfEZ+wxoqlwfCAzb1ZVG0ad8Gfe4x8T658HJCLGLd4E4NaoQd8EvPfLqOXgzGaE/2U4ytDSWA==",
+ "version": "0.33.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-tedious/-/instrumentation-tedious-0.33.0.tgz",
+ "integrity": "sha512-Q6WQwAD01MMTub31GlejoiFACYNw26J426wyjvU7by7fDIr2nZXNW4vhTGs7i7F0TnXBO3xN688g1tdUgYwJ5w==",
"license": "Apache-2.0",
"dependencies": {
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.33.0",
"@types/tedious": "^4.0.14"
},
@@ -1006,13 +1021,13 @@
}
},
"node_modules/@opentelemetry/instrumentation-undici": {
- "version": "0.23.0",
- "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-undici/-/instrumentation-undici-0.23.0.tgz",
- "integrity": "sha512-LL0VySzKVR2cJSFVZaTYpZl1XTpBGnfzoQPe2W7McS2267ldsaEIqtQY6VXs2KCXN0poFjze5110PIpxHDaDGg==",
+ "version": "0.24.0",
+ "resolved": "https://registry.npmjs.org/@opentelemetry/instrumentation-undici/-/instrumentation-undici-0.24.0.tgz",
+ "integrity": "sha512-oKzZ3uvqP17sV0EsoQcJgjEfIp0kiZRbYu/eD8p13Cbahumf8lb/xpYeNr/hfAJ4owzEtIDcGIjprfLcYbIKBQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/core": "^2.0.0",
- "@opentelemetry/instrumentation": "^0.213.0",
+ "@opentelemetry/instrumentation": "^0.214.0",
"@opentelemetry/semantic-conventions": "^1.24.0"
},
"engines": {
@@ -1036,6 +1051,7 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/resources/-/resources-2.6.1.tgz",
"integrity": "sha512-lID/vxSuKWXM55XhAKNoYXu9Cutoq5hFdkbTdI/zDKQktXzcWBVhNsOkiZFTMU9UtEWuGRNe0HUgmsFldIdxVA==",
"license": "Apache-2.0",
+ "peer": true,
"dependencies": {
"@opentelemetry/core": "2.6.1",
"@opentelemetry/semantic-conventions": "^1.29.0"
@@ -1052,6 +1068,7 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/sdk-trace-base/-/sdk-trace-base-2.6.1.tgz",
"integrity": "sha512-r86ut4T1e8vNwB35CqCcKd45yzqH6/6Wzvpk2/cZB8PsPLlZFTvrh8yfOS3CYZYcUmAx4hHTZJ8AO8Dj8nrdhw==",
"license": "Apache-2.0",
+ "peer": true,
"dependencies": {
"@opentelemetry/core": "2.6.1",
"@opentelemetry/resources": "2.6.1",
@@ -1069,6 +1086,7 @@
"resolved": "https://registry.npmjs.org/@opentelemetry/semantic-conventions/-/semantic-conventions-1.40.0.tgz",
"integrity": "sha512-cifvXDhcqMwwTlTK04GBNeIe7yyo28Mfby85QXFe1Yk8nmi36Ab/5UQwptOx84SsoGNRg+EVSjwzfSZMy6pmlw==",
"license": "Apache-2.0",
+ "peer": true,
"engines": {
"node": ">=14"
}
@@ -1109,9 +1127,9 @@
}
},
"node_modules/@prisma/instrumentation": {
- "version": "7.4.2",
- "resolved": "https://registry.npmjs.org/@prisma/instrumentation/-/instrumentation-7.4.2.tgz",
- "integrity": "sha512-r9JfchJF1Ae6yAxcaLu/V1TGqBhAuSDe3mRNOssBfx1rMzfZ4fdNvrgUBwyb/TNTGXFxlH9AZix5P257x07nrg==",
+ "version": "7.6.0",
+ "resolved": "https://registry.npmjs.org/@prisma/instrumentation/-/instrumentation-7.6.0.tgz",
+ "integrity": "sha512-ZPW2gRiwpPzEfgeZgaekhqXrbW+Y2RJKHVqUmlhZhKzRNCcvR6DykzylDrynpArKKRQtLxoZy36fK7U0p3pdgQ==",
"license": "Apache-2.0",
"dependencies": {
"@opentelemetry/instrumentation": "^0.207.0"
@@ -1451,54 +1469,54 @@
}
},
"node_modules/@sentry/core": {
- "version": "10.46.0",
- "resolved": "https://registry.npmjs.org/@sentry/core/-/core-10.46.0.tgz",
- "integrity": "sha512-N3fj4zqBQOhXliS1Ne9euqIKuciHCGOJfPGQLwBoW9DNz03jF+NB8+dUKtrJ79YLoftjVgf8nbgwtADK7NR+2Q==",
+ "version": "10.47.0",
+ "resolved": "https://registry.npmjs.org/@sentry/core/-/core-10.47.0.tgz",
+ "integrity": "sha512-nsYRAx3EWezDut+Zl+UwwP07thh9uY7CfSAi2whTdcJl5hu1nSp2z8bba7Vq/MGbNLnazkd3A+GITBEML924JA==",
"license": "MIT",
"engines": {
"node": ">=18"
}
},
"node_modules/@sentry/node": {
- "version": "10.46.0",
- "resolved": "https://registry.npmjs.org/@sentry/node/-/node-10.46.0.tgz",
- "integrity": "sha512-vF+7FrUXEtmYWuVcnvBjlWKeyLw/kwHpwnGj9oUmO/a2uKjDmUr53ZVcapggNxCjivavGYr9uHOY64AGdeUyzA==",
- "license": "MIT",
- "dependencies": {
- "@fastify/otel": "0.17.1",
- "@opentelemetry/api": "^1.9.0",
- "@opentelemetry/context-async-hooks": "^2.6.0",
- "@opentelemetry/core": "^2.6.0",
- "@opentelemetry/instrumentation": "^0.213.0",
- "@opentelemetry/instrumentation-amqplib": "0.60.0",
- "@opentelemetry/instrumentation-connect": "0.56.0",
- "@opentelemetry/instrumentation-dataloader": "0.30.0",
- "@opentelemetry/instrumentation-express": "0.61.0",
- "@opentelemetry/instrumentation-fs": "0.32.0",
- "@opentelemetry/instrumentation-generic-pool": "0.56.0",
- "@opentelemetry/instrumentation-graphql": "0.61.0",
- "@opentelemetry/instrumentation-hapi": "0.59.0",
- "@opentelemetry/instrumentation-http": "0.213.0",
- "@opentelemetry/instrumentation-ioredis": "0.61.0",
- "@opentelemetry/instrumentation-kafkajs": "0.22.0",
- "@opentelemetry/instrumentation-knex": "0.57.0",
- "@opentelemetry/instrumentation-koa": "0.61.0",
- "@opentelemetry/instrumentation-lru-memoizer": "0.57.0",
- "@opentelemetry/instrumentation-mongodb": "0.66.0",
- "@opentelemetry/instrumentation-mongoose": "0.59.0",
- "@opentelemetry/instrumentation-mysql": "0.59.0",
- "@opentelemetry/instrumentation-mysql2": "0.59.0",
- "@opentelemetry/instrumentation-pg": "0.65.0",
- "@opentelemetry/instrumentation-redis": "0.61.0",
- "@opentelemetry/instrumentation-tedious": "0.32.0",
- "@opentelemetry/instrumentation-undici": "0.23.0",
- "@opentelemetry/resources": "^2.6.0",
- "@opentelemetry/sdk-trace-base": "^2.6.0",
+ "version": "10.47.0",
+ "resolved": "https://registry.npmjs.org/@sentry/node/-/node-10.47.0.tgz",
+ "integrity": "sha512-R+btqPepv88o635G6HtVewLjqCLUedBg5HBs7Nq1qbbKvyti01uArUF2f+3DsLenk5B9LUNiRlE+frZA44Ahmw==",
+ "license": "MIT",
+ "dependencies": {
+ "@fastify/otel": "0.18.0",
+ "@opentelemetry/api": "^1.9.1",
+ "@opentelemetry/context-async-hooks": "^2.6.1",
+ "@opentelemetry/core": "^2.6.1",
+ "@opentelemetry/instrumentation": "^0.214.0",
+ "@opentelemetry/instrumentation-amqplib": "0.61.0",
+ "@opentelemetry/instrumentation-connect": "0.57.0",
+ "@opentelemetry/instrumentation-dataloader": "0.31.0",
+ "@opentelemetry/instrumentation-express": "0.62.0",
+ "@opentelemetry/instrumentation-fs": "0.33.0",
+ "@opentelemetry/instrumentation-generic-pool": "0.57.0",
+ "@opentelemetry/instrumentation-graphql": "0.62.0",
+ "@opentelemetry/instrumentation-hapi": "0.60.0",
+ "@opentelemetry/instrumentation-http": "0.214.0",
+ "@opentelemetry/instrumentation-ioredis": "0.62.0",
+ "@opentelemetry/instrumentation-kafkajs": "0.23.0",
+ "@opentelemetry/instrumentation-knex": "0.58.0",
+ "@opentelemetry/instrumentation-koa": "0.62.0",
+ "@opentelemetry/instrumentation-lru-memoizer": "0.58.0",
+ "@opentelemetry/instrumentation-mongodb": "0.67.0",
+ "@opentelemetry/instrumentation-mongoose": "0.60.0",
+ "@opentelemetry/instrumentation-mysql": "0.60.0",
+ "@opentelemetry/instrumentation-mysql2": "0.60.0",
+ "@opentelemetry/instrumentation-pg": "0.66.0",
+ "@opentelemetry/instrumentation-redis": "0.62.0",
+ "@opentelemetry/instrumentation-tedious": "0.33.0",
+ "@opentelemetry/instrumentation-undici": "0.24.0",
+ "@opentelemetry/resources": "^2.6.1",
+ "@opentelemetry/sdk-trace-base": "^2.6.1",
"@opentelemetry/semantic-conventions": "^1.40.0",
- "@prisma/instrumentation": "7.4.2",
- "@sentry/core": "10.46.0",
- "@sentry/node-core": "10.46.0",
- "@sentry/opentelemetry": "10.46.0",
+ "@prisma/instrumentation": "7.6.0",
+ "@sentry/core": "10.47.0",
+ "@sentry/node-core": "10.47.0",
+ "@sentry/opentelemetry": "10.47.0",
"import-in-the-middle": "^3.0.0"
},
"engines": {
@@ -1506,13 +1524,13 @@
}
},
"node_modules/@sentry/node-core": {
- "version": "10.46.0",
- "resolved": "https://registry.npmjs.org/@sentry/node-core/-/node-core-10.46.0.tgz",
- "integrity": "sha512-gwLGXfkzmiCmUI1VWttyoZBaVp1ItpDKc8AV2mQblWPQGdLSD0c6uKV/FkU291yZA3rXsrLXVwcWoibwnjE2vw==",
+ "version": "10.47.0",
+ "resolved": "https://registry.npmjs.org/@sentry/node-core/-/node-core-10.47.0.tgz",
+ "integrity": "sha512-qv6LsqHbkQmd0aQEUox/svRSz26J+l4gGjFOUNEay2armZu9XLD+Ct89jpFgZD5oIPNAj2jraodTRqydXiwS5w==",
"license": "MIT",
"dependencies": {
- "@sentry/core": "10.46.0",
- "@sentry/opentelemetry": "10.46.0",
+ "@sentry/core": "10.47.0",
+ "@sentry/opentelemetry": "10.47.0",
"import-in-the-middle": "^3.0.0"
},
"engines": {
@@ -1522,6 +1540,7 @@
"@opentelemetry/api": "^1.9.0",
"@opentelemetry/context-async-hooks": "^1.30.1 || ^2.1.0",
"@opentelemetry/core": "^1.30.1 || ^2.1.0",
+ "@opentelemetry/exporter-trace-otlp-http": ">=0.57.0 <1",
"@opentelemetry/instrumentation": ">=0.57.1 <1",
"@opentelemetry/resources": "^1.30.1 || ^2.1.0",
"@opentelemetry/sdk-trace-base": "^1.30.1 || ^2.1.0",
@@ -1537,6 +1556,9 @@
"@opentelemetry/core": {
"optional": true
},
+ "@opentelemetry/exporter-trace-otlp-http": {
+ "optional": true
+ },
"@opentelemetry/instrumentation": {
"optional": true
},
@@ -1552,12 +1574,12 @@
}
},
"node_modules/@sentry/opentelemetry": {
- "version": "10.46.0",
- "resolved": "https://registry.npmjs.org/@sentry/opentelemetry/-/opentelemetry-10.46.0.tgz",
- "integrity": "sha512-dzzV2ovruGsx9jzusGGr6cNPvMgYRu2BIrF8aMZ3rkQ1OpPJjPStqtA1l1fw0aoxHOxIjFU7ml4emF+xdmMl3g==",
+ "version": "10.47.0",
+ "resolved": "https://registry.npmjs.org/@sentry/opentelemetry/-/opentelemetry-10.47.0.tgz",
+ "integrity": "sha512-f6Hw2lrpCjlOksiosP0Z2jK/+l+21SIdoNglVeG/sttMyx8C8ywONKh0Ha50sFsvB1VaB8n94RKzzf3hkh9V3g==",
"license": "MIT",
"dependencies": {
- "@sentry/core": "10.46.0"
+ "@sentry/core": "10.47.0"
},
"engines": {
"node": ">=18"
@@ -1871,6 +1893,7 @@
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz",
"integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==",
"license": "MIT",
+ "peer": true,
"bin": {
"acorn": "bin/acorn"
},
@@ -2007,11 +2030,13 @@
}
},
"node_modules/balanced-match": {
- "version": "1.0.2",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
- "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
- "dev": true,
- "license": "MIT"
+ "version": "4.0.4",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
+ "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
+ "license": "MIT",
+ "engines": {
+ "node": "18 || 20 || >=22"
+ }
},
"node_modules/base64url": {
"version": "3.0.1",
@@ -2074,14 +2099,15 @@
}
},
"node_modules/brace-expansion": {
- "version": "1.1.13",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
- "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
- "dev": true,
+ "version": "5.0.5",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
+ "integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
"license": "MIT",
"dependencies": {
- "balanced-match": "^1.0.0",
- "concat-map": "0.0.1"
+ "balanced-match": "^4.0.2"
+ },
+ "engines": {
+ "node": "18 || 20 || >=22"
}
},
"node_modules/braces": {
@@ -2686,6 +2712,37 @@
"url": "https://opencollective.com/eslint"
}
},
+ "node_modules/eslint/node_modules/balanced-match": {
+ "version": "1.0.2",
+ "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
+ "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/eslint/node_modules/brace-expansion": {
+ "version": "1.1.13",
+ "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.13.tgz",
+ "integrity": "sha512-9ZLprWS6EENmhEOpjCYW2c8VkmOvckIJZfkr7rBW6dObmfgJ/L1GpSYW5Hpo9lDz4D1+n0Ckz8rU7FwHDQiG/w==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "balanced-match": "^1.0.0",
+ "concat-map": "0.0.1"
+ }
+ },
+ "node_modules/eslint/node_modules/minimatch": {
+ "version": "3.1.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
+ "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
+ "dev": true,
+ "license": "ISC",
+ "dependencies": {
+ "brace-expansion": "^1.1.7"
+ },
+ "engines": {
+ "node": "*"
+ }
+ },
"node_modules/espree": {
"version": "10.4.0",
"resolved": "https://registry.npmjs.org/espree/-/espree-10.4.0.tgz",
@@ -4017,16 +4074,18 @@
}
},
"node_modules/minimatch": {
- "version": "3.1.5",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.5.tgz",
- "integrity": "sha512-VgjWUsnnT6n+NUk6eZq77zeFdpW2LWDzP6zFGrCbHXiYNul5Dzqk2HHQ5uFH2DNW5Xbp8+jVzaeNt94ssEEl4w==",
- "dev": true,
- "license": "ISC",
+ "version": "10.2.5",
+ "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.5.tgz",
+ "integrity": "sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==",
+ "license": "BlueOak-1.0.0",
"dependencies": {
- "brace-expansion": "^1.1.7"
+ "brace-expansion": "^5.0.5"
},
"engines": {
- "node": "*"
+ "node": "18 || 20 || >=22"
+ },
+ "funding": {
+ "url": "https://github.com/sponsors/isaacs"
}
},
"node_modules/module-details-from-path": {
@@ -4177,29 +4236,6 @@
"url": "https://opencollective.com/nodemon"
}
},
- "node_modules/nodemon/node_modules/balanced-match": {
- "version": "4.0.4",
- "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-4.0.4.tgz",
- "integrity": "sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==",
- "dev": true,
- "license": "MIT",
- "engines": {
- "node": "18 || 20 || >=22"
- }
- },
- "node_modules/nodemon/node_modules/brace-expansion": {
- "version": "5.0.5",
- "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-5.0.5.tgz",
- "integrity": "sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==",
- "dev": true,
- "license": "MIT",
- "dependencies": {
- "balanced-match": "^4.0.2"
- },
- "engines": {
- "node": "18 || 20 || >=22"
- }
- },
"node_modules/nodemon/node_modules/has-flag": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
@@ -4210,22 +4246,6 @@
"node": ">=4"
}
},
- "node_modules/nodemon/node_modules/minimatch": {
- "version": "10.2.4",
- "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-10.2.4.tgz",
- "integrity": "sha512-oRjTw/97aTBN0RHbYCdtF1MQfvusSIBQM0IZEgzl6426+8jSC0nF1a/GmnVLpfB9yyr6g6FTqWqiZVbxrtaCIg==",
- "dev": true,
- "license": "BlueOak-1.0.0",
- "dependencies": {
- "brace-expansion": "^5.0.2"
- },
- "engines": {
- "node": "18 || 20 || >=22"
- },
- "funding": {
- "url": "https://github.com/sponsors/isaacs"
- }
- },
"node_modules/nodemon/node_modules/supports-color": {
"version": "5.5.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
@@ -4517,9 +4537,9 @@
}
},
"node_modules/path-to-regexp": {
- "version": "8.4.0",
- "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.4.0.tgz",
- "integrity": "sha512-PuseHIvAnz3bjrM2rGJtSgo1zjgxapTLZ7x2pjhzWwlp4SJQgK3f3iZIQwkpEnBaKz6seKBADpM4B4ySkuYypg==",
+ "version": "8.4.1",
+ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-8.4.1.tgz",
+ "integrity": "sha512-fvU78fIjZ+SBM9YwCknCvKOUKkLVqtWDVctl0s7xIqfmfb38t2TT4ZU2gHm+Z8xGwgW+QWEU3oQSAzIbo89Ggw==",
"license": "MIT",
"funding": {
"type": "opencollective",
@@ -4543,6 +4563,7 @@
"resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz",
"integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==",
"license": "MIT",
+ "peer": true,
"dependencies": {
"pg-connection-string": "^2.12.0",
"pg-pool": "^3.13.0",
@@ -5317,6 +5338,7 @@
"integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==",
"dev": true,
"license": "MIT",
+ "peer": true,
"engines": {
"node": ">=12"
},
@@ -5366,6 +5388,15 @@
"nodetouch": "bin/nodetouch.js"
}
},
+ "node_modules/tree-sitter-wasms": {
+ "version": "0.1.13",
+ "resolved": "https://registry.npmjs.org/tree-sitter-wasms/-/tree-sitter-wasms-0.1.13.tgz",
+ "integrity": "sha512-wT+cR6DwaIz80/vho3AvSF0N4txuNx/5bcRKoXouOfClpxh/qqrF4URNLQXbbt8MaAxeksZcZd1j8gcGjc+QxQ==",
+ "license": "Unlicense",
+ "dependencies": {
+ "tree-sitter-wasms": "^0.1.11"
+ }
+ },
"node_modules/tslib": {
"version": "2.8.1",
"resolved": "https://registry.npmjs.org/tslib/-/tslib-2.8.1.tgz",
@@ -5565,6 +5596,7 @@
"integrity": "sha512-xjR1dMTVHlFLh98JE3i/f/WePqJsah4A0FK9cc8Ehp9Udk0AZk6ccpIZhh1qJ/yxVWRZ+Q54ocnD8TXmkhspGg==",
"dev": true,
"license": "MIT",
+ "peer": true,
"dependencies": {
"@vitest/expect": "4.1.2",
"@vitest/mocker": "4.1.2",
@@ -5654,6 +5686,20 @@
"url": "https://github.com/sponsors/jonschlinkert"
}
},
+ "node_modules/web-tree-sitter": {
+ "version": "0.25.10",
+ "resolved": "https://registry.npmjs.org/web-tree-sitter/-/web-tree-sitter-0.25.10.tgz",
+ "integrity": "sha512-Y09sF44/13XvgVKgO2cNDw5rGk6s26MgoZPXLESvMXeefBf7i6/73eFurre0IsTW6E14Y0ArIzhUMmjoc7xyzA==",
+ "license": "MIT",
+ "peerDependencies": {
+ "@types/emscripten": "^1.40.0"
+ },
+ "peerDependenciesMeta": {
+ "@types/emscripten": {
+ "optional": true
+ }
+ }
+ },
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
diff --git a/server/package.json b/server/package.json
index 55fbbc0..3924a3d 100644
--- a/server/package.json
+++ b/server/package.json
@@ -9,9 +9,9 @@
"scripts": {
"start": "node index.js",
"dev": "nodemon index.js",
- "migrate": "psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/001_initial.sql && psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/002_function_nodes.sql && psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/003_share_tokens.sql && psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/004_analysis_jobs_metadata.sql",
+ "migrate": "psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/001_initial.sql && psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/002_function_nodes.sql && psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/003_share_tokens.sql && psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/004_analysis_jobs_metadata.sql && psql \"$DATABASE_URL\" -v ON_ERROR_STOP=1 -f ./src/infrastructure/migrations/006_contracts.sql",
"db:migrate": "npm run migrate",
- "test": "node --test test/ai.queries.test.js test/github.webhook.test.js test/parser.multilang.test.js test/pr-comment.test.js",
+ "test": "node --test test/ai.queries.test.js test/ai.snippet-impact.test.js test/ai.suggest-refactor.test.js test/graph.heatmap.test.js test/github.webhook.test.js test/parser.multilang.test.js test/pr-comment.test.js test/snippet.analyzer.confidence.test.js",
"test:ai-queries": "node --test test/ai.queries.test.js",
"test:unit": "vitest run --configLoader native --pool threads",
"test:coverage": "vitest run --coverage --configLoader native --pool threads"
@@ -37,7 +37,9 @@
"passport-github2": "^0.1.12",
"passport-jwt": "^4.0.1",
"pg": "^8.20.0",
- "pgvector": "^0.2.1"
+ "pgvector": "^0.2.1",
+ "tree-sitter-wasms": "^0.1.13",
+ "web-tree-sitter": "^0.25.10"
},
"devDependencies": {
"@vitest/coverage-v8": "^4.0.8",
diff --git a/server/src/agents/analysis/SnippetAnalyzerAgent.js b/server/src/agents/analysis/SnippetAnalyzerAgent.js
new file mode 100644
index 0000000..392f481
--- /dev/null
+++ b/server/src/agents/analysis/SnippetAnalyzerAgent.js
@@ -0,0 +1,489 @@
+import { BaseAgent } from '../core/BaseAgent.js';
+import { createChatClient } from '../../services/ai/llmProvider.js';
+import { AnalysisAgent } from './AnalysisAgent.js';
+
+const DEFAULT_MODEL = process.env.AI_MODEL || process.env.OPENAI_MODEL || 'gpt-4o-mini';
+const MAX_SNIPPET_CHARS = 4_000;
+const MAX_CONTEXT_FILES = 14;
+const DEFAULT_CONFIDENCE_RETRY_THRESHOLD = Number(
+ process.env.AI_SNIPPET_CONFIDENCE_RETRY_THRESHOLD || process.env.AI_CONFIDENCE_RETRY_THRESHOLD || 0.6,
+);
+const DEFAULT_CONFIDENCE_MAX_RERUNS = Number.parseInt(
+ process.env.AI_SNIPPET_CONFIDENCE_MAX_RERUNS || process.env.AI_CONFIDENCE_MAX_RERUNS || 1,
+ 10,
+);
+
+function normalizePath(value) {
+ return String(value || '').trim();
+}
+
+function normalizeSnippet(value) {
+ return String(value || '')
+ .replace(/\r\n/g, '\n')
+ .trim()
+ .slice(0, MAX_SNIPPET_CHARS);
+}
+
+function asPositiveInteger(value) {
+ const numeric = Number.parseInt(value, 10);
+ return Number.isInteger(numeric) && numeric > 0 ? numeric : null;
+}
+
+function clamp01(value) {
+ const numeric = Number(value);
+ if (!Number.isFinite(numeric)) return 0;
+ return Math.max(0, Math.min(1, numeric));
+}
+
+function confidenceLabel(score) {
+ if (score >= 0.85) return 'high';
+ if (score >= 0.65) return 'medium';
+ return 'low';
+}
+
+function confidenceScoreFromLabel(label) {
+ const normalized = String(label || '').trim().toLowerCase();
+ if (normalized === 'high') return 0.9;
+ if (normalized === 'medium') return 0.7;
+ return 0.5;
+}
+
+function parseConfidenceScore(value) {
+ if (typeof value === 'number' && Number.isFinite(value)) {
+ return value > 1 ? value / 100 : value;
+ }
+
+ const normalized = String(value || '').trim().toLowerCase();
+ if (!normalized) return null;
+
+ if (['high', 'medium', 'low'].includes(normalized)) {
+ return confidenceScoreFromLabel(normalized);
+ }
+
+ if (normalized.endsWith('%')) {
+ const pct = Number.parseFloat(normalized.slice(0, -1));
+ if (Number.isFinite(pct)) {
+ return pct / 100;
+ }
+ }
+
+ const numeric = Number.parseFloat(normalized);
+ if (Number.isFinite(numeric)) {
+ return numeric > 1 ? numeric / 100 : numeric;
+ }
+
+ return null;
+}
+
+function toUniquePaths(paths) {
+ if (!Array.isArray(paths)) return [];
+
+ const seen = new Set();
+ const output = [];
+
+ for (const path of paths) {
+ const normalized = normalizePath(path);
+ if (!normalized || seen.has(normalized)) continue;
+ seen.add(normalized);
+ output.push(normalized);
+ }
+
+ return output;
+}
+
+function buildReverseAdjacency(graph) {
+ const analysisAgent = new AnalysisAgent();
+ return analysisAgent.buildReverseAdjacency(graph);
+}
+
+function getImpactPaths(filePath, reverseAdjacency) {
+ const analysisAgent = new AnalysisAgent();
+ return analysisAgent.getImpactedFiles(filePath, reverseAdjacency);
+}
+
+function buildNodeContext(node) {
+ const declarations = Array.isArray(node?.declarations)
+ ? node.declarations
+ .map((entry) => entry?.name)
+ .filter(Boolean)
+ .slice(0, 10)
+ .join(', ')
+ : 'none';
+
+ return {
+ filePath: node?.file_path || null,
+ type: node?.file_type || 'module',
+ summary: node?.summary || 'No summary available',
+ inDegree: Number(node?.metrics?.inDegree || 0),
+ outDegree: Number(node?.metrics?.outDegree || 0),
+ loc: Number(node?.metrics?.loc || 0),
+ declarations,
+ };
+}
+
+function buildPrompt({ filePath, snippet, lineStart, lineEnd, contextFiles, directImpactedFiles, indirectImpactedFiles }) {
+ const lineRange = lineStart && lineEnd ? `${lineStart}-${lineEnd}` : 'unknown';
+
+ return [
+ 'You are SnippetAnalyzerAgent in an agentic code-analysis system.',
+ 'Your task: explain what this snippet does and its architectural impact based only on supplied repository graph context.',
+ 'Do not hallucinate hidden files or behavior. If uncertain, state assumptions briefly.',
+ '',
+ `File: ${filePath}`,
+ `Selected lines: ${lineRange}`,
+ '',
+ 'Snippet:',
+ '```',
+ snippet,
+ '```',
+ '',
+ `Direct impacted files (dependents): ${directImpactedFiles.join(', ') || 'none'}`,
+ `Indirect impacted files (transitive): ${indirectImpactedFiles.join(', ') || 'none'}`,
+ '',
+ 'Related file context:',
+ ...contextFiles.map(
+ (entry, index) =>
+ `${index + 1}. ${entry.filePath} | type=${entry.type} | inDegree=${entry.inDegree} | outDegree=${entry.outDegree} | loc=${entry.loc} | declarations=${entry.declarations} | summary=${entry.summary}`,
+ ),
+ '',
+ 'Return strictly valid JSON with this shape:',
+ '{',
+ ' "snippetPurpose": "concise explanation of what the snippet does",',
+ ' "fileImpact": "impact inside this file",',
+ ' "codebaseImpact": "impact on related files and overall architecture",',
+ ' "directImpactedFiles": ["paths that directly depend on this file/snippet"],',
+ ' "indirectImpactedFiles": ["paths with transitive impact"],',
+ ' "relatedFileFindings": [',
+ ' { "filePath": "path", "impact": "how it is affected", "risk": "low|medium|high" }',
+ ' ],',
+ ' "confidence": "high|medium|low",',
+ ' "confidenceScore": 0.0',
+ '}',
+ 'confidenceScore must be a number between 0 and 1 and confidence should align with the score.',
+ ].join('\n');
+}
+
+function buildRetryPrompt({ previousResponse, threshold }) {
+ return [
+ 'Your previous output had confidence below the required threshold.',
+ `Re-evaluate and respond again with stronger precision. Required confidenceScore >= ${threshold.toFixed(2)}.`,
+ 'Do not invent facts. Keep all claims grounded in the provided graph context.',
+ '',
+ 'Previous response JSON:',
+ String(previousResponse || '{}'),
+ ].join('\n');
+}
+
+function parseModelResponse(rawText, fallback) {
+ try {
+ const parsed = JSON.parse(String(rawText || '{}'));
+
+ const explicitScore = parseConfidenceScore(parsed?.confidenceScore);
+ const labelCandidate = String(parsed?.confidence || '').toLowerCase();
+ const label = ['high', 'medium', 'low'].includes(labelCandidate)
+ ? labelCandidate
+ : confidenceLabel(explicitScore ?? fallback.confidenceScore);
+ const score = clamp01(explicitScore ?? parseConfidenceScore(parsed?.confidence) ?? confidenceScoreFromLabel(label));
+
+ return {
+ snippetPurpose: String(parsed?.snippetPurpose || fallback.snippetPurpose).trim(),
+ fileImpact: String(parsed?.fileImpact || fallback.fileImpact).trim(),
+ codebaseImpact: String(parsed?.codebaseImpact || fallback.codebaseImpact).trim(),
+ directImpactedFiles: toUniquePaths(
+ Array.isArray(parsed?.directImpactedFiles) ? parsed.directImpactedFiles : fallback.directImpactedFiles,
+ ),
+ indirectImpactedFiles: toUniquePaths(
+ Array.isArray(parsed?.indirectImpactedFiles)
+ ? parsed.indirectImpactedFiles
+ : fallback.indirectImpactedFiles,
+ ),
+ relatedFileFindings: Array.isArray(parsed?.relatedFileFindings)
+ ? parsed.relatedFileFindings
+ .map((item) => ({
+ filePath: normalizePath(item?.filePath),
+ impact: String(item?.impact || '').trim(),
+ risk: ['low', 'medium', 'high'].includes(String(item?.risk || '').toLowerCase())
+ ? String(item.risk).toLowerCase()
+ : 'medium',
+ }))
+ .filter((item) => item.filePath && item.impact)
+ .slice(0, MAX_CONTEXT_FILES)
+ : [],
+ confidence: label,
+ confidenceScore: score,
+ };
+ } catch {
+ return fallback;
+ }
+}
+
+export class SnippetAnalyzerAgent extends BaseAgent {
+ agentId = 'snippet-analyzer-agent';
+ maxRetries = 1;
+ timeoutMs = 90_000;
+
+ constructor({ db, llmClient } = {}) {
+ super();
+ this.db = db;
+ this.llmClient = llmClient || createChatClient();
+ this.model = DEFAULT_MODEL;
+ this.confidenceRetryThreshold = clamp01(DEFAULT_CONFIDENCE_RETRY_THRESHOLD);
+ this.confidenceMaxReruns = Math.max(0, Number.isInteger(DEFAULT_CONFIDENCE_MAX_RERUNS) ? DEFAULT_CONFIDENCE_MAX_RERUNS : 1);
+ }
+
+ async process(input = {}, context = {}) {
+ const start = Date.now();
+ const errors = [];
+ const warnings = [];
+
+ const jobId = normalizePath(input.jobId || context.jobId);
+ const filePath = normalizePath(input.filePath);
+ const snippet = normalizeSnippet(input.snippet);
+ const lineStart = asPositiveInteger(input.lineStart);
+ const lineEnd = asPositiveInteger(input.lineEnd);
+
+ if (!jobId || !filePath || !snippet) {
+ return this.buildResult({
+ jobId: context?.jobId || jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors: [{ code: 400, message: 'SnippetAnalyzerAgent requires jobId, filePath, and snippet.' }],
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ if (!this.llmClient.isConfigured()) {
+ return this.buildResult({
+ jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors: [{ code: 503, message: 'AI provider is not configured.' }],
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ try {
+ const [nodesResult, edgesResult] = await Promise.all([
+ this.db.query(
+ `
+ SELECT file_path, file_type, declarations, metrics, summary
+ FROM graph_nodes
+ WHERE job_id = $1
+ `,
+ [jobId],
+ ),
+ this.db.query(
+ `
+ SELECT source_path, target_path
+ FROM graph_edges
+ WHERE job_id = $1
+ `,
+ [jobId],
+ ),
+ ]);
+
+ if (nodesResult.rowCount === 0) {
+ return this.buildResult({
+ jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors: [{ code: 404, message: 'No graph data found for this job.' }],
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ const nodesByPath = new Map();
+ for (const row of nodesResult.rows) {
+ nodesByPath.set(row.file_path, row);
+ }
+
+ if (!nodesByPath.has(filePath)) {
+ return this.buildResult({
+ jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors: [{ code: 404, message: 'filePath not found in this job graph.' }],
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ const depsBySource = new Map();
+ const graph = {};
+
+ for (const row of edgesResult.rows) {
+ if (!depsBySource.has(row.source_path)) depsBySource.set(row.source_path, []);
+ depsBySource.get(row.source_path).push(row.target_path);
+ }
+
+ for (const row of nodesResult.rows) {
+ graph[row.file_path] = {
+ deps: depsBySource.get(row.file_path) || [],
+ type: row.file_type,
+ declarations: row.declarations || [],
+ metrics: row.metrics || {},
+ summary: row.summary || null,
+ };
+ }
+
+ const reverseAdjacency = buildReverseAdjacency(graph);
+ const allImpacted = getImpactPaths(filePath, reverseAdjacency);
+ const directImpactedFiles = Array.isArray(reverseAdjacency[filePath])
+ ? toUniquePaths(reverseAdjacency[filePath])
+ : [];
+ const indirectImpactedFiles = toUniquePaths(
+ allImpacted.filter((impactedPath) => !directImpactedFiles.includes(impactedPath)),
+ );
+
+ const sourceDependencies = Array.isArray(graph[filePath]?.deps) ? graph[filePath].deps : [];
+ const neighborhood = toUniquePaths([
+ filePath,
+ ...sourceDependencies,
+ ...directImpactedFiles,
+ ...indirectImpactedFiles,
+ ]).slice(0, MAX_CONTEXT_FILES);
+
+ const contextFiles = neighborhood
+ .map((path) => nodesByPath.get(path))
+ .filter(Boolean)
+ .map(buildNodeContext);
+
+ const prompt = buildPrompt({
+ filePath,
+ snippet,
+ lineStart,
+ lineEnd,
+ contextFiles,
+ directImpactedFiles,
+ indirectImpactedFiles,
+ });
+
+ const fallback = {
+ snippetPurpose: 'Unable to confidently summarize this snippet from available graph context.',
+ fileImpact: 'Impact inside this file could not be fully determined from metadata.',
+ codebaseImpact: 'Potential impact exists on dependent files listed in direct/indirect sets.',
+ directImpactedFiles,
+ indirectImpactedFiles,
+ relatedFileFindings: contextFiles
+ .filter((entry) => entry.filePath !== filePath)
+ .slice(0, 6)
+ .map((entry) => ({
+ filePath: entry.filePath,
+ impact: `Related to ${filePath} through dependency graph adjacency.`,
+ risk: 'medium',
+ })),
+ confidence: 'medium',
+ confidenceScore: 0.7,
+ };
+
+ let attemptIndex = 0;
+ let totalCompletionTokens = 0;
+ let rerunTriggered = false;
+ let parsed = fallback;
+ let lastRawContent = '{}';
+
+ while (attemptIndex <= this.confidenceMaxReruns) {
+ const messages = [{ role: 'user', content: prompt }];
+
+ if (attemptIndex > 0) {
+ messages.push({
+ role: 'user',
+ content: buildRetryPrompt({
+ previousResponse: lastRawContent,
+ threshold: this.confidenceRetryThreshold,
+ }),
+ });
+ }
+
+ const completion = await this.llmClient.createChatCompletion({
+ model: this.model,
+ temperature: attemptIndex > 0 ? 0 : 0.1,
+ maxTokens: 700,
+ responseFormat: { type: 'json_object' },
+ messages,
+ });
+
+ const completionTokens = Number(completion?.usage?.completion_tokens || completion?.usage?.output_tokens || 0);
+ totalCompletionTokens += completionTokens;
+ lastRawContent = completion?.content || '{}';
+
+ parsed = parseModelResponse(lastRawContent, fallback);
+ if (parsed.confidenceScore >= this.confidenceRetryThreshold) {
+ break;
+ }
+
+ if (attemptIndex < this.confidenceMaxReruns) {
+ rerunTriggered = true;
+ warnings.push({
+ code: 299,
+ message: `Low-confidence snippet analysis (${parsed.confidenceScore.toFixed(2)}) triggered re-run.`,
+ });
+ }
+
+ attemptIndex += 1;
+ }
+
+ const numericConfidence = clamp01(parsed.confidenceScore);
+ const confidenceBucket = confidenceLabel(numericConfidence);
+
+ return this.buildResult({
+ jobId,
+ status: 'success',
+ confidence: numericConfidence,
+ data: {
+ filePath,
+ snippet,
+ lineStart,
+ lineEnd,
+ snippetPurpose: parsed.snippetPurpose,
+ fileImpact: parsed.fileImpact,
+ codebaseImpact: parsed.codebaseImpact,
+ directImpactedFiles: parsed.directImpactedFiles,
+ indirectImpactedFiles: parsed.indirectImpactedFiles,
+ relatedFileFindings: parsed.relatedFileFindings,
+ relatedFilesScanned: contextFiles.map((entry) => entry.filePath),
+ confidence: confidenceBucket,
+ confidenceScore: numericConfidence,
+ rerunTriggered,
+ attemptsUsed: attemptIndex + 1,
+ confidenceThreshold: this.confidenceRetryThreshold,
+ },
+ errors,
+ warnings,
+ metrics: {
+ snippetChars: snippet.length,
+ contextFileCount: contextFiles.length,
+ directImpactedCount: parsed.directImpactedFiles.length,
+ indirectImpactedCount: parsed.indirectImpactedFiles.length,
+ completionTokens: totalCompletionTokens,
+ attemptsUsed: attemptIndex + 1,
+ },
+ processingTimeMs: Date.now() - start,
+ });
+ } catch (error) {
+ errors.push({ code: error?.statusCode || error?.status || 500, message: error.message });
+
+ return this.buildResult({
+ jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors,
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+ }
+}
\ No newline at end of file
diff --git a/server/src/agents/core/SupervisorAgent.js b/server/src/agents/core/SupervisorAgent.js
index bd06417..be5c498 100644
--- a/server/src/agents/core/SupervisorAgent.js
+++ b/server/src/agents/core/SupervisorAgent.js
@@ -1,8 +1,9 @@
import { IngestionAgent } from '../ingestion/IngestionAgent.js';
import { ScannerAgent } from '../scanner/ScannerAgent.js';
-import { ParserAgent } from '../parser/ParserAgent.js';
+import { PolyglotParserAgent } from '../parser/PolyglotParserAgent.js';
import { GraphBuilderAgent } from '../graph/GraphBuilderAgent.js';
import { EnrichmentAgent } from '../enrichment/EnrichmentAgent.js';
+import { ContractInferenceAgent } from '../enrichment/ContractInferenceAgent.js';
import { EmbeddingAgent } from '../embedding/EmbeddingAgent.js';
import { PersistenceAgent } from '../persistence/PersistenceAgent.js';
import { AuditLogger } from './AuditLogger.js';
@@ -28,9 +29,10 @@ export class SupervisorAgent {
this.agents = {
ingestion: new IngestionAgent(),
scanner: new ScannerAgent(),
- parser: new ParserAgent(),
+ parser: new PolyglotParserAgent(),
graphBuilder: new GraphBuilderAgent(),
enrichment: new EnrichmentAgent(),
+ contractInference: new ContractInferenceAgent(),
embedding: new EmbeddingAgent(),
persistence: new PersistenceAgent({ db }),
};
@@ -92,6 +94,16 @@ export class SupervisorAgent {
agentTrace.push(enrichmentResult);
Object.assign(pipelineData, enrichmentResult.data);
+ await this._updateJobStatus(jobId, 'inferring-contracts');
+ const contractResult = await this._runWithSupervision(
+ this.agents.contractInference,
+ { graph: pipelineData.graph, extractedPath: pipelineData.extractedPath },
+ context,
+ { abortOnCritical: false },
+ );
+ agentTrace.push(contractResult);
+ Object.assign(pipelineData, contractResult.data);
+
await this._updateJobStatus(jobId, 'embedding');
const embeddingResult = await this._runWithSupervision(
this.agents.embedding,
@@ -116,6 +128,7 @@ export class SupervisorAgent {
edges: pipelineData.edges,
functionNodes: pipelineData.functionNodes,
enriched: pipelineData.enriched,
+ contracts: pipelineData.contracts,
embeddings: pipelineData.embeddings,
topology: pipelineData.topology,
},
@@ -325,6 +338,7 @@ export class SupervisorAgent {
const prNumber = input?.github?.prNumber;
const owner = input?.github?.owner;
const repo = input?.github?.repo;
+ const sha = input?.github?.headSha;
if (!prNumber || !owner || !repo) return;
if (!GitHubPRService.isConfigured()) {
@@ -359,6 +373,17 @@ export class SupervisorAgent {
}
console.log(`[SupervisorAgent] PR comment posted to ${owner}/${repo}#${prNumber}`);
+
+ // Create a check run for PR status
+ if (sha) {
+ const conclusion = impactedFiles.size > 10 ? 'failure' : 'neutral';
+ await GitHubPRService.createCheckRun(owner, repo, sha, {
+ conclusion,
+ title: `${impactedFiles.size} files potentially impacted`,
+ summary: `${changedFiles.length} changed files affect ${impactedFiles.size} dependent files.`,
+ detailsUrl: graphUrl,
+ });
+ }
} catch (err) {
// PR comment failure must never abort the main pipeline.
console.error('[SupervisorAgent] Failed to post PR comment:', err.message);
diff --git a/server/src/agents/core/__tests__/confidence.test.js b/server/src/agents/core/__tests__/confidence.test.js
index c727508..e9f2c75 100644
--- a/server/src/agents/core/__tests__/confidence.test.js
+++ b/server/src/agents/core/__tests__/confidence.test.js
@@ -30,7 +30,7 @@ describe('scoreParser', () => {
describe('computeOverallConfidence', () => {
it('applies parser weight and drags overall confidence down for low parser score', () => {
const trace = [
- { agentId: 'parser-agent', confidence: 0.3 },
+ { agentId: 'polyglot-parser-agent', confidence: 0.3 },
{ agentId: 'graph-builder-agent', confidence: 0.95 },
{ agentId: 'persistence-agent', confidence: 1.0 },
];
diff --git a/server/src/agents/core/confidence.js b/server/src/agents/core/confidence.js
index 6020395..84795ce 100644
--- a/server/src/agents/core/confidence.js
+++ b/server/src/agents/core/confidence.js
@@ -23,7 +23,7 @@ export const CONFIDENCE_THRESHOLDS = {
export const DEFAULT_AGENT_WEIGHTS = {
'ingestion-agent': 0.1,
'scanner-agent': 0.1,
- 'parser-agent': 0.25,
+ 'polyglot-parser-agent': 0.25,
'graph-builder-agent': 0.25,
'enrichment-agent': 0.1,
'embedding-agent': 0.1,
@@ -77,6 +77,19 @@ export function scoreParser({ totalAttempted = 0, successCount = 0, failedCount
return round3(parseRate * (1 - errorPenalty));
}
+export function scorePolyglotParser({
+ totalAttempted = 0,
+ successCount = 0,
+ failedCount = 0,
+ languageBreakdown = {},
+} = {}) {
+ const parseRate = safeDiv(successCount, Math.max(totalAttempted, 1), 0);
+ const errorPenalty = Math.min(0.3, safeDiv(failedCount, Math.max(totalAttempted, 1), 0));
+ const langCount = Object.keys(languageBreakdown || {}).length;
+ const langBonus = Math.min(0.05, langCount * 0.005);
+ return round3(parseRate * (1 - errorPenalty) + langBonus);
+}
+
export function scoreGraphBuilder({
resolvedEdges = 0,
resolvedLocalEdges = resolvedEdges,
diff --git a/server/src/agents/embedding/EmbeddingAgent.js b/server/src/agents/embedding/EmbeddingAgent.js
index 279b618..8c45388 100644
--- a/server/src/agents/embedding/EmbeddingAgent.js
+++ b/server/src/agents/embedding/EmbeddingAgent.js
@@ -1,8 +1,9 @@
-import OpenAI from 'openai';
import { BaseAgent } from '../core/BaseAgent.js';
import { scoreEmbedding } from '../core/confidence.js';
+import { createEmbeddingClient } from '../../services/ai/llmProvider.js';
-const DEFAULT_EMBEDDING_MODEL = process.env.OPENAI_EMBEDDING_MODEL || 'text-embedding-3-small';
+const DEFAULT_EMBEDDING_MODEL =
+ process.env.AI_EMBEDDING_MODEL || process.env.OPENAI_EMBEDDING_MODEL || 'text-embedding-3-small';
const EMBEDDING_BATCH_SIZE = 100;
function formatDeclarationNames(declarations) {
@@ -34,13 +35,9 @@ export class EmbeddingAgent extends BaseAgent {
maxRetries = 2;
timeoutMs = 180_000;
- constructor({ openaiClient } = {}) {
+ constructor({ embeddingClient } = {}) {
super();
- this.openai =
- openaiClient ||
- (process.env.OPENAI_API_KEY
- ? new OpenAI({ apiKey: process.env.OPENAI_API_KEY })
- : null);
+ this.embeddingClient = embeddingClient || createEmbeddingClient();
this.model = DEFAULT_EMBEDDING_MODEL;
}
@@ -66,13 +63,13 @@ export class EmbeddingAgent extends BaseAgent {
});
}
- if (!this.openai) {
+ if (!this.embeddingClient.isConfigured()) {
return this.buildResult({
jobId: context?.jobId,
status: 'failed',
confidence: 0,
data: {},
- errors: [{ code: 500, message: 'OPENAI_API_KEY is missing for EmbeddingAgent.' }],
+ errors: [{ code: 500, message: 'Embedding provider is not configured for EmbeddingAgent.' }],
warnings,
metrics: {
attempted: entries.length,
@@ -98,7 +95,7 @@ export class EmbeddingAgent extends BaseAgent {
const batch = payload.slice(idx, idx + EMBEDDING_BATCH_SIZE);
try {
- const response = await this.openai.embeddings.create({
+ const response = await this.embeddingClient.createEmbedding({
model: this.model,
input: batch.map((item) => item.text),
});
diff --git a/server/src/agents/enrichment/ContractInferenceAgent.js b/server/src/agents/enrichment/ContractInferenceAgent.js
new file mode 100644
index 0000000..f0c3c6d
--- /dev/null
+++ b/server/src/agents/enrichment/ContractInferenceAgent.js
@@ -0,0 +1,212 @@
+import { readFile } from 'fs/promises';
+import path from 'path';
+import crypto from 'crypto';
+import pLimit from 'p-limit';
+import { BaseAgent } from '../core/BaseAgent.js';
+import { createChatClient } from '../../services/ai/llmProvider.js';
+import { redisClient } from '../../infrastructure/connections.js';
+
+const CACHE_TTL = Number(process.env.AI_CACHE_TTL_SECONDS || 3600);
+const CONCURRENCY = Number(process.env.CONTRACT_CONCURRENCY || 3);
+
+const ROUTE_INDICATORS = [
+ /\.(?:router|routes|controller|handler|api)\./i,
+ /(?:router|app|bp)\.(get|post|put|delete|patch)\s*\(/,
+ /@(?:Get|Post|Put|Delete|Patch|Request)Mapping/,
+ /@app\.route/,
+ /def\s+\w+_view\s*\(/,
+ /fastapi|flask\.Blueprint/i,
+];
+
+function normalizeConcurrency(value, fallback) {
+ return Number.isInteger(value) && value > 0 ? value : fallback;
+}
+
+function isRouteFile(filePath, content) {
+ return ROUTE_INDICATORS.some((re) => re.test(filePath) || re.test(content.slice(0, 2000)));
+}
+
+function buildContractPrompt(filePath, content) {
+ const snippet = content.slice(0, 10_000);
+ return [
+ 'You are an API contract analyser. Inspect the route handler file below.',
+ 'Return ONLY valid JSON. No markdown. No preamble.',
+ '',
+ `File: ${filePath}`,
+ '---',
+ snippet,
+ '---',
+ '',
+ 'Return this exact schema:',
+ '{',
+ ' "routes": [',
+ ' {',
+ ' "method": "GET|POST|PUT|DELETE|PATCH",',
+ ' "path": "/api/example",',
+ ' "requestBody": { "type": "object", "properties": {} },',
+ ' "responseBody": { "type": "object", "properties": {} },',
+ ' "queryParams": ["param1"],',
+ ' "pathParams": [":id"],',
+ ' "confidenceScore": 0.0',
+ ' }',
+ ' ],',
+ ' "envDependencies": ["OPENAI_API_KEY"],',
+ ' "externalServices": ["stripe.com", "redis"],',
+ ' "cachingPatterns": ["redis.set", "lru-cache"]',
+ '}',
+ ].join('\n');
+}
+
+function normalizeContract(payload) {
+ return {
+ routes: Array.isArray(payload?.routes) ? payload.routes : [],
+ envDependencies: Array.isArray(payload?.envDependencies) ? payload.envDependencies : [],
+ externalServices: Array.isArray(payload?.externalServices) ? payload.externalServices : [],
+ cachingPatterns: Array.isArray(payload?.cachingPatterns) ? payload.cachingPatterns : [],
+ };
+}
+
+function cacheKeyFor(filePath, content) {
+ const hash = crypto.createHash('sha256');
+ hash.update(filePath);
+ hash.update('\u0000');
+ hash.update(content);
+ return `contract:${hash.digest('hex')}`;
+}
+
+export class ContractInferenceAgent extends BaseAgent {
+ agentId = 'contract-inference-agent';
+ maxRetries = 1;
+ timeoutMs = 180_000;
+
+ constructor({ redis } = {}) {
+ super();
+ this.chatClient = createChatClient();
+ this.redis = redis || redisClient;
+ this.concurrency = normalizeConcurrency(CONCURRENCY, 3);
+ this.cacheTtlSeconds = Number.isFinite(CACHE_TTL) && CACHE_TTL > 0 ? CACHE_TTL : 3600;
+ }
+
+ async process(input, context) {
+ const start = Date.now();
+ const graph = input?.graph || {};
+ const extractedPath = input?.extractedPath || '';
+ const warnings = [];
+ const errors = [];
+ const contracts = {};
+
+ if (!this.chatClient.isConfigured()) {
+ return this.buildResult({
+ jobId: context?.jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors: [{ code: 500, message: 'AI provider is not configured. Set AI_API_KEY (or OPENAI_API_KEY) in your environment.' }],
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ const entries = Object.entries(graph);
+ if (!extractedPath || entries.length === 0) {
+ return this.buildResult({
+ jobId: context?.jobId,
+ status: 'partial',
+ confidence: 0.5,
+ data: { contracts, stats: { attempted: 0, succeeded: 0, skipped: 0 } },
+ errors,
+ warnings: [...warnings, 'Contract inference skipped due to missing graph or extractedPath.'],
+ metrics: { routeFilesFound: 0, succeeded: 0, skipped: 0 },
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ const limit = pLimit(this.concurrency);
+ let attempted = 0;
+ let succeeded = 0;
+ let skipped = 0;
+
+ await Promise.all(
+ entries.map(([filePath]) =>
+ limit(async () => {
+ const absolute = path.join(extractedPath, filePath);
+ let content = '';
+
+ try {
+ content = await readFile(absolute, 'utf8');
+ } catch {
+ skipped += 1;
+ return;
+ }
+
+ if (!isRouteFile(filePath, content)) {
+ skipped += 1;
+ return;
+ }
+
+ const key = cacheKeyFor(filePath, content);
+ const cached = await this._readCache(key);
+ if (cached) {
+ contracts[filePath] = cached;
+ succeeded += 1;
+ return;
+ }
+
+ attempted += 1;
+
+ try {
+ const result = await this.chatClient.createChatCompletion({
+ temperature: 0.0,
+ maxTokens: 600,
+ responseFormat: { type: 'json_object' },
+ messages: [{ role: 'user', content: buildContractPrompt(filePath, content) }],
+ });
+
+ const raw = result?.content || '{}';
+ const parsed = JSON.parse(String(raw).replace(/```json|```/g, '').trim());
+ const normalized = normalizeContract(parsed);
+
+ contracts[filePath] = normalized;
+ succeeded += 1;
+ await this._writeCache(key, normalized);
+ } catch (error) {
+ warnings.push(`Contract inference failed for ${filePath}: ${error.message}`);
+ }
+ }),
+ ),
+ );
+
+ const confidence = attempted === 0 ? 0.5 : succeeded / Math.max(attempted, 1);
+
+ return this.buildResult({
+ jobId: context?.jobId,
+ status: 'success',
+ confidence: Math.max(0.4, confidence),
+ data: { contracts, stats: { attempted, succeeded, skipped } },
+ errors,
+ warnings,
+ metrics: { routeFilesFound: attempted, succeeded, skipped },
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ async _readCache(key) {
+ if (!this.redis?.get) return null;
+ try {
+ const value = await this.redis.get(key);
+ return value ? JSON.parse(value) : null;
+ } catch {
+ return null;
+ }
+ }
+
+ async _writeCache(key, value) {
+ if (!this.redis?.set) return;
+ try {
+ await this.redis.set(key, JSON.stringify(value), 'EX', this.cacheTtlSeconds);
+ } catch {
+ // Best-effort cache write.
+ }
+ }
+}
diff --git a/server/src/agents/enrichment/EnrichmentAgent.js b/server/src/agents/enrichment/EnrichmentAgent.js
index 901f25e..fa76711 100644
--- a/server/src/agents/enrichment/EnrichmentAgent.js
+++ b/server/src/agents/enrichment/EnrichmentAgent.js
@@ -1,13 +1,13 @@
import crypto from 'crypto';
import path from 'path';
import { readFile } from 'fs/promises';
-import OpenAI from 'openai';
import pLimit from 'p-limit';
import { BaseAgent } from '../core/BaseAgent.js';
import { scoreEnrichment } from '../core/confidence.js';
import { redisClient } from '../../infrastructure/connections.js';
+import { createChatClient } from '../../services/ai/llmProvider.js';
-const DEFAULT_MODEL = process.env.OPENAI_MODEL || 'gpt-4o-mini';
+const DEFAULT_MODEL = process.env.AI_MODEL || process.env.OPENAI_MODEL || 'gpt-4o-mini';
const FILE_LINE_THRESHOLD = Number(process.env.ENRICHMENT_FILE_LINE_THRESHOLD || 50);
const CACHE_TTL_SECONDS = Number(process.env.AI_CACHE_TTL_SECONDS || 3600);
const ENRICHMENT_CONCURRENCY = Number(process.env.ENRICHMENT_CONCURRENCY || 4);
@@ -80,14 +80,10 @@ export class EnrichmentAgent extends BaseAgent {
maxRetries = 2;
timeoutMs = 240_000;
- constructor({ redis, openaiClient } = {}) {
+ constructor({ redis, llmClient } = {}) {
super();
this.redis = redis || redisClient;
- this.openai =
- openaiClient ||
- (process.env.OPENAI_API_KEY
- ? new OpenAI({ apiKey: process.env.OPENAI_API_KEY })
- : null);
+ this.llmClient = llmClient || createChatClient();
this.model = DEFAULT_MODEL;
this.lineThreshold = Number.isFinite(FILE_LINE_THRESHOLD) ? FILE_LINE_THRESHOLD : 50;
this.cacheTtlSeconds = Number.isFinite(CACHE_TTL_SECONDS) ? CACHE_TTL_SECONDS : 3600;
@@ -115,8 +111,8 @@ export class EnrichmentAgent extends BaseAgent {
});
}
- if (!this.openai) {
- warnings.push('OPENAI_API_KEY is missing. Falling back to heuristic summaries only.');
+ if (!this.llmClient.isConfigured()) {
+ warnings.push('AI provider is not configured. Falling back to heuristic summaries only.');
}
const enriched = {};
@@ -147,7 +143,7 @@ export class EnrichmentAgent extends BaseAgent {
return;
}
- if (!this.openai) {
+ if (!this.llmClient.isConfigured()) {
enriched[filePath] = {
summary: cheapSummary(filePath, node),
architecturalRole: node?.type || 'module',
@@ -188,18 +184,18 @@ export class EnrichmentAgent extends BaseAgent {
try {
const prompt = buildPrompt({ filePath, node, content });
- const completion = await this.openai.chat.completions.create({
+ const completion = await this.llmClient.createChatCompletion({
model: this.model,
temperature: 0.1,
- max_tokens: 220,
- response_format: { type: 'json_object' },
+ maxTokens: 220,
+ responseFormat: { type: 'json_object' },
messages: [{ role: 'user', content: prompt }],
});
- const message = completion?.choices?.[0]?.message?.content || '{}';
+ const message = completion?.content || '{}';
const usage = completion?.usage || {};
- totalPromptTokens += Number(usage.prompt_tokens || 0);
- totalCompletionTokens += Number(usage.completion_tokens || 0);
+ totalPromptTokens += Number(usage.prompt_tokens || usage.input_tokens || 0);
+ totalCompletionTokens += Number(usage.completion_tokens || usage.output_tokens || 0);
let parsed;
try {
diff --git a/server/src/agents/parser/PolyglotParserAgent.js b/server/src/agents/parser/PolyglotParserAgent.js
new file mode 100644
index 0000000..162ef68
--- /dev/null
+++ b/server/src/agents/parser/PolyglotParserAgent.js
@@ -0,0 +1,196 @@
+import path from 'path';
+import os from 'os';
+import { readFile } from 'fs/promises';
+import { Worker } from 'worker_threads';
+import pLimit from 'p-limit';
+import { BaseAgent } from '../core/BaseAgent.js';
+import { scorePolyglotParser } from '../core/confidence.js';
+import { parseSql } from './sqlParser.js';
+
+const BABEL_EXTS = new Set(['.js', '.ts', '.jsx', '.tsx']);
+const TREESITTER_EXTS = new Set(['.py', '.java', '.go', '.rs', '.rb', '.cs', '.kt', '.kts', '.php']);
+const SQL_EXTS = new Set(['.sql']);
+
+function langFromExt(ext) {
+ const map = {
+ '.py': 'python',
+ '.java': 'java',
+ '.go': 'go',
+ '.rs': 'rust',
+ '.rb': 'ruby',
+ '.cs': 'c_sharp',
+ '.kt': 'kotlin',
+ '.kts': 'kotlin',
+ '.php': 'php',
+ };
+
+ return map[ext] || null;
+}
+
+function normalizeRelative(filePath, rootDir) {
+ return path.relative(rootDir, filePath).replace(/\\/g, '/');
+}
+
+function parseConcurrency() {
+ const configured = Number(process.env.PARSER_WORKER_CONCURRENCY);
+ if (Number.isInteger(configured) && configured > 0) return configured;
+ return Math.max(1, os.cpus().length - 1);
+}
+
+function extensionGroup(ext) {
+ if (BABEL_EXTS.has(ext)) return 'babel';
+ if (TREESITTER_EXTS.has(ext)) return langFromExt(ext) || 'treesitter';
+ if (SQL_EXTS.has(ext)) return 'sql';
+ return 'unsupported';
+}
+
+function emptyParseResult(relativePath, parseError) {
+ return {
+ relativePath,
+ imports: [],
+ declarations: [],
+ functionNodes: [],
+ metrics: {},
+ parseError,
+ };
+}
+
+export class PolyglotParserAgent extends BaseAgent {
+ agentId = 'polyglot-parser-agent';
+ maxRetries = 2;
+ timeoutMs = 300_000;
+
+ async process(input, context) {
+ const start = Date.now();
+ const errors = [];
+ const warnings = [];
+
+ const rootDir = input?.extractedPath || input?.rootDir;
+ const manifest = Array.isArray(input?.manifest)
+ ? input.manifest
+ : (input?.files || []).map((absolutePath) => ({
+ absolutePath,
+ relativePath: rootDir ? normalizeRelative(absolutePath, rootDir) : absolutePath,
+ }));
+
+ if (!rootDir || manifest.length === 0) {
+ return this.buildResult({
+ jobId: context?.jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors: [{ code: 400, message: 'PolyglotParserAgent requires extractedPath and manifest.' }],
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ const languageBreakdown = {};
+ for (const file of manifest) {
+ const ext = path.extname(file.absolutePath).toLowerCase();
+ const key = extensionGroup(ext);
+ languageBreakdown[key] = (languageBreakdown[key] || 0) + 1;
+ }
+
+ const limit = pLimit(parseConcurrency());
+ const parsedFiles = await Promise.all(
+ manifest.map((file) => limit(() => this._parseFile(file))),
+ );
+
+ let successCount = 0;
+ let failedCount = 0;
+
+ for (const parsed of parsedFiles) {
+ if (parsed.parseError) {
+ failedCount += 1;
+ warnings.push(`Parse error in ${parsed.relativePath}: ${parsed.parseError}`);
+ } else {
+ successCount += 1;
+ }
+ }
+
+ const summary = {
+ totalAttempted: manifest.length,
+ successCount,
+ failedCount,
+ languageBreakdown,
+ };
+
+ const confidence = scorePolyglotParser(summary);
+ const status = failedCount === manifest.length ? 'failed' : failedCount > 0 ? 'partial' : 'success';
+
+ return this.buildResult({
+ jobId: context?.jobId,
+ status,
+ confidence,
+ data: {
+ parsedFiles,
+ summary,
+ },
+ errors,
+ warnings,
+ metrics: {
+ totalAttempted: manifest.length,
+ successCount,
+ failedCount,
+ },
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ async _parseFile(file) {
+ const ext = path.extname(file.absolutePath).toLowerCase();
+
+ if (SQL_EXTS.has(ext)) {
+ return this._parseSqlFile(file);
+ }
+
+ if (BABEL_EXTS.has(ext)) {
+ return this._runWorker('parseWorker.js', {
+ filePath: file.absolutePath,
+ relativePath: file.relativePath,
+ });
+ }
+
+ if (TREESITTER_EXTS.has(ext)) {
+ return this._runWorker('treesitterWorker.js', {
+ filePath: file.absolutePath,
+ relativePath: file.relativePath,
+ language: langFromExt(ext),
+ });
+ }
+
+ return emptyParseResult(file.relativePath, `Unsupported extension: ${ext}`);
+ }
+
+ async _parseSqlFile(file) {
+ try {
+ const content = await readFile(file.absolutePath, 'utf8');
+ return parseSql(content, file.relativePath);
+ } catch (error) {
+ return emptyParseResult(file.relativePath, error.message);
+ }
+ }
+
+ _runWorker(workerFile, workerData) {
+ return new Promise((resolve) => {
+ const worker = new Worker(new URL(workerFile, import.meta.url), { workerData });
+
+ const timeout = setTimeout(() => {
+ worker.terminate();
+ resolve(emptyParseResult(workerData.relativePath, `Worker timeout (${workerFile})`));
+ }, 30_000);
+
+ worker.once('message', (message) => {
+ clearTimeout(timeout);
+ resolve(message);
+ });
+
+ worker.once('error', (error) => {
+ clearTimeout(timeout);
+ resolve(emptyParseResult(workerData.relativePath, error.message));
+ });
+ });
+ }
+}
diff --git a/server/src/agents/parser/sqlParser.js b/server/src/agents/parser/sqlParser.js
new file mode 100644
index 0000000..a7ed913
--- /dev/null
+++ b/server/src/agents/parser/sqlParser.js
@@ -0,0 +1,49 @@
+const CREATE_TABLE_RE = /CREATE\s+(?:OR\s+REPLACE\s+)?TABLE\s+(?:IF\s+NOT\s+EXISTS\s+)?(?:\w+\.)?(\w+)/gi;
+const FROM_RE = /\bFROM\s+(?:\w+\.)?(\w+)/gi;
+const JOIN_RE = /\bJOIN\s+(?:\w+\.)?(\w+)/gi;
+const INSERT_RE = /\bINSERT\s+INTO\s+(?:\w+\.)?(\w+)/gi;
+const UPDATE_RE = /\bUPDATE\s+(?:\w+\.)?(\w+)/gi;
+const COLUMN_RE = /\b(\w+)\s+(?:INT|TEXT|VARCHAR|BOOLEAN|NUMERIC|TIMESTAMPTZ|UUID|JSONB|SERIAL|BIGINT|FLOAT|REAL|DATE|CHAR)\b/gi;
+
+function matchAll(re, str) {
+ const matches = [];
+ const local = new RegExp(re.source, re.flags);
+ let current;
+ while ((current = local.exec(str)) !== null) {
+ matches.push(current[1]);
+ }
+ return matches;
+}
+
+export function parseSql(content, relativePath) {
+ const tables = new Set([
+ ...matchAll(CREATE_TABLE_RE, content),
+ ...matchAll(FROM_RE, content),
+ ...matchAll(JOIN_RE, content),
+ ...matchAll(INSERT_RE, content),
+ ...matchAll(UPDATE_RE, content),
+ ]);
+
+ const columns = new Set(matchAll(COLUMN_RE, content));
+
+ const imports = [...tables].map((table) => `table:${table}`);
+
+ const declarations = [
+ ...[...tables].map((name) => ({ name, kind: 'table' })),
+ ...[...columns].map((name) => ({ name, kind: 'column' })),
+ ];
+
+ return {
+ relativePath,
+ imports,
+ declarations,
+ functionNodes: [],
+ metrics: {
+ loc: content.split(/\r?\n/).length,
+ tableCount: tables.size,
+ columnCount: columns.size,
+ importCount: tables.size,
+ },
+ parseError: null,
+ };
+}
diff --git a/server/src/agents/parser/treesitterWorker.js b/server/src/agents/parser/treesitterWorker.js
new file mode 100644
index 0000000..d1bc5ba
--- /dev/null
+++ b/server/src/agents/parser/treesitterWorker.js
@@ -0,0 +1,188 @@
+import { existsSync } from 'fs';
+import { readFile } from 'fs/promises';
+import path from 'path';
+import { parentPort, workerData } from 'worker_threads';
+import { Language, Parser, Query } from 'web-tree-sitter';
+import { fileURLToPath } from 'url';
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = path.dirname(__filename);
+
+const QUERIES = {
+ python: {
+ imports: `
+ (import_statement (dotted_name) @import)
+ (import_from_statement module_name: (dotted_name) @import)
+ `,
+ declarations: `
+ (function_definition name: (identifier) @name) @fn
+ (class_definition name: (identifier) @name) @cls
+ `,
+ },
+ java: {
+ imports: `(import_declaration (scoped_identifier) @import)`,
+ declarations: `
+ (method_declaration name: (identifier) @name) @fn
+ (class_declaration name: (identifier) @name) @cls
+ (interface_declaration name: (identifier) @name) @iface
+ `,
+ },
+ go: {
+ imports: `(import_spec path: (interpreted_string_literal) @import)`,
+ declarations: `
+ (function_declaration name: (identifier) @name) @fn
+ (type_declaration (type_spec name: (type_identifier) @name)) @type
+ `,
+ },
+ rust: {
+ imports: `(use_declaration argument: (_) @import)`,
+ declarations: `
+ (function_item name: (identifier) @name) @fn
+ (struct_item name: (type_identifier) @name) @struct
+ (enum_item name: (type_identifier) @name) @enum
+ `,
+ },
+ ruby: {
+ imports: `(call method: (identifier) @method (#match? @method "^require")) @import`,
+ declarations: `
+ (method name: (identifier) @name) @fn
+ (singleton_method name: (identifier) @name) @fn
+ (class name: (constant) @name) @cls
+ `,
+ },
+ c_sharp: {
+ imports: `(using_directive (identifier) @import)`,
+ declarations: `
+ (method_declaration name: (identifier) @name) @fn
+ (class_declaration name: (identifier) @name) @cls
+ (interface_declaration name: (identifier) @name) @iface
+ `,
+ },
+ kotlin: {
+ imports: `(import_header (identifier) @import)`,
+ declarations: `
+ (function_declaration (simple_identifier) @name) @fn
+ (class_declaration (type_identifier) @name) @cls
+ `,
+ },
+ php: {
+ imports: `(include_expression (string) @import)`,
+ declarations: `
+ (function_definition name: (name) @name) @fn
+ (class_declaration name: (name) @name) @cls
+ `,
+ },
+};
+
+function emptyResult(relativePath, parseError) {
+ return {
+ relativePath,
+ imports: [],
+ declarations: [],
+ functionNodes: [],
+ metrics: {},
+ parseError,
+ };
+}
+
+function declarationKindFromCaptures(captures) {
+ const marker = captures.find((capture) => capture.name !== 'name' && capture.name !== 'import');
+ return marker?.name || 'fn';
+}
+
+function resolveWasmPath(language) {
+ const wasmFile = `tree-sitter-${language}.wasm`;
+ const configuredWasmDir = process.env.PARSER_WASM_DIR;
+
+ const candidates = [
+ configuredWasmDir ? path.resolve(configuredWasmDir, wasmFile) : null,
+ path.resolve(__dirname, '../../../wasm', wasmFile),
+ path.resolve(__dirname, '../../../node_modules/tree-sitter-wasms/out', wasmFile),
+ path.resolve(process.cwd(), 'wasm', wasmFile),
+ path.resolve(process.cwd(), 'node_modules/tree-sitter-wasms/out', wasmFile),
+ ].filter(Boolean);
+
+ const located = candidates.find((candidate) => existsSync(candidate));
+ if (located) return located;
+
+ throw new Error(
+ `Missing Tree-sitter WASM for ${language}. Looked in: ${candidates.join(', ')}`,
+ );
+}
+
+async function run() {
+ const { filePath, relativePath, language } = workerData;
+
+ if (!filePath || !relativePath || !language) {
+ parentPort.postMessage(emptyResult(relativePath || 'unknown', 'Worker missing required filePath, relativePath, or language.'));
+ return;
+ }
+
+ const queries = QUERIES[language] || { imports: '', declarations: '' };
+
+ await Parser.init();
+
+ const wasmPath = resolveWasmPath(language);
+ const lang = await Language.load(wasmPath);
+
+ const parser = new Parser();
+ parser.setLanguage(lang);
+
+ const source = await readFile(filePath, 'utf8');
+ const tree = parser.parse(source);
+ const root = tree.rootNode;
+
+ const imports = [];
+ const declarations = [];
+ const functionNodes = [];
+ const seenImports = new Set();
+ const seenDecls = new Set();
+
+ if (queries.imports) {
+ const query = new Query(lang, queries.imports);
+ for (const match of query.matches(root)) {
+ for (const capture of match.captures) {
+ if (capture.name !== 'import') continue;
+ const value = capture.node.text.replace(/['"]/g, '');
+ if (!value || seenImports.has(value)) continue;
+ seenImports.add(value);
+ imports.push(value);
+ }
+ }
+ }
+
+ if (queries.declarations) {
+ const query = new Query(lang, queries.declarations);
+ for (const match of query.matches(root)) {
+ const kind = declarationKindFromCaptures(match.captures);
+ for (const capture of match.captures) {
+ if (capture.name !== 'name') continue;
+
+ const name = capture.node.text;
+ const key = `${kind}:${name}`;
+ if (!name || seenDecls.has(key)) continue;
+
+ seenDecls.add(key);
+ declarations.push({ name, kind });
+ functionNodes.push({ name, kind, calls: [], loc: null });
+ }
+ }
+ }
+
+ parentPort.postMessage({
+ relativePath,
+ imports,
+ declarations,
+ functionNodes,
+ metrics: {
+ loc: source.split(/\r?\n/).length,
+ importCount: imports.length,
+ declarationCount: declarations.length,
+ },
+ parseError: null,
+ });
+}
+
+run().catch((error) => {
+ parentPort.postMessage(emptyResult(workerData?.relativePath || 'unknown', error.message));
+});
diff --git a/server/src/agents/persistence/PersistenceAgent.js b/server/src/agents/persistence/PersistenceAgent.js
index 031d55f..62832ff 100644
--- a/server/src/agents/persistence/PersistenceAgent.js
+++ b/server/src/agents/persistence/PersistenceAgent.js
@@ -39,6 +39,7 @@ export class PersistenceAgent extends BaseAgent {
const functionNodes = input?.functionNodes || {};
const embeddings = input?.embeddings || {};
const enriched = input?.enriched || {};
+ const contracts = input?.contracts || {};
const topology = input?.topology || {};
if (!jobId) {
@@ -116,11 +117,26 @@ export class PersistenceAgent extends BaseAgent {
}
}
+ const contractPaths = [];
+ const contractRoutes = [];
+ const contractEnvDeps = [];
+ const contractExtServices = [];
+ const contractCaching = [];
+
+ for (const [filePath, contract] of Object.entries(contracts)) {
+ contractPaths.push(filePath);
+ contractRoutes.push(toJson(contract?.routes, []));
+ contractEnvDeps.push(toJson(contract?.envDependencies, []));
+ contractExtServices.push(toJson(contract?.externalServices, []));
+ contractCaching.push(toJson(contract?.cachingPatterns, []));
+ }
+
const recordsAttempted =
nodePaths.length +
edgeSourcePaths.length +
embeddingPaths.length +
- functionNodePaths.length;
+ functionNodePaths.length +
+ contractPaths.length;
let recordsWritten = 0;
let client;
@@ -256,6 +272,45 @@ export class PersistenceAgent extends BaseAgent {
recordsWritten += functionNodeResult.rowCount || 0;
}
+ await client.query('SAVEPOINT after_function_nodes');
+
+ if (contractPaths.length > 0) {
+ const contractResult = await client.query(
+ `
+ INSERT INTO api_contracts (
+ job_id,
+ file_path,
+ routes,
+ env_deps,
+ ext_services,
+ caching
+ )
+ SELECT
+ $1,
+ unnest($2::text[]),
+ unnest($3::jsonb[]),
+ unnest($4::jsonb[]),
+ unnest($5::jsonb[]),
+ unnest($6::jsonb[])
+ ON CONFLICT (job_id, file_path) DO UPDATE
+ SET routes = EXCLUDED.routes,
+ env_deps = EXCLUDED.env_deps,
+ ext_services = EXCLUDED.ext_services,
+ caching = EXCLUDED.caching
+ `,
+ [
+ jobId,
+ contractPaths,
+ contractRoutes,
+ contractEnvDeps,
+ contractExtServices,
+ contractCaching,
+ ],
+ );
+
+ recordsWritten += contractResult.rowCount || 0;
+ }
+
await client.query('COMMIT');
const confidence = scorePersistence({
@@ -273,6 +328,7 @@ export class PersistenceAgent extends BaseAgent {
edges: edgeSourcePaths.length,
embeddings: embeddingPaths.length,
functionNodes: functionNodePaths.length,
+ contracts: contractPaths.length,
},
durationMs: Date.now() - start,
},
diff --git a/server/src/agents/query/QueryAgent.js b/server/src/agents/query/QueryAgent.js
index 09d4a12..cf2870f 100644
--- a/server/src/agents/query/QueryAgent.js
+++ b/server/src/agents/query/QueryAgent.js
@@ -1,10 +1,11 @@
import crypto from 'crypto';
-import OpenAI from 'openai';
import { BaseAgent } from '../core/BaseAgent.js';
import { pgPool, redisClient } from '../../infrastructure/connections.js';
+import { createChatClient, createEmbeddingClient } from '../../services/ai/llmProvider.js';
-const DEFAULT_MODEL = process.env.OPENAI_MODEL || 'gpt-4o-mini';
-const DEFAULT_EMBEDDING_MODEL = process.env.OPENAI_EMBEDDING_MODEL || 'text-embedding-3-small';
+const DEFAULT_MODEL = process.env.AI_MODEL || process.env.OPENAI_MODEL || 'gpt-4o-mini';
+const DEFAULT_EMBEDDING_MODEL =
+ process.env.AI_EMBEDDING_MODEL || process.env.OPENAI_EMBEDDING_MODEL || 'text-embedding-3-small';
const CACHE_TTL_SECONDS = Number(process.env.AI_CACHE_TTL_SECONDS || 3600);
const SEMANTIC_CANDIDATE_LIMIT = 20;
const CONTEXT_LIMIT = 8;
@@ -120,15 +121,12 @@ export class QueryAgent extends BaseAgent {
maxRetries = 1;
timeoutMs = 90_000;
- constructor({ db, redis, openaiClient } = {}) {
+ constructor({ db, redis, llmClient, embeddingClient } = {}) {
super();
this.db = db || pgPool;
this.redis = redis || redisClient;
- this.openai =
- openaiClient ||
- (process.env.OPENAI_API_KEY
- ? new OpenAI({ apiKey: process.env.OPENAI_API_KEY })
- : null);
+ this.llmClient = llmClient || createChatClient();
+ this.embeddingClient = embeddingClient || createEmbeddingClient();
this.model = DEFAULT_MODEL;
this.embeddingModel = DEFAULT_EMBEDDING_MODEL;
this.cacheTtlSeconds = Number.isFinite(CACHE_TTL_SECONDS) ? CACHE_TTL_SECONDS : 3600;
@@ -156,13 +154,26 @@ export class QueryAgent extends BaseAgent {
});
}
- if (!this.openai) {
+ if (!this.llmClient.isConfigured()) {
return this.buildResult({
jobId,
status: 'failed',
confidence: 0,
data: {},
- errors: [{ code: 500, message: 'OPENAI_API_KEY is missing for QueryAgent.' }],
+ errors: [{ code: 500, message: 'AI provider is not configured for QueryAgent.' }],
+ warnings,
+ metrics: {},
+ processingTimeMs: Date.now() - start,
+ });
+ }
+
+ if (!this.embeddingClient.isConfigured()) {
+ return this.buildResult({
+ jobId,
+ status: 'failed',
+ confidence: 0,
+ data: {},
+ errors: [{ code: 500, message: 'Embedding provider is not configured for QueryAgent.' }],
warnings,
metrics: {},
processingTimeMs: Date.now() - start,
@@ -208,7 +219,7 @@ export class QueryAgent extends BaseAgent {
});
}
- const embeddingResponse = await this.openai.embeddings.create({
+ const embeddingResponse = await this.embeddingClient.createEmbedding({
model: this.embeddingModel,
input: question,
});
@@ -246,15 +257,15 @@ export class QueryAgent extends BaseAgent {
const reranked = keywordRerank(question, candidates);
const topFiles = reranked.slice(0, CONTEXT_LIMIT);
- const completion = await this.openai.chat.completions.create({
+ const completion = await this.llmClient.createChatCompletion({
model: this.model,
temperature: 0.1,
- max_tokens: 320,
- response_format: { type: 'json_object' },
+ maxTokens: 320,
+ responseFormat: { type: 'json_object' },
messages: [{ role: 'user', content: buildAnswerPrompt(question, topFiles) }],
});
- const rawMessage = completion?.choices?.[0]?.message?.content || '{}';
+ const rawMessage = completion?.content || '{}';
let parsed;
try {
parsed = JSON.parse(rawMessage);
@@ -275,7 +286,7 @@ export class QueryAgent extends BaseAgent {
confidence: llmConfidence,
retrievedFiles: topFiles.length,
queryEmbeddingTokens: Number(embeddingResponse?.usage?.total_tokens || 0),
- completionTokens: Number(completion?.usage?.completion_tokens || 0),
+ completionTokens: Number(completion?.usage?.completion_tokens || completion?.usage?.output_tokens || 0),
};
await this._saveQuery({
diff --git a/server/src/agents/scanner/ScannerAgent.js b/server/src/agents/scanner/ScannerAgent.js
index 0912b84..12816eb 100644
--- a/server/src/agents/scanner/ScannerAgent.js
+++ b/server/src/agents/scanner/ScannerAgent.js
@@ -17,12 +17,16 @@ const DEFAULT_SKIP_DIRS = new Set([
]);
const ALLOWED_EXTENSIONS = new Set([
- '.js',
- '.ts',
- '.jsx',
- '.tsx',
+ '.js', '.ts', '.jsx', '.tsx',
'.py',
+ '.java',
'.go',
+ '.rs',
+ '.rb',
+ '.cs',
+ '.kt', '.kts',
+ '.php',
+ '.sql',
]);
function normalizeRelative(filePath, rootDir) {
diff --git a/server/src/analyze/controllers/analyze.controller.js b/server/src/analyze/controllers/analyze.controller.js
index abe403e..e470dd8 100644
--- a/server/src/analyze/controllers/analyze.controller.js
+++ b/server/src/analyze/controllers/analyze.controller.js
@@ -6,11 +6,15 @@ import {
pickLocalDirectory,
} from '../services/localPicker.service.js';
import {
+ fetchRepoFileContent,
+ fetchRepoContents,
fetchOwnedRepositories,
fetchRepoBranches,
fetchRepoDetails,
+ fetchRepoTree,
parseGitHubRepoUrl,
resolvePublicRepository,
+ updateRepoFileContent,
} from '../services/githubApi.service.js';
import { pgPool, redisClient } from '../../infrastructure/connections.js';
import {
@@ -518,3 +522,233 @@ export async function listBranchesController(req, res, next) {
return next(err);
}
}
+
+function resolveRepoFromQuery(req) {
+ const source = req.query.source === 'owned' ? 'owned' : 'public';
+ const token = source === 'owned' ? req.cookies?.github_token : undefined;
+
+ const owner = typeof req.query.owner === 'string' ? req.query.owner.trim() : '';
+ const repo = typeof req.query.repo === 'string' ? req.query.repo.trim() : '';
+ const branch = typeof req.query.branch === 'string' ? req.query.branch.trim() : '';
+
+ let targetOwner = owner;
+ let targetRepo = repo;
+
+ if ((!targetOwner || !targetRepo) && typeof req.query.url === 'string') {
+ const parsed = parseGitHubRepoUrl(req.query.url);
+ targetOwner = parsed.owner;
+ targetRepo = parsed.repo;
+ }
+
+ if (!targetOwner || !targetRepo) {
+ const err = new Error('Repository lookup requires owner/repo or a valid GitHub URL.');
+ err.statusCode = 400;
+ throw err;
+ }
+
+ return {
+ source,
+ token,
+ owner: targetOwner,
+ repo: targetRepo,
+ branch,
+ };
+}
+
+export async function listRepositoryStructureController(req, res, next) {
+ try {
+ const { token, owner, repo, branch } = resolveRepoFromQuery(req);
+
+ const [repoDetails, repoTree] = await Promise.all([
+ fetchRepoDetails({ owner, repo, token }),
+ fetchRepoTree({ owner, repo, ref: branch, token }),
+ ]);
+
+ const topLevelDirectories = new Map();
+ const topLevelFiles = new Map();
+
+ for (const entry of repoTree.tree) {
+ const pathValue = String(entry?.path || '').trim();
+ if (!pathValue) continue;
+
+ const segments = pathValue.split('/').filter(Boolean);
+ if (!segments.length) continue;
+
+ const topLevelName = segments[0];
+
+ // Top-level blobs are root files and should not be grouped as directories.
+ if (segments.length === 1 && entry.type === 'blob') {
+ topLevelFiles.set(topLevelName, {
+ name: topLevelName,
+ path: topLevelName,
+ size: Number.isFinite(entry?.size) ? entry.size : 0,
+ type: 'file',
+ });
+ continue;
+ }
+
+ if (!topLevelDirectories.has(topLevelName)) {
+ topLevelDirectories.set(topLevelName, {
+ name: topLevelName,
+ path: topLevelName,
+ fileCount: 0,
+ subdirectories: new Set(),
+ });
+ }
+
+ const current = topLevelDirectories.get(topLevelName);
+
+ if (entry.type === 'blob') {
+ current.fileCount += 1;
+ }
+
+ if (segments.length > 1) {
+ current.subdirectories.add(segments[1]);
+ }
+ }
+
+ const directories = Array.from(topLevelDirectories.values())
+ .map((item) => ({
+ name: item.name,
+ path: item.path,
+ fileCount: item.fileCount,
+ subdirectories: Array.from(item.subdirectories)
+ .filter(Boolean)
+ .sort((a, b) => a.localeCompare(b)),
+ }))
+ .sort((a, b) => a.name.localeCompare(b.name));
+
+ const files = Array.from(topLevelFiles.values())
+ .sort((a, b) => a.name.localeCompare(b.name));
+
+ return res.status(200).json({
+ repository: {
+ owner: repoDetails.owner,
+ repo: repoDetails.repo,
+ fullName: repoDetails.fullName,
+ branch: branch || repoDetails.defaultBranch || null,
+ defaultBranch: repoDetails.defaultBranch,
+ htmlUrl: `https://github.com/${repoDetails.owner}/${repoDetails.repo}`,
+ },
+ truncated: repoTree.truncated,
+ directories,
+ files,
+ });
+ } catch (err) {
+ return next(err);
+ }
+}
+
+export async function listRepositoryDirectoryController(req, res, next) {
+ try {
+ const { token, owner, repo, branch } = resolveRepoFromQuery(req);
+ const requestedPath = typeof req.query.path === 'string'
+ ? req.query.path.trim().replace(/^\/+/, '').replace(/\/+$/, '')
+ : '';
+
+ const [repoDetails, entries] = await Promise.all([
+ fetchRepoDetails({ owner, repo, token }),
+ fetchRepoContents({ owner, repo, path: requestedPath, ref: branch, token }),
+ ]);
+
+ return res.status(200).json({
+ repository: {
+ owner: repoDetails.owner,
+ repo: repoDetails.repo,
+ fullName: repoDetails.fullName,
+ branch: branch || repoDetails.defaultBranch || null,
+ defaultBranch: repoDetails.defaultBranch,
+ htmlUrl: `https://github.com/${repoDetails.owner}/${repoDetails.repo}`,
+ },
+ path: requestedPath,
+ entries,
+ });
+ } catch (err) {
+ return next(err);
+ }
+}
+
+export async function getRepositoryFileController(req, res, next) {
+ try {
+ const { token, owner, repo, branch } = resolveRepoFromQuery(req);
+ const requestedPath = typeof req.query.path === 'string'
+ ? req.query.path.trim().replace(/^\/+/, '').replace(/\/+$/, '')
+ : '';
+
+ if (!requestedPath) {
+ const err = new Error('File path is required to load repository file content.');
+ err.statusCode = 400;
+ throw err;
+ }
+
+ const [repoDetails, file] = await Promise.all([
+ fetchRepoDetails({ owner, repo, token }),
+ fetchRepoFileContent({ owner, repo, path: requestedPath, ref: branch, token }),
+ ]);
+
+ return res.status(200).json({
+ repository: {
+ owner: repoDetails.owner,
+ repo: repoDetails.repo,
+ fullName: repoDetails.fullName,
+ branch: branch || repoDetails.defaultBranch || null,
+ defaultBranch: repoDetails.defaultBranch,
+ htmlUrl: `https://github.com/${repoDetails.owner}/${repoDetails.repo}`,
+ },
+ file,
+ canEdit: req.query.source === 'owned',
+ });
+ } catch (err) {
+ return next(err);
+ }
+}
+
+export async function updateRepositoryFileController(req, res, next) {
+ try {
+ const source = req.body.source === 'owned' ? 'owned' : 'public';
+ const token = source === 'owned' ? req.cookies?.github_token : undefined;
+
+ let targetOwner = req.body.owner || '';
+ let targetRepo = req.body.repo || '';
+
+ if ((!targetOwner || !targetRepo) && typeof req.body.url === 'string') {
+ const parsed = parseGitHubRepoUrl(req.body.url);
+ targetOwner = parsed.owner;
+ targetRepo = parsed.repo;
+ }
+
+ if (!targetOwner || !targetRepo) {
+ const err = new Error('Repository update requires owner/repo or a valid GitHub URL.');
+ err.statusCode = 400;
+ throw err;
+ }
+
+ if (source !== 'owned') {
+ const err = new Error('Editing files is only supported for authenticated owned repositories.');
+ err.statusCode = 403;
+ throw err;
+ }
+
+ const updated = await updateRepoFileContent({
+ owner: targetOwner,
+ repo: targetRepo,
+ path: req.body.path,
+ ref: req.body.branch,
+ token,
+ content: req.body.content,
+ sha: req.body.sha,
+ message: req.body.message,
+ });
+
+ return res.status(200).json({
+ file: {
+ path: updated.path,
+ sha: updated.sha,
+ htmlUrl: updated.htmlUrl,
+ commitSha: updated.commitSha,
+ },
+ });
+ } catch (err) {
+ return next(err);
+ }
+}
diff --git a/server/src/analyze/middleware/validate.middleware.js b/server/src/analyze/middleware/validate.middleware.js
index e12c03e..4ba461e 100644
--- a/server/src/analyze/middleware/validate.middleware.js
+++ b/server/src/analyze/middleware/validate.middleware.js
@@ -95,3 +95,91 @@ export function validateBranchQuery(req, _res, next) {
return next();
}
+
+export function validateRepoBrowserQuery(req, _res, next) {
+ const hasOwner = isNonEmptyString(req.query?.owner);
+ const hasRepo = isNonEmptyString(req.query?.repo);
+ const hasUrl = isNonEmptyString(req.query?.url);
+
+ if (!hasUrl && !(hasOwner && hasRepo)) {
+ return fail(next, 'Repository query requires owner/repo or a valid GitHub URL.');
+ }
+
+ if (hasOwner) req.query.owner = req.query.owner.trim();
+ if (hasRepo) req.query.repo = req.query.repo.trim();
+ if (hasUrl) req.query.url = req.query.url.trim();
+
+ if (isNonEmptyString(req.query?.branch)) {
+ req.query.branch = req.query.branch.trim();
+ }
+
+ if (isNonEmptyString(req.query?.path)) {
+ req.query.path = req.query.path.trim().replace(/^\/+/, '').replace(/\/+$/, '');
+ }
+
+ return next();
+}
+
+export function validateRepoFileQuery(req, _res, next) {
+ const hasOwner = isNonEmptyString(req.query?.owner);
+ const hasRepo = isNonEmptyString(req.query?.repo);
+ const hasUrl = isNonEmptyString(req.query?.url);
+ const hasPath = isNonEmptyString(req.query?.path);
+
+ if (!hasPath) {
+ return fail(next, 'Repository file query requires a non-empty "path" value.');
+ }
+
+ if (!hasUrl && !(hasOwner && hasRepo)) {
+ return fail(next, 'Repository file query requires owner/repo or a valid GitHub URL.');
+ }
+
+ if (hasOwner) req.query.owner = req.query.owner.trim();
+ if (hasRepo) req.query.repo = req.query.repo.trim();
+ if (hasUrl) req.query.url = req.query.url.trim();
+ req.query.path = req.query.path.trim().replace(/^\/+/, '').replace(/\/+$/, '');
+
+ if (isNonEmptyString(req.query?.branch)) {
+ req.query.branch = req.query.branch.trim();
+ }
+
+ return next();
+}
+
+export function validateRepoFileUpdateBody(req, _res, next) {
+ const body = req.body ?? {};
+
+ const hasOwner = isNonEmptyString(body.owner);
+ const hasRepo = isNonEmptyString(body.repo);
+ const hasUrl = isNonEmptyString(body.url);
+
+ if (!hasUrl && !(hasOwner && hasRepo)) {
+ return fail(next, 'Repository file update requires owner/repo or a valid GitHub URL.');
+ }
+
+ if (!isNonEmptyString(body.path)) {
+ return fail(next, 'Repository file update requires a non-empty "path" string.');
+ }
+
+ if (typeof body.content !== 'string') {
+ return fail(next, 'Repository file update requires "content" as a string.');
+ }
+
+ if (!isNonEmptyString(body.sha)) {
+ return fail(next, 'Repository file update requires a non-empty "sha" string.');
+ }
+
+ req.body = {
+ source: body.source === 'owned' ? 'owned' : 'public',
+ ...(hasOwner ? { owner: body.owner.trim() } : {}),
+ ...(hasRepo ? { repo: body.repo.trim() } : {}),
+ ...(hasUrl ? { url: body.url.trim() } : {}),
+ path: body.path.trim().replace(/^\/+/, '').replace(/\/+$/, ''),
+ content: body.content,
+ sha: body.sha.trim(),
+ ...(isNonEmptyString(body.branch) ? { branch: body.branch.trim() } : {}),
+ ...(isNonEmptyString(body.message) ? { message: body.message.trim() } : {}),
+ };
+
+ return next();
+}
diff --git a/server/src/analyze/routes/analyze.routes.js b/server/src/analyze/routes/analyze.routes.js
index 94a64ad..8f117bf 100644
--- a/server/src/analyze/routes/analyze.routes.js
+++ b/server/src/analyze/routes/analyze.routes.js
@@ -5,15 +5,22 @@ import {
validateBranchQuery,
validateLocalPathBody,
validatePublicRepoBody,
+ validateRepoBrowserQuery,
+ validateRepoFileQuery,
+ validateRepoFileUpdateBody,
} from '../middleware/validate.middleware.js';
import {
analyzeController,
browseLocalPathController,
+ getRepositoryFileController,
listAnalysisHistoryController,
listBranchesController,
+ listRepositoryDirectoryController,
+ listRepositoryStructureController,
listOwnedReposController,
localPickerCapabilitiesController,
resolvePublicRepoController,
+ updateRepositoryFileController,
validateLocalPathController,
} from '../controllers/analyze.controller.js';
@@ -35,5 +42,9 @@ router.post('/local/validate', analyzeLimiter, validateLocalPathBody, validateLo
router.post('/github/public/resolve', analyzeLimiter, validatePublicRepoBody, resolvePublicRepoController);
router.get('/github/repos', analyzeLimiter, listOwnedReposController);
router.get('/github/branches', analyzeLimiter, validateBranchQuery, listBranchesController);
+router.get('/github/structure', analyzeLimiter, validateRepoBrowserQuery, listRepositoryStructureController);
+router.get('/github/contents', analyzeLimiter, validateRepoBrowserQuery, listRepositoryDirectoryController);
+router.get('/github/file', analyzeLimiter, validateRepoFileQuery, getRepositoryFileController);
+router.put('/github/file', analyzeLimiter, validateRepoFileUpdateBody, updateRepositoryFileController);
export default router;
diff --git a/server/src/analyze/services/githubApi.service.js b/server/src/analyze/services/githubApi.service.js
index 370d26a..6086231 100644
--- a/server/src/analyze/services/githubApi.service.js
+++ b/server/src/analyze/services/githubApi.service.js
@@ -68,15 +68,18 @@ function parseGitHubRateLimitError(response, context = 'GitHub API request') {
throw err;
}
-async function githubFetchRaw(urlOrPath, { token, headers = {} } = {}) {
+async function githubFetchRaw(urlOrPath, { token, headers = {}, method = 'GET', body } = {}) {
const targetUrl = urlOrPath.startsWith('http') ? urlOrPath : `${GITHUB_API_BASE}${urlOrPath}`;
return fetch(targetUrl, {
+ method,
headers: {
Accept: 'application/vnd.github+json',
'User-Agent': 'codegraph-ai',
+ ...(body ? { 'Content-Type': 'application/json' } : {}),
...(token ? { Authorization: `Bearer ${token}` } : {}),
...headers,
},
+ ...(body ? { body: JSON.stringify(body) } : {}),
});
}
@@ -177,6 +180,176 @@ export async function fetchRepoBranches({ owner, repo, token }) {
}));
}
+function buildRepoContentsPath({ owner, repo, path: repoPath = '', ref = '' }) {
+ const encodedOwner = encodeURIComponent(owner);
+ const encodedRepo = encodeURIComponent(repo);
+ const normalizedPath = String(repoPath || '')
+ .split('/')
+ .map((segment) => segment.trim())
+ .filter(Boolean)
+ .map((segment) => encodeURIComponent(segment))
+ .join('/');
+
+ const basePath = normalizedPath
+ ? `/repos/${encodedOwner}/${encodedRepo}/contents/${normalizedPath}`
+ : `/repos/${encodedOwner}/${encodedRepo}/contents`;
+
+ if (!ref) return basePath;
+ return `${basePath}?ref=${encodeURIComponent(ref)}`;
+}
+
+function normalizeContentEntry(entry) {
+ return {
+ name: entry?.name || '',
+ path: entry?.path || '',
+ type: entry?.type || 'file',
+ size: Number.isFinite(entry?.size) ? entry.size : 0,
+ sha: entry?.sha || null,
+ htmlUrl: entry?.html_url || null,
+ downloadUrl: entry?.download_url || null,
+ };
+}
+
+export async function fetchRepoContents({ owner, repo, path = '', ref = '', token }) {
+ const apiPath = buildRepoContentsPath({ owner, repo, path, ref });
+ const data = await githubFetch(apiPath, { token });
+
+ if (!Array.isArray(data)) {
+ return [];
+ }
+
+ const entries = data.map(normalizeContentEntry);
+
+ entries.sort((a, b) => {
+ const aIsDir = a.type === 'dir';
+ const bIsDir = b.type === 'dir';
+
+ if (aIsDir && !bIsDir) return -1;
+ if (!aIsDir && bIsDir) return 1;
+
+ return a.name.localeCompare(b.name);
+ });
+
+ return entries;
+}
+
+export async function fetchRepoTree({ owner, repo, ref = '', token }) {
+ const encodedOwner = encodeURIComponent(owner);
+ const encodedRepo = encodeURIComponent(repo);
+ const encodedRef = encodeURIComponent(ref || 'HEAD');
+ const data = await githubFetch(
+ `/repos/${encodedOwner}/${encodedRepo}/git/trees/${encodedRef}?recursive=1`,
+ { token },
+ );
+
+ const tree = Array.isArray(data?.tree) ? data.tree : [];
+
+ return {
+ truncated: Boolean(data?.truncated),
+ tree: tree.map((entry) => ({
+ path: entry?.path || '',
+ type: entry?.type || 'blob',
+ size: Number.isFinite(entry?.size) ? entry.size : 0,
+ sha: entry?.sha || null,
+ })),
+ };
+}
+
+function decodeGitHubBase64Content(content) {
+ try {
+ return Buffer.from(String(content || ''), 'base64').toString('utf8');
+ } catch {
+ const err = new Error('Failed to decode GitHub file content.');
+ err.statusCode = 422;
+ throw err;
+ }
+}
+
+function encodeGitHubBase64Content(content) {
+ return Buffer.from(String(content || ''), 'utf8').toString('base64');
+}
+
+export async function fetchRepoFileContent({ owner, repo, path, ref = '', token }) {
+ const apiPath = buildRepoContentsPath({ owner, repo, path, ref });
+ const data = await githubFetch(apiPath, { token });
+
+ if (Array.isArray(data) || data?.type !== 'file') {
+ const err = new Error('Requested path is not a file.');
+ err.statusCode = 400;
+ throw err;
+ }
+
+ const encoding = data?.encoding || 'base64';
+ const rawContent = String(data?.content || '').replace(/\n/g, '');
+
+ if (encoding !== 'base64') {
+ const err = new Error(`Unsupported GitHub file encoding: ${encoding}.`);
+ err.statusCode = 422;
+ throw err;
+ }
+
+ const content = decodeGitHubBase64Content(rawContent);
+
+ return {
+ name: data?.name || path.split('/').pop() || 'unknown-file',
+ path: data?.path || path,
+ sha: data?.sha || null,
+ size: Number.isFinite(data?.size) ? data.size : 0,
+ htmlUrl: data?.html_url || null,
+ downloadUrl: data?.download_url || null,
+ content,
+ encoding: 'utf8',
+ };
+}
+
+export async function updateRepoFileContent({
+ owner,
+ repo,
+ path,
+ ref = '',
+ token,
+ content,
+ sha,
+ message,
+}) {
+ if (!token) {
+ const err = new Error('GitHub authentication required to update files.');
+ err.statusCode = 401;
+ throw err;
+ }
+
+ if (!sha) {
+ const err = new Error('A file SHA is required to update file content.');
+ err.statusCode = 400;
+ throw err;
+ }
+
+ const apiPath = buildRepoContentsPath({ owner, repo, path });
+ const response = await githubFetchRaw(apiPath, {
+ token,
+ method: 'PUT',
+ body: {
+ message: message || `Update ${path} via CodeGraph AI`,
+ content: encodeGitHubBase64Content(content),
+ sha,
+ ...(ref ? { branch: ref } : {}),
+ },
+ });
+
+ if (!response.ok) {
+ parseGitHubRateLimitError(response, `GitHub file update (${path})`);
+ }
+
+ const data = await response.json();
+
+ return {
+ path: data?.content?.path || path,
+ sha: data?.content?.sha || null,
+ htmlUrl: data?.content?.html_url || `https://github.com/${owner}/${repo}/blob/${ref || 'main'}/${path}`,
+ commitSha: data?.commit?.sha || null,
+ };
+}
+
export async function fetchOwnedRepositories({ token }) {
if (!token) {
const err = new Error('GitHub authentication required. Please log in with GitHub.');
diff --git a/server/src/api/ai/routes/ai.routes.js b/server/src/api/ai/routes/ai.routes.js
index 0927901..b879ccc 100644
--- a/server/src/api/ai/routes/ai.routes.js
+++ b/server/src/api/ai/routes/ai.routes.js
@@ -1,15 +1,16 @@
import { Router } from 'express';
import jwt from 'jsonwebtoken';
import rateLimit from 'express-rate-limit';
-import OpenAI from 'openai';
import { QueryAgent } from '../../../agents/query/QueryAgent.js';
import { AnalysisAgent } from '../../../agents/analysis/AnalysisAgent.js';
+import { SnippetAnalyzerAgent } from '../../../agents/analysis/SnippetAnalyzerAgent.js';
import { pgPool, redisClient } from '../../../infrastructure/connections.js';
+import { requirePlan } from '../../../middleware/planGuard.middleware.js';
+import { createChatClient } from '../../../services/ai/llmProvider.js';
const router = Router();
-const openaiClient = process.env.OPENAI_API_KEY
- ? new OpenAI({ apiKey: process.env.OPENAI_API_KEY })
- : null;
+const chatClient = createChatClient();
+const defaultChatModel = process.env.AI_MODEL || process.env.OPENAI_MODEL || 'gpt-4o-mini';
const aiLimiter = rateLimit({
windowMs: 60 * 1000,
@@ -136,6 +137,91 @@ function toGraphFromRows(nodeRows = [], edgeRows = []) {
router.use(aiLimiter);
+router.post('/suggest-refactor', requirePlan(), async (req, res, next) => {
+ const jobId = String(req.body?.jobId || '').trim();
+ const filePath = String(req.body?.filePath || '').trim();
+
+ if (!jobId || !filePath) {
+ return res.status(400).json({ error: 'jobId and filePath are required.' });
+ }
+
+ try {
+ const nodeResult = await pgPool.query(
+ `
+ SELECT file_path, file_type, declarations, metrics, summary
+ FROM graph_nodes
+ WHERE job_id = $1 AND file_path = $2
+ LIMIT 1
+ `,
+ [jobId, filePath],
+ );
+
+ if (nodeResult.rowCount === 0) {
+ return res.status(404).json({ error: 'File not found.' });
+ }
+
+ if (!chatClient.isConfigured()) {
+ return res.status(503).json({ error: 'AI provider is not configured.' });
+ }
+
+ const node = nodeResult.rows[0];
+ const exportsList = (node.declarations || []).map((declaration) => declaration?.name).filter(Boolean);
+
+ const prompt = `You are a senior software architect reviewing a file in a dependency graph analysis.
+
+File: ${node.file_path}
+Type: ${node.file_type}
+Lines of code: ${node.metrics?.loc || 'unknown'}
+In-degree (files that import this): ${node.metrics?.inDegree || 0}
+Out-degree (files this imports): ${node.metrics?.outDegree || 0}
+Exports: ${exportsList.join(', ') || 'none'}
+Summary: ${node.summary || 'no summary available'}
+
+Respond with a JSON object:
+{
+ "concerns": ["list of specific architectural concerns"],
+ "suggestions": ["list of concrete refactoring steps"],
+ "priority": "high | medium | low",
+ "estimatedEffort": "hours estimate as a string, e.g. '2-4 hours'"
+}
+Only respond with the JSON object.`;
+
+ const completion = await chatClient.createChatCompletion({
+ model: defaultChatModel,
+ maxTokens: 400,
+ temperature: 0.2,
+ messages: [{ role: 'user', content: prompt }],
+ });
+
+ const content = completion?.content?.trim() || '';
+
+ let parsed;
+ try {
+ parsed = JSON.parse(content);
+ } catch {
+ parsed = {
+ concerns: [],
+ suggestions: content ? [content] : [],
+ priority: 'medium',
+ estimatedEffort: 'unknown',
+ };
+ }
+
+ return res.status(200).json({
+ filePath,
+ concerns: Array.isArray(parsed?.concerns) ? parsed.concerns : [],
+ suggestions: Array.isArray(parsed?.suggestions) ? parsed.suggestions : [],
+ priority: ['high', 'medium', 'low'].includes(parsed?.priority) ? parsed.priority : 'medium',
+ estimatedEffort:
+ typeof parsed?.estimatedEffort === 'string' && parsed.estimatedEffort.trim()
+ ? parsed.estimatedEffort.trim()
+ : 'unknown',
+ });
+ } catch (error) {
+ return next(error);
+ }
+});
+
router.get('/queries', async (req, res, next) => {
const authUser = getAuthUser(req);
if (!authUser?.id) {
@@ -253,21 +339,15 @@ router.post('/explain/stream', async (req, res, next) => {
return res.status(400).json({ error: 'question and jobId are required.' });
}
- if (!openaiClient) {
- return res.status(503).json({ error: 'OpenAI is not configured for streaming.' });
+ if (!chatClient.isConfigured()) {
+ return res.status(503).json({ error: 'AI provider is not configured for streaming.' });
}
let clientClosed = false;
- let stream = null;
+ let streamSession = null;
const closeStream = () => {
- if (typeof stream?.abort === 'function') {
- stream.abort();
- }
-
- if (typeof stream?.controller?.abort === 'function') {
- stream.controller.abort();
- }
+ streamSession?.cancel?.();
};
const writeEvent = (payload) => {
@@ -309,25 +389,18 @@ router.post('/explain/stream', async (req, res, next) => {
res.flushHeaders();
}
- stream = await openaiClient.chat.completions.stream({
- model: process.env.OPENAI_MODEL || 'gpt-4o-mini',
- max_tokens: 500,
- messages: [
- {
- role: 'user',
- content: question,
- },
- ],
+ streamSession = await chatClient.createStream({
+ model: defaultChatModel,
+ maxTokens: 500,
+ messages: [{ role: 'user', content: question }],
+ onText: (text) => {
+ if (!clientClosed) {
+ writeEvent({ text });
+ }
+ },
});
- for await (const chunk of stream) {
- if (clientClosed) break;
-
- const text = chunk?.choices?.[0]?.delta?.content || '';
- if (text) {
- writeEvent({ text });
- }
- }
+ await streamSession.consume();
if (!clientClosed) {
res.write('data: [DONE]\n\n');
@@ -417,4 +490,66 @@ router.post('/impact', async (req, res, next) => {
}
});
+router.post('/snippet-impact', async (req, res, next) => {
+ const authUser = getAuthUser(req);
+ if (!authUser?.id) {
+ return res.status(401).json({ error: 'Authentication required.' });
+ }
+
+ const jobId = String(req.body?.jobId || '').trim();
+ const filePath = String(req.body?.filePath || '').trim();
+ const snippet = String(req.body?.snippet || '').trim();
+ const lineStart = Number.parseInt(req.body?.lineStart, 10);
+ const lineEnd = Number.parseInt(req.body?.lineEnd, 10);
+
+ if (!jobId || !filePath || !snippet) {
+ return res.status(400).json({ error: 'jobId, filePath, and snippet are required.' });
+ }
+
+ try {
+ const userId = await resolveDatabaseUserId(authUser);
+ if (!userId) {
+ return res.status(500).json({ error: 'Failed to resolve authenticated user.' });
+ }
+
+ const ownership = await pgPool.query(
+ `
+ SELECT 1
+ FROM analysis_jobs
+ WHERE id = $1 AND user_id = $2
+ LIMIT 1
+ `,
+ [jobId, userId],
+ );
+
+ if (ownership.rowCount === 0) {
+ return res.status(404).json({ error: 'Analysis job not found for this user.' });
+ }
+
+ const agent = new SnippetAnalyzerAgent({ db: pgPool });
+ const result = await agent.process(
+ {
+ jobId,
+ filePath,
+ snippet,
+ lineStart,
+ lineEnd,
+ },
+ { jobId },
+ );
+
+ if (result.status === 'failed') {
+ const statusCode = Number(result.errors?.[0]?.code) || 400;
+ return res.status(statusCode).json({
+ error: result.errors?.[0]?.message || 'Unable to analyze snippet impact.',
+ details: result.errors || [],
+ });
+ }
+
+ return res.status(200).json(result.data);
+ } catch (error) {
+ return next(error);
+ }
+});
+
export default router;
diff --git a/server/src/api/graph/routes/graph.routes.js b/server/src/api/graph/routes/graph.routes.js
index 087fda7..3bf2e6a 100644
--- a/server/src/api/graph/routes/graph.routes.js
+++ b/server/src/api/graph/routes/graph.routes.js
@@ -161,6 +161,40 @@ router.post('/:jobId/share', shareLimiter, async (req, res, next) => {
}
});
+router.get('/:jobId/heatmap', async (req, res, next) => {
+ const { jobId } = req.params;
+
+ if (!jobId) {
+ return res.status(400).json({ error: 'jobId is required.' });
+ }
+
+ try {
+ const result = await pgPool.query(
+ `
+ SELECT file_path, file_type, metrics,
+ (metrics->>'inDegree')::int * COALESCE((metrics->>'complexity')::numeric, 1) AS risk_score
+ FROM graph_nodes
+ WHERE job_id = $1
+ ORDER BY risk_score DESC
+ LIMIT 50
+ `,
+ [jobId],
+ );
+
+ return res.status(200).json({
+ hotspots: result.rows.map((row) => ({
+ filePath: row.file_path,
+ type: row.file_type,
+ riskScore: Number.parseFloat(row.risk_score) || 0,
+ inDegree: Number(row.metrics?.inDegree) || 0,
+ loc: Number(row.metrics?.loc) || 0,
+ })),
+ });
+ } catch (error) {
+ return next(error);
+ }
+});
+
router.get('/:jobId', async (req, res, next) => {
const { jobId } = req.params;
diff --git a/server/src/api/webhooks/github.webhook.js b/server/src/api/webhooks/github.webhook.js
index 21bb6d4..bd866f5 100644
--- a/server/src/api/webhooks/github.webhook.js
+++ b/server/src/api/webhooks/github.webhook.js
@@ -2,10 +2,6 @@ import crypto from 'node:crypto';
import express from 'express';
import { Router } from 'express';
import rateLimit from 'express-rate-limit';
-import { pgPool } from '../../infrastructure/connections.js';
-import { enqueueAnalysisJob } from '../../queue/analysisQueue.js';
-
-const router = Router();
const webhookLimiter = rateLimit({
windowMs: 60 * 1000,
@@ -51,170 +47,197 @@ function logWebhookEvent(level, message, context = {}) {
}
}
-router.post('/github', webhookLimiter, express.raw({ type: 'application/json' }), async (req, res, next) => {
- const startTime = Date.now();
- const signature = req.headers['x-github-signature-256'];
- const event = String(req.headers['x-github-event'] || '').trim();
- const deliveryId = req.headers['x-github-delivery'];
- const secret = process.env.GITHUB_WEBHOOK_SECRET;
-
- if (!secret) {
- logWebhookEvent('warn', 'Webhook secret not configured', {
- event,
- deliveryId,
- });
- return res.status(503).json({ error: 'Webhook secret is not configured.' });
- }
+export function createGitHubWebhookRouter({ db, enqueueJob } = {}) {
+ const router = Router();
- const rawBody = Buffer.isBuffer(req.body)
- ? req.body
- : Buffer.from(typeof req.body === 'string' ? req.body : JSON.stringify(req.body || {}));
-
- if (!verifySignature(rawBody, signature, secret)) {
- logWebhookEvent('warn', 'Invalid signature', {
- event,
- deliveryId,
- signatureLength: String(signature || '').length,
- });
- return res.status(401).send('Invalid signature');
- }
+ let resolvedDb = db;
+ let resolvedEnqueueJob = enqueueJob;
- let payload;
- try {
- payload = JSON.parse(rawBody.toString('utf8'));
- } catch (parseErr) {
- logWebhookEvent('error', 'Failed to parse JSON payload', {
- event,
- deliveryId,
- error: parseErr.message,
- });
- return res.status(400).send('Invalid JSON payload');
- }
+ async function resolveDependencies() {
+ if (!resolvedDb) {
+ const { pgPool } = await import('../../infrastructure/connections.js');
+ resolvedDb = pgPool;
+ }
- if (event !== 'pull_request') {
- logWebhookEvent('info', `Ignoring non-PR event`, {
- event,
- deliveryId,
- });
- return res.status(200).send('Ignored');
+ if (!resolvedEnqueueJob) {
+ const { enqueueAnalysisJob } = await import('../../queue/analysisQueue.js');
+ resolvedEnqueueJob = enqueueAnalysisJob;
+ }
}
- const action = payload?.action;
- if (!['opened', 'synchronize'].includes(action)) {
- logWebhookEvent('info', `Ignoring PR action: ${action}`, {
- event,
- deliveryId,
- action,
- });
- return res.status(200).send('Ignored');
- }
+ router.post('/github', webhookLimiter, express.raw({ type: 'application/json' }), async (req, res, next) => {
+ const startTime = Date.now();
+ const signature = req.headers['x-github-signature-256'];
+ const event = String(req.headers['x-github-event'] || '').trim();
+ const deliveryId = req.headers['x-github-delivery'];
+ const secret = process.env.GITHUB_WEBHOOK_SECRET;
+
+ if (!secret) {
+ logWebhookEvent('warn', 'Webhook secret not configured', {
+ event,
+ deliveryId,
+ });
+ return res.status(503).json({ error: 'Webhook secret is not configured.' });
+ }
+
+ const rawBody = Buffer.isBuffer(req.body)
+ ? req.body
+ : Buffer.from(typeof req.body === 'string' ? req.body : JSON.stringify(req.body || {}));
+
+ if (!verifySignature(rawBody, signature, secret)) {
+ logWebhookEvent('warn', 'Invalid signature', {
+ event,
+ deliveryId,
+ signatureLength: String(signature || '').length,
+ });
+ return res.status(401).send('Invalid signature');
+ }
+
+ let payload;
+ try {
+ payload = JSON.parse(rawBody.toString('utf8'));
+ } catch (parseErr) {
+ logWebhookEvent('error', 'Failed to parse JSON payload', {
+ event,
+ deliveryId,
+ error: parseErr.message,
+ });
+ return res.status(400).send('Invalid JSON payload');
+ }
+
+ if (event !== 'pull_request') {
+ logWebhookEvent('info', `Ignoring non-PR event`, {
+ event,
+ deliveryId,
+ });
+ return res.status(200).send('Ignored');
+ }
- try {
- const owner = payload?.repository?.owner?.login;
- const repo = payload?.repository?.name;
- const branch = payload?.pull_request?.head?.ref;
- const prNumber = payload?.pull_request?.number;
- const prTitle = payload?.pull_request?.title;
-
- logWebhookEvent('info', `Processing PR ${action}`, {
- event,
- deliveryId,
- action,
- owner,
- repo,
- branch,
- prNumber,
- prTitle,
- });
-
- if (!owner || !repo || !branch) {
- logWebhookEvent('warn', 'Invalid PR payload structure', {
+ const action = payload?.action;
+ if (!['opened', 'synchronize'].includes(action)) {
+ logWebhookEvent('info', `Ignoring PR action: ${action}`, {
event,
deliveryId,
action,
- owner: owner ? 'โ' : 'โ',
- repo: repo ? 'โ' : 'โ',
- branch: branch ? 'โ' : 'โ',
});
- return res.status(400).json({ error: 'Invalid pull request payload.' });
+ return res.status(200).send('Ignored');
}
- const repoResult = await pgPool.query(
- `
- SELECT id, owner_id
- FROM repositories
- WHERE github_owner = $1 AND github_repo = $2
- LIMIT 1
- `,
- [owner, repo],
- );
-
- if (repoResult.rowCount === 0) {
- logWebhookEvent('info', 'Repository not tracked in CodeGraph', {
+ try {
+ await resolveDependencies();
+
+ const owner = payload?.repository?.owner?.login;
+ const repo = payload?.repository?.name;
+ const branch = payload?.pull_request?.head?.ref;
+ const prNumber = payload?.pull_request?.number;
+ const prTitle = payload?.pull_request?.title;
+ const headSha = payload?.pull_request?.head?.sha;
+
+ logWebhookEvent('info', `Processing PR ${action}`, {
event,
deliveryId,
+ action,
owner,
repo,
branch,
+ prNumber,
+ prTitle,
});
- return res.status(200).send('Repository not tracked');
- }
- const { id: repositoryId, owner_id: userId } = repoResult.rows[0];
-
- const jobResult = await pgPool.query(
- `
- INSERT INTO analysis_jobs (repository_id, user_id, branch, status, metadata)
- VALUES ($1, $2, $3, 'queued', $4)
- RETURNING id
- `,
- [repositoryId, userId, branch, JSON.stringify({ prNumber, prTitle })],
- );
-
- const jobId = jobResult.rows[0].id;
-
- await enqueueAnalysisJob({
- jobId,
- input: {
- source: 'github',
- github: {
+ if (!owner || !repo || !branch) {
+ logWebhookEvent('warn', 'Invalid PR payload structure', {
+ event,
+ deliveryId,
+ action,
+ owner: owner ? 'yes' : 'no',
+ repo: repo ? 'yes' : 'no',
+ branch: branch ? 'yes' : 'no',
+ });
+ return res.status(400).json({ error: 'Invalid pull request payload.' });
+ }
+
+ const repoResult = await resolvedDb.query(
+ `
+ SELECT id, owner_id
+ FROM repositories
+ WHERE github_owner = $1 AND github_repo = $2
+ LIMIT 1
+ `,
+ [owner, repo],
+ );
+
+ if (repoResult.rowCount === 0) {
+ logWebhookEvent('info', 'Repository not tracked in CodeGraph', {
+ event,
+ deliveryId,
owner,
repo,
branch,
- prNumber,
- prTitle,
+ });
+ return res.status(200).send('Repository not tracked');
+ }
+
+ const { id: repositoryId, owner_id: userId } = repoResult.rows[0];
+
+ const jobResult = await resolvedDb.query(
+ `
+ INSERT INTO analysis_jobs (repository_id, user_id, branch, status, metadata)
+ VALUES ($1, $2, $3, 'queued', $4)
+ RETURNING id
+ `,
+ [repositoryId, userId, branch, JSON.stringify({ prNumber, prTitle })],
+ );
+
+ const jobId = jobResult.rows[0].id;
+
+ await resolvedEnqueueJob({
+ jobId,
+ input: {
+ source: 'github',
+ github: {
+ owner,
+ repo,
+ branch,
+ prNumber,
+ prTitle,
+ headSha,
+ },
+ repositoryId,
+ userId,
},
- repositoryId,
- userId,
- },
- });
-
- const processingTime = Date.now() - startTime;
- logWebhookEvent('info', `Analysis job queued successfully`, {
- event,
- deliveryId,
- action,
- jobId,
- owner,
- repo,
- branch,
- prNumber,
- processingTimeMs: processingTime,
- });
-
- return res.status(200).send('Queued');
- } catch (error) {
- const processingTime = Date.now() - startTime;
- logWebhookEvent('error', `Failed to process webhook: ${error.message}`, {
- event,
- deliveryId,
- action,
- error: error.message,
- processingTimeMs: processingTime,
- stack: error.stack,
- });
- return next(error);
- }
-});
+ });
+
+ const processingTime = Date.now() - startTime;
+ logWebhookEvent('info', `Analysis job queued successfully`, {
+ event,
+ deliveryId,
+ action,
+ jobId,
+ owner,
+ repo,
+ branch,
+ prNumber,
+ processingTimeMs: processingTime,
+ });
+
+ return res.status(200).send('Queued');
+ } catch (error) {
+ const processingTime = Date.now() - startTime;
+ logWebhookEvent('error', `Failed to process webhook: ${error.message}`, {
+ event,
+ deliveryId,
+ action,
+ error: error.message,
+ processingTimeMs: processingTime,
+ stack: error.stack,
+ });
+ return next(error);
+ }
+ });
+
+ return router;
+}
+
+const router = createGitHubWebhookRouter();
export default router;
diff --git a/server/src/api/webhooks/pr-comment.routes.js b/server/src/api/webhooks/pr-comment.routes.js
index 79e25aa..0b79f93 100644
--- a/server/src/api/webhooks/pr-comment.routes.js
+++ b/server/src/api/webhooks/pr-comment.routes.js
@@ -1,6 +1,5 @@
import { Router } from 'express';
import rateLimit from 'express-rate-limit';
-import { pgPool } from '../../infrastructure/connections.js';
import GitHubPRService from '../../services/GitHubPRService.js';
import ImpactAnalysisService from '../../services/ImpactAnalysisService.js';
@@ -17,10 +16,20 @@ const prCommentLimiter = rateLimit({
* When called without arguments it falls back to the production singletons.
*/
export function createPrCommentRouter({
- db = pgPool,
- gitHubPRService = GitHubPRService,
+ db,
+ gitHubPRService,
} = {}) {
const router = Router();
+ let resolvedDb = db;
+ const resolvedGitHubPRService =
+ gitHubPRService || (typeof GitHubPRService === 'function' ? new GitHubPRService() : GitHubPRService);
+
+ async function resolveDb() {
+ if (!resolvedDb) {
+ const { pgPool } = await import('../../infrastructure/connections.js');
+ resolvedDb = pgPool;
+ }
+ }
/**
* POST /api/webhooks/github/pr-comment
@@ -38,8 +47,10 @@ export function createPrCommentRouter({
}
try {
+ await resolveDb();
+
// Fetch job metadata and PR info
- const jobResult = await db.query(
+ const jobResult = await resolvedDb.query(
`
SELECT aj.id, aj.status, aj.branch,
r.id as repositoryId, r.github_owner, r.github_repo,
@@ -65,7 +76,7 @@ export function createPrCommentRouter({
}
// Check if GitHub token is configured
- if (!gitHubPRService.isConfigured()) {
+ if (!resolvedGitHubPRService.isConfigured()) {
console.warn('GitHub token not configured, skipping PR comment');
return res.status(200).json({ message: 'GitHub token not configured' });
}
@@ -73,14 +84,14 @@ export function createPrCommentRouter({
// Get PR diff
let diff;
try {
- diff = await gitHubPRService.getPRDiff(owner, repo, parseInt(prNumber, 10));
+ diff = await resolvedGitHubPRService.getPRDiff(owner, repo, parseInt(prNumber, 10));
} catch (err) {
console.error('Failed to fetch PR diff:', err.message);
return res.status(200).json({ message: 'Failed to fetch PR diff', error: err.message });
}
// Parse changed files from diff
- const changedFiles = gitHubPRService.parseDiff(diff).map((f) => f.file);
+ const changedFiles = resolvedGitHubPRService.parseDiff(diff).map((f) => f.file);
if (changedFiles.length === 0) {
console.log('No changed files found in diff');
@@ -98,12 +109,16 @@ export function createPrCommentRouter({
// Format impact comment
const graphUrl = `${process.env.CLIENT_URL || 'http://localhost:5173'}/?jobId=${jobId}`;
- const comment = gitHubPRService.formatImpactComment(changedFiles, impactedFiles, graphUrl);
+ const comment = resolvedGitHubPRService.formatImpactComment(changedFiles, impactedFiles, graphUrl);
// Check if comment already exists
let existingComment;
try {
- existingComment = await gitHubPRService.findExistingComment(owner, repo, parseInt(prNumber, 10));
+ existingComment = await resolvedGitHubPRService.findExistingComment(
+ owner,
+ repo,
+ parseInt(prNumber, 10),
+ );
} catch (err) {
console.error('Failed to find existing comment:', err.message);
}
@@ -112,10 +127,20 @@ export function createPrCommentRouter({
let result;
try {
if (existingComment) {
- result = await gitHubPRService.updatePRComment(owner, repo, existingComment.id, comment);
+ result = await resolvedGitHubPRService.updatePRComment(
+ owner,
+ repo,
+ existingComment.id,
+ comment,
+ );
console.log(`Updated PR comment #${existingComment.id} on ${owner}/${repo}#${prNumber}`);
} else {
- result = await gitHubPRService.postPRComment(owner, repo, parseInt(prNumber, 10), comment);
+ result = await resolvedGitHubPRService.postPRComment(
+ owner,
+ repo,
+ parseInt(prNumber, 10),
+ comment,
+ );
console.log(`Posted PR comment on ${owner}/${repo}#${prNumber}`);
}
} catch (err) {
@@ -127,7 +152,7 @@ export function createPrCommentRouter({
}
// Log the event
- await db.query(
+ await resolvedDb.query(
`
INSERT INTO audit_logs (job_id, event_type, message, metadata)
VALUES ($1, $2, $3, $4)
@@ -170,11 +195,15 @@ export function createPrCommentRouter({
}
try {
- if (!gitHubPRService.isConfigured()) {
+ if (!resolvedGitHubPRService.isConfigured()) {
return res.status(503).json({ error: 'GitHub token not configured' });
}
- const existing = await gitHubPRService.findExistingComment(owner, repo, parseInt(prNumber, 10));
+ const existing = await resolvedGitHubPRService.findExistingComment(
+ owner,
+ repo,
+ parseInt(prNumber, 10),
+ );
return res.json({
hasComment: !!existing,
diff --git a/server/src/infrastructure/connections.js b/server/src/infrastructure/connections.js
index 28d1119..dd2ef29 100644
--- a/server/src/infrastructure/connections.js
+++ b/server/src/infrastructure/connections.js
@@ -19,13 +19,24 @@ pgPool.on('error', (err) => {
const redisHost = process.env.REDIS_HOST || '127.0.0.1';
const redisPort = Number(process.env.REDIS_PORT || 6379);
+const isTestRuntime = process.argv.includes('--test') || Boolean(process.env.VITEST);
+
+const redisOptions = {
+ maxRetriesPerRequest: null,
+ lazyConnect: true,
+ ...(isTestRuntime
+ ? {
+ retryStrategy: () => null,
+ }
+ : {}),
+};
export const redisClient = process.env.REDIS_URL
- ? new Redis(process.env.REDIS_URL, { maxRetriesPerRequest: null })
+ ? new Redis(process.env.REDIS_URL, redisOptions)
: new Redis({
host: redisHost,
port: redisPort,
- maxRetriesPerRequest: null,
+ ...redisOptions,
});
redisClient.on('connect', () => {
diff --git a/server/src/infrastructure/migrations/006_contracts.sql b/server/src/infrastructure/migrations/006_contracts.sql
new file mode 100644
index 0000000..ab53717
--- /dev/null
+++ b/server/src/infrastructure/migrations/006_contracts.sql
@@ -0,0 +1,25 @@
+DO $$
+BEGIN
+ IF NOT EXISTS (
+ SELECT 1
+ FROM pg_enum e
+ JOIN pg_type t ON e.enumtypid = t.oid
+ WHERE t.typname = 'job_status' AND e.enumlabel = 'inferring-contracts'
+ ) THEN
+ ALTER TYPE job_status ADD VALUE 'inferring-contracts';
+ END IF;
+END $$;
+
+CREATE TABLE IF NOT EXISTS api_contracts (
+ id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
+ job_id UUID NOT NULL REFERENCES analysis_jobs(id) ON DELETE CASCADE,
+ file_path TEXT NOT NULL,
+ routes JSONB NOT NULL DEFAULT '[]',
+ env_deps JSONB NOT NULL DEFAULT '[]',
+ ext_services JSONB NOT NULL DEFAULT '[]',
+ caching JSONB NOT NULL DEFAULT '[]',
+ created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
+ UNIQUE (job_id, file_path)
+);
+
+CREATE INDEX IF NOT EXISTS idx_contracts_job ON api_contracts(job_id);
diff --git a/server/src/middleware/planGuard.middleware.js b/server/src/middleware/planGuard.middleware.js
index 13512a6..d3be67b 100644
--- a/server/src/middleware/planGuard.middleware.js
+++ b/server/src/middleware/planGuard.middleware.js
@@ -81,6 +81,9 @@ async function resolveDatabaseUserId(authUser) {
return upserted.rows[0]?.id || null;
}
+// TODO: Enforce allowedPlans when paid tiers are introduced.
+// Currently every authenticated user is treated as 'free' regardless
+// of the plan list passed to this middleware.
export function requirePlan(..._allowedPlans) {
return async (req, res, next) => {
try {
diff --git a/server/src/queue/analysisQueue.js b/server/src/queue/analysisQueue.js
index fbbc7ec..cf30e4b 100644
--- a/server/src/queue/analysisQueue.js
+++ b/server/src/queue/analysisQueue.js
@@ -4,34 +4,71 @@ import { pgPool, redisClient } from '../infrastructure/connections.js';
const queueConcurrency = Number(process.env.QUEUE_CONCURRENCY || 3);
-export const analysisQueue = new Queue('code-analysis', {
- connection: redisClient,
- defaultJobOptions: {
- attempts: 1,
- removeOnComplete: 100,
- removeOnFail: 200,
- },
-});
-
-export const analysisWorker = new Worker(
- 'code-analysis',
- async (job) => {
- const supervisor = new SupervisorAgent({
- db: pgPool,
- redis: redisClient,
- });
-
- return supervisor.runPipeline(job.data.jobId, job.data.input);
- },
- {
+let analysisQueue;
+let analysisWorker;
+
+function buildQueue() {
+ return new Queue('code-analysis', {
connection: redisClient,
- concurrency: Number.isInteger(queueConcurrency) && queueConcurrency > 0 ? queueConcurrency : 3,
- },
-);
+ defaultJobOptions: {
+ attempts: 1,
+ removeOnComplete: 100,
+ removeOnFail: 200,
+ },
+ });
+}
+
+function buildWorker() {
+ const worker = new Worker(
+ 'code-analysis',
+ async (job) => {
+ const supervisor = new SupervisorAgent({
+ db: pgPool,
+ redis: redisClient,
+ });
+
+ return supervisor.runPipeline(job.data.jobId, job.data.input);
+ },
+ {
+ connection: redisClient,
+ concurrency: Number.isInteger(queueConcurrency) && queueConcurrency > 0 ? queueConcurrency : 3,
+ },
+ );
+
+ worker.on('failed', (job, err) => {
+ console.error(`[Queue] Job ${job?.id} failed:`, err.message);
+ });
+
+ return worker;
+}
-analysisWorker.on('failed', (job, err) => {
- console.error(`[Queue] Job ${job?.id} failed:`, err.message);
-});
+export function getAnalysisQueue() {
+ if (!analysisQueue) {
+ analysisQueue = buildQueue();
+ }
+
+ return analysisQueue;
+}
+
+export function startAnalysisWorker() {
+ if (!analysisWorker) {
+ analysisWorker = buildWorker();
+ }
+
+ return analysisWorker;
+}
+
+export async function closeAnalysisQueueResources() {
+ if (analysisWorker) {
+ await analysisWorker.close();
+ analysisWorker = undefined;
+ }
+
+ if (analysisQueue) {
+ await analysisQueue.close();
+ analysisQueue = undefined;
+ }
+}
export async function enqueueAnalysisJob({ jobId, input }) {
if (!jobId) {
@@ -40,7 +77,7 @@ export async function enqueueAnalysisJob({ jobId, input }) {
throw err;
}
- return analysisQueue.add(
+ return getAnalysisQueue().add(
'analyze',
{ jobId, input },
{
diff --git a/server/src/services/GitHubPRService.js b/server/src/services/GitHubPRService.js
index 62a2b45..163f02d 100644
--- a/server/src/services/GitHubPRService.js
+++ b/server/src/services/GitHubPRService.js
@@ -228,6 +228,41 @@ ${impactedList}
throw new Error(`Failed to fetch PR metadata: ${err.message}`);
}
}
+
+ /**
+ * Create a check run for PR status
+ * @param {string} owner - Repository owner
+ * @param {string} repo - Repository name
+ * @param {string} sha - Commit SHA
+ * @param {Object} options - Check run options
+ * @param {string} options.conclusion - 'success' | 'failure' | 'neutral'
+ * @param {string} options.title - Check title
+ * @param {string} options.summary - Check summary
+ * @param {string} options.detailsUrl - URL to details
+ * @returns {Promise<{id: number, conclusion: string}>}
+ */
+ async createCheckRun(owner, repo, sha, { conclusion, title, summary, detailsUrl }) {
+ if (!this.isConfigured()) return;
+
+ try {
+ const response = await this.client.post(`/repos/${owner}/${repo}/check-runs`, {
+ name: 'CodeGraph Impact Analysis',
+ head_sha: sha,
+ status: 'completed',
+ conclusion,
+ details_url: detailsUrl,
+ output: { title, summary },
+ });
+
+ return {
+ id: response.data.id,
+ conclusion: response.data.conclusion,
+ };
+ } catch (err) {
+ console.error('Failed to create check run:', err.message);
+ return null;
+ }
+ }
}
export { GitHubPRService };
diff --git a/server/src/services/ai/llmProvider.js b/server/src/services/ai/llmProvider.js
new file mode 100644
index 0000000..3f72831
--- /dev/null
+++ b/server/src/services/ai/llmProvider.js
@@ -0,0 +1,341 @@
+import axios from 'axios';
+import OpenAI from 'openai';
+
+const DEFAULT_CHAT_MODEL = process.env.AI_MODEL || process.env.OPENAI_MODEL || 'gpt-4o-mini';
+const DEFAULT_EMBEDDING_MODEL =
+ process.env.AI_EMBEDDING_MODEL || process.env.OPENAI_EMBEDDING_MODEL || 'text-embedding-3-small';
+
+function normalizeProvider(value) {
+ const provider = String(value || 'openai-compatible').trim().toLowerCase();
+
+ if (['openai', 'compatible', 'openai-compatible'].includes(provider)) {
+ return 'openai-compatible';
+ }
+
+ if (['google', 'gemini'].includes(provider)) {
+ return 'gemini';
+ }
+
+ if (['anthropic', 'claude'].includes(provider)) {
+ return 'anthropic';
+ }
+
+ return provider;
+}
+
+function resolveChatApiKey(provider) {
+ return (
+ process.env.AI_API_KEY
+ || (provider === 'anthropic' ? process.env.ANTHROPIC_API_KEY : null)
+ || (provider === 'gemini' ? process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY : null)
+ || process.env.OPENAI_API_KEY
+ || null
+ );
+}
+
+function resolveChatBaseUrl(provider) {
+ if (process.env.AI_BASE_URL) return process.env.AI_BASE_URL;
+ if (process.env.OPENAI_BASE_URL) return process.env.OPENAI_BASE_URL;
+
+ if (provider === 'anthropic') return 'https://api.anthropic.com/v1/messages';
+ if (provider === 'gemini') return 'https://generativelanguage.googleapis.com/v1beta';
+ return null;
+}
+
+function resolveEmbeddingProvider() {
+ return normalizeProvider(process.env.AI_EMBEDDING_PROVIDER || process.env.AI_PROVIDER || 'openai-compatible');
+}
+
+function resolveEmbeddingApiKey(provider) {
+ return (
+ process.env.AI_EMBEDDING_API_KEY
+ || process.env.AI_API_KEY
+ || process.env.OPENAI_API_KEY
+ || (provider === 'gemini' ? process.env.GEMINI_API_KEY || process.env.GOOGLE_API_KEY : null)
+ || null
+ );
+}
+
+function resolveEmbeddingBaseUrl() {
+ return process.env.AI_EMBEDDING_BASE_URL || process.env.AI_BASE_URL || process.env.OPENAI_BASE_URL || null;
+}
+
+function normalizeMessageText(content) {
+ if (Array.isArray(content)) {
+ return content
+ .map((part) => {
+ if (typeof part === 'string') return part;
+ if (typeof part?.text === 'string') return part.text;
+ return '';
+ })
+ .join('\n')
+ .trim();
+ }
+
+ return String(content || '').trim();
+}
+
+function toAnthropicPayload(messages = []) {
+ const systemPrompts = [];
+ const mappedMessages = [];
+
+ for (const message of messages) {
+ const role = String(message?.role || 'user').trim().toLowerCase();
+ const text = normalizeMessageText(message?.content);
+ if (!text) continue;
+
+ if (role === 'system') {
+ systemPrompts.push(text);
+ continue;
+ }
+
+ mappedMessages.push({
+ role: role === 'assistant' ? 'assistant' : 'user',
+ content: text,
+ });
+ }
+
+ return {
+ system: systemPrompts.join('\n\n').trim() || undefined,
+ messages: mappedMessages,
+ };
+}
+
+function toGeminiPayload(messages = []) {
+ const contents = [];
+
+ for (const message of messages) {
+ const role = String(message?.role || 'user').trim().toLowerCase();
+ if (role === 'system') continue;
+
+ const text = normalizeMessageText(message?.content);
+ if (!text) continue;
+
+ contents.push({
+ role: role === 'assistant' ? 'model' : 'user',
+ parts: [{ text }],
+ });
+ }
+
+ return contents;
+}
+
+export class ChatClient {
+ constructor() {
+ this.provider = normalizeProvider(process.env.AI_PROVIDER || 'openai-compatible');
+ this.apiKey = resolveChatApiKey(this.provider);
+ this.baseUrl = resolveChatBaseUrl(this.provider);
+ this.model = DEFAULT_CHAT_MODEL;
+
+ this.openai =
+ this.provider === 'openai-compatible' && this.apiKey
+ ? new OpenAI({
+ apiKey: this.apiKey,
+ baseURL: this.baseUrl || undefined,
+ })
+ : null;
+ }
+
+ isConfigured() {
+ if (!this.apiKey) return false;
+ if (!this.model) return false;
+ return true;
+ }
+
+ async createChatCompletion({ messages, model, temperature, maxTokens, responseFormat } = {}) {
+ if (!this.isConfigured()) {
+ throw new Error('AI provider is not configured. Set AI_API_KEY and AI_MODEL (or OPENAI_* fallbacks).');
+ }
+
+ const selectedModel = model || this.model;
+
+ if (this.provider === 'openai-compatible') {
+ const response = await this.openai.chat.completions.create({
+ model: selectedModel,
+ temperature,
+ max_tokens: maxTokens,
+ response_format: responseFormat,
+ messages,
+ });
+
+ const content = response?.choices?.[0]?.message?.content;
+ return {
+ content: normalizeMessageText(content),
+ usage: response?.usage || {},
+ raw: response,
+ };
+ }
+
+ if (this.provider === 'anthropic') {
+ const payload = toAnthropicPayload(messages);
+
+ const response = await axios.post(
+ this.baseUrl,
+ {
+ model: selectedModel,
+ max_tokens: maxTokens || 512,
+ temperature,
+ system: payload.system,
+ messages: payload.messages,
+ },
+ {
+ headers: {
+ 'x-api-key': this.apiKey,
+ 'anthropic-version': process.env.ANTHROPIC_VERSION || '2023-06-01',
+ 'content-type': 'application/json',
+ },
+ timeout: 60_000,
+ },
+ );
+
+ const blocks = Array.isArray(response?.data?.content) ? response.data.content : [];
+ const content = blocks
+ .map((block) => (block?.type === 'text' ? block.text : ''))
+ .filter(Boolean)
+ .join('\n')
+ .trim();
+
+ return {
+ content,
+ usage: response?.data?.usage || {},
+ raw: response?.data,
+ };
+ }
+
+ if (this.provider === 'gemini') {
+ const base = this.baseUrl.replace(/\/$/, '');
+ const endpoint = `${base}/models/${encodeURIComponent(selectedModel)}:generateContent`;
+ const contents = toGeminiPayload(messages);
+
+ const generationConfig = {
+ temperature,
+ maxOutputTokens: maxTokens,
+ };
+
+ if (responseFormat?.type === 'json_object') {
+ generationConfig.responseMimeType = 'application/json';
+ }
+
+ const response = await axios.post(
+ endpoint,
+ {
+ contents,
+ generationConfig,
+ },
+ {
+ params: { key: this.apiKey },
+ headers: { 'content-type': 'application/json' },
+ timeout: 60_000,
+ },
+ );
+
+ const parts = response?.data?.candidates?.[0]?.content?.parts || [];
+ const content = parts.map((part) => part?.text || '').filter(Boolean).join('\n').trim();
+
+ return {
+ content,
+ usage: response?.data?.usageMetadata || {},
+ raw: response?.data,
+ };
+ }
+
+ throw new Error(
+ `Unsupported AI_PROVIDER '${this.provider}'. Supported providers: openai-compatible, anthropic, gemini.`,
+ );
+ }
+
+ async createStream({ messages, model, maxTokens, temperature, onText } = {}) {
+ if (!this.isConfigured()) {
+ throw new Error('AI provider is not configured. Set AI_API_KEY and AI_MODEL (or OPENAI_* fallbacks).');
+ }
+
+ if (this.provider === 'openai-compatible') {
+ const stream = await this.openai.chat.completions.stream({
+ model: model || this.model,
+ max_tokens: maxTokens,
+ temperature,
+ messages,
+ });
+
+ return {
+ cancel: () => {
+ if (typeof stream?.abort === 'function') {
+ stream.abort();
+ }
+
+ if (typeof stream?.controller?.abort === 'function') {
+ stream.controller.abort();
+ }
+ },
+ consume: async () => {
+ for await (const chunk of stream) {
+ const text = chunk?.choices?.[0]?.delta?.content || '';
+ if (text) onText?.(text);
+ }
+ },
+ };
+ }
+
+ // Fallback for providers without native stream handling in this adapter.
+ return {
+ cancel: () => undefined,
+ consume: async () => {
+ const completion = await this.createChatCompletion({
+ messages,
+ model,
+ maxTokens,
+ temperature,
+ });
+ if (completion?.content) {
+ onText?.(completion.content);
+ }
+ },
+ };
+ }
+}
+
+export class EmbeddingClient {
+ constructor() {
+ this.provider = resolveEmbeddingProvider();
+ this.apiKey = resolveEmbeddingApiKey(this.provider);
+ this.baseUrl = resolveEmbeddingBaseUrl();
+ this.model = DEFAULT_EMBEDDING_MODEL;
+
+ this.openai =
+ this.provider === 'openai-compatible' && this.apiKey
+ ? new OpenAI({
+ apiKey: this.apiKey,
+ baseURL: this.baseUrl || undefined,
+ })
+ : null;
+ }
+
+ isConfigured() {
+ return Boolean(this.apiKey && this.model);
+ }
+
+ async createEmbedding({ input, model } = {}) {
+ if (!this.isConfigured()) {
+ throw new Error('Embedding provider is not configured. Set AI_EMBEDDING_API_KEY and AI_EMBEDDING_MODEL.');
+ }
+
+ if (this.provider !== 'openai-compatible') {
+ throw new Error(
+ `AI_EMBEDDING_PROVIDER '${this.provider}' is not supported. Use openai-compatible for embeddings.`,
+ );
+ }
+
+ return this.openai.embeddings.create({
+ model: model || this.model,
+ input,
+ });
+ }
+}
+
+export function createChatClient() {
+ return new ChatClient();
+}
+
+export function createEmbeddingClient() {
+ return new EmbeddingClient();
+}
diff --git a/server/test/ai.snippet-impact.test.js b/server/test/ai.snippet-impact.test.js
new file mode 100644
index 0000000..091587d
--- /dev/null
+++ b/server/test/ai.snippet-impact.test.js
@@ -0,0 +1,196 @@
+import { after, before, test } from 'node:test';
+import assert from 'node:assert/strict';
+import jwt from 'jsonwebtoken';
+
+process.env.JWT_SECRET = process.env.JWT_SECRET || 'test-secret';
+process.env.DATABASE_URL =
+ process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5433/codegraph';
+process.env.REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
+delete process.env.OPENAI_API_KEY;
+delete process.env.AI_API_KEY;
+
+let app;
+let pgPool;
+let redisClient;
+let server;
+let baseUrl;
+
+async function settleWithTimeout(promise, timeoutMs = 3000) {
+ let timer;
+
+ try {
+ await Promise.race([
+ promise.catch(() => undefined),
+ new Promise((resolve) => {
+ timer = setTimeout(resolve, timeoutMs);
+ timer.unref?.();
+ }),
+ ]);
+ } finally {
+ if (timer) {
+ clearTimeout(timer);
+ }
+ }
+}
+
+before(async () => {
+ ({ default: app } = await import('../app.js'));
+ ({ pgPool, redisClient } = await import('../src/infrastructure/connections.js'));
+
+ await new Promise((resolve) => {
+ server = app.listen(0, resolve);
+ });
+
+ const address = server.address();
+ baseUrl = `http://127.0.0.1:${address.port}`;
+});
+
+after(async () => {
+ await settleWithTimeout(
+ new Promise((resolve, reject) => {
+ server.close((error) => {
+ if (error) return reject(error);
+ return resolve();
+ });
+ }),
+ );
+
+ await settleWithTimeout(redisClient.quit());
+ await settleWithTimeout(pgPool.end());
+});
+
+test('POST /api/ai/snippet-impact requires authentication', async () => {
+ const response = await fetch(`${baseUrl}/api/ai/snippet-impact`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ jobId: 'job-1',
+ filePath: 'src/file.js',
+ snippet: 'const x = 1;',
+ }),
+ });
+
+ assert.equal(response.status, 401);
+ const payload = await response.json();
+ assert.equal(payload.error, 'Authentication required.');
+});
+
+test('POST /api/ai/snippet-impact validates required fields', async () => {
+ const userId = '63a501be-d0e4-4570-a32d-7d8c61b65f31';
+ const token = jwt.sign({ id: userId, username: 'snippet-user' }, process.env.JWT_SECRET, {
+ expiresIn: '1h',
+ });
+
+ await pgPool.query(
+ `
+ INSERT INTO users (id, username, email)
+ VALUES ($1, $2, $3)
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [userId, 'snippet-user', 'snippet@example.com'],
+ );
+
+ try {
+ const response = await fetch(`${baseUrl}/api/ai/snippet-impact`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({
+ jobId: 'job-1',
+ filePath: 'src/file.js',
+ }),
+ });
+
+ assert.equal(response.status, 400);
+ const payload = await response.json();
+ assert.equal(payload.error, 'jobId, filePath, and snippet are required.');
+ } finally {
+ await pgPool.query('DELETE FROM users WHERE id = $1', [userId]);
+ }
+});
+
+test('POST /api/ai/snippet-impact returns 503 when AI provider is not configured', async () => {
+ const userId = '9db9aabd-6818-445a-a361-6203c2f39c85';
+ const repositoryId = 'dc9e2364-7a26-4f3f-9496-9f9070ec748f';
+ const jobId = '38759f8c-e63e-49c4-ab95-c9fce4f31550';
+ const token = jwt.sign({ id: userId, username: 'snippet-user-2' }, process.env.JWT_SECRET, {
+ expiresIn: '1h',
+ });
+
+ await pgPool.query(
+ `
+ INSERT INTO users (id, username, email)
+ VALUES ($1, $2, $3)
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [userId, 'snippet-user-2', 'snippet2@example.com'],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO repositories (id, owner_id, source, full_name)
+ VALUES ($1, $2, 'local', 'snippet/repo')
+ ON CONFLICT (owner_id, full_name) DO UPDATE
+ SET full_name = EXCLUDED.full_name
+ `,
+ [repositoryId, userId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO analysis_jobs (id, repository_id, user_id, status)
+ VALUES ($1, $2, $3, 'completed')
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [jobId, repositoryId, userId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO graph_nodes (job_id, file_path, file_type, declarations, metrics, summary)
+ VALUES
+ ($1, 'src/file-a.js', 'module', '[{"name":"runA"}]'::jsonb, '{"loc": 20, "inDegree": 1, "outDegree": 1}'::jsonb, 'Main unit'),
+ ($1, 'src/file-b.js', 'module', '[{"name":"runB"}]'::jsonb, '{"loc": 42, "inDegree": 0, "outDegree": 0}'::jsonb, 'Dependent unit')
+ ON CONFLICT (job_id, file_path) DO NOTHING
+ `,
+ [jobId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO graph_edges (job_id, source_path, target_path, edge_type)
+ VALUES ($1, 'src/file-b.js', 'src/file-a.js', 'import')
+ ON CONFLICT (job_id, source_path, target_path) DO NOTHING
+ `,
+ [jobId],
+ );
+
+ try {
+ const response = await fetch(`${baseUrl}/api/ai/snippet-impact`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({
+ jobId,
+ filePath: 'src/file-a.js',
+ snippet: 'export function runA() { return 1; }',
+ lineStart: 1,
+ lineEnd: 1,
+ }),
+ });
+
+ assert.equal(response.status, 503);
+ const payload = await response.json();
+ assert.equal(payload.error, 'AI provider is not configured.');
+ } finally {
+ await pgPool.query('DELETE FROM graph_edges WHERE job_id = $1', [jobId]);
+ await pgPool.query('DELETE FROM graph_nodes WHERE job_id = $1', [jobId]);
+ await pgPool.query('DELETE FROM analysis_jobs WHERE id = $1', [jobId]);
+ await pgPool.query('DELETE FROM repositories WHERE id = $1', [repositoryId]);
+ await pgPool.query('DELETE FROM users WHERE id = $1', [userId]);
+ }
+});
\ No newline at end of file
diff --git a/server/test/ai.suggest-refactor.test.js b/server/test/ai.suggest-refactor.test.js
new file mode 100644
index 0000000..1d1a2e4
--- /dev/null
+++ b/server/test/ai.suggest-refactor.test.js
@@ -0,0 +1,226 @@
+import { after, before, test } from 'node:test';
+import assert from 'node:assert/strict';
+import jwt from 'jsonwebtoken';
+
+process.env.JWT_SECRET = process.env.JWT_SECRET || 'test-secret';
+process.env.DATABASE_URL =
+ process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5433/codegraph';
+process.env.REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
+delete process.env.OPENAI_API_KEY;
+
+let app;
+let pgPool;
+let redisClient;
+let server;
+let baseUrl;
+
+async function settleWithTimeout(promise, timeoutMs = 3000) {
+ let timer;
+
+ try {
+ await Promise.race([
+ promise.catch(() => undefined),
+ new Promise((resolve) => {
+ timer = setTimeout(resolve, timeoutMs);
+ timer.unref?.();
+ }),
+ ]);
+ } finally {
+ if (timer) {
+ clearTimeout(timer);
+ }
+ }
+}
+
+before(async () => {
+ ({ default: app } = await import('../app.js'));
+ ({ pgPool, redisClient } = await import('../src/infrastructure/connections.js'));
+
+ await new Promise((resolve) => {
+ server = app.listen(0, resolve);
+ });
+
+ const address = server.address();
+ baseUrl = `http://127.0.0.1:${address.port}`;
+});
+
+after(async () => {
+ await settleWithTimeout(
+ new Promise((resolve, reject) => {
+ server.close((error) => {
+ if (error) return reject(error);
+ return resolve();
+ });
+ }),
+ );
+
+ await settleWithTimeout(redisClient.quit());
+ await settleWithTimeout(pgPool.end());
+});
+
+test('POST /api/ai/suggest-refactor requires authentication', async () => {
+ const response = await fetch(`${baseUrl}/api/ai/suggest-refactor`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({ jobId: 'x', filePath: 'src/file.js' }),
+ });
+
+ assert.equal(response.status, 401);
+ const payload = await response.json();
+ assert.equal(payload.error, 'Authentication required.');
+});
+
+test('POST /api/ai/suggest-refactor validates required fields', async () => {
+ const userId = '7a28f1e2-4477-449b-8c89-963b6c4f7111';
+ const token = jwt.sign({ id: userId, username: 'refactor-user' }, process.env.JWT_SECRET, {
+ expiresIn: '1h',
+ });
+
+ await pgPool.query(
+ `
+ INSERT INTO users (id, username, email)
+ VALUES ($1, $2, $3)
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [userId, 'refactor-user', 'refactor@example.com'],
+ );
+
+ try {
+ const response = await fetch(`${baseUrl}/api/ai/suggest-refactor`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ jobId: 'job-only' }),
+ });
+
+ assert.equal(response.status, 400);
+ const payload = await response.json();
+ assert.equal(payload.error, 'jobId and filePath are required.');
+ } finally {
+ await pgPool.query('DELETE FROM users WHERE id = $1', [userId]);
+ }
+});
+
+test('POST /api/ai/suggest-refactor returns 404 when file is not part of the graph job', async () => {
+ const userId = 'd12f12de-94f9-4330-9414-25f4a4f07222';
+ const repositoryId = 'a0f9d493-fb2a-4e89-9f4d-48448a7e4333';
+ const jobId = '97ab3bb2-4ef6-4c3a-8df0-f4bf8f91a444';
+ const token = jwt.sign({ id: userId, username: 'refactor-user-2' }, process.env.JWT_SECRET, {
+ expiresIn: '1h',
+ });
+
+ await pgPool.query(
+ `
+ INSERT INTO users (id, username, email)
+ VALUES ($1, $2, $3)
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [userId, 'refactor-user-2', 'refactor2@example.com'],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO repositories (id, owner_id, source, full_name)
+ VALUES ($1, $2, 'local', 'refactor/repo')
+ ON CONFLICT (owner_id, full_name) DO UPDATE
+ SET full_name = EXCLUDED.full_name
+ `,
+ [repositoryId, userId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO analysis_jobs (id, repository_id, user_id, status)
+ VALUES ($1, $2, $3, 'completed')
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [jobId, repositoryId, userId],
+ );
+
+ try {
+ const response = await fetch(`${baseUrl}/api/ai/suggest-refactor`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ jobId, filePath: 'src/missing-file.js' }),
+ });
+
+ assert.equal(response.status, 404);
+ const payload = await response.json();
+ assert.equal(payload.error, 'File not found.');
+ } finally {
+ await pgPool.query('DELETE FROM analysis_jobs WHERE id = $1', [jobId]);
+ await pgPool.query('DELETE FROM repositories WHERE id = $1', [repositoryId]);
+ await pgPool.query('DELETE FROM users WHERE id = $1', [userId]);
+ }
+});
+
+test('POST /api/ai/suggest-refactor returns 503 when AI provider is not configured', async () => {
+ const userId = '97bcd6cf-eb90-40dc-a429-11eb45422555';
+ const repositoryId = '5cd42d0d-e6d3-40dd-966e-5a2d4b0d3666';
+ const jobId = '8085b4cb-1718-428e-9694-9e22cfb76777';
+ const token = jwt.sign({ id: userId, username: 'refactor-user-3' }, process.env.JWT_SECRET, {
+ expiresIn: '1h',
+ });
+
+ await pgPool.query(
+ `
+ INSERT INTO users (id, username, email)
+ VALUES ($1, $2, $3)
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [userId, 'refactor-user-3', 'refactor3@example.com'],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO repositories (id, owner_id, source, full_name)
+ VALUES ($1, $2, 'local', 'refactor/repo-3')
+ ON CONFLICT (owner_id, full_name) DO UPDATE
+ SET full_name = EXCLUDED.full_name
+ `,
+ [repositoryId, userId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO analysis_jobs (id, repository_id, user_id, status)
+ VALUES ($1, $2, $3, 'completed')
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [jobId, repositoryId, userId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO graph_nodes (job_id, file_path, file_type, declarations, metrics, summary)
+ VALUES ($1, 'src/high-risk.js', 'service', '[{"name":"runRisk"}]'::jsonb, '{"loc": 200, "inDegree": 8, "outDegree": 5}'::jsonb, 'Hot path orchestration')
+ ON CONFLICT (job_id, file_path) DO NOTHING
+ `,
+ [jobId],
+ );
+
+ try {
+ const response = await fetch(`${baseUrl}/api/ai/suggest-refactor`, {
+ method: 'POST',
+ headers: {
+ 'Content-Type': 'application/json',
+ Authorization: `Bearer ${token}`,
+ },
+ body: JSON.stringify({ jobId, filePath: 'src/high-risk.js' }),
+ });
+
+ assert.equal(response.status, 503);
+ const payload = await response.json();
+ assert.equal(payload.error, 'AI provider is not configured.');
+ } finally {
+ await pgPool.query('DELETE FROM graph_nodes WHERE job_id = $1', [jobId]);
+ await pgPool.query('DELETE FROM analysis_jobs WHERE id = $1', [jobId]);
+ await pgPool.query('DELETE FROM repositories WHERE id = $1', [repositoryId]);
+ await pgPool.query('DELETE FROM users WHERE id = $1', [userId]);
+ }
+});
diff --git a/server/test/github.webhook.test.js b/server/test/github.webhook.test.js
index b9e7cc7..202692b 100644
--- a/server/test/github.webhook.test.js
+++ b/server/test/github.webhook.test.js
@@ -3,8 +3,7 @@ import assert from 'node:assert/strict';
import crypto from 'node:crypto';
import express from 'express';
import request from 'supertest';
-import githubWebhookRouter from '../src/api/webhooks/github.webhook.js';
-import * as Queue from 'bullmq';
+import { createGitHubWebhookRouter } from '../src/api/webhooks/github.webhook.js';
// Mock dependencies
const mockEnqueueAnalysisJob = async ({ jobId, input }) => {
@@ -57,8 +56,13 @@ describe('GitHub Webhook Integration', () => {
app.use('/api/webhooks/github', express.raw({ type: 'application/json' }));
app.use(express.json());
- // Mock the module dependencies by patching the router's dependencies
- app.use('/api/webhooks', githubWebhookRouter);
+ app.use(
+ '/api/webhooks',
+ createGitHubWebhookRouter({
+ db: mockPgPool,
+ enqueueJob: mockEnqueueAnalysisJob,
+ }),
+ );
});
it('accepts valid pull_request webhook with correct signature', async () => {
@@ -78,7 +82,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(body);
@@ -103,7 +107,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', invalidSignature)
+ .set('x-github-signature-256', invalidSignature)
.set('Content-Type', 'application/json')
.send(body);
@@ -130,7 +134,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(body);
@@ -150,7 +154,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'push')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(body);
@@ -175,7 +179,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(body);
@@ -193,7 +197,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(invalidBody);
@@ -217,7 +221,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(body);
@@ -242,7 +246,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(body);
@@ -267,7 +271,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', signature)
+ .set('x-github-signature-256', signature)
.set('Content-Type', 'application/json')
.send(body);
@@ -294,7 +298,7 @@ describe('GitHub Webhook Integration', () => {
const response = await request(app)
.post('/api/webhooks/github')
.set('x-github-event', 'pull_request')
- .set('x-hub-signature-256', shortSignature)
+ .set('x-github-signature-256', shortSignature)
.set('Content-Type', 'application/json')
.send(body);
diff --git a/server/test/graph.heatmap.test.js b/server/test/graph.heatmap.test.js
new file mode 100644
index 0000000..1595e68
--- /dev/null
+++ b/server/test/graph.heatmap.test.js
@@ -0,0 +1,124 @@
+import { after, before, test } from 'node:test';
+import assert from 'node:assert/strict';
+
+process.env.JWT_SECRET = process.env.JWT_SECRET || 'test-secret';
+process.env.DATABASE_URL =
+ process.env.DATABASE_URL || 'postgres://postgres:postgres@localhost:5433/codegraph';
+process.env.REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379';
+
+let app;
+let pgPool;
+let redisClient;
+let server;
+let baseUrl;
+
+async function settleWithTimeout(promise, timeoutMs = 3000) {
+ let timer;
+
+ try {
+ await Promise.race([
+ promise.catch(() => undefined),
+ new Promise((resolve) => {
+ timer = setTimeout(resolve, timeoutMs);
+ timer.unref?.();
+ }),
+ ]);
+ } finally {
+ if (timer) {
+ clearTimeout(timer);
+ }
+ }
+}
+
+before(async () => {
+ ({ default: app } = await import('../app.js'));
+ ({ pgPool, redisClient } = await import('../src/infrastructure/connections.js'));
+
+ await new Promise((resolve) => {
+ server = app.listen(0, resolve);
+ });
+
+ const address = server.address();
+ baseUrl = `http://127.0.0.1:${address.port}`;
+});
+
+after(async () => {
+ await settleWithTimeout(
+ new Promise((resolve, reject) => {
+ server.close((error) => {
+ if (error) return reject(error);
+ return resolve();
+ });
+ }),
+ );
+
+ await settleWithTimeout(redisClient.quit());
+ await settleWithTimeout(pgPool.end());
+});
+
+test('GET /api/graph/:jobId/heatmap returns nodes ordered by risk score', async () => {
+ const userId = '9e4f6d7a-31e1-4d9f-8575-b9e5428eb111';
+ const repositoryId = '2c4ef2f5-019e-41fd-b6f1-9652d4a7c222';
+ const jobId = '14882a4f-f885-4488-8afb-7b15a2c3d333';
+
+ await pgPool.query(
+ `
+ INSERT INTO users (id, username, email)
+ VALUES ($1, $2, $3)
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [userId, 'heatmap-user', 'heatmap@example.com'],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO repositories (id, owner_id, source, full_name)
+ VALUES ($1, $2, 'local', 'heatmap/repo')
+ ON CONFLICT (owner_id, full_name) DO UPDATE
+ SET full_name = EXCLUDED.full_name
+ `,
+ [repositoryId, userId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO analysis_jobs (id, repository_id, user_id, status)
+ VALUES ($1, $2, $3, 'completed')
+ ON CONFLICT (id) DO NOTHING
+ `,
+ [jobId, repositoryId, userId],
+ );
+
+ await pgPool.query(
+ `
+ INSERT INTO graph_nodes (job_id, file_path, file_type, declarations, metrics)
+ VALUES
+ ($1, 'src/high-risk.js', 'service', '[]'::jsonb, '{"inDegree": 4, "complexity": 7, "loc": 240}'::jsonb),
+ ($1, 'src/medium-risk.js', 'module', '[]'::jsonb, '{"inDegree": 3, "complexity": 3, "loc": 150}'::jsonb),
+ ($1, 'src/low-risk.js', 'util', '[]'::jsonb, '{"inDegree": 1, "complexity": 1, "loc": 40}'::jsonb)
+ ON CONFLICT (job_id, file_path) DO NOTHING
+ `,
+ [jobId],
+ );
+
+ try {
+ const response = await fetch(`${baseUrl}/api/graph/${jobId}/heatmap`);
+ assert.equal(response.status, 200);
+
+ const payload = await response.json();
+ assert.equal(Array.isArray(payload.hotspots), true);
+ assert.equal(payload.hotspots.length, 3);
+
+ assert.equal(payload.hotspots[0].filePath, 'src/high-risk.js');
+ assert.equal(payload.hotspots[0].riskScore, 28);
+ assert.equal(payload.hotspots[1].filePath, 'src/medium-risk.js');
+ assert.equal(payload.hotspots[1].riskScore, 9);
+ assert.equal(payload.hotspots[2].filePath, 'src/low-risk.js');
+ assert.equal(payload.hotspots[2].riskScore, 1);
+ } finally {
+ await pgPool.query('DELETE FROM graph_nodes WHERE job_id = $1', [jobId]);
+ await pgPool.query('DELETE FROM analysis_jobs WHERE id = $1', [jobId]);
+ await pgPool.query('DELETE FROM repositories WHERE id = $1', [repositoryId]);
+ await pgPool.query('DELETE FROM users WHERE id = $1', [userId]);
+ }
+});
diff --git a/server/test/parser.multilang.test.js b/server/test/parser.multilang.test.js
index 682fad6..485eff7 100644
--- a/server/test/parser.multilang.test.js
+++ b/server/test/parser.multilang.test.js
@@ -3,7 +3,7 @@ import assert from 'node:assert/strict';
import { mkdtemp, rm, writeFile, mkdir } from 'fs/promises';
import os from 'os';
import path from 'path';
-import { ParserAgent } from '../src/agents/parser/ParserAgent.js';
+import { PolyglotParserAgent } from '../src/agents/parser/PolyglotParserAgent.js';
const tempDirs = [];
@@ -13,7 +13,7 @@ after(async () => {
}
});
-test('ParserAgent parses Python and Go files via language workers', async () => {
+test('PolyglotParserAgent parses Python and Go files via tree-sitter worker', async () => {
const rootDir = await mkdtemp(path.join(os.tmpdir(), 'codegraph-parser-'));
tempDirs.push(rootDir);
@@ -49,14 +49,14 @@ test('ParserAgent parses Python and Go files via language workers', async () =>
'',
'type Service struct {}',
'',
- 'func (s Service) Handle() {',
+ 'func Handle() {',
' fmt.Println("ok")',
'}',
].join('\n'),
'utf8',
);
- const parser = new ParserAgent();
+ const parser = new PolyglotParserAgent();
const result = await parser.process(
{
@@ -75,14 +75,14 @@ test('ParserAgent parses Python and Go files via language workers', async () =>
const pyResult = result.data.parsedFiles.find((file) => file.relativePath === 'service.py');
assert.ok(pyResult);
assert.equal(pyResult.parseError, null);
- assert.deepEqual(pyResult.imports, ['./pkg', 'requests']);
- assert.equal(pyResult.declarations.some((entry) => entry.name === 'login' && entry.kind === 'function'), true);
- assert.equal(pyResult.declarations.some((entry) => entry.name === 'AuthService' && entry.kind === 'class'), true);
+ assert.equal(pyResult.imports.includes('requests'), true);
+ assert.equal(pyResult.declarations.some((entry) => entry.name === 'login' && entry.kind === 'fn'), true);
+ assert.equal(pyResult.declarations.some((entry) => entry.name === 'AuthService' && entry.kind === 'cls'), true);
const goResult = result.data.parsedFiles.find((file) => file.relativePath === 'service.go');
assert.ok(goResult);
assert.equal(goResult.parseError, null);
assert.deepEqual(goResult.imports, ['fmt', 'net/http']);
- assert.equal(goResult.declarations.some((entry) => entry.name === 'Handle' && entry.kind === 'function'), true);
- assert.equal(goResult.declarations.some((entry) => entry.name === 'Service' && entry.kind === 'struct'), true);
+ assert.equal(goResult.declarations.some((entry) => entry.name === 'Handle' && entry.kind === 'fn'), true);
+ assert.equal(goResult.declarations.some((entry) => entry.name === 'Service' && entry.kind === 'type'), true);
});
diff --git a/server/test/snippet.analyzer.confidence.test.js b/server/test/snippet.analyzer.confidence.test.js
new file mode 100644
index 0000000..85c9380
--- /dev/null
+++ b/server/test/snippet.analyzer.confidence.test.js
@@ -0,0 +1,147 @@
+import assert from 'node:assert/strict';
+import test from 'node:test';
+import { SnippetAnalyzerAgent } from '../src/agents/analysis/SnippetAnalyzerAgent.js';
+
+function createMockDb() {
+ return {
+ async query(sql) {
+ if (String(sql).includes('FROM graph_nodes')) {
+ return {
+ rowCount: 3,
+ rows: [
+ {
+ file_path: 'src/a.js',
+ file_type: 'module',
+ declarations: [{ name: 'runA' }],
+ metrics: { inDegree: 1, outDegree: 1, loc: 10 },
+ summary: 'Core entry file',
+ },
+ {
+ file_path: 'src/b.js',
+ file_type: 'module',
+ declarations: [{ name: 'runB' }],
+ metrics: { inDegree: 0, outDegree: 1, loc: 14 },
+ summary: 'Depends on a',
+ },
+ {
+ file_path: 'src/c.js',
+ file_type: 'module',
+ declarations: [{ name: 'runC' }],
+ metrics: { inDegree: 1, outDegree: 0, loc: 20 },
+ summary: 'Transitively impacted',
+ },
+ ],
+ };
+ }
+
+ if (String(sql).includes('FROM graph_edges')) {
+ return {
+ rowCount: 2,
+ rows: [
+ { source_path: 'src/b.js', target_path: 'src/a.js' },
+ { source_path: 'src/c.js', target_path: 'src/b.js' },
+ ],
+ };
+ }
+
+ return { rowCount: 0, rows: [] };
+ },
+ };
+}
+
+test('SnippetAnalyzerAgent re-runs when confidence is below threshold', async () => {
+ let callCount = 0;
+
+ const llmClient = {
+ isConfigured: () => true,
+ createChatCompletion: async () => {
+ callCount += 1;
+
+ if (callCount === 1) {
+ return {
+ content: JSON.stringify({
+ snippetPurpose: 'Low confidence first pass.',
+ fileImpact: 'Uncertain file impact.',
+ codebaseImpact: 'Potentially broad impact.',
+ directImpactedFiles: ['src/b.js'],
+ indirectImpactedFiles: ['src/c.js'],
+ relatedFileFindings: [{ filePath: 'src/b.js', impact: 'Depends on a', risk: 'medium' }],
+ confidence: 'low',
+ confidenceScore: 0.45,
+ }),
+ usage: { completion_tokens: 120 },
+ };
+ }
+
+ return {
+ content: JSON.stringify({
+ snippetPurpose: 'Exports a function used by downstream modules.',
+ fileImpact: 'Affects exported behavior in src/a.js.',
+ codebaseImpact: 'Changes propagate to direct and transitive dependents.',
+ directImpactedFiles: ['src/b.js'],
+ indirectImpactedFiles: ['src/c.js'],
+ relatedFileFindings: [{ filePath: 'src/c.js', impact: 'Reads output from src/b.js', risk: 'medium' }],
+ confidence: 'high',
+ confidenceScore: 0.92,
+ }),
+ usage: { completion_tokens: 160 },
+ };
+ },
+ };
+
+ const agent = new SnippetAnalyzerAgent({ db: createMockDb(), llmClient });
+
+ const result = await agent.process({
+ jobId: 'job-1',
+ filePath: 'src/a.js',
+ snippet: 'export function runA() { return 1; }',
+ lineStart: 1,
+ lineEnd: 1,
+ });
+
+ assert.equal(result.status, 'success');
+ assert.equal(callCount, 2);
+ assert.equal(result.data.rerunTriggered, true);
+ assert.equal(result.data.attemptsUsed, 2);
+ assert.equal(result.data.confidence, 'high');
+ assert.equal(result.data.confidenceScore, 0.92);
+ assert.equal(result.metrics.attemptsUsed, 2);
+ assert.equal(result.metrics.completionTokens, 280);
+});
+
+test('SnippetAnalyzerAgent accepts numeric-string confidence and skips rerun when >= threshold', async () => {
+ let callCount = 0;
+
+ const llmClient = {
+ isConfigured: () => true,
+ createChatCompletion: async () => {
+ callCount += 1;
+ return {
+ content: JSON.stringify({
+ snippetPurpose: 'Initializes and returns stable value.',
+ fileImpact: 'Local helper behavior only.',
+ codebaseImpact: 'Limited to direct consumers.',
+ directImpactedFiles: ['src/b.js'],
+ indirectImpactedFiles: [],
+ relatedFileFindings: [{ filePath: 'src/b.js', impact: 'Imports helper output', risk: 'low' }],
+ confidenceScore: '0.82',
+ }),
+ usage: { output_tokens: 90 },
+ };
+ },
+ };
+
+ const agent = new SnippetAnalyzerAgent({ db: createMockDb(), llmClient });
+ const result = await agent.process({
+ jobId: 'job-1',
+ filePath: 'src/a.js',
+ snippet: 'export function runA() { return 1; }',
+ });
+
+ assert.equal(result.status, 'success');
+ assert.equal(callCount, 1);
+ assert.equal(result.data.rerunTriggered, false);
+ assert.equal(result.data.attemptsUsed, 1);
+ assert.equal(result.data.confidence, 'medium');
+ assert.equal(result.data.confidenceScore, 0.82);
+});
diff --git a/vscode-extension/.gitignore b/vscode-extension/.gitignore
new file mode 100644
index 0000000..f5959f6
--- /dev/null
+++ b/vscode-extension/.gitignore
@@ -0,0 +1,6 @@
+node_modules
+dist
+*.vsix
+.DS_Store
+.env
+.env.local
diff --git a/vscode-extension/.vscodeignore b/vscode-extension/.vscodeignore
new file mode 100644
index 0000000..74303fe
--- /dev/null
+++ b/vscode-extension/.vscodeignore
@@ -0,0 +1,10 @@
+.git
+node_modules
+dist/**/*.map
+src/**
+**/*.ts
+**/*.md
+tsconfig.json
+.prettierrc
+.eslintignore
+.eslintrc.json
diff --git a/vscode-extension/README.md b/vscode-extension/README.md
new file mode 100644
index 0000000..8ec0d11
--- /dev/null
+++ b/vscode-extension/README.md
@@ -0,0 +1,160 @@
+# CodeGraph AI โ VS Code Extension
+
+Visualize your codebase dependencies and impact analysis directly in VS Code. See file relationships, dependencies, and AI-powered insights without leaving your editor.
+
+## Features
+
+- ๐ **Dependency Graph Visualization** โ Explore your codebase structure in an interactive graph
+- ๐ก **Hover Intelligence** โ See file summaries, dependencies, and usage information on hover
+- ๐ **Impact Analysis** โ Understand which files are affected by your changes
+- ๐ค **AI Refactor Suggestions** โ Get actionable refactoring recommendations
+- ๐จ **Dark Mode Support** โ Matches your VS Code theme
+
+## Installation
+
+1. **From VS Code Marketplace:**
+ - Open VS Code
+ - Go to Extensions (Ctrl/Cmd + Shift + X)
+ - Search for "CodeGraph AI"
+ - Click Install
+
+2. **From Source (Development):**
+ ```bash
+ git clone https://github.com/codegraph-ai/codegraph-ai.git
+ cd vscode-extension
+ npm install
+ npm run esbuild
+ # Open vscode-extension folder in VS Code
+ # Press F5 to launch the extension in development mode
+ ```
+
+## Configuration
+
+Add to your VS Code settings (`.vscode/settings.json` or globally):
+
+```json
+{
+ "codegraphAi.serverUrl": "http://localhost:5000",
+ "codegraphAi.apiToken": "your-jwt-token-here"
+}
+```
+
+### Settings
+
+- **`codegraphAi.serverUrl`** โ URL of your CodeGraph AI server (default: `http://localhost:5000`)
+- **`codegraphAi.apiToken`** โ JWT token for authentication with the server (optional)
+
+## Usage
+
+### Opening the Graph
+
+1. Open a supported file (`.js`, `.ts`, `.jsx`, `.tsx`, `.py`, `.go`)
+2. Run the command **"CodeGraph AI: Open Graph"** from the Command Palette (Ctrl/Cmd + Shift + P)
+3. A new panel opens showing your dependency graph
+
+### Hover Information
+
+Hover over any file in your editor to see:
+- File summary and description
+- Number of direct dependencies
+- Number of files that depend on it
+- Quick link to open the full graph
+
+### Keyboard Shortcuts
+
+- `Ctrl/Cmd + Shift + P` โ Open Command Palette
+- Type "CodeGraph AI" to find available commands
+
+## Supported Languages
+
+- JavaScript / TypeScript
+- JSX / TSX
+- Python
+- Go
+
+## Requirements
+
+- VS Code 1.85 or later
+- CodeGraph AI server running (local or remote)
+- Valid JWT authentication token (if server requires authentication)
+
+## Development
+
+### Build
+
+```bash
+npm run esbuild # Build once
+npm run esbuild-watch # Watch mode for development
+npm run vscode:prepublish # Production build with minification
+```
+
+### Type Checking
+
+```bash
+npm run typecheck
+```
+
+### Package for Distribution
+
+```bash
+npm install -g @vscode/vsce
+vsce package # Generates .vsix file
+vsce publish # Publish to VS Code Marketplace (requires credentials)
+```
+
+## Architecture
+
+### Files
+
+- **`src/extension.ts`** โ Extension activation and command registration
+- **`src/HoverProvider.ts`** โ VS Code hover provider implementation
+- **`src/GraphPanel.ts`** โ WebviewPanel managing the graph visualization
+- **`src/ApiClient.ts`** โ HTTP client for CodeGraph backend communication
+- **`media/main.js`** โ Webview script for graph rendering
+- **`media/main.css`** โ Webview styles
+
+### Communication Flow
+
+```
+VS Code Extension
+ โ
+ApiClient (HTTP)
+ โ
+CodeGraph Backend API
+ โ
+Graph Visualization (Webview)
+```
+
+## Troubleshooting
+
+### "No workspace folder open"
+- Open a folder in VS Code (File โ Open Folder)
+
+### "Connection refused" / "Failed to fetch graph"
+- Verify the CodeGraph server is running
+- Check `codegraphAi.serverUrl` setting
+- Ensure firewall allows the connection
+
+### Hover information not showing
+- The file must be from an analyzed repository
+- Set a valid Job ID from a completed analysis
+- Verify network connectivity to the server
+
+## Contributing
+
+Found a bug or have a feature request? Open an issue on GitHub:
+https://github.com/codegraph-ai/codegraph-ai/issues
+
+## License
+
+MIT โ See LICENSE file in the repository
+
+## Support
+
+- ๐ [Documentation](https://github.com/codegraph-ai/codegraph-ai#readme)
+- ๐ฌ [Discussions](https://github.com/codegraph-ai/codegraph-ai/discussions)
+- ๐ [Issues](https://github.com/codegraph-ai/codegraph-ai/issues)
+
+---
+
+**Developed by the CodeGraph AI team**
diff --git a/vscode-extension/media/main.css b/vscode-extension/media/main.css
new file mode 100644
index 0000000..50abac4
--- /dev/null
+++ b/vscode-extension/media/main.css
@@ -0,0 +1,176 @@
+:root {
+ --color-bg: var(--vscode-editor-background);
+ --color-fg: var(--vscode-editor-foreground);
+ --color-border: var(--vscode-panel-border);
+ --color-primary: var(--vscode-focusBorder);
+}
+
+body {
+ color: var(--color-fg);
+ background-color: var(--color-bg);
+ font-family: var(--vscode-font-family);
+ font-size: var(--vscode-font-size);
+ line-height: 1.6;
+ padding: 16px;
+}
+
+#root {
+ width: 100%;
+ height: 100%;
+}
+
+.container {
+ display: flex;
+ flex-direction: column;
+ gap: 16px;
+}
+
+.header {
+ display: flex;
+ justify-content: space-between;
+ align-items: center;
+ padding-bottom: 12px;
+ border-bottom: 1px solid var(--color-border);
+}
+
+.header h1 {
+ font-size: 16px;
+ font-weight: 600;
+ margin: 0;
+}
+
+.controls {
+ display: flex;
+ gap: 8px;
+}
+
+button {
+ padding: 6px 12px;
+ background-color: var(--vscode-button-background);
+ color: var(--vscode-button-foreground);
+ border: none;
+ border-radius: 2px;
+ cursor: pointer;
+ font-size: inherit;
+ font-family: inherit;
+ transition: background-color 0.2s;
+}
+
+button:hover {
+ background-color: var(--vscode-button-hoverBackground);
+}
+
+button:active {
+ background-color: var(--vscode-button-hoverBackground);
+}
+
+button:disabled {
+ opacity: 0.5;
+ cursor: not-allowed;
+}
+
+.select {
+ padding: 6px 8px;
+ background-color: var(--vscode-input-background);
+ color: var(--vscode-input-foreground);
+ border: 1px solid var(--vscode-input-border);
+ border-radius: 2px;
+ font-size: inherit;
+ font-family: inherit;
+}
+
+.graph-container {
+ flex: 1;
+ border: 1px solid var(--color-border);
+ border-radius: 4px;
+ overflow: hidden;
+ background-color: var(--vscode-editor-background);
+}
+
+.loading {
+ display: flex;
+ align-items: center;
+ justify-content: center;
+ padding: 32px;
+ color: var(--vscode-foreground);
+}
+
+.error {
+ padding: 12px 16px;
+ background-color: var(--vscode-inputValidation-errorBackground);
+ color: var(--vscode-inputValidation-errorForeground);
+ border: 1px solid var(--vscode-inputValidation-errorBorder);
+ border-radius: 4px;
+ margin-bottom: 12px;
+}
+
+.info {
+ padding: 12px 16px;
+ background-color: var(--vscode-inputValidation-infoBackground);
+ color: var(--vscode-inputValidation-infoForeground);
+ border: 1px solid var(--vscode-inputValidation-infoBorder);
+ border-radius: 4px;
+ margin-bottom: 12px;
+}
+
+.stats {
+ display: grid;
+ grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
+ gap: 12px;
+ padding: 12px;
+ background-color: var(--vscode-editor-background);
+ border: 1px solid var(--color-border);
+ border-radius: 4px;
+}
+
+.stat-item {
+ padding: 8px;
+ border-left: 3px solid var(--color-primary);
+ padding-left: 12px;
+}
+
+.stat-label {
+ font-size: 12px;
+ opacity: 0.7;
+ text-transform: uppercase;
+ letter-spacing: 0.5px;
+}
+
+.stat-value {
+ font-size: 18px;
+ font-weight: 600;
+ margin-top: 4px;
+}
+
+.list {
+ max-height: 300px;
+ overflow-y: auto;
+ border: 1px solid var(--color-border);
+ border-radius: 4px;
+}
+
+.list-item {
+ padding: 8px 12px;
+ border-bottom: 1px solid var(--color-border);
+ cursor: pointer;
+ transition: background-color 0.2s;
+}
+
+.list-item:hover {
+ background-color: var(--vscode-list-hoverBackground);
+}
+
+.list-item:last-child {
+ border-bottom: none;
+}
+
+.file-path {
+ font-family: var(--vscode-editor-font-family);
+ font-size: 12px;
+}
+
+.metadata {
+ font-size: 11px;
+ opacity: 0.6;
+ margin-top: 2px;
+}
diff --git a/vscode-extension/media/main.js b/vscode-extension/media/main.js
new file mode 100644
index 0000000..36a7bc9
--- /dev/null
+++ b/vscode-extension/media/main.js
@@ -0,0 +1,117 @@
+// This will be loaded in the webview
+// It handles communication between the extension and the webview content
+
+(function () {
+ const vscode = window.vscode;
+
+ // Initialize the webview
+ const root = document.getElementById('root');
+
+ // Create a simple UI for now until React graph is built
+ const html = `
+
+
+
+
+ No graph loaded. Run an analysis on your repository to get started.
+
+
+
+
+
+
+
+
Loading graph visualization...
+
+
+
+ `;
+
+ root.innerHTML = html;
+
+ // Setup event listeners
+ document.getElementById('refreshBtn').addEventListener('click', () => {
+ vscode.postMessage({ command: 'refresh' });
+ });
+
+ document.getElementById('settingsBtn').addEventListener('click', () => {
+ vscode.postMessage({ command: 'openSettings' });
+ });
+
+ // Listen for messages from the extension
+ window.addEventListener('message', (event) => {
+ const message = event.data;
+ console.log('Webview received message:', message);
+
+ switch (message.command) {
+ case 'graphLoaded':
+ handleGraphLoaded(message.data);
+ break;
+ case 'error':
+ showError(message.message);
+ break;
+ case 'refactorSuggestions':
+ showRefactorSuggestions(message.data);
+ break;
+ }
+ });
+
+ function handleGraphLoaded(data) {
+ const infoMsg = document.getElementById('infoMsg');
+ const content = document.getElementById('content');
+ const stats = document.getElementById('stats');
+
+ infoMsg.style.display = 'none';
+ content.style.display = 'flex';
+ content.style.flexDirection = 'column';
+
+ // Display basic stats
+ const nodeCount = Object.keys(data.graph || {}).length;
+ const edgeCount = data.edges ? data.edges.length : 0;
+
+ stats.innerHTML = `
+
+
+
Dependencies
+
${edgeCount}
+
+
+
Job ID
+
${data.jobId.slice(0, 12)}...
+
+ `;
+
+ // TODO: Render actual graph visualization here
+ document.getElementById('graphContainer').innerHTML = `
+
+ Graph visualization coming soon. Your codebase has ${nodeCount} files with ${edgeCount} dependencies.
+
+ `;
+ }
+
+ function showError(message) {
+ const errorMsg = document.getElementById('errorMsg');
+ const content = document.getElementById('content');
+
+ errorMsg.textContent = message;
+ errorMsg.style.display = 'block';
+ content.style.display = 'none';
+ }
+
+ function showRefactorSuggestions(data) {
+ console.log('Refactor suggestions:', data);
+ // TODO: Display suggestions in a panel
+ }
+
+ // Signal that webview is ready
+ vscode.postMessage({ command: 'webviewReady' });
+})();
diff --git a/vscode-extension/media/reset.css b/vscode-extension/media/reset.css
new file mode 100644
index 0000000..0d322f1
--- /dev/null
+++ b/vscode-extension/media/reset.css
@@ -0,0 +1,117 @@
+html {
+ box-sizing: border-box;
+ font-size: 13px;
+}
+
+*,
+*:before,
+*:after {
+ box-sizing: inherit;
+}
+
+body,
+div,
+dl,
+dt,
+dd,
+ul,
+ol,
+li,
+h1,
+h2,
+h3,
+h4,
+h5,
+h6,
+pre,
+form,
+fieldset,
+legend,
+input,
+textarea,
+p,
+blockquote,
+th,
+td {
+ margin: 0;
+ padding: 0;
+}
+
+table {
+ border-collapse: collapse;
+ border-spacing: 0;
+}
+
+fieldset,
+img {
+ border: 0;
+}
+
+address,
+caption,
+cite,
+code,
+dfn,
+em,
+strong,
+th,
+var {
+ font-style: normal;
+ font-weight: normal;
+}
+
+li {
+ list-style: none;
+}
+
+caption,
+th {
+ text-align: left;
+}
+
+h1,
+h2,
+h3,
+h4,
+h5,
+h6 {
+ font-size: 100%;
+ font-weight: normal;
+}
+
+q:before,
+q:after {
+ content: '';
+}
+
+abbr,
+acronym {
+ border: 0;
+ font-variant: normal;
+}
+
+sup {
+ vertical-align: text-top;
+}
+
+sub {
+ vertical-align: text-bottom;
+}
+
+input,
+textarea,
+select {
+ font-family: inherit;
+ font-size: inherit;
+ font-weight: inherit;
+}
+
+input,
+textarea,
+select {
+ font-size: 100%;
+}
+
+legend {
+ color: #000;
+}
diff --git a/vscode-extension/package-lock.json b/vscode-extension/package-lock.json
new file mode 100644
index 0000000..714838f
--- /dev/null
+++ b/vscode-extension/package-lock.json
@@ -0,0 +1,489 @@
+{
+ "name": "codegraph-ai",
+ "version": "0.0.1",
+ "lockfileVersion": 3,
+ "requires": true,
+ "packages": {
+ "": {
+ "name": "codegraph-ai",
+ "version": "0.0.1",
+ "devDependencies": {
+ "@types/node": "^20.0.0",
+ "@types/vscode": "^1.85.0",
+ "esbuild": "^0.19.0",
+ "typescript": "^5.3.0"
+ },
+ "engines": {
+ "vscode": "^1.85.0"
+ }
+ },
+ "node_modules/@esbuild/aix-ppc64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.19.12.tgz",
+ "integrity": "sha512-bmoCYyWdEL3wDQIVbcyzRyeKLgk2WtWLTWz1ZIAZF/EGbNOwSA6ew3PftJ1PqMiOOGu0OyFMzG53L0zqIpPeNA==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "aix"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-arm": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.19.12.tgz",
+ "integrity": "sha512-qg/Lj1mu3CdQlDEEiWrlC4eaPZ1KztwGJ9B6J+/6G+/4ewxJg7gqj8eVYWvao1bXrqGiW2rsBZFSX3q2lcW05w==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.19.12.tgz",
+ "integrity": "sha512-P0UVNGIienjZv3f5zq0DP3Nt2IE/3plFzuaS96vihvD0Hd6H/q4WXUGpCxD/E8YrSXfNyRPbpTq+T8ZQioSuPA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/android-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.19.12.tgz",
+ "integrity": "sha512-3k7ZoUW6Q6YqhdhIaq/WZ7HwBpnFBlW905Fa4s4qWJyiNOgT1dOqDiVAQFwBH7gBRZr17gLrlFCRzF6jFh7Kew==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "android"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.19.12.tgz",
+ "integrity": "sha512-B6IeSgZgtEzGC42jsI+YYu9Z3HKRxp8ZT3cqhvliEHovq8HSX2YX8lNocDn79gCKJXOSaEot9MVYky7AKjCs8g==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/darwin-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.19.12.tgz",
+ "integrity": "sha512-hKoVkKzFiToTgn+41qGhsUJXFlIjxI/jSYeZf3ugemDYZldIXIxhvwN6erJGlX4t5h417iFuheZ7l+YVn05N3A==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "darwin"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.19.12.tgz",
+ "integrity": "sha512-4aRvFIXmwAcDBw9AueDQ2YnGmz5L6obe5kmPT8Vd+/+x/JMVKCgdcRwH6APrbpNXsPz+K653Qg8HB/oXvXVukA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/freebsd-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.19.12.tgz",
+ "integrity": "sha512-EYoXZ4d8xtBoVN7CEwWY2IN4ho76xjYXqSXMNccFSx2lgqOG/1TBPW0yPx1bJZk94qu3tX0fycJeeQsKovA8gg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "freebsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.19.12.tgz",
+ "integrity": "sha512-J5jPms//KhSNv+LO1S1TX1UWp1ucM6N6XuL6ITdKWElCu8wXP72l9MM0zDTzzeikVyqFE6U8YAV9/tFyj0ti+w==",
+ "cpu": [
+ "arm"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.19.12.tgz",
+ "integrity": "sha512-EoTjyYyLuVPfdPLsGVVVC8a0p1BFFvtpQDB/YLEhaXyf/5bczaGeN15QkR+O4S5LeJ92Tqotve7i1jn35qwvdA==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ia32": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.19.12.tgz",
+ "integrity": "sha512-Thsa42rrP1+UIGaWz47uydHSBOgTUnwBwNq59khgIwktK6x60Hivfbux9iNR0eHCHzOLjLMLfUMLCypBkZXMHA==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-loong64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.19.12.tgz",
+ "integrity": "sha512-LiXdXA0s3IqRRjm6rV6XaWATScKAXjI4R4LoDlvO7+yQqFdlr1Bax62sRwkVvRIrwXxvtYEHHI4dm50jAXkuAA==",
+ "cpu": [
+ "loong64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-mips64el": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.19.12.tgz",
+ "integrity": "sha512-fEnAuj5VGTanfJ07ff0gOA6IPsvrVHLVb6Lyd1g2/ed67oU1eFzL0r9WL7ZzscD+/N6i3dWumGE1Un4f7Amf+w==",
+ "cpu": [
+ "mips64el"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-ppc64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.19.12.tgz",
+ "integrity": "sha512-nYJA2/QPimDQOh1rKWedNOe3Gfc8PabU7HT3iXWtNUbRzXS9+vgB0Fjaqr//XNbd82mCxHzik2qotuI89cfixg==",
+ "cpu": [
+ "ppc64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-riscv64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.19.12.tgz",
+ "integrity": "sha512-2MueBrlPQCw5dVJJpQdUYgeqIzDQgw3QtiAHUC4RBz9FXPrskyyU3VI1hw7C0BSKB9OduwSJ79FTCqtGMWqJHg==",
+ "cpu": [
+ "riscv64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-s390x": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.19.12.tgz",
+ "integrity": "sha512-+Pil1Nv3Umes4m3AZKqA2anfhJiVmNCYkPchwFJNEJN5QxmTs1uzyy4TvmDrCRNT2ApwSari7ZIgrPeUx4UZDg==",
+ "cpu": [
+ "s390x"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/linux-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.19.12.tgz",
+ "integrity": "sha512-B71g1QpxfwBvNrfyJdVDexenDIt1CiDN1TIXLbhOw0KhJzE78KIFGX6OJ9MrtC0oOqMWf+0xop4qEU8JrJTwCg==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "linux"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/netbsd-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.19.12.tgz",
+ "integrity": "sha512-3ltjQ7n1owJgFbuC61Oj++XhtzmymoCihNFgT84UAmJnxJfm4sYCiSLTXZtE00VWYpPMYc+ZQmB6xbSdVh0JWA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "netbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/openbsd-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.19.12.tgz",
+ "integrity": "sha512-RbrfTB9SWsr0kWmb9srfF+L933uMDdu9BIzdA7os2t0TXhCRjrQyCeOt6wVxr79CKD4c+p+YhCj31HBkYcXebw==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "openbsd"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/sunos-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.19.12.tgz",
+ "integrity": "sha512-HKjJwRrW8uWtCQnQOz9qcU3mUZhTUQvi56Q8DPTLLB+DawoiQdjsYq+j+D3s9I8VFtDr+F9CjgXKKC4ss89IeA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "sunos"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-arm64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.19.12.tgz",
+ "integrity": "sha512-URgtR1dJnmGvX864pn1B2YUYNzjmXkuJOIqG2HdU62MVS4EHpU2946OZoTMnRUHklGtJdJZ33QfzdjGACXhn1A==",
+ "cpu": [
+ "arm64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-ia32": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.19.12.tgz",
+ "integrity": "sha512-+ZOE6pUkMOJfmxmBZElNOx72NKpIa/HFOMGzu8fqzQJ5kgf6aTGrcJaFsNiVMH4JKpMipyK+7k0n2UXN7a8YKQ==",
+ "cpu": [
+ "ia32"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@esbuild/win32-x64": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.19.12.tgz",
+ "integrity": "sha512-T1QyPSDCyMXaO3pzBkF96E8xMkiRYbUEZADd29SyPGabqxMViNoii+NcK7eWJAEoU6RZyEm5lVSIjTmcdoB9HA==",
+ "cpu": [
+ "x64"
+ ],
+ "dev": true,
+ "license": "MIT",
+ "optional": true,
+ "os": [
+ "win32"
+ ],
+ "engines": {
+ "node": ">=12"
+ }
+ },
+ "node_modules/@types/node": {
+ "version": "20.19.37",
+ "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.37.tgz",
+ "integrity": "sha512-8kzdPJ3FsNsVIurqBs7oodNnCEVbni9yUEkaHbgptDACOPW04jimGagZ51E6+lXUwJjgnBw+hyko/lkFWCldqw==",
+ "dev": true,
+ "license": "MIT",
+ "dependencies": {
+ "undici-types": "~6.21.0"
+ }
+ },
+ "node_modules/@types/vscode": {
+ "version": "1.110.0",
+ "resolved": "https://registry.npmjs.org/@types/vscode/-/vscode-1.110.0.tgz",
+ "integrity": "sha512-AGuxUEpU4F4mfuQjxPPaQVyuOMhs+VT/xRok1jiHVBubHK7lBRvCuOMZG0LKUwxncrPorJ5qq/uil3IdZBd5lA==",
+ "dev": true,
+ "license": "MIT"
+ },
+ "node_modules/esbuild": {
+ "version": "0.19.12",
+ "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.19.12.tgz",
+ "integrity": "sha512-aARqgq8roFBj054KvQr5f1sFu0D65G+miZRCuJyJ0G13Zwx7vRar5Zhn2tkQNzIXcBrNVsv/8stehpj+GAjgbg==",
+ "dev": true,
+ "hasInstallScript": true,
+ "license": "MIT",
+ "bin": {
+ "esbuild": "bin/esbuild"
+ },
+ "engines": {
+ "node": ">=12"
+ },
+ "optionalDependencies": {
+ "@esbuild/aix-ppc64": "0.19.12",
+ "@esbuild/android-arm": "0.19.12",
+ "@esbuild/android-arm64": "0.19.12",
+ "@esbuild/android-x64": "0.19.12",
+ "@esbuild/darwin-arm64": "0.19.12",
+ "@esbuild/darwin-x64": "0.19.12",
+ "@esbuild/freebsd-arm64": "0.19.12",
+ "@esbuild/freebsd-x64": "0.19.12",
+ "@esbuild/linux-arm": "0.19.12",
+ "@esbuild/linux-arm64": "0.19.12",
+ "@esbuild/linux-ia32": "0.19.12",
+ "@esbuild/linux-loong64": "0.19.12",
+ "@esbuild/linux-mips64el": "0.19.12",
+ "@esbuild/linux-ppc64": "0.19.12",
+ "@esbuild/linux-riscv64": "0.19.12",
+ "@esbuild/linux-s390x": "0.19.12",
+ "@esbuild/linux-x64": "0.19.12",
+ "@esbuild/netbsd-x64": "0.19.12",
+ "@esbuild/openbsd-x64": "0.19.12",
+ "@esbuild/sunos-x64": "0.19.12",
+ "@esbuild/win32-arm64": "0.19.12",
+ "@esbuild/win32-ia32": "0.19.12",
+ "@esbuild/win32-x64": "0.19.12"
+ }
+ },
+ "node_modules/typescript": {
+ "version": "5.9.3",
+ "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz",
+ "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==",
+ "dev": true,
+ "license": "Apache-2.0",
+ "bin": {
+ "tsc": "bin/tsc",
+ "tsserver": "bin/tsserver"
+ },
+ "engines": {
+ "node": ">=14.17"
+ }
+ },
+ "node_modules/undici-types": {
+ "version": "6.21.0",
+ "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
+ "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
+ "dev": true,
+ "license": "MIT"
+ }
+ }
+}
diff --git a/vscode-extension/package.json b/vscode-extension/package.json
new file mode 100644
index 0000000..f81527f
--- /dev/null
+++ b/vscode-extension/package.json
@@ -0,0 +1,54 @@
+{
+ "name": "codegraph-ai",
+ "displayName": "CodeGraph AI",
+ "description": "Visualize your codebase dependencies and impact analysis directly in VS Code",
+ "version": "0.0.1",
+ "publisher": "codegraph",
+ "engines": {
+ "vscode": "^1.85.0"
+ },
+ "categories": [
+ "Visualization",
+ "Developer Tools"
+ ],
+ "activationEvents": [
+ "workspaceContains:**/*.{js,ts,jsx,tsx,py,go}"
+ ],
+ "main": "./dist/extension.js",
+ "contributes": {
+ "commands": [
+ {
+ "command": "codegraphAi.openGraph",
+ "title": "CodeGraph AI: Open Graph"
+ }
+ ],
+ "configuration": {
+ "title": "CodeGraph AI",
+ "properties": {
+ "codegraphAi.serverUrl": {
+ "type": "string",
+ "default": "http://localhost:5000",
+ "description": "CodeGraph AI server URL"
+ },
+ "codegraphAi.apiToken": {
+ "type": "string",
+ "default": "",
+ "description": "JWT token for authentication with CodeGraph AI server"
+ }
+ }
+ }
+ },
+ "scripts": {
+ "vscode:prepublish": "npm run esbuild-base -- --minify",
+ "esbuild-base": "esbuild ./src/extension.ts --bundle --outfile=dist/extension.js --external:vscode --format=cjs --platform=node",
+ "esbuild": "npm run esbuild-base -- --sourcemap",
+ "esbuild-watch": "npm run esbuild-base -- --sourcemap --watch",
+ "typecheck": "tsc --noEmit"
+ },
+ "devDependencies": {
+ "@types/node": "^20.0.0",
+ "@types/vscode": "^1.85.0",
+ "esbuild": "^0.19.0",
+ "typescript": "^5.3.0"
+ }
+}
diff --git a/vscode-extension/src/ApiClient.ts b/vscode-extension/src/ApiClient.ts
new file mode 100644
index 0000000..854b56a
--- /dev/null
+++ b/vscode-extension/src/ApiClient.ts
@@ -0,0 +1,164 @@
+/**
+ * API Client for CodeGraph backend
+ * Handles authentication and graph data retrieval
+ */
+export class ApiClient {
+ currentJobId: string | null = null;
+ private cache: Map
= new Map();
+
+ constructor(private serverUrl: string, private apiToken: string) {
+ // Normalize URL
+ this.serverUrl = serverUrl.replace(/\/$/, '');
+ }
+
+ /**
+ * Set the current job ID for graph queries
+ */
+ setCurrentJobId(jobId: string) {
+ this.currentJobId = jobId;
+ // Clear cache when switching jobs
+ this.cache.clear();
+ }
+
+ /**
+ * Fetch full graph data for a job
+ */
+ async getGraph(jobId: string) {
+ const cacheKey = `graph:${jobId}`;
+ if (this.cache.has(cacheKey)) {
+ return this.cache.get(cacheKey);
+ }
+
+ try {
+ const response = await fetch(`${this.serverUrl}/api/graph/${jobId}`, {
+ method: 'GET',
+ headers: this.buildHeaders(),
+ });
+
+ if (!response.ok) {
+ throw new Error(`Failed to fetch graph: ${response.statusText}`);
+ }
+
+ const data = await response.json();
+ this.cache.set(cacheKey, data);
+ return data;
+ } catch (err) {
+ console.error('[ApiClient] Failed to fetch graph:', err);
+ throw err;
+ }
+ }
+
+ /**
+ * Fetch heatmap data (complexity/risk scoring)
+ */
+ async getHeatmap(jobId: string) {
+ const cacheKey = `heatmap:${jobId}`;
+ if (this.cache.has(cacheKey)) {
+ return this.cache.get(cacheKey);
+ }
+
+ try {
+ const response = await fetch(`${this.serverUrl}/api/graph/${jobId}/heatmap`, {
+ method: 'GET',
+ headers: this.buildHeaders(),
+ });
+
+ if (!response.ok) {
+ throw new Error(`Failed to fetch heatmap: ${response.statusText}`);
+ }
+
+ const data = await response.json();
+ this.cache.set(cacheKey, data);
+ return data;
+ } catch (err) {
+ console.error('[ApiClient] Failed to fetch heatmap:', err);
+ throw err;
+ }
+ }
+
+ /**
+ * Get AI refactor suggestions for a file
+ */
+ async getRefactorSuggestions(jobId: string, filePath: string) {
+ try {
+ const response = await fetch(`${this.serverUrl}/api/ai/suggest-refactor`, {
+ method: 'POST',
+ headers: this.buildHeaders(),
+ body: JSON.stringify({ jobId, filePath }),
+ });
+
+ if (!response.ok) {
+ throw new Error(`Failed to get refactor suggestions: ${response.statusText}`);
+ }
+
+ return await response.json();
+ } catch (err) {
+ console.error('[ApiClient] Failed to get refactor suggestions:', err);
+ throw err;
+ }
+ }
+
+ /**
+ * List user's repositories
+ */
+ async getRepositories() {
+ try {
+ const response = await fetch(`${this.serverUrl}/api/repositories`, {
+ method: 'GET',
+ headers: this.buildHeaders(),
+ });
+
+ if (!response.ok) {
+ throw new Error(`Failed to fetch repositories: ${response.statusText}`);
+ }
+
+ return await response.json();
+ } catch (err) {
+ console.error('[ApiClient] Failed to fetch repositories:', err);
+ throw err;
+ }
+ }
+
+ /**
+ * List analysis jobs for a repository
+ */
+ async getRepositoryJobs(repositoryId: string) {
+ try {
+ const response = await fetch(`${this.serverUrl}/api/repositories/${repositoryId}/jobs`, {
+ method: 'GET',
+ headers: this.buildHeaders(),
+ });
+
+ if (!response.ok) {
+ throw new Error(`Failed to fetch jobs: ${response.statusText}`);
+ }
+
+ return await response.json();
+ } catch (err) {
+ console.error('[ApiClient] Failed to fetch jobs:', err);
+ throw err;
+ }
+ }
+
+ /**
+ * Build HTTP headers with authorization
+ */
+ private buildHeaders(): Record {
+ const headers: Record = {
+ 'Content-Type': 'application/json',
+ };
+
+ if (this.apiToken) {
+ headers['Authorization'] = `Bearer ${this.apiToken}`;
+ }
+
+ return headers;
+ }
+
+ /**
+ * Clear cache (useful when data might have changed)
+ */
+ clearCache() {
+ this.cache.clear();
+ }
+}
diff --git a/vscode-extension/src/GraphPanel.ts b/vscode-extension/src/GraphPanel.ts
new file mode 100644
index 0000000..184a143
--- /dev/null
+++ b/vscode-extension/src/GraphPanel.ts
@@ -0,0 +1,206 @@
+import * as vscode from 'vscode';
+import * as path from 'path';
+import { ApiClient } from './ApiClient';
+
+/**
+ * Manages the WebviewPanel that displays the CodeGraph visualization
+ */
+export class GraphPanel {
+ public static currentPanel: GraphPanel | undefined;
+
+ private readonly _panel: vscode.WebviewPanel;
+ private readonly _extensionUri: vscode.Uri;
+ private _disposables: vscode.Disposable[] = [];
+ private _apiClient: ApiClient;
+ private _repoPath: string;
+
+ public static createOrShow(extensionUri: vscode.Uri, apiClient: ApiClient, repoPath: string) {
+ const column = vscode.window.activeTextEditor?.viewColumn || vscode.ViewColumn.One;
+
+ // If we already have a panel, show it
+ if (GraphPanel.currentPanel) {
+ GraphPanel.currentPanel._panel.reveal(column);
+ GraphPanel.currentPanel._update(repoPath);
+ return;
+ }
+
+ // Otherwise, create a new panel
+ const panel = vscode.window.createWebviewPanel(
+ 'codegraphAiGraph',
+ 'CodeGraph AI',
+ column,
+ {
+ enableScripts: true,
+ enableForms: true,
+ retainContextWhenHidden: true,
+ localResourceRoots: [vscode.Uri.joinPath(extensionUri, 'media')],
+ }
+ );
+
+ GraphPanel.currentPanel = new GraphPanel(panel, extensionUri, apiClient, repoPath);
+ }
+
+ private constructor(
+ panel: vscode.WebviewPanel,
+ extensionUri: vscode.Uri,
+ apiClient: ApiClient,
+ repoPath: string
+ ) {
+ this._panel = panel;
+ this._extensionUri = extensionUri;
+ this._apiClient = apiClient;
+ this._repoPath = repoPath;
+
+ // Update the html for the webview
+ this._update(repoPath);
+
+ // Listen for when the panel is disposed
+ // This happens when the user closes the panel or when the panel is closed programmatically
+ this._panel.onDidDispose(() => this.dispose(), null, this._disposables);
+
+ // Handle messages from the webview
+ this._panel.webview.onDidReceiveMessage(
+ async (message) => {
+ switch (message.command) {
+ case 'webviewReady':
+ case 'refresh':
+ await this._sendGraphData();
+ break;
+ case 'openSettings':
+ vscode.commands.executeCommand('workbench.action.openSettings', 'codegraphAi');
+ break;
+ case 'selectJobId':
+ this._apiClient.setCurrentJobId(message.jobId);
+ vscode.window.showInformationMessage(`Loaded graph for job ${message.jobId.slice(0, 8)}...`);
+ break;
+ case 'openFile':
+ this._openFile(message.filePath);
+ break;
+ case 'getRefactorSuggestions':
+ this._getRefactorSuggestions(message.filePath);
+ break;
+ }
+ },
+ null,
+ this._disposables
+ );
+ }
+
+ public dispose() {
+ GraphPanel.currentPanel = undefined;
+
+ // Clean up resources
+ this._panel.dispose();
+
+ while (this._disposables.length) {
+ const x = this._disposables.pop();
+ if (x) {
+ x.dispose();
+ }
+ }
+ }
+
+ private _update(repoPath: string) {
+ this._panel.webview.html = this._getHtmlForWebview(this._panel.webview, repoPath);
+ }
+
+ private async _sendGraphData() {
+ const jobId = this._apiClient.currentJobId;
+ if (!jobId) {
+ this._panel.webview.postMessage({ command: 'error', message: 'No graph loaded. Run an analysis first.' });
+ return;
+ }
+ try {
+ const data = await this._apiClient.getGraph(jobId);
+ this._panel.webview.postMessage({ command: 'graphLoaded', data: { ...data, jobId } });
+ } catch (err) {
+ this._panel.webview.postMessage({ command: 'error', message: (err as Error).message });
+ }
+ }
+
+ private _getHtmlForWebview(webview: vscode.Webview, repoPath: string): string {
+ const styleResetUri = webview.asWebviewUri(vscode.Uri.joinPath(this._extensionUri, 'media', 'reset.css'));
+ const styleMainUri = webview.asWebviewUri(vscode.Uri.joinPath(this._extensionUri, 'media', 'main.css'));
+ const scriptUri = webview.asWebviewUri(vscode.Uri.joinPath(this._extensionUri, 'media', 'main.js'));
+
+ // Use a nonce to only allow specific scripts to be run
+ const nonce = getNonce();
+
+ return `
+
+
+
+
+
+
+
+
+ CodeGraph AI
+
+
+
+
+
+
+`;
+ }
+
+ private _openFile(filePath: string) {
+ const fullPath = path.join(this._repoPath, filePath);
+ vscode.workspace.openTextDocument(fullPath).then(
+ (doc) => {
+ vscode.window.showTextDocument(doc, { preview: false });
+ },
+ (err) => {
+ vscode.window.showErrorMessage(`Failed to open file: ${err.message}`);
+ }
+ );
+ }
+
+ private async _getRefactorSuggestions(filePath: string) {
+ try {
+ if (!this._apiClient.currentJobId) {
+ vscode.window.showErrorMessage('No job loaded. Please load a graph first.');
+ return;
+ }
+
+ const suggestions = await this._apiClient.getRefactorSuggestions(
+ this._apiClient.currentJobId,
+ filePath
+ );
+
+ this._panel.webview.postMessage({
+ command: 'refactorSuggestions',
+ data: suggestions,
+ });
+ } catch (err) {
+ vscode.window.showErrorMessage(`Failed to get refactor suggestions: ${(err as Error).message}`);
+ }
+ }
+}
+
+function getNonce() {
+ let text = '';
+ const possible = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789';
+ for (let i = 0; i < 32; i++) {
+ text += possible.charAt(Math.floor(Math.random() * possible.length));
+ }
+ return text;
+}
+
+function escapeHtml(unsafe: string): string {
+ return unsafe
+ .replace(/&/g, '&')
+ .replace(//g, '>')
+ .replace(/"/g, '"')
+ .replace(/'/g, ''');
+}
diff --git a/vscode-extension/src/HoverProvider.ts b/vscode-extension/src/HoverProvider.ts
new file mode 100644
index 0000000..4b29882
--- /dev/null
+++ b/vscode-extension/src/HoverProvider.ts
@@ -0,0 +1,36 @@
+import * as vscode from 'vscode';
+import * as path from 'path';
+import { ApiClient } from './ApiClient';
+
+export class HoverProvider implements vscode.HoverProvider {
+ constructor(private api: ApiClient) {}
+
+ async provideHover(document: vscode.TextDocument): Promise {
+ const jobId = this.api.currentJobId;
+ if (!jobId) return null;
+
+ const workspaceRoot = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath || '';
+ const relativePath = path.relative(workspaceRoot, document.uri.fsPath).replace(/\\/g, '/');
+
+ try {
+ const graph = await this.api.getGraph(jobId);
+ const node = graph?.graph?.[relativePath];
+ if (!node) return null;
+
+ const markdown = new vscode.MarkdownString();
+ markdown.isTrusted = { enabledCommands: ['codegraphAi.openGraph'] };
+ markdown.appendMarkdown(`**CodeGraph AI** โ \`${relativePath}\`\n\n`);
+ if (node.summary) {
+ markdown.appendText(node.summary);
+ markdown.appendMarkdown('\n\n');
+ }
+ markdown.appendMarkdown(`- **Deps:** ${node.deps?.length || 0} `);
+ markdown.appendMarkdown(`**Used by:** ${Object.values(graph.graph).filter((n: any) => n.deps?.includes(relativePath)).length}\n\n`);
+ markdown.appendMarkdown(`[Open in Graph](command:codegraphAi.openGraph)`);
+
+ return new vscode.Hover(markdown);
+ } catch {
+ return null;
+ }
+ }
+}
diff --git a/vscode-extension/src/extension.ts b/vscode-extension/src/extension.ts
new file mode 100644
index 0000000..e0c72a0
--- /dev/null
+++ b/vscode-extension/src/extension.ts
@@ -0,0 +1,33 @@
+import * as vscode from 'vscode';
+import { GraphPanel } from './GraphPanel';
+import { HoverProvider } from './HoverProvider';
+import { ApiClient } from './ApiClient';
+
+export function activate(context: vscode.ExtensionContext) {
+ const apiClient = new ApiClient(
+ vscode.workspace.getConfiguration('codegraphAi').get('serverUrl') || 'http://localhost:5000',
+ vscode.workspace.getConfiguration('codegraphAi').get('apiToken') || ''
+ );
+
+ // Command: Open graph for current workspace
+ context.subscriptions.push(
+ vscode.commands.registerCommand('codegraphAi.openGraph', async () => {
+ const repoPath = vscode.workspace.workspaceFolders?.[0]?.uri.fsPath;
+ if (!repoPath) {
+ vscode.window.showErrorMessage('No workspace folder open.');
+ return;
+ }
+ GraphPanel.createOrShow(context.extensionUri, apiClient, repoPath);
+ })
+ );
+
+ // Hover: show file summary + dep count
+ context.subscriptions.push(
+ vscode.languages.registerHoverProvider(
+ ['javascript', 'typescript', 'javascriptreact', 'typescriptreact', 'python', 'go'],
+ new HoverProvider(apiClient)
+ )
+ );
+}
+
+export function deactivate() {}
diff --git a/vscode-extension/tsconfig.json b/vscode-extension/tsconfig.json
new file mode 100644
index 0000000..3254b8f
--- /dev/null
+++ b/vscode-extension/tsconfig.json
@@ -0,0 +1,19 @@
+{
+ "compilerOptions": {
+ "module": "commonjs",
+ "target": "ES2020",
+ "lib": ["ES2020"],
+ "outDir": "./dist",
+ "rootDir": "./src",
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "forceConsistentCasingInFileNames": true,
+ "resolveJsonModule": true,
+ "declaration": true,
+ "declarationMap": true,
+ "sourceMap": true
+ },
+ "include": ["src"],
+ "exclude": ["node_modules", "dist"]
+}