diff --git a/.env.example b/.env.example new file mode 100644 index 00000000..340f3d84 --- /dev/null +++ b/.env.example @@ -0,0 +1,22 @@ +# Mike — Docker Compose environment +# Copy to .env and fill in the values before running `docker compose up -d` + +# ---- Required ---- +JWT_SECRET=change-me-to-a-long-random-secret-string +POSTGRES_PASSWORD=change-me-to-a-strong-password + +# ---- LLM API keys (set at least one) ---- +ANTHROPIC_API_KEY= +GEMINI_API_KEY= +OPENAI_API_KEY= +# Local LLM via Ollama (leave empty to disable) +OLLAMA_BASE_URL= + +# ---- Optional overrides ---- +# Public URL at which the backend is reachable FROM the browser. +# Change this if you expose Mike behind a reverse proxy. +NEXT_PUBLIC_API_BASE_URL=http://localhost:3001 +BACKEND_URL=http://localhost:3001 +FRONTEND_URL=http://localhost:3000 +BACKEND_PORT=3001 +FRONTEND_PORT=3000 diff --git a/backend/.env.example b/backend/.env.example index 1db370a9..0258a32e 100644 --- a/backend/.env.example +++ b/backend/.env.example @@ -1,14 +1,44 @@ PORT=3001 FRONTEND_URL=http://localhost:3000 + +# --------------------------------------------------------------------------- +# Auth & database +# --------------------------------------------------------------------------- +# MODE A — Supabase (default, cloud-hosted) SUPABASE_URL=https://your-project.supabase.co SUPABASE_SECRET_KEY=your-supabase-service-role-key -R2_ENDPOINT_URL=https://your-account-id.r2.cloudflarestorage.com -R2_ACCESS_KEY_ID=your-r2-access-key -R2_SECRET_ACCESS_KEY=your-r2-secret-key -R2_BUCKET_NAME=mike +# MODE B — Local PostgreSQL (set AUTH_MODE=local and DATABASE_URL instead) +# AUTH_MODE=local +# DATABASE_URL=postgres://mike:secret@localhost:5432/mike +# JWT_SECRET=change-me-to-a-long-random-string +# JWT_EXPIRY_DAYS=30 + +# --------------------------------------------------------------------------- +# File storage +# --------------------------------------------------------------------------- +# By default files are stored locally (no external storage needed). +# LOCAL_STORAGE_PATH=./uploads +# BACKEND_URL=http://localhost:3001 + +# Optional: Cloudflare R2 (takes priority over local storage when configured) +# R2_ENDPOINT_URL=https://your-account-id.r2.cloudflarestorage.com +# R2_ACCESS_KEY_ID=your-r2-access-key +# R2_SECRET_ACCESS_KEY=your-r2-secret-key +# R2_BUCKET_NAME=mike +# --------------------------------------------------------------------------- +# LLM API keys +# --------------------------------------------------------------------------- GEMINI_API_KEY=your-gemini-key ANTHROPIC_API_KEY=your-anthropic-key OPENROUTER_API_KEY=your-openrouter-key RESEND_API_KEY=your-resend-key + +# Local LLM via Ollama (OpenAI-compatible) +OLLAMA_BASE_URL=http://localhost:11434/v1 +OLLAMA_API_KEY= + +# Local LLM via llama.cpp server (OpenAI-compatible) +LLAMACPP_BASE_URL=http://localhost:8080/v1 +LLAMACPP_API_KEY= diff --git a/backend/Dockerfile b/backend/Dockerfile new file mode 100644 index 00000000..374a7f2c --- /dev/null +++ b/backend/Dockerfile @@ -0,0 +1,24 @@ +FROM node:22-slim AS deps +WORKDIR /app +COPY package*.json ./ +RUN npm ci --omit=dev + +FROM node:22-slim AS build +WORKDIR /app +COPY package*.json tsconfig.json ./ +RUN npm ci +COPY src ./src +RUN npm run build + +FROM node:22-slim AS runtime +# libreoffice is needed for DOCX→PDF conversion +RUN apt-get update && apt-get install -y --no-install-recommends \ + libreoffice \ + && rm -rf /var/lib/apt/lists/* +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY --from=build /app/dist ./dist +COPY package.json ./ +ENV NODE_ENV=production +EXPOSE 3001 +CMD ["node", "dist/index.js"] diff --git a/backend/package-lock.json b/backend/package-lock.json index effa2ade..ead3c697 100644 --- a/backend/package-lock.json +++ b/backend/package-lock.json @@ -7,12 +7,14 @@ "": { "name": "mike-backend", "version": "1.0.0", + "license": "AGPL-3.0-only", "dependencies": { "@anthropic-ai/sdk": "^0.90.0", "@aws-sdk/client-s3": "^3.787.0", "@aws-sdk/s3-request-presigner": "^3.787.0", "@google/genai": "^1.50.1", "@supabase/supabase-js": "^2.49.4", + "bcrypt": "^6.0.0", "cors": "^2.8.5", "docx": "^9.5.0", "dotenv": "^17.4.1", @@ -21,18 +23,23 @@ "fast-diff": "^1.3.0", "fast-xml-parser": "^5.7.1", "helmet": "^8.1.0", + "jsonwebtoken": "^9.0.3", "jszip": "^3.10.1", "libreoffice-convert": "^1.6.0", "mammoth": "^1.9.0", "multer": "^1.4.5-lts.2", "pdfjs-dist": "^4.10.38", + "pg": "^8.20.0", "resend": "^4.5.1" }, "devDependencies": { + "@types/bcrypt": "^6.0.0", "@types/cors": "^2.8.17", "@types/express": "^4.17.21", + "@types/jsonwebtoken": "^9.0.10", "@types/multer": "^1.4.12", "@types/node": "^22.14.1", + "@types/pg": "^8.20.0", "prettier": "^3.8.1", "tsx": "^4.19.3", "typescript": "^5.8.3" @@ -2599,6 +2606,16 @@ "node": ">=20.0.0" } }, + "node_modules/@types/bcrypt": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/@types/bcrypt/-/bcrypt-6.0.0.tgz", + "integrity": "sha512-/oJGukuH3D2+D+3H4JWLaAsJ/ji86dhRidzZ/Od7H/i8g+aCmvkeCc6Ni/f9uxGLSQVCRZkX2/lqEFG2BvWtlQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, "node_modules/@types/body-parser": { "version": "1.19.6", "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.6.tgz", @@ -2663,6 +2680,17 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/jsonwebtoken": { + "version": "9.0.10", + "resolved": "https://registry.npmjs.org/@types/jsonwebtoken/-/jsonwebtoken-9.0.10.tgz", + "integrity": "sha512-asx5hIG9Qmf/1oStypjanR7iKTv0gXQ1Ov/jfrX6kS/EO0OFni8orbmGCn0672NHR3kXHwpAwR+B368ZGN/2rA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/ms": "*", + "@types/node": "*" + } + }, "node_modules/@types/mime": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", @@ -2670,6 +2698,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/ms": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-2.1.0.tgz", + "integrity": "sha512-GsCCIZDE/p3i96vtEqx+7dBUGXrc7zeSK3wwPHIaRThS+9OhWIXRqzs4d6k1SVU8g91DrNRWxWUGhp5KXQb2VA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/multer": { "version": "1.4.13", "resolved": "https://registry.npmjs.org/@types/multer/-/multer-1.4.13.tgz", @@ -2689,6 +2724,18 @@ "undici-types": "~6.21.0" } }, + "node_modules/@types/pg": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-bEPFOaMAHTEP1EzpvHTbmwR8UsFyHSKsRisLIHVMXnpNefSbGA1bD6CVy+qKjGSqmZqNqBDV2azOBo8TgkcVow==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "pg-protocol": "*", + "pg-types": "^2.2.0" + } + }, "node_modules/@types/qs": { "version": "6.15.0", "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.15.0.tgz", @@ -2829,6 +2876,20 @@ ], "license": "MIT" }, + "node_modules/bcrypt": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/bcrypt/-/bcrypt-6.0.0.tgz", + "integrity": "sha512-cU8v/EGSrnH+HnxV2z0J7/blxH8gq7Xh2JFT6Aroax7UohdmiJJlxApMxtKfuI7z68NvvVcmR78k2LbT6efhRg==", + "hasInstallScript": true, + "license": "MIT", + "dependencies": { + "node-addon-api": "^8.3.0", + "node-gyp-build": "^4.8.4" + }, + "engines": { + "node": ">= 18" + } + }, "node_modules/bignumber.js": { "version": "9.3.1", "resolved": "https://registry.npmjs.org/bignumber.js/-/bignumber.js-9.3.1.tgz", @@ -3848,6 +3909,34 @@ "node": ">=16" } }, + "node_modules/jsonwebtoken": { + "version": "9.0.3", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.3.tgz", + "integrity": "sha512-MT/xP0CrubFRNLNKvxJ2BYfy53Zkm++5bX9dtuPbqAeQpTVe0MQTFhao8+Cp//EmJp244xt6Drw/GVEGCUj40g==", + "license": "MIT", + "dependencies": { + "jws": "^4.0.1", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "license": "MIT" + }, "node_modules/jszip": { "version": "3.10.1", "resolved": "https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz", @@ -3912,6 +4001,48 @@ "immediate": "~3.0.5" } }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==", + "license": "MIT" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==", + "license": "MIT" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==", + "license": "MIT" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==", + "license": "MIT" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==", + "license": "MIT" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==", + "license": "MIT" + }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==", + "license": "MIT" + }, "node_modules/long": { "version": "5.3.2", "resolved": "https://registry.npmjs.org/long/-/long-5.3.2.tgz", @@ -4101,6 +4232,15 @@ "node": ">= 0.6" } }, + "node_modules/node-addon-api": { + "version": "8.7.0", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-8.7.0.tgz", + "integrity": "sha512-9MdFxmkKaOYVTV+XVRG8ArDwwQ77XIgIPyKASB1k3JPq3M8fGQQQE3YpMOrKm6g//Ktx8ivZr8xo1Qmtqub+GA==", + "license": "MIT", + "engines": { + "node": "^18 || ^20 || >= 21" + } + }, "node_modules/node-domexception": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/node-domexception/-/node-domexception-1.0.0.tgz", @@ -4139,6 +4279,17 @@ "url": "https://opencollective.com/node-fetch" } }, + "node_modules/node-gyp-build": { + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz", + "integrity": "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==", + "license": "MIT", + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", @@ -4270,6 +4421,134 @@ "url": "https://ko-fi.com/killymxi" } }, + "node_modules/pg": { + "version": "8.20.0", + "resolved": "https://registry.npmjs.org/pg/-/pg-8.20.0.tgz", + "integrity": "sha512-ldhMxz2r8fl/6QkXnBD3CR9/xg694oT6DZQ2s6c/RI28OjtSOpxnPrUCGOBJ46RCUxcWdx3p6kw/xnDHjKvaRA==", + "license": "MIT", + "dependencies": { + "pg-connection-string": "^2.12.0", + "pg-pool": "^3.13.0", + "pg-protocol": "^1.13.0", + "pg-types": "2.2.0", + "pgpass": "1.0.5" + }, + "engines": { + "node": ">= 16.0.0" + }, + "optionalDependencies": { + "pg-cloudflare": "^1.3.0" + }, + "peerDependencies": { + "pg-native": ">=3.0.1" + }, + "peerDependenciesMeta": { + "pg-native": { + "optional": true + } + } + }, + "node_modules/pg-cloudflare": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/pg-cloudflare/-/pg-cloudflare-1.3.0.tgz", + "integrity": "sha512-6lswVVSztmHiRtD6I8hw4qP/nDm1EJbKMRhf3HCYaqud7frGysPv7FYJ5noZQdhQtN2xJnimfMtvQq21pdbzyQ==", + "license": "MIT", + "optional": true + }, + "node_modules/pg-connection-string": { + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/pg-connection-string/-/pg-connection-string-2.12.0.tgz", + "integrity": "sha512-U7qg+bpswf3Cs5xLzRqbXbQl85ng0mfSV/J0nnA31MCLgvEaAo7CIhmeyrmJpOr7o+zm0rXK+hNnT5l9RHkCkQ==", + "license": "MIT" + }, + "node_modules/pg-int8": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/pg-int8/-/pg-int8-1.0.1.tgz", + "integrity": "sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw==", + "license": "ISC", + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/pg-pool": { + "version": "3.13.0", + "resolved": "https://registry.npmjs.org/pg-pool/-/pg-pool-3.13.0.tgz", + "integrity": "sha512-gB+R+Xud1gLFuRD/QgOIgGOBE2KCQPaPwkzBBGC9oG69pHTkhQeIuejVIk3/cnDyX39av2AxomQiyPT13WKHQA==", + "license": "MIT", + "peerDependencies": { + "pg": ">=8.0" + } + }, + "node_modules/pg-protocol": { + "version": "1.13.0", + "resolved": "https://registry.npmjs.org/pg-protocol/-/pg-protocol-1.13.0.tgz", + "integrity": "sha512-zzdvXfS6v89r6v7OcFCHfHlyG/wvry1ALxZo4LqgUoy7W9xhBDMaqOuMiF3qEV45VqsN6rdlcehHrfDtlCPc8w==", + "license": "MIT" + }, + "node_modules/pg-types": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/pg-types/-/pg-types-2.2.0.tgz", + "integrity": "sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA==", + "license": "MIT", + "dependencies": { + "pg-int8": "1.0.1", + "postgres-array": "~2.0.0", + "postgres-bytea": "~1.0.0", + "postgres-date": "~1.0.4", + "postgres-interval": "^1.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/pgpass": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/pgpass/-/pgpass-1.0.5.tgz", + "integrity": "sha512-FdW9r/jQZhSeohs1Z3sI1yxFQNFvMcnmfuj4WBMUTxOrAyLMaTcE1aAMBiTlbMNaXvBCQuVi0R7hd8udDSP7ug==", + "license": "MIT", + "dependencies": { + "split2": "^4.1.0" + } + }, + "node_modules/postgres-array": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/postgres-array/-/postgres-array-2.0.0.tgz", + "integrity": "sha512-VpZrUqU5A69eQyW2c5CA1jtLecCsN2U/bD6VilrFDWq5+5UIEVO7nazS3TEcHf1zuPYO/sqGvUvW62g86RXZuA==", + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/postgres-bytea": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/postgres-bytea/-/postgres-bytea-1.0.1.tgz", + "integrity": "sha512-5+5HqXnsZPE65IJZSMkZtURARZelel2oXUEO8rH83VS/hxH5vv1uHquPg5wZs8yMAfdv971IU+kcPUczi7NVBQ==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-date": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/postgres-date/-/postgres-date-1.0.7.tgz", + "integrity": "sha512-suDmjLVQg78nMK2UZ454hAG+OAW+HQPZ6n++TNDUX+L0+uUlLywnoxJKDou51Zm+zTCjrCl0Nq6J9C5hP9vK/Q==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/postgres-interval": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/postgres-interval/-/postgres-interval-1.2.0.tgz", + "integrity": "sha512-9ZhXKM/rw350N1ovuWHbGxnGh/SNJ4cnxHiM0rxE4VN41wsg8P8zWn9hv/buK00RP4WvlOyr/RBDiptyxVbkZQ==", + "license": "MIT", + "dependencies": { + "xtend": "^4.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/prettier": { "version": "3.8.1", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.8.1.tgz", @@ -4505,6 +4784,18 @@ "url": "https://ko-fi.com/killymxi" } }, + "node_modules/semver": { + "version": "7.8.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.8.0.tgz", + "integrity": "sha512-AcM7dV/5ul4EekoQ29Agm5vri8JNqRyj39o0qpX6vDF2GZrtutZl5RwgD1XnZjiTAfncsJhMI48QQH3sN87YNA==", + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/send": { "version": "0.19.2", "resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz", @@ -4634,6 +4925,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/split2": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/split2/-/split2-4.2.0.tgz", + "integrity": "sha512-UcjcJOWknrNkF6PLX83qcHM6KHgVKNkV62Y8a5uYDVv9ydGQVwAHMKqHdJje1VTWpljG0WYpCDhrCdAOYH4TWg==", + "license": "ISC", + "engines": { + "node": ">= 10.x" + } + }, "node_modules/sprintf-js": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", diff --git a/backend/package.json b/backend/package.json index 8451ab8b..f0a875f5 100644 --- a/backend/package.json +++ b/backend/package.json @@ -13,6 +13,7 @@ "@aws-sdk/s3-request-presigner": "^3.787.0", "@google/genai": "^1.50.1", "@supabase/supabase-js": "^2.49.4", + "bcrypt": "^6.0.0", "cors": "^2.8.5", "docx": "^9.5.0", "dotenv": "^17.4.1", @@ -21,18 +22,23 @@ "fast-diff": "^1.3.0", "fast-xml-parser": "^5.7.1", "helmet": "^8.1.0", + "jsonwebtoken": "^9.0.3", "jszip": "^3.10.1", "libreoffice-convert": "^1.6.0", "mammoth": "^1.9.0", "multer": "^1.4.5-lts.2", "pdfjs-dist": "^4.10.38", + "pg": "^8.20.0", "resend": "^4.5.1" }, "devDependencies": { + "@types/bcrypt": "^6.0.0", "@types/cors": "^2.8.17", "@types/express": "^4.17.21", + "@types/jsonwebtoken": "^9.0.10", "@types/multer": "^1.4.12", "@types/node": "^22.14.1", + "@types/pg": "^8.20.0", "prettier": "^3.8.1", "tsx": "^4.19.3", "typescript": "^5.8.3" diff --git a/backend/schema.sql b/backend/schema.sql index cb505e63..c52a4d40 100644 --- a/backend/schema.sql +++ b/backend/schema.sql @@ -62,7 +62,7 @@ create trigger on_auth_user_created create table if not exists public.user_api_keys ( id uuid primary key default gen_random_uuid(), user_id uuid not null references auth.users(id) on delete cascade, - provider text not null check (provider in ('claude', 'gemini', 'openai')), + provider text not null check (provider in ('claude', 'gemini', 'openai', 'ollama')), encrypted_key text not null, iv text not null, auth_tag text not null, @@ -263,6 +263,7 @@ create table if not exists public.chat_messages ( role text not null, content jsonb, files jsonb, + workflow jsonb, annotations jsonb, created_at timestamptz not null default now() ); diff --git a/backend/src/index.ts b/backend/src/index.ts index 07b3b849..1fd99770 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -11,6 +11,8 @@ import { tabularRouter } from "./routes/tabular"; import { workflowsRouter } from "./routes/workflows"; import { userRouter } from "./routes/user"; import { downloadsRouter } from "./routes/downloads"; +import { localFilesRouter } from "./routes/localFiles"; +import { localAuthRouter } from "./routes/localAuth"; const app = express(); const PORT = process.env.PORT ?? 3001; @@ -118,6 +120,10 @@ app.use("/workflows", workflowsRouter); app.use("/user", userRouter); app.use("/users", userRouter); app.use("/download", downloadsRouter); +app.use("/local-files", localFilesRouter); +if (process.env.AUTH_MODE === "local") { + app.use("/auth", localAuthRouter); +} app.get("/health", (_req, res) => res.json({ ok: true })); diff --git a/backend/src/lib/access.ts b/backend/src/lib/access.ts index 6f2c869e..66905cf6 100644 --- a/backend/src/lib/access.ts +++ b/backend/src/lib/access.ts @@ -11,9 +11,9 @@ * owner-only (delete, rename, member management). */ -import type { createServerSupabase } from "./supabase"; +import { createDb, DbClient } from "./db"; -type Db = ReturnType; +type Db = DbClient; export type ProjectAccess = | { @@ -135,7 +135,7 @@ export async function listAccessibleProjectIds( ? db .from("projects") .select("id") - .contains("shared_with", [userEmail]) + .filter("shared_with", "cs", JSON.stringify([userEmail])) .neq("user_id", userId) : Promise.resolve({ data: [] as { id: string }[] }), ]); diff --git a/backend/src/lib/chatTools.ts b/backend/src/lib/chatTools.ts index 6d85c6aa..89db0764 100644 --- a/backend/src/lib/chatTools.ts +++ b/backend/src/lib/chatTools.ts @@ -6,7 +6,7 @@ import { uploadFile, } from "./storage"; import { convertedPdfKey } from "./convert"; -import { createServerSupabase } from "./supabase"; +import { createDb, DbClient } from "./db"; import { applyTrackedEdits, extractDocxBodyText, @@ -544,7 +544,7 @@ function citationReminder(docLabel: string, filename: string): string { export async function enrichWithPriorEvents( messages: ChatMessage[], chatId: string | null | undefined, - db: ReturnType, + db: DbClient, docIndex: DocIndex, ): Promise { if (!chatId) return messages; @@ -731,7 +731,7 @@ export async function generateDocx( title: string, sections: unknown[], userId: string, - db: ReturnType, + db: DbClient, options?: { landscape?: boolean; projectId?: string | null }, ) { try { @@ -1253,7 +1253,7 @@ export async function generateDocx( */ export async function loadCurrentVersionBytes( documentId: string, - db: ReturnType, + db: DbClient, ): Promise<{ bytes: Buffer; storage_path: string } | null> { const active = await loadActiveVersion(documentId, db); if (!active) return null; @@ -1271,7 +1271,7 @@ export async function runEditDocument(params: { documentId: string; userId: string; edits: EditInput[]; - db: ReturnType; + db: DbClient; /** * If provided, append these edits to the existing turn-scoped version * (overwrites the file at storagePath and reuses the document_versions @@ -1483,7 +1483,7 @@ async function readDocumentContent( docStore: DocStore, write: (s: string) => void, docIndex?: DocIndex, - db?: ReturnType, + db?: DbClient, opts?: { emitEvents?: boolean }, ): Promise { const emitEvents = opts?.emitEvents ?? true; @@ -1669,7 +1669,7 @@ async function findInDocumentContent(params: { docStore: DocStore; write: (s: string) => void; docIndex?: DocIndex; - db?: ReturnType; + db?: DbClient; }): Promise { const { docLabel, @@ -1838,7 +1838,7 @@ export async function runToolCalls( toolCalls: ToolCall[], docStore: DocStore, userId: string, - db: ReturnType, + db: DbClient, write: (s: string) => void, workflowStore?: WorkflowStore, tabularStore?: TabularCellStore, @@ -2715,7 +2715,7 @@ export async function runLLMStream(params: { docStore: DocStore; docIndex: DocIndex; userId: string; - db: ReturnType; + db: DbClient; write: (s: string) => void; extraTools?: unknown[]; workflowStore?: WorkflowStore; @@ -3049,7 +3049,7 @@ export function extractAnnotations( export async function buildDocContext( messages: ChatMessage[], userId: string, - db: ReturnType, + db: DbClient, chatId?: string | null, ): Promise<{ docIndex: DocIndex; docStore: DocStore }> { const docIndex: DocIndex = {}; @@ -3138,7 +3138,7 @@ export async function buildDocContext( export async function buildProjectDocContext( projectId: string, _userId: string, - db: ReturnType, + db: DbClient, ): Promise<{ docIndex: DocIndex; docStore: DocStore; @@ -3232,7 +3232,7 @@ export async function buildProjectDocContext( export async function buildWorkflowStore( userId: string, userEmail: string | null | undefined, - db: ReturnType, + db: DbClient, ): Promise { const { BUILTIN_WORKFLOWS } = await import("./builtinWorkflows"); const store: WorkflowStore = new Map(); diff --git a/backend/src/lib/db.ts b/backend/src/lib/db.ts new file mode 100644 index 00000000..341a87ab --- /dev/null +++ b/backend/src/lib/db.ts @@ -0,0 +1,85 @@ +/** + * DB client factory. + * + * AUTH_MODE=supabase (default): returns the Supabase PostgREST client. + * AUTH_MODE=local: returns a PgAdapter backed by DATABASE_URL. + * + * Both expose the same .from(table).select/insert/update/delete/upsert chain + * so all route files work without modification. + */ + +import { Pool } from "pg"; +import { PgAdapter } from "./pgAdapter"; +import { createServerSupabase } from "./supabase"; + +// Loose interface so both Supabase client and PgAdapter satisfy it without +// TypeScript trying to intersect their complex generic return types. +// Route code already casts `data` to specific types, so `any` here is safe. +export interface DbClient { + // eslint-disable-next-line @typescript-eslint/no-explicit-any + from(table: string): any; +} + +let _pool: Pool | null = null; + +function getPgPool(): Pool { + if (!_pool) { + const url = process.env.DATABASE_URL; + if (!url) throw new Error("DATABASE_URL must be set when AUTH_MODE=local"); + _pool = new Pool({ connectionString: url }); + } + return _pool; +} + +export function createDb(): DbClient { + if (process.env.AUTH_MODE === "local") { + return new PgAdapter(getPgPool()); + } + return createServerSupabase(); +} + +// --------------------------------------------------------------------------- +// Auth-level user listing — needed by the workflow sharing feature to resolve +// user emails. In Supabase mode this calls the admin auth API; in local mode +// it queries the users table directly. +// --------------------------------------------------------------------------- + +export async function deleteAuthUser(userId: string): Promise<{ error: { message: string } | null }> { + if (process.env.AUTH_MODE === "local") { + const pool = getPgPool(); + await pool.query("DELETE FROM users WHERE id = $1", [userId]); + return { error: null }; + } + try { + const { createClient } = await import("@supabase/supabase-js"); + const admin = createClient( + process.env.SUPABASE_URL ?? "", + process.env.SUPABASE_SECRET_KEY ?? "", + { auth: { autoRefreshToken: false, persistSession: false } }, + ); + const { error } = await admin.auth.admin.deleteUser(userId); + return { error: error ? { message: error.message } : null }; + } catch (err) { + return { error: { message: err instanceof Error ? err.message : String(err) } }; + } +} + +export async function listAuthUsers(): Promise<{ id: string; email: string }[]> { + if (process.env.AUTH_MODE === "local") { + const pool = getPgPool(); + const res = await pool.query("SELECT id, email FROM users"); + return res.rows as { id: string; email: string }[]; + } + try { + const { createClient } = await import("@supabase/supabase-js"); + const admin = createClient( + process.env.SUPABASE_URL ?? "", + process.env.SUPABASE_SECRET_KEY ?? "", + { auth: { autoRefreshToken: false, persistSession: false } }, + ); + const { data } = await admin.auth.admin.listUsers({ perPage: 1000 }); + return (data?.users ?? []).map((u) => ({ id: u.id, email: u.email ?? "" })); + } catch { + return []; + } +} diff --git a/backend/src/lib/documentVersions.ts b/backend/src/lib/documentVersions.ts index 83c2ac45..aa40f1bb 100644 --- a/backend/src/lib/documentVersions.ts +++ b/backend/src/lib/documentVersions.ts @@ -1,6 +1,6 @@ -import type { createServerSupabase } from "./supabase"; +import { createDb, DbClient } from "./db"; -type Supa = ReturnType; +type Supa = DbClient; interface DocRow { id: string; diff --git a/backend/src/lib/llm/index.ts b/backend/src/lib/llm/index.ts index 4b5e9793..ad83bda0 100644 --- a/backend/src/lib/llm/index.ts +++ b/backend/src/lib/llm/index.ts @@ -1,6 +1,7 @@ import { streamClaude, completeClaudeText } from "./claude"; import { streamGemini, completeGeminiText } from "./gemini"; import { streamOpenAI, completeOpenAIText } from "./openai"; +import { streamOllama, completeOllamaText } from "./ollama"; import { providerForModel } from "./models"; import type { StreamChatParams, StreamChatResult, UserApiKeys } from "./types"; @@ -13,6 +14,7 @@ export async function streamChatWithTools( const provider = providerForModel(params.model); if (provider === "claude") return streamClaude(params); if (provider === "openai") return streamOpenAI(params); + if (provider === "ollama") return streamOllama(params); return streamGemini(params); } @@ -26,5 +28,6 @@ export async function completeText(params: { const provider = providerForModel(params.model); if (provider === "claude") return completeClaudeText(params); if (provider === "openai") return completeOpenAIText(params); + if (provider === "ollama") return completeOllamaText(params); return completeGeminiText(params); } diff --git a/backend/src/lib/llm/models.ts b/backend/src/lib/llm/models.ts index ed4872ef..4537eab5 100644 --- a/backend/src/lib/llm/models.ts +++ b/backend/src/lib/llm/models.ts @@ -22,6 +22,13 @@ export const CLAUDE_LOW_MODELS = ["claude-haiku-4-5"] as const; export const GEMINI_LOW_MODELS = ["gemini-3.1-flash-lite-preview"] as const; export const OPENAI_LOW_MODELS = ["gpt-5.4-nano"] as const; +// Local / self-hosted models via Ollama or llama.cpp +// Any model name can be used at runtime — the list below are sensible defaults +// that appear in the UI model picker. +export const OLLAMA_MAIN_MODELS = ["local-gemma-26b", "local-llama3.3", "local-deepseek-r1"] as const; +export const OLLAMA_MID_MODELS = ["local-gemma-26b"] as const; +export const OLLAMA_LOW_MODELS = ["local-gemma-26b"] as const; + export const DEFAULT_MAIN_MODEL = "gemini-3-flash-preview"; export const DEFAULT_TITLE_MODEL = "gemini-3.1-flash-lite-preview"; export const DEFAULT_TABULAR_MODEL = "gemini-3-flash-preview"; @@ -36,6 +43,9 @@ const ALL_MODELS = new Set([ ...CLAUDE_LOW_MODELS, ...GEMINI_LOW_MODELS, ...OPENAI_LOW_MODELS, + ...OLLAMA_MAIN_MODELS, + ...OLLAMA_MID_MODELS, + ...OLLAMA_LOW_MODELS, ]); // --------------------------------------------------------------------------- @@ -46,10 +56,18 @@ export function providerForModel(model: string): Provider { if (model.startsWith("claude")) return "claude"; if (model.startsWith("gemini")) return "gemini"; if (model.startsWith("gpt-")) return "openai"; + if (model.startsWith("local-")) return "ollama"; + // Fallback: if OLLAMA_BASE_URL is set, route any model name to Ollama. + // This allows using the UI to type in any Ollama model name directly. + const ollamaBase = process.env.OLLAMA_BASE_URL?.trim() || process.env.LLAMACPP_BASE_URL?.trim(); + if (ollamaBase) return "ollama"; throw new Error(`Unknown model id: ${model}`); } export function resolveModel(id: string | null | undefined, fallback: string): string { - if (id && ALL_MODELS.has(id)) return id; + if (!id) return fallback; + if (ALL_MODELS.has(id)) return id; + // Allow any local-* model name — the user may have typed a custom one. + if (id.startsWith("local-")) return id; return fallback; } diff --git a/backend/src/lib/llm/ollama.ts b/backend/src/lib/llm/ollama.ts new file mode 100644 index 00000000..74b77c90 --- /dev/null +++ b/backend/src/lib/llm/ollama.ts @@ -0,0 +1,317 @@ +import type { + StreamChatParams, + StreamChatResult, + NormalizedToolCall, + NormalizedToolResult, + OpenAIToolSchema, +} from "./types"; + +const DEFAULT_OLLAMA_BASE_URL = "http://localhost:11434/v1"; + +function baseUrl(): string { + return ( + process.env.OLLAMA_BASE_URL?.trim() || + process.env.LLAMACPP_BASE_URL?.trim() || + DEFAULT_OLLAMA_BASE_URL + ); +} + +function envApiKey(): string | undefined { + return ( + process.env.OLLAMA_API_KEY?.trim() || + process.env.LLAMACPP_API_KEY?.trim() || + undefined + ); +} + +/** + * Resolve the real model name to send to Ollama/llama.cpp. + * The UI uses a "local-" prefix to distinguish local models from cloud + * providers; strip it before forwarding to the API. + */ +function resolveModelName(model: string): string { + if (model.startsWith("local-")) { + return model.slice("local-".length); + } + return model; +} + +type ToolCallPart = { + id: string; + type: "function"; + function: { name: string; arguments: string }; +}; + +type ApiMessage = + | { role: "system"; content: string } + | { role: "user"; content: string } + | { role: "assistant"; content: string | null; tool_calls?: ToolCallPart[] } + | { role: "tool"; tool_call_id: string; content: string }; + +type ApiTool = { + type: "function"; + function: { + name: string; + description?: string; + parameters: Record; + }; +}; + +type ChatCompletionChunk = { + choices?: { + delta: { + content?: string | null; + tool_calls?: ToolCallPart[]; + reasoning_content?: string; + }; + finish_reason?: string | null; + }[]; +}; + +function toApiMessages( + messages: StreamChatParams["messages"], + systemPrompt: string, +): ApiMessage[] { + const result: ApiMessage[] = [{ role: "system", content: systemPrompt }]; + for (const m of messages) { + result.push({ role: m.role, content: m.content }); + } + return result; +} + +function toApiTools(tools: OpenAIToolSchema[]): ApiTool[] { + return tools.map((t) => ({ + type: "function" as const, + function: { + name: t.function.name, + description: t.function.description, + parameters: t.function.parameters, + }, + })); +} + +function parseToolCalls( + toolCalls: ToolCallPart[] | undefined, +): NormalizedToolCall[] { + if (!toolCalls || toolCalls.length === 0) return []; + return toolCalls.map((tc) => { + let input: Record = {}; + try { + const parsed = JSON.parse(tc.function.arguments); + if (parsed && typeof parsed === "object" && !Array.isArray(parsed)) { + input = parsed as Record; + } + } catch { + input = {}; + } + return { + id: tc.id, + name: tc.function.name, + input, + }; + }); +} + +export async function streamOllama( + params: StreamChatParams, +): Promise { + const { + model, + systemPrompt, + tools = [], + callbacks = {}, + runTools, + } = params; + const maxIter = params.maxIterations ?? 10; + const url = `${baseUrl()}/chat/completions`; + const key = params.apiKeys?.ollama?.trim() || envApiKey(); + const headers: Record = { + "Content-Type": "application/json", + }; + if (key) headers["Authorization"] = `Bearer ${key}`; + + let messages = toApiMessages(params.messages, systemPrompt); + const apiTools = toApiTools(tools); + const hasTools = apiTools.length > 0; + let fullText = ""; + + for (let iter = 0; iter < maxIter; iter++) { + const body: Record = { + model: resolveModelName(model), + messages, + stream: true, + max_tokens: 16384, + }; + if (hasTools) body.tools = apiTools; + + const response = await fetch(url, { + method: "POST", + headers, + body: JSON.stringify(body), + }); + + if (!response.ok) { + const text = await response.text().catch(() => ""); + throw new Error( + `Ollama/llama.cpp request failed (${response.status}): ${text || response.statusText}`, + ); + } + + if (!response.body) { + throw new Error("Ollama/llama.cpp response had no body"); + } + + const reader = response.body.getReader(); + const decoder = new TextDecoder(); + const toolCalls: NormalizedToolCall[] = []; + const toolCallParts: ToolCallPart[] = []; + let currentToolCall: ToolCallPart | null = null; + let buffer = ""; + let pendingContent = ""; + let sawReasoning = false; + + while (true) { + const { done, value } = await reader.read(); + if (done) break; + + buffer += decoder.decode(value, { stream: true }); + const lines = buffer.split("\n"); + buffer = lines.pop() ?? ""; + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed || !trimmed.startsWith("data:")) continue; + const data = trimmed.slice(5).trim(); + if (!data || data === "[DONE]") continue; + + try { + const chunk = JSON.parse(data) as ChatCompletionChunk; + const choice = chunk.choices?.[0]; + if (!choice) continue; + + const delta = choice.delta; + + if (delta.reasoning_content) { + sawReasoning = true; + callbacks.onReasoningDelta?.(delta.reasoning_content); + } + + if (delta.content) { + if (hasTools) { + pendingContent += delta.content; + } else { + fullText += delta.content; + callbacks.onContentDelta?.(delta.content); + } + } + + if (delta.tool_calls) { + for (const tc of delta.tool_calls) { + if (tc.id) { + currentToolCall = { id: tc.id, type: "function", function: { name: tc.function.name, arguments: tc.function.arguments || "" } }; + toolCallParts.push(currentToolCall); + } else if (currentToolCall && tc.function?.arguments) { + currentToolCall.function.arguments += tc.function.arguments; + } + } + } + + if (choice.finish_reason === "tool_calls" && toolCallParts.length > 0) { + const calls = parseToolCalls(toolCallParts); + for (const c of calls) { + callbacks.onToolCallStart?.(c); + toolCalls.push(c); + } + toolCallParts.length = 0; + currentToolCall = null; + } + } catch { + // skip malformed JSON chunks + } + } + } + + if (sawReasoning) callbacks.onReasoningBlockEnd?.(); + + if (!toolCalls.length || !runTools) { + if (pendingContent) { + fullText += pendingContent; + callbacks.onContentDelta?.(pendingContent); + } + break; + } + + const results = await runTools(toolCalls); + + const assistantContent = pendingContent || null; + const assistantMsg: ApiMessage = { + role: "assistant", + content: assistantContent, + tool_calls: toolCalls.map((tc) => ({ + id: tc.id, + type: "function" as const, + function: { name: tc.name, arguments: JSON.stringify(tc.input) }, + })), + }; + messages.push(assistantMsg); + + for (const r of results) { + messages.push({ + role: "tool", + tool_call_id: r.tool_use_id, + content: r.content, + }); + } + + pendingContent = ""; + toolCalls.length = 0; + } + + return { fullText }; +} + +export async function completeOllamaText(params: { + model: string; + systemPrompt?: string; + user: string; + maxTokens?: number; + apiKeys?: { ollama?: string | null }; +}): Promise { + const url = `${baseUrl()}/chat/completions`; + const key = params.apiKeys?.ollama?.trim() || envApiKey(); + const headers: Record = { + "Content-Type": "application/json", + }; + if (key) headers["Authorization"] = `Bearer ${key}`; + + const messages: ApiMessage[] = []; + if (params.systemPrompt) { + messages.push({ role: "system", content: params.systemPrompt }); + } + messages.push({ role: "user", content: params.user }); + + const response = await fetch(url, { + method: "POST", + headers, + body: JSON.stringify({ + model: resolveModelName(params.model), + messages, + max_tokens: params.maxTokens ?? 512, + stream: false, + }), + }); + + if (!response.ok) { + const text = await response.text().catch(() => ""); + throw new Error( + `Ollama/llama.cpp completion failed (${response.status}): ${text || response.statusText}`, + ); + } + + const json = (await response.json()) as { + choices?: { message?: { content?: string } }[]; + }; + return json.choices?.[0]?.message?.content ?? ""; +} + +export type { NormalizedToolResult }; diff --git a/backend/src/lib/llm/types.ts b/backend/src/lib/llm/types.ts index a8409d80..af95cb88 100644 --- a/backend/src/lib/llm/types.ts +++ b/backend/src/lib/llm/types.ts @@ -2,7 +2,7 @@ // Callers always speak OpenAI-style tools + { role, content } messages; each // provider translates internally. -export type Provider = "claude" | "gemini" | "openai"; +export type Provider = "claude" | "gemini" | "openai" | "ollama"; export type OpenAIToolSchema = { type: "function"; @@ -40,6 +40,7 @@ export type UserApiKeys = { claude?: string | null; gemini?: string | null; openai?: string | null; + ollama?: string | null; }; export type StreamChatParams = { diff --git a/backend/src/lib/localSignedTokens.ts b/backend/src/lib/localSignedTokens.ts new file mode 100644 index 00000000..44910d86 --- /dev/null +++ b/backend/src/lib/localSignedTokens.ts @@ -0,0 +1,68 @@ +import crypto from "crypto"; + +function getSecret(): string { + return ( + process.env.DOWNLOAD_SIGNING_SECRET ?? + process.env.SUPABASE_SECRET_KEY ?? + "dev-secret" + ); +} + +function b64urlEncode(buf: Buffer): string { + return buf + .toString("base64") + .replace(/\+/g, "-") + .replace(/\//g, "_") + .replace(/=+$/g, ""); +} + +function b64urlDecode(s: string): Buffer { + let t = s.replace(/-/g, "+").replace(/_/g, "/"); + while (t.length % 4) t += "="; + return Buffer.from(t, "base64"); +} + +function timingSafeEqStr(a: string, b: string): boolean { + if (a.length !== b.length) return false; + return crypto.timingSafeEqual(Buffer.from(a), Buffer.from(b)); +} + +export function signLocalFile( + key: string, + filename: string, + expiresIn: number, +): string { + const exp = Math.floor(Date.now() / 1000) + expiresIn; + const payload = JSON.stringify({ p: key, f: filename, exp }); + const enc = b64urlEncode(Buffer.from(payload, "utf8")); + const sig = crypto + .createHmac("sha256", getSecret()) + .update(enc) + .digest(); + return `${enc}.${b64urlEncode(sig)}`; +} + +export function verifyLocalFile( + token: string, +): { key: string; filename: string } | null { + const parts = token.split("."); + if (parts.length !== 2) return null; + const [enc, sigEnc] = parts; + const expected = crypto + .createHmac("sha256", getSecret()) + .update(enc) + .digest(); + if (!timingSafeEqStr(sigEnc, b64urlEncode(expected))) return null; + try { + const parsed = JSON.parse(b64urlDecode(enc).toString("utf8")) as { + p: string; + f: string; + exp: number; + }; + if (!parsed?.p || !parsed?.f || !parsed?.exp) return null; + if (Math.floor(Date.now() / 1000) > parsed.exp) return null; + return { key: parsed.p, filename: parsed.f }; + } catch { + return null; + } +} diff --git a/backend/src/lib/pgAdapter.ts b/backend/src/lib/pgAdapter.ts new file mode 100644 index 00000000..dfa349da --- /dev/null +++ b/backend/src/lib/pgAdapter.ts @@ -0,0 +1,370 @@ +/** + * Minimal PostgreSQL query builder that mirrors the subset of the + * Supabase JS client API used throughout the backend routes. + * + * Returns `{ data, error }` (and `count` for COUNT queries) in the same + * shape as PostgREST, so route code needs zero changes once the db + * factory returns a PgAdapter instead of the Supabase client. + */ + +import { Pool } from "pg"; + +type Row = Record; +type QueryResult = { data: T; error: null; count?: number | null } | { data: null; error: { message: string }; count?: null }; +type Operation = "select" | "insert" | "update" | "delete" | "upsert"; + +// Conditions store sql with '?' placeholders plus the corresponding values. +// resolveParams() replaces each '?' with the correct $n at query-build time. +interface Condition { + sql: string; + params: unknown[]; +} + +function resolveParams(conditions: Condition[]): { clauses: string[]; params: unknown[] } { + const params: unknown[] = []; + const clauses: string[] = []; + for (const cond of conditions) { + let sql = cond.sql; + for (const p of cond.params) { + params.push(p); + sql = sql.replace("?", `$${params.length}`); + } + clauses.push(sql); + } + return { clauses, params }; +} + +// PostgREST OR filter syntax: "col.op.val,col.op.(v1,v2)" +function splitOrTokens(filter: string): string[] { + const tokens: string[] = []; + let depth = 0; + let start = 0; + for (let i = 0; i < filter.length; i++) { + if (filter[i] === "(") depth++; + else if (filter[i] === ")") depth--; + else if (filter[i] === "," && depth === 0) { + tokens.push(filter.slice(start, i)); + start = i + 1; + } + } + tokens.push(filter.slice(start)); + return tokens; +} + +function parseOrFilter(filter: string): Condition { + const tokens = splitOrTokens(filter); + const parts: Condition[] = []; + for (const token of tokens) { + const firstDot = token.indexOf("."); + const col = token.slice(0, firstDot); + const rest = token.slice(firstDot + 1); + const secondDot = rest.indexOf("."); + const op = rest.slice(0, secondDot); + const val = rest.slice(secondDot + 1); + if (op === "eq") { + parts.push({ sql: `${col} = ?`, params: [val] }); + } else if (op === "in") { + const inner = val.slice(1, -1); + const vals = inner ? inner.split(",") : []; + if (vals.length === 0) { + parts.push({ sql: "FALSE", params: [] }); + } else { + const placeholders = vals.map(() => "?").join(", "); + parts.push({ sql: `${col} IN (${placeholders})`, params: vals }); + } + } + } + return { + sql: `(${parts.map((p) => p.sql).join(" OR ")})`, + params: parts.flatMap((p) => p.params), + }; +} + +export class PgQueryBuilder { + private _pool: Pool; + private _table: string; + private _op: Operation = "select"; + private _cols = "*"; + private _countMode = false; + private _conditions: Condition[] = []; + private _orderClauses: string[] = []; + private _limitVal?: number; + private _singleMode = false; + private _maybeSingleMode = false; + private _insertData?: Row | Row[]; + private _updateData?: Row; + private _upsertConflict?: string; + private _ignoreDuplicates = false; + private _withReturning = false; + private _orCond?: Condition; + + constructor(pool: Pool, table: string) { + this._pool = pool; + this._table = table; + } + + select(cols = "*", opts?: { count?: "exact"; head?: boolean }): this { + if (this._op === "insert" || this._op === "update" || this._op === "upsert") { + // Chained after a mutation: add RETURNING clause + this._withReturning = true; + this._cols = cols; + } else { + this._op = "select"; + this._countMode = opts?.count === "exact"; + this._cols = cols; + } + return this; + } + + insert(data: Row | Row[]): this { + this._op = "insert"; + this._insertData = data; + return this; + } + + update(data: Row): this { + this._op = "update"; + this._updateData = data; + return this; + } + + delete(): this { + this._op = "delete"; + return this; + } + + upsert(data: Row | Row[], opts?: { onConflict?: string; ignoreDuplicates?: boolean }): this { + this._op = "upsert"; + this._insertData = data; + this._upsertConflict = opts?.onConflict; + this._ignoreDuplicates = opts?.ignoreDuplicates ?? false; + return this; + } + + eq(col: string, val: unknown): this { + if (val === null) { + this._conditions.push({ sql: `${col} IS NULL`, params: [] }); + } else { + this._conditions.push({ sql: `${col} = ?`, params: [val] }); + } + return this; + } + + neq(col: string, val: unknown): this { + this._conditions.push({ sql: `${col} != ?`, params: [val] }); + return this; + } + + in(col: string, vals: unknown[]): this { + if (vals.length === 0) { + this._conditions.push({ sql: "FALSE", params: [] }); + } else { + const placeholders = vals.map(() => "?").join(", "); + this._conditions.push({ sql: `${col} IN (${placeholders})`, params: vals }); + } + return this; + } + + is(col: string, val: null | boolean): this { + if (val === null) { + this._conditions.push({ sql: `${col} IS NULL`, params: [] }); + } else { + this._conditions.push({ sql: `${col} IS ${val ? "TRUE" : "FALSE"}`, params: [] }); + } + return this; + } + + filter(col: string, op: string, val: unknown): this { + if (op === "cs") { + this._conditions.push({ sql: `${col} @> ?::jsonb`, params: [val] }); + } + return this; + } + + not(col: string, op: string, val: unknown): this { + if (op === "is") { + this._conditions.push({ sql: `${col} IS NOT NULL`, params: [] }); + } else if (op === "eq") { + this._conditions.push({ sql: `${col} != ?`, params: [val] }); + } else if (op === "in") { + const vals = Array.isArray(val) ? val : [val]; + const placeholders = vals.map(() => "?").join(", "); + this._conditions.push({ sql: `${col} NOT IN (${placeholders})`, params: vals }); + } + return this; + } + + or(filter: string): this { + this._orCond = parseOrFilter(filter); + return this; + } + + order(col: string, opts?: { ascending?: boolean }): this { + const dir = opts?.ascending === false ? "DESC" : "ASC"; + this._orderClauses.push(`${col} ${dir}`); + return this; + } + + limit(n: number): this { + this._limitVal = n; + return this; + } + + // Make the builder awaitable for array-result selects. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + then(onfulfilled: (value: any) => R, onrejected?: (reason: unknown) => R): Promise { + return this._execute().then(onfulfilled, onrejected); + } + + async single(): Promise> { + this._singleMode = true; + return this._execute() as Promise>; + } + + async maybeSingle(): Promise> { + this._maybeSingleMode = true; + return this._execute() as Promise>; + } + + private _buildWhere(): { where: string; params: unknown[] } { + const allConds: Condition[] = [...this._conditions]; + if (this._orCond) allConds.push(this._orCond); + if (allConds.length === 0) return { where: "", params: [] }; + const { clauses, params } = resolveParams(allConds); + return { where: `WHERE ${clauses.join(" AND ")}`, params }; + } + + private async _execute(): Promise> { + try { + return await this._run(); + } catch (err) { + return { data: null, error: { message: err instanceof Error ? err.message : String(err) } }; + } + } + + private async _run(): Promise> { + const table = `"${this._table}"`; + + if (this._op === "select") { + const { where, params } = this._buildWhere(); + if (this._countMode) { + const sql = `SELECT COUNT(*) FROM ${table} ${where}`.trim(); + const res = await this._pool.query(sql, params); + return { data: null, error: null, count: parseInt(res.rows[0].count, 10) }; + } + const parts: string[] = [`SELECT ${this._cols} FROM ${table}`]; + if (where) parts.push(where); + if (this._orderClauses.length) parts.push(`ORDER BY ${this._orderClauses.join(", ")}`); + if (this._limitVal !== undefined) parts.push(`LIMIT ${this._limitVal}`); + const res = await this._pool.query(parts.join(" "), params); + if (this._singleMode) { + if (res.rows.length === 0) return { data: null, error: { message: "No rows found" } }; + return { data: res.rows[0], error: null }; + } + if (this._maybeSingleMode) { + return { data: res.rows[0] ?? null, error: null }; + } + return { data: res.rows, error: null }; + } + + if (this._op === "insert" || this._op === "upsert") { + const rows = Array.isArray(this._insertData) ? this._insertData : [this._insertData!]; + const cols = Object.keys(rows[0]); + const params: unknown[] = []; + const valueSets = rows.map((row) => { + const placeholders = cols.map((col) => { + // pg formats JS arrays as PostgreSQL array literals {a,b} which is invalid + // for jsonb columns. JSON.stringify produces valid JSON for PostgreSQL to parse. + const val = row[col]; + params.push(Array.isArray(val) ? JSON.stringify(val) : val); + return `$${params.length}`; + }); + return `(${placeholders.join(", ")})`; + }); + + const colList = cols.map((c) => `"${c}"`).join(", "); + let sql = `INSERT INTO ${table} (${colList}) VALUES ${valueSets.join(", ")}`; + + if (this._op === "upsert" && this._upsertConflict) { + const conflictCols = this._upsertConflict.split(",").map((c) => `"${c.trim()}"`).join(", "); + const updateCols = cols.filter((c) => !this._upsertConflict!.split(",").map((x) => x.trim()).includes(c)); + if (this._ignoreDuplicates || updateCols.length === 0) { + sql += ` ON CONFLICT (${conflictCols}) DO NOTHING`; + } else { + const setClauses = updateCols.map((c) => `"${c}" = EXCLUDED."${c}"`).join(", "); + sql += ` ON CONFLICT (${conflictCols}) DO UPDATE SET ${setClauses}`; + } + } + + if (this._withReturning) { + sql += ` RETURNING ${this._cols === "*" ? "*" : this._cols}`; + } + + const res = await this._pool.query(sql, params); + if (this._withReturning) { + if (this._singleMode) { + return { data: res.rows[0] ?? null, error: null }; + } + return { data: res.rows, error: null }; + } + return { data: null, error: null }; + } + + if (this._op === "update") { + const entries = Object.entries(this._updateData!); + const setParams: unknown[] = []; + const setClauses = entries.map(([col, val]) => { + setParams.push(Array.isArray(val) ? JSON.stringify(val) : val); + return `"${col}" = $${setParams.length}`; + }); + + // WHERE conditions need offset by setParams.length + const allConds: Condition[] = [...this._conditions]; + if (this._orCond) allConds.push(this._orCond); + const offset = setParams.length; + const condParams: unknown[] = []; + const condClauses: string[] = []; + for (const cond of allConds) { + let sql = cond.sql; + for (const p of cond.params) { + condParams.push(p); + sql = sql.replace("?", `$${offset + condParams.length}`); + } + condClauses.push(sql); + } + + const where = condClauses.length > 0 ? `WHERE ${condClauses.join(" AND ")}` : ""; + const returning = this._withReturning ? ` RETURNING ${this._cols === "*" ? "*" : this._cols}` : ""; + const sql = `UPDATE ${table} SET ${setClauses.join(", ")} ${where}${returning}`.trim(); + const allParams = [...setParams, ...condParams]; + const res = await this._pool.query(sql, allParams); + + if (this._withReturning) { + if (this._singleMode) return { data: res.rows[0] ?? null, error: null }; + return { data: res.rows, error: null }; + } + return { data: null, error: null }; + } + + if (this._op === "delete") { + const { where, params } = this._buildWhere(); + const sql = `DELETE FROM ${table} ${where}`.trim(); + await this._pool.query(sql, params); + return { data: null, error: null }; + } + + return { data: null, error: { message: `Unsupported operation: ${this._op}` } }; + } +} + +export class PgAdapter { + private _pool: Pool; + + constructor(pool: Pool) { + this._pool = pool; + } + + from(table: string): PgQueryBuilder { + return new PgQueryBuilder(this._pool, table); + } +} diff --git a/backend/src/lib/storage.ts b/backend/src/lib/storage.ts index 6b4f7492..d32e5cfe 100644 --- a/backend/src/lib/storage.ts +++ b/backend/src/lib/storage.ts @@ -1,12 +1,24 @@ /** - * Cloudflare R2 storage utilities for Mike document management. - * R2 is S3-compatible — uses @aws-sdk/client-s3. + * Storage layer for Mike document management. * - * Required env vars: - * R2_ENDPOINT_URL — https://.r2.cloudflarestorage.com - * R2_ACCESS_KEY_ID — R2 API token (Access Key ID) - * R2_SECRET_ACCESS_KEY — R2 API token (Secret Access Key) - * R2_BUCKET_NAME — bucket name (default: "mike") + * Backends (in priority order): + * 1. Cloudflare R2 (S3-compatible) — when R2_* env vars are set + * 2. Supabase Storage — when R2 is absent and Supabase is configured + * 3. Local filesystem — default, always available + * + * R2 env vars: + * R2_ENDPOINT_URL — https://.r2.cloudflarestorage.com + * R2_ACCESS_KEY_ID — R2 API token (Access Key ID) + * R2_SECRET_ACCESS_KEY — R2 API token (Secret Access Key) + * R2_BUCKET_NAME — bucket name (default: "mike") + * + * Supabase Storage (automatic fallback when R2 is absent): + * SUPABASE_URL / SUPABASE_SECRET_KEY (already required for auth) + * SUPABASE_STORAGE_BUCKET — bucket name (default: "mike-documents") + * + * Local filesystem (default when no cloud storage is configured): + * LOCAL_STORAGE_PATH — directory for stored files (default: "./uploads") + * BACKEND_URL — used to build signed URLs (default: "http://localhost:3001") */ import { @@ -16,8 +28,53 @@ import { DeleteObjectCommand, } from "@aws-sdk/client-s3"; import { getSignedUrl as awsGetSignedUrl } from "@aws-sdk/s3-request-presigner"; +import { createClient } from "@supabase/supabase-js"; +import fs from "fs/promises"; +import path from "path"; +import { signLocalFile } from "./localSignedTokens"; + +// --------------------------------------------------------------------------- +// Driver detection +// --------------------------------------------------------------------------- + +function isR2Configured(): boolean { + const url = process.env.R2_ENDPOINT_URL?.trim() ?? ""; + const key = process.env.R2_ACCESS_KEY_ID?.trim() ?? ""; + const secret = process.env.R2_SECRET_ACCESS_KEY?.trim() ?? ""; + // Reject placeholder values shipped with .env.example + if (url.includes("your-account-id") || key.startsWith("your-") || secret.startsWith("your-")) return false; + return Boolean(url && key && secret); +} + +const USE_R2 = isR2Configured(); +// Supabase Storage requires an explicit opt-in via SUPABASE_STORAGE_BUCKET, +// because SUPABASE_URL/SUPABASE_SECRET_KEY are also used for Auth and would +// otherwise accidentally enable cloud storage. +const USE_SUPABASE_STORAGE = !USE_R2 && Boolean( + process.env.SUPABASE_STORAGE_BUCKET?.trim(), +); +const USE_LOCAL = !USE_R2 && !USE_SUPABASE_STORAGE; + +const BUCKET = process.env.R2_BUCKET_NAME ?? "mike"; +const SUPABASE_BUCKET = process.env.SUPABASE_STORAGE_BUCKET ?? "mike-documents"; +const LOCAL_STORAGE_PATH = path.resolve( + process.env.LOCAL_STORAGE_PATH ?? "./uploads", +); +const BACKEND_URL = (process.env.BACKEND_URL ?? "http://localhost:3001").replace(/\/$/, ""); -function getClient(): S3Client { +// Storage is always enabled (local filesystem is always available). +export const storageEnabled = true; + +if (USE_LOCAL) { + // Ensure the local storage directory exists at startup. + fs.mkdir(LOCAL_STORAGE_PATH, { recursive: true }).catch(() => {}); +} + +// --------------------------------------------------------------------------- +// R2 client +// --------------------------------------------------------------------------- + +function getR2Client(): S3Client { return new S3Client({ region: "auto", endpoint: process.env.R2_ENDPOINT_URL!, @@ -28,13 +85,36 @@ function getClient(): S3Client { }); } -const BUCKET = process.env.R2_BUCKET_NAME ?? "mike"; +// --------------------------------------------------------------------------- +// Supabase Storage client +// --------------------------------------------------------------------------- -export const storageEnabled = Boolean( - process.env.R2_ENDPOINT_URL && - process.env.R2_ACCESS_KEY_ID && - process.env.R2_SECRET_ACCESS_KEY, -); +function getSupabaseClient() { + return createClient( + process.env.SUPABASE_URL!, + process.env.SUPABASE_SECRET_KEY!, + { auth: { persistSession: false } }, + ); +} + +async function ensureSupabaseBucket(): Promise { + const sb = getSupabaseClient(); + const { data: buckets } = await sb.storage.listBuckets(); + const exists = (buckets ?? []).some((b) => b.name === SUPABASE_BUCKET); + if (!exists) { + await sb.storage.createBucket(SUPABASE_BUCKET, { public: false }); + } +} + +let bucketEnsured = false; +async function withBucket(): Promise> { + const sb = getSupabaseClient(); + if (!bucketEnsured) { + await ensureSupabaseBucket(); + bucketEnsured = true; + } + return sb; +} // --------------------------------------------------------------------------- // Upload @@ -45,15 +125,32 @@ export async function uploadFile( content: ArrayBuffer, contentType: string, ): Promise { - const client = getClient(); - await client.send( - new PutObjectCommand({ - Bucket: BUCKET, - Key: key, - Body: Buffer.from(content), - ContentType: contentType, - }), - ); + if (USE_R2) { + const client = getR2Client(); + await client.send( + new PutObjectCommand({ + Bucket: BUCKET, + Key: key, + Body: Buffer.from(content), + ContentType: contentType, + }), + ); + return; + } + + if (USE_SUPABASE_STORAGE) { + const sb = await withBucket(); + const { error } = await sb.storage + .from(SUPABASE_BUCKET) + .upload(key, Buffer.from(content), { contentType, upsert: true }); + if (error) throw new Error(`Supabase storage upload failed: ${error.message}`); + return; + } + + // Local filesystem + const filePath = path.join(LOCAL_STORAGE_PATH, key); + await fs.mkdir(path.dirname(filePath), { recursive: true }); + await fs.writeFile(filePath, Buffer.from(content)); } // --------------------------------------------------------------------------- @@ -61,15 +158,28 @@ export async function uploadFile( // --------------------------------------------------------------------------- export async function downloadFile(key: string): Promise { - if (!storageEnabled) return null; try { - const client = getClient(); - const response = await client.send( - new GetObjectCommand({ Bucket: BUCKET, Key: key }), - ); - if (!response.Body) return null; - const bytes = await response.Body.transformToByteArray(); - return bytes.buffer as ArrayBuffer; + if (USE_R2) { + const client = getR2Client(); + const response = await client.send( + new GetObjectCommand({ Bucket: BUCKET, Key: key }), + ); + if (!response.Body) return null; + const bytes = await response.Body.transformToByteArray(); + return bytes.buffer as ArrayBuffer; + } + + if (USE_SUPABASE_STORAGE) { + const sb = await withBucket(); + const { data, error } = await sb.storage.from(SUPABASE_BUCKET).download(key); + if (error || !data) return null; + return await data.arrayBuffer(); + } + + // Local filesystem + const filePath = path.join(LOCAL_STORAGE_PATH, key); + const buf = await fs.readFile(filePath); + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength) as ArrayBuffer; } catch { return null; } @@ -80,9 +190,21 @@ export async function downloadFile(key: string): Promise { // --------------------------------------------------------------------------- export async function deleteFile(key: string): Promise { - if (!storageEnabled) return; - const client = getClient(); - await client.send(new DeleteObjectCommand({ Bucket: BUCKET, Key: key })); + if (USE_R2) { + const client = getR2Client(); + await client.send(new DeleteObjectCommand({ Bucket: BUCKET, Key: key })); + return; + } + + if (USE_SUPABASE_STORAGE) { + const sb = await withBucket(); + await sb.storage.from(SUPABASE_BUCKET).remove([key]); + return; + } + + // Local filesystem + const filePath = path.join(LOCAL_STORAGE_PATH, key); + await fs.unlink(filePath).catch(() => {}); } // --------------------------------------------------------------------------- @@ -94,22 +216,35 @@ export async function getSignedUrl( expiresIn = 3600, downloadFilename?: string, ): Promise { - if (!storageEnabled) return null; try { - const client = getClient(); - // Override the response Content-Disposition so the browser uses this - // filename on download, instead of the last path segment of the R2 key - // (which includes the document UUID). The `download` attribute on - // is ignored for cross-origin URLs, so we have to set it server-side. - const responseContentDisposition = downloadFilename - ? buildContentDisposition("attachment", downloadFilename) - : undefined; - const command = new GetObjectCommand({ - Bucket: BUCKET, - Key: key, - ResponseContentDisposition: responseContentDisposition, - }); - return await awsGetSignedUrl(client, command, { expiresIn }); + if (USE_R2) { + const client = getR2Client(); + const responseContentDisposition = downloadFilename + ? buildContentDisposition("attachment", downloadFilename) + : undefined; + const command = new GetObjectCommand({ + Bucket: BUCKET, + Key: key, + ResponseContentDisposition: responseContentDisposition, + }); + return await awsGetSignedUrl(client, command, { expiresIn }); + } + + if (USE_SUPABASE_STORAGE) { + const sb = await withBucket(); + const { data, error } = await sb.storage + .from(SUPABASE_BUCKET) + .createSignedUrl(key, expiresIn, { + download: downloadFilename ? normalizeDownloadFilename(downloadFilename) : false, + }); + if (error || !data) return null; + return data.signedUrl; + } + + // Local filesystem: generate a time-limited HMAC token served by /local-files + const filename = downloadFilename ?? path.basename(key); + const token = signLocalFile(key, filename, expiresIn); + return `${BACKEND_URL}/local-files/${token}`; } catch { return null; } @@ -185,3 +320,13 @@ function storageExtension(filename: string, fallback: string): string { const ext = filename.slice(lastDot).toLowerCase(); return /^\.[a-z0-9]{1,16}$/.test(ext) ? ext : fallback; } + +// --------------------------------------------------------------------------- +// Diagnostics +// --------------------------------------------------------------------------- + +export function activeStorageBackend(): "r2" | "supabase" | "local" { + if (USE_R2) return "r2"; + if (USE_SUPABASE_STORAGE) return "supabase"; + return "local"; +} diff --git a/backend/src/lib/userApiKeys.ts b/backend/src/lib/userApiKeys.ts index 4355c939..734e5063 100644 --- a/backend/src/lib/userApiKeys.ts +++ b/backend/src/lib/userApiKeys.ts @@ -1,9 +1,9 @@ import crypto from "crypto"; -import { createServerSupabase } from "./supabase"; +import { createDb, DbClient } from "./db"; import type { UserApiKeys } from "./llm"; -type Db = ReturnType; -export type ApiKeyProvider = "claude" | "gemini" | "openai"; +type Db = DbClient; +export type ApiKeyProvider = "claude" | "gemini" | "openai" | "ollama"; export type ApiKeySource = "user" | "env" | null; export type ApiKeyStatus = Record & { sources: Record; @@ -16,7 +16,7 @@ type EncryptedKeyRow = { auth_tag: string; }; -const PROVIDERS: ApiKeyProvider[] = ["claude", "gemini", "openai"]; +const PROVIDERS: ApiKeyProvider[] = ["claude", "gemini", "openai", "ollama"]; function envApiKey(provider: ApiKeyProvider): string | null { if (provider === "claude") { @@ -29,9 +29,28 @@ function envApiKey(provider: ApiKeyProvider): string | null { if (provider === "openai") { return process.env.OPENAI_API_KEY?.trim() || null; } + if (provider === "ollama") { + return process.env.OLLAMA_API_KEY?.trim() || + process.env.LLAMACPP_API_KEY?.trim() || + null; + } return process.env.GEMINI_API_KEY?.trim() || null; } +/** + * Returns true if the Ollama provider is available via env config + * (either OLLAMA_BASE_URL or an API key). Used separately from + * envApiKey because Ollama may not require an API key at all. + */ +export function hasEnvOllama(): boolean { + return !!( + process.env.OLLAMA_API_KEY?.trim() || + process.env.LLAMACPP_API_KEY?.trim() || + process.env.OLLAMA_BASE_URL?.trim() || + process.env.LLAMACPP_BASE_URL?.trim() + ); +} + export function hasEnvApiKey(provider: ApiKeyProvider): boolean { return !!envApiKey(provider); } @@ -40,6 +59,7 @@ function encryptionKey(): Buffer { const secret = process.env.USER_API_KEYS_ENCRYPTION_SECRET || process.env.API_KEYS_ENCRYPTION_SECRET || + process.env.JWT_SECRET || process.env.SUPABASE_SECRET_KEY; if (!secret) { throw new Error("API key encryption secret is not configured"); @@ -93,31 +113,39 @@ export function normalizeApiKeyProvider(value: string): ApiKeyProvider | null { export async function getUserApiKeyStatus( userId: string, - db: Db = createServerSupabase(), + db: Db = createDb(), ): Promise { const status: ApiKeyStatus = { claude: false, gemini: false, openai: false, + ollama: false, sources: { claude: null, gemini: null, openai: null, + ollama: null, }, }; for (const provider of PROVIDERS) { - if (hasEnvApiKey(provider)) { + // Ollama is available via env if the base URL or API key is set, + // even if no actual API key is provided. + if (provider === "ollama") { + if (hasEnvOllama()) { + status[provider] = true; + status.sources[provider] = "env"; + } + } else if (hasEnvApiKey(provider)) { status[provider] = true; status.sources[provider] = "env"; } } - const { data, error } = await db + const { data } = await db .from("user_api_keys") .select("provider") .eq("user_id", userId); - if (error) throw error; for (const row of data ?? []) { const provider = normalizeApiKeyProvider(String(row.provider)); @@ -132,19 +160,19 @@ export async function getUserApiKeyStatus( export async function getUserApiKeys( userId: string, - db: Db = createServerSupabase(), + db: Db = createDb(), ): Promise { const apiKeys: UserApiKeys = { claude: envApiKey("claude"), gemini: envApiKey("gemini"), openai: envApiKey("openai"), + ollama: envApiKey("ollama"), }; - const { data, error } = await db + const { data } = await db .from("user_api_keys") .select("provider, encrypted_key, iv, auth_tag") .eq("user_id", userId); - if (error) throw error; for (const row of (data ?? []) as EncryptedKeyRow[]) { const provider = normalizeApiKeyProvider(row.provider); @@ -160,9 +188,10 @@ export async function saveUserApiKey( userId: string, provider: ApiKeyProvider, value: string | null, - db: Db = createServerSupabase(), + db: Db = createDb(), ): Promise { const normalized = value?.trim() || null; + if (!normalized) { const { error } = await db .from("user_api_keys") diff --git a/backend/src/lib/userSettings.ts b/backend/src/lib/userSettings.ts index bfbeb0fd..5b574339 100644 --- a/backend/src/lib/userSettings.ts +++ b/backend/src/lib/userSettings.ts @@ -1,12 +1,13 @@ -import { createServerSupabase } from "./supabase"; +import { createDb, DbClient } from "./db"; import { resolveModel, DEFAULT_TITLE_MODEL, DEFAULT_TABULAR_MODEL, OPENAI_LOW_MODELS, + OLLAMA_LOW_MODELS, type UserApiKeys, } from "./llm"; -import { getUserApiKeys as getStoredUserApiKeys } from "./userApiKeys"; +import { getUserApiKeys as getStoredUserApiKeys, hasEnvOllama } from "./userApiKeys"; export type UserModelSettings = { title_model: string; @@ -16,38 +17,46 @@ export type UserModelSettings = { // Title generation is a lightweight task — always routed to the cheapest model // of whichever provider the user has keys for: Gemini Flash Lite if Gemini is -// available, otherwise OpenAI nano, otherwise Claude Haiku. With no user keys -// set, defaults to Gemini (the dev-mode env fallback). -function resolveTitleModel(apiKeys: UserApiKeys): string { +// available, otherwise OpenAI nano, otherwise Claude Haiku, otherwise the +// cheapest local model. With no user keys set, defaults to Gemini (the dev-mode +// env fallback). +// When Ollama is the only available provider, reuse the user's tabular model +// (which we know is installed) rather than a hardcoded default that may not exist. +function resolveTitleModel(apiKeys: UserApiKeys, tabularModel: string): string { if (apiKeys.gemini?.trim()) return DEFAULT_TITLE_MODEL; if (apiKeys.openai?.trim()) return OPENAI_LOW_MODELS[0]; if (apiKeys.claude?.trim()) return "claude-haiku-4-5"; + const ollamaAvailable = !!(apiKeys.ollama?.trim()) || hasEnvOllama(); + if (ollamaAvailable) { + return tabularModel.startsWith("local-") ? tabularModel : OLLAMA_LOW_MODELS[0]; + } return DEFAULT_TITLE_MODEL; } export async function getUserModelSettings( userId: string, - db?: ReturnType, + db?: DbClient, ): Promise { - const client = db ?? createServerSupabase(); + const client = db ?? createDb(); const { data } = await client .from("user_profiles") .select("tabular_model") .eq("user_id", userId) .single(); const api_keys = await getStoredUserApiKeys(userId, client); + const tabular_model = resolveModel(data?.tabular_model, DEFAULT_TABULAR_MODEL); return { - title_model: resolveTitleModel(api_keys), - tabular_model: resolveModel(data?.tabular_model, DEFAULT_TABULAR_MODEL), + title_model: resolveTitleModel(api_keys, tabular_model), + tabular_model, api_keys, }; } export async function getUserApiKeys( userId: string, - db?: ReturnType, + db?: DbClient, ): Promise { - const client = db ?? createServerSupabase(); + const client = db ?? createDb(); return getStoredUserApiKeys(userId, client); } diff --git a/backend/src/middleware/auth.ts b/backend/src/middleware/auth.ts index f30fd136..5fd3f3eb 100644 --- a/backend/src/middleware/auth.ts +++ b/backend/src/middleware/auth.ts @@ -1,37 +1,54 @@ import { Request, Response, NextFunction } from "express"; import { createClient } from "@supabase/supabase-js"; +import jwt from "jsonwebtoken"; export async function requireAuth( - req: Request, - res: Response, - next: NextFunction, + req: Request, + res: Response, + next: NextFunction, ): Promise { - const auth = req.headers.authorization ?? ""; - if (!auth.startsWith("Bearer ")) { - res.status(401).json({ detail: "Missing or invalid Authorization header" }); - return; - } - const token = auth.slice(7).trim(); + const auth = req.headers.authorization ?? ""; + if (!auth.startsWith("Bearer ")) { + res.status(401).json({ detail: "Missing or invalid Authorization header" }); + return; + } + const token = auth.slice(7).trim(); - const supabaseUrl = process.env.SUPABASE_URL ?? ""; - const serviceKey = process.env.SUPABASE_SECRET_KEY ?? ""; + if (process.env.AUTH_MODE === "local") { + const secret = process.env.JWT_SECRET; + if (!secret) { + res.status(500).json({ detail: "JWT_SECRET is not configured" }); + return; + } + try { + const payload = jwt.verify(token, secret) as { sub: string; email: string }; + res.locals.userId = payload.sub; + res.locals.userEmail = (payload.email ?? "").toLowerCase(); + res.locals.token = token; + next(); + } catch { + res.status(401).json({ detail: "Invalid or expired token" }); + } + return; + } - if (!supabaseUrl || !serviceKey) { - res.status(500).json({ detail: "Server auth is not configured" }); - return; - } - - const admin = createClient(supabaseUrl, serviceKey, { - auth: { persistSession: false }, - }); - const { data } = await admin.auth.getUser(token); - if (!data.user) { - res.status(401).json({ detail: "Invalid or expired token" }); - return; - } - - res.locals.userId = data.user.id; - res.locals.userEmail = data.user.email?.toLowerCase() ?? ""; - res.locals.token = token; - next(); + // Supabase mode (default) + const supabaseUrl = process.env.SUPABASE_URL ?? ""; + const serviceKey = process.env.SUPABASE_SECRET_KEY ?? ""; + if (!supabaseUrl || !serviceKey) { + res.status(500).json({ detail: "Server auth is not configured" }); + return; + } + const admin = createClient(supabaseUrl, serviceKey, { + auth: { persistSession: false }, + }); + const { data } = await admin.auth.getUser(token); + if (!data.user) { + res.status(401).json({ detail: "Invalid or expired token" }); + return; + } + res.locals.userId = data.user.id; + res.locals.userEmail = (data.user.email ?? "").toLowerCase(); + res.locals.token = token; + next(); } diff --git a/backend/src/routes/chat.ts b/backend/src/routes/chat.ts index fe272c67..53fef8f5 100644 --- a/backend/src/routes/chat.ts +++ b/backend/src/routes/chat.ts @@ -1,6 +1,6 @@ import { Router } from "express"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; +import { createDb, DbClient } from "../lib/db"; import { buildDocContext, buildMessages, @@ -16,7 +16,7 @@ import { checkProjectAccess } from "../lib/access"; export const chatRouter = Router(); -type Db = ReturnType; +type Db = DbClient; const isDev = process.env.NODE_ENV !== "production"; const devLog = (...args: Parameters) => { if (isDev) console.log(...args); @@ -140,7 +140,7 @@ async function getAccessibleChat( // listed per-project via GET /projects/:projectId/chats. chatRouter.get("/", requireAuth, async (req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); + const db = createDb(); const { data: ownProjects, error: projErr } = await db .from("projects") @@ -174,7 +174,7 @@ chatRouter.post("/create", requireAuth, async (req, res) => { return void res.status(400).json({ detail: parsedProjectId.detail }); } const projectId = parsedProjectId.projectId; - const db = createServerSupabase(); + const db = createDb(); const projectAccess = await validateAccessibleProjectId( projectId, userId, @@ -201,7 +201,7 @@ chatRouter.get("/:chatId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { chatId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const chat = await getAccessibleChat(chatId, userId, userEmail, db); if (!chat) @@ -224,7 +224,7 @@ chatRouter.get("/:chatId", requireAuth, async (req, res) => { // EditCards render with the real state. async function hydrateEditStatuses( messages: Record[], - db: ReturnType, + db: DbClient, ): Promise[]> { const editIds = new Set(); const versionIds = new Set(); @@ -342,7 +342,7 @@ chatRouter.patch("/:chatId", requireAuth, async (req, res) => { if (!title) return void res.status(400).json({ detail: "title is required" }); - const db = createServerSupabase(); + const db = createDb(); const { data, error } = await db .from("chats") .update({ title }) @@ -360,7 +360,7 @@ chatRouter.patch("/:chatId", requireAuth, async (req, res) => { chatRouter.delete("/:chatId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { chatId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { error } = await db .from("chats") .delete() @@ -381,7 +381,7 @@ chatRouter.post("/:chatId/generate-title", requireAuth, async (req, res) => { if (!message) return void res.status(400).json({ detail: "message is required" }); - const db = createServerSupabase(); + const db = createDb(); const chat = await getAccessibleChat(chatId, userId, userEmail, db); if (!chat) return void res.status(404).json({ detail: "Chat not found" }); @@ -449,7 +449,7 @@ chatRouter.post("/", requireAuth, async (req, res) => { }); const userEmail = res.locals.userEmail as string | undefined; - const db = createServerSupabase(); + const db = createDb(); let chatId = chat_id ?? null; let chatTitle: string | null = null; let resolvedProjectId: string | null = parsedProjectId.projectId; @@ -505,10 +505,16 @@ chatRouter.post("/", requireAuth, async (req, res) => { const lastUser = [...messages].reverse().find((m) => m.role === "user"); if (lastUser) { + // content is a jsonb column: plain strings must be JSON-encoded so PostgreSQL + // can parse them as valid JSON (e.g. "ciao" → '"ciao"'). Objects/arrays are + // handled by PgAdapter already. + const contentVal = typeof lastUser.content === "string" + ? JSON.stringify(lastUser.content) + : lastUser.content; await db.from("chat_messages").insert({ chat_id: chatId, role: "user", - content: lastUser.content, + content: contentVal, files: lastUser.files ?? null, workflow: lastUser.workflow ?? null, }); diff --git a/backend/src/routes/documents.ts b/backend/src/routes/documents.ts index 32f4b881..bf27de03 100644 --- a/backend/src/routes/documents.ts +++ b/backend/src/routes/documents.ts @@ -1,6 +1,6 @@ import { Router } from "express"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; +import { createDb, DbClient } from "../lib/db"; import { buildContentDisposition, downloadFile, @@ -30,7 +30,7 @@ const ALLOWED_TYPES = new Set(["pdf", "docx", "doc"]); // GET /single-documents documentsRouter.get("/", requireAuth, async (req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); + const db = createDb(); const { data, error } = await db .from("documents") .select("*") @@ -54,7 +54,7 @@ documentsRouter.post( singleFileUpload("file"), async (req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); + const db = createDb(); await handleDocumentUpload(req, res, userId, null, db); }, ); @@ -63,7 +63,7 @@ documentsRouter.post( documentsRouter.delete("/:documentId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { documentId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: doc, error } = await db .from("documents") @@ -100,7 +100,7 @@ documentsRouter.get("/:documentId/display", requireAuth, async (req, res) => { const { documentId } = req.params; const versionIdParam = typeof req.query.version_id === "string" ? req.query.version_id : null; - const db = createServerSupabase(); + const db = createDb(); const { data: doc } = await db .from("documents") @@ -161,7 +161,7 @@ documentsRouter.post("/download-zip", requireAuth, async (req, res) => { if (!Array.isArray(document_ids) || document_ids.length === 0) return void res.status(400).json({ detail: "document_ids is required" }); - const db = createServerSupabase(); + const db = createDb(); const { data: rawDocs, error } = await db .from("documents") .select("id, filename, file_type, current_version_id, user_id, project_id") @@ -213,7 +213,7 @@ documentsRouter.get("/:documentId/url", requireAuth, async (req, res) => { const userEmail = res.locals.userEmail as string | undefined; const { documentId } = req.params; const versionIdParam = typeof req.query.version_id === "string" ? req.query.version_id : null; - const db = createServerSupabase(); + const db = createDb(); const { data: doc, error } = await db .from("documents") @@ -264,7 +264,7 @@ documentsRouter.get("/:documentId/docx", requireAuth, async (req, res) => { const userEmail = res.locals.userEmail as string | undefined; const { documentId } = req.params; const versionIdParam = typeof req.query.version_id === "string" ? req.query.version_id : null; - const db = createServerSupabase(); + const db = createDb(); const { data: doc, error } = await db .from("documents") @@ -347,7 +347,7 @@ documentsRouter.get("/:documentId/versions", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { documentId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: doc } = await db .from("documents") @@ -384,7 +384,7 @@ documentsRouter.post( const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { documentId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const file = req.file; if (!file) @@ -547,7 +547,7 @@ documentsRouter.patch( const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { documentId, versionId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: doc } = await db .from("documents") @@ -592,7 +592,7 @@ documentsRouter.get( const { documentId } = req.params; const versionIdParam = typeof req.query.version_id === "string" ? req.query.version_id : null; - const db = createServerSupabase(); + const db = createDb(); const { data: doc } = await db .from("documents") @@ -630,7 +630,7 @@ async function handleEditResolution( const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { documentId, editId } = req.params; - const db = createServerSupabase(); + const db = createDb(); console.log(`[edit-resolution] incoming ${mode}`, { userId, @@ -835,7 +835,7 @@ async function handleDocumentUpload( res: import("express").Response, userId: string, projectId: string | null, - db: ReturnType, + db: DbClient, ) { const file = req.file; if (!file) return void res.status(400).json({ detail: "file is required" }); diff --git a/backend/src/routes/downloads.ts b/backend/src/routes/downloads.ts index 0b374e62..9fe9ae7a 100644 --- a/backend/src/routes/downloads.ts +++ b/backend/src/routes/downloads.ts @@ -1,6 +1,6 @@ import { Router } from "express"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; +import { createDb, DbClient } from "../lib/db"; import { buildContentDisposition, downloadFile } from "../lib/storage"; import { verifyDownload } from "../lib/downloadTokens"; import { ensureDocAccess } from "../lib/access"; @@ -25,7 +25,7 @@ downloadsRouter.get("/:token", requireAuth, async (req, res) => { if (!info) return void res.status(404).json({ detail: "Invalid link" }); - const db = createServerSupabase(); + const db = createDb(); let version: | { id: string; diff --git a/backend/src/routes/localAuth.ts b/backend/src/routes/localAuth.ts new file mode 100644 index 00000000..81075c6b --- /dev/null +++ b/backend/src/routes/localAuth.ts @@ -0,0 +1,117 @@ +/** + * Local authentication endpoints — only active when AUTH_MODE=local. + * + * POST /auth/register { email, password } → { access_token, user } + * POST /auth/login { email, password } → { access_token, user } + * + * JWTs are signed with JWT_SECRET and expire in JWT_EXPIRY_DAYS (default 30). + */ + +import { Router } from "express"; +import bcrypt from "bcrypt"; +import jwt from "jsonwebtoken"; +import { Pool } from "pg"; + +export const localAuthRouter = Router(); + +function getPool(): Pool { + const url = process.env.DATABASE_URL; + if (!url) throw new Error("DATABASE_URL must be set when AUTH_MODE=local"); + // Re-use the pool from db.ts indirectly — we import here to avoid circular deps. + // A lightweight pool created once per process. + return new Pool({ connectionString: url, max: 5 }); +} + +let _pool: Pool | null = null; +function pool(): Pool { + if (!_pool) _pool = getPool(); + return _pool; +} + +function jwtSecret(): string { + const s = process.env.JWT_SECRET; + if (!s) throw new Error("JWT_SECRET must be set when AUTH_MODE=local"); + return s; +} + +function jwtExpirySeconds(): number { + const days = parseInt(process.env.JWT_EXPIRY_DAYS ?? "30", 10); + return (isFinite(days) && days > 0 ? days : 30) * 86400; +} + +function makeToken(userId: string, email: string): string { + return jwt.sign( + { sub: userId, email }, + jwtSecret(), + { expiresIn: jwtExpirySeconds() }, + ); +} + +localAuthRouter.post("/register", async (req, res) => { + const { email, password } = req.body ?? {}; + if (typeof email !== "string" || typeof password !== "string") { + return void res.status(400).json({ detail: "email and password are required" }); + } + const trimmedEmail = email.trim().toLowerCase(); + if (!trimmedEmail || !password) { + return void res.status(400).json({ detail: "email and password are required" }); + } + if (password.length < 6) { + return void res.status(400).json({ detail: "Password must be at least 6 characters" }); + } + + try { + const existing = await pool().query( + "SELECT id FROM users WHERE email = $1", + [trimmedEmail], + ); + if ((existing.rowCount ?? 0) > 0) { + return void res.status(409).json({ detail: "Email already registered" }); + } + + const hash = await bcrypt.hash(password, 12); + const result = await pool().query( + "INSERT INTO users (email, password_hash) VALUES ($1, $2) RETURNING id, email", + [trimmedEmail, hash], + ); + const user = result.rows[0] as { id: string; email: string }; + // Create user_profiles row + await pool().query( + "INSERT INTO user_profiles (user_id) VALUES ($1) ON CONFLICT (user_id) DO NOTHING", + [user.id], + ); + const access_token = makeToken(user.id, user.email); + res.status(201).json({ access_token, user: { id: user.id, email: user.email } }); + } catch (err) { + console.error("[local-auth] register error", err); + res.status(500).json({ detail: "Registration failed" }); + } +}); + +localAuthRouter.post("/login", async (req, res) => { + const { email, password } = req.body ?? {}; + if (typeof email !== "string" || typeof password !== "string") { + return void res.status(400).json({ detail: "email and password are required" }); + } + const trimmedEmail = email.trim().toLowerCase(); + + try { + const result = await pool().query( + "SELECT id, email, password_hash FROM users WHERE email = $1", + [trimmedEmail], + ); + const user = result.rows[0] as { id: string; email: string; password_hash: string } | undefined; + if (!user) { + return void res.status(401).json({ detail: "Invalid credentials" }); + } + const valid = await bcrypt.compare(password, user.password_hash); + if (!valid) { + return void res.status(401).json({ detail: "Invalid credentials" }); + } + const access_token = makeToken(user.id, user.email); + res.json({ access_token, user: { id: user.id, email: user.email } }); + } catch (err) { + console.error("[local-auth] login error", err); + res.status(500).json({ detail: "Login failed" }); + } +}); diff --git a/backend/src/routes/localFiles.ts b/backend/src/routes/localFiles.ts new file mode 100644 index 00000000..b5963ea0 --- /dev/null +++ b/backend/src/routes/localFiles.ts @@ -0,0 +1,41 @@ +import { Router } from "express"; +import path from "path"; +import { verifyLocalFile } from "../lib/localSignedTokens"; +import { downloadFile } from "../lib/storage"; +import { buildContentDisposition } from "../lib/storage"; + +export const localFilesRouter = Router(); + +function contentTypeFor(filename: string): string { + const lower = filename.toLowerCase(); + if (lower.endsWith(".pdf")) return "application/pdf"; + if (lower.endsWith(".docx")) + return "application/vnd.openxmlformats-officedocument.wordprocessingml.document"; + if (lower.endsWith(".doc")) return "application/msword"; + if (lower.endsWith(".xlsx")) + return "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"; + return "application/octet-stream"; +} + +// GET /local-files/:token +// No authentication required — the HMAC-signed, time-limited token is the credential. +localFilesRouter.get("/:token", async (req, res) => { + const info = verifyLocalFile(req.params.token); + if (!info) return void res.status(404).json({ detail: "Invalid or expired link" }); + + // Prevent path traversal: storage keys must not escape LOCAL_STORAGE_PATH. + const normalized = path.normalize(info.key); + if (normalized.startsWith("..") || path.isAbsolute(normalized)) { + return void res.status(400).json({ detail: "Invalid key" }); + } + + const raw = await downloadFile(info.key); + if (!raw) return void res.status(404).json({ detail: "File not found" }); + + res.setHeader("Content-Type", contentTypeFor(info.filename)); + res.setHeader( + "Content-Disposition", + buildContentDisposition("attachment", info.filename), + ); + res.send(Buffer.from(raw)); +}); diff --git a/backend/src/routes/projectChat.ts b/backend/src/routes/projectChat.ts index 5e299615..6a3db37e 100644 --- a/backend/src/routes/projectChat.ts +++ b/backend/src/routes/projectChat.ts @@ -1,6 +1,6 @@ import { Router } from "express"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; +import { createDb, DbClient } from "../lib/db"; import { buildProjectDocContext, buildMessages, @@ -38,7 +38,7 @@ projectChatRouter.post("/", requireAuth, async (req, res) => { attached_documents?: { filename: string; document_id: string }[]; }; - const db = createServerSupabase(); + const db = createDb(); // Verify the user has access to the project (owner or shared member). const projectAccess = await checkProjectAccess( @@ -80,10 +80,13 @@ projectChatRouter.post("/", requireAuth, async (req, res) => { const lastUser = [...messages].reverse().find((m) => m.role === "user"); if (lastUser) { + const contentVal = typeof lastUser.content === "string" + ? JSON.stringify(lastUser.content) + : lastUser.content; await db.from("chat_messages").insert({ chat_id: chatId, role: "user", - content: lastUser.content, + content: contentVal, files: lastUser.files ?? null, workflow: lastUser.workflow ?? null, }); diff --git a/backend/src/routes/projects.ts b/backend/src/routes/projects.ts index da6c9fce..046d78d8 100644 --- a/backend/src/routes/projects.ts +++ b/backend/src/routes/projects.ts @@ -1,7 +1,6 @@ import { Router } from "express"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; -import { createClient } from "@supabase/supabase-js"; +import { createDb, DbClient, listAuthUsers } from "../lib/db"; import { attachActiveVersionPaths, attachLatestVersionNumbers, @@ -18,7 +17,7 @@ const ALLOWED_TYPES = new Set(["pdf", "docx", "doc"]); projectsRouter.get("/", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string; - const db = createServerSupabase(); + const db = createDb(); const { data: ownProjects, error: ownError } = await db .from("projects") @@ -27,11 +26,14 @@ projectsRouter.get("/", requireAuth, async (req, res) => { .order("created_at", { ascending: false }); if (ownError) return void res.status(500).json({ detail: ownError.message }); + // .contains() sends PostgreSQL array syntax {value} instead of JSON ["value"] + // for jsonb columns, causing "invalid input syntax for type json". + // Use .filter() with explicit JSON.stringify() to get the correct format. const { data: sharedProjects, error: sharedError } = userEmail ? await db .from("projects") .select("*") - .contains("shared_with", [userEmail]) + .filter("shared_with", "cs", JSON.stringify([userEmail])) .neq("user_id", userId) .order("created_at", { ascending: false }) : { data: [], error: null }; @@ -82,7 +84,7 @@ projectsRouter.post("/", requireAuth, async (req, res) => { if (!name?.trim()) return void res.status(400).json({ detail: "name is required" }); - const db = createServerSupabase(); + const db = createDb(); const { data, error } = await db .from("projects") .insert({ @@ -102,7 +104,7 @@ projectsRouter.get("/:projectId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string; const { projectId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: project, error } = await db .from("projects") @@ -146,7 +148,7 @@ projectsRouter.get("/:projectId/people", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { projectId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: project } = await db .from("projects") @@ -169,8 +171,7 @@ projectsRouter.get("/:projectId/people", requireAuth, async (req, res) => { // Pull every auth user (matching the lookup endpoint's pattern). For // larger deployments this should page or be replaced with a bulk-by-id // RPC, but it keeps things simple while user counts are modest. - const { data: usersData } = await db.auth.admin.listUsers({ perPage: 1000 }); - const allUsers = usersData?.users ?? []; + const allUsers = await listAuthUsers(); const userByEmail = new Map(); const userById = new Map(); for (const u of allUsers) { @@ -247,7 +248,7 @@ projectsRouter.patch("/:projectId", requireAuth, async (req, res) => { updates.shared_with = cleaned; } - const db = createServerSupabase(); + const db = createDb(); const { data, error } = await db .from("projects") .update({ ...updates, updated_at: new Date().toISOString() }) @@ -274,7 +275,7 @@ projectsRouter.patch("/:projectId", requireAuth, async (req, res) => { projectsRouter.delete("/:projectId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { projectId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { error } = await db .from("projects") .delete() @@ -289,7 +290,7 @@ projectsRouter.get("/:projectId/documents", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { projectId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) @@ -316,7 +317,7 @@ projectsRouter.post( const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { projectId, documentId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) @@ -446,7 +447,7 @@ projectsRouter.post( const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { projectId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) @@ -465,7 +466,7 @@ projectsRouter.get("/:projectId/chats", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { projectId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) @@ -490,7 +491,7 @@ projectsRouter.post("/:projectId/folders", requireAuth, async (req, res) => { const { name, parent_folder_id } = req.body as { name: string; parent_folder_id?: string | null }; if (!name?.trim()) return void res.status(400).json({ detail: "name is required" }); - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) return void res.status(404).json({ detail: "Project not found" }); @@ -517,7 +518,7 @@ projectsRouter.patch("/:projectId/folders/:folderId", requireAuth, async (req, r const { projectId, folderId } = req.params; const body = req.body as { name?: string; parent_folder_id?: string | null }; - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) return void res.status(404).json({ detail: "Project not found" }); @@ -553,7 +554,7 @@ projectsRouter.delete("/:projectId/folders/:folderId", requireAuth, async (req, const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { projectId, folderId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) return void res.status(404).json({ detail: "Project not found" }); @@ -577,7 +578,7 @@ projectsRouter.patch("/:projectId/documents/:documentId/folder", requireAuth, as const { projectId, documentId } = req.params; const { folder_id } = req.body as { folder_id: string | null }; - const db = createServerSupabase(); + const db = createDb(); const access = await checkProjectAccess(projectId, userId, userEmail, db); if (!access.ok) return void res.status(404).json({ detail: "Project not found" }); @@ -595,7 +596,7 @@ projectsRouter.patch("/:projectId/documents/:documentId/folder", requireAuth, as }); async function loadProjectFolder( - db: ReturnType, + db: DbClient, projectId: string, folderId: string, ): Promise<{ id: string; parent_folder_id: string | null } | null> { @@ -613,7 +614,7 @@ export async function handleDocumentUpload( res: import("express").Response, userId: string, projectId: string | null, - db: ReturnType, + db: DbClient, ) { const file = req.file; if (!file) return void res.status(400).json({ detail: "file is required" }); diff --git a/backend/src/routes/tabular.ts b/backend/src/routes/tabular.ts index 2b4f6db9..ce05ae97 100644 --- a/backend/src/routes/tabular.ts +++ b/backend/src/routes/tabular.ts @@ -1,6 +1,6 @@ import { Router } from "express"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; +import { createDb, DbClient, listAuthUsers } from "../lib/db"; import { downloadFile } from "../lib/storage"; import { loadActiveVersion } from "../lib/documentVersions"; import { normalizeDocxZipPaths } from "../lib/convert"; @@ -49,7 +49,7 @@ export const tabularRouter = Router(); tabularRouter.get("/", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; - const db = createServerSupabase(); + const db = createDb(); // Optional ?project_id= scopes results to a single project. Project-page // callers pass it; the global tabular-reviews page omits it. We still @@ -104,7 +104,7 @@ tabularRouter.get("/", requireAuth, async (req, res) => { ? db .from("tabular_reviews") .select("*") - .contains("shared_with", JSON.stringify([userEmail])) + .filter("shared_with", "cs", JSON.stringify([userEmail])) .neq("user_id", userId) .order("created_at", { ascending: false }) : Promise.resolve({ @@ -181,7 +181,7 @@ tabularRouter.post("/", requireAuth, async (req, res) => { project_id?: string; }; - const db = createServerSupabase(); + const db = createDb(); if (project_id) { const access = await checkProjectAccess( project_id, @@ -297,7 +297,7 @@ tabularRouter.get("/:reviewId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { reviewId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: review, error } = await db .from("tabular_reviews") @@ -344,7 +344,7 @@ tabularRouter.get("/:reviewId/people", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { reviewId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: review } = await db .from("tabular_reviews") @@ -365,10 +365,7 @@ tabularRouter.get("/:reviewId/people", requireAuth, async (req, res) => { // Same pattern as /projects/:id/people: walk auth.users to map emails // to user_ids, then pull display_names from user_profiles by user_id. - const { data: usersData } = await db.auth.admin.listUsers({ - perPage: 1000, - }); - const allUsers = usersData?.users ?? []; + const allUsers = await listAuthUsers(); const userByEmail = new Map(); const userById = new Map(); for (const u of allUsers) { @@ -445,7 +442,7 @@ tabularRouter.patch("/:reviewId", requireAuth, async (req, res) => { } updates.updated_at = new Date().toISOString(); - const db = createServerSupabase(); + const db = createDb(); const { data: existingReview, error: reviewError } = await db .from("tabular_reviews") .select("*") @@ -522,8 +519,8 @@ tabularRouter.patch("/:reviewId", requireAuth, async (req, res) => { } else { // No document change — derive from existing cells documentIds = [ - ...new Set( - (existingCells ?? []).map((cell) => cell.document_id), + ...new Set( + (existingCells ?? []).map((cell) => cell.document_id as string), ), ]; if (documentIds.length === 0 && existingReview.project_id) { @@ -570,7 +567,7 @@ tabularRouter.patch("/:reviewId", requireAuth, async (req, res) => { tabularRouter.delete("/:reviewId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { reviewId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { error } = await db .from("tabular_reviews") .delete() @@ -594,7 +591,7 @@ tabularRouter.post("/:reviewId/clear-cells", requireAuth, async (req, res) => { .status(400) .json({ detail: "document_ids is required" }); - const db = createServerSupabase(); + const db = createDb(); const { data: review, error: reviewError } = await db .from("tabular_reviews") .select("id, user_id, project_id") @@ -633,7 +630,7 @@ tabularRouter.post( .status(400) .json({ detail: "document_id and column_index are required" }); - const db = createServerSupabase(); + const db = createDb(); const { data: review, error: reviewError } = await db .from("tabular_reviews") .select("*") @@ -731,7 +728,7 @@ tabularRouter.post("/:reviewId/generate", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { reviewId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: review, error: reviewError } = await db .from("tabular_reviews") @@ -911,7 +908,7 @@ tabularRouter.get("/:reviewId/chats", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { reviewId } = req.params; - const db = createServerSupabase(); + const db = createDb(); // Verify access (owner or shared-project member). const { data: review, error } = await db @@ -943,7 +940,7 @@ tabularRouter.delete( async (req, res) => { const userId = res.locals.userId as string; const { chatId } = req.params; - const db = createServerSupabase(); + const db = createDb(); // Owner-only delete — sibling collaborators shouldn't be able to wipe // each other's threads. const { error } = await db @@ -964,7 +961,7 @@ tabularRouter.get( const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { reviewId, chatId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: review } = await db .from("tabular_reviews") @@ -1120,7 +1117,7 @@ tabularRouter.post("/:reviewId/chat", requireAuth, async (req, res) => { .json({ detail: "messages must include a user message" }); } - const db = createServerSupabase(); + const db = createDb(); const { data: review, error } = await db .from("tabular_reviews") .select("*") @@ -1204,10 +1201,13 @@ tabularRouter.post("/:reviewId/chat", requireAuth, async (req, res) => { // Persist user message if (chatId) { + const contentVal = typeof lastUser.content === "string" + ? JSON.stringify(lastUser.content) + : lastUser.content; await db.from("tabular_review_chat_messages").insert({ chat_id: chatId, role: "user", - content: lastUser.content, + content: contentVal, }); } diff --git a/backend/src/routes/user.ts b/backend/src/routes/user.ts index 0df2021d..b2c45e3c 100644 --- a/backend/src/routes/user.ts +++ b/backend/src/routes/user.ts @@ -1,6 +1,6 @@ import { Router } from "express"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; +import { createDb, DbClient, deleteAuthUser } from "../lib/db"; import { DEFAULT_TABULAR_MODEL, resolveModel } from "../lib/llm"; import { type ApiKeyStatus, @@ -102,7 +102,7 @@ function validateProfilePayload(body: unknown): } async function ensureProfileRow( - db: ReturnType, + db: DbClient, userId: string, ) { const { error } = await db @@ -115,7 +115,7 @@ async function ensureProfileRow( } async function loadProfile( - db: ReturnType, + db: DbClient, userId: string, options: { repairMissing?: boolean } = {}, ) { @@ -174,7 +174,7 @@ async function loadProfile( // POST /user/profile userRouter.post("/profile", requireAuth, async (_req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); + const db = createDb(); const error = await ensureProfileRow(db, userId); if (error) return void res.status(500).json({ detail: error.message }); res.json({ ok: true }); @@ -183,7 +183,7 @@ userRouter.post("/profile", requireAuth, async (_req, res) => { // GET /user/profile userRouter.get("/profile", requireAuth, async (_req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); + const db = createDb(); const { data, error } = await loadProfile(db, userId, { repairMissing: true, }); @@ -198,7 +198,7 @@ userRouter.patch("/profile", requireAuth, async (req, res) => { const parsed = validateProfilePayload(req.body); if (!parsed.ok) return void res.status(400).json({ detail: parsed.detail }); - const db = createServerSupabase(); + const db = createDb(); const ensureError = await ensureProfileRow(db, userId); if (ensureError) return void res.status(500).json({ detail: ensureError.message }); @@ -219,7 +219,7 @@ userRouter.patch("/profile", requireAuth, async (req, res) => { // GET /user/api-keys userRouter.get("/api-keys", requireAuth, async (_req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); + const db = createDb(); const status = await getUserApiKeyStatus(userId, db); res.json(status); }); @@ -233,9 +233,11 @@ userRouter.put("/api-keys/:provider", requireAuth, async (req, res) => { const apiKey = typeof req.body?.api_key === "string" ? req.body.api_key : null; - const db = createServerSupabase(); + const db = createDb(); try { - if (hasEnvApiKey(provider)) { + // Ollama is env-configured when the base URL is set — but the API key + // is optional, so we always allow the browser to save/clear it. + if (provider !== "ollama" && hasEnvApiKey(provider)) { return void res.status(409).json({ detail: "This provider is configured by the server environment and cannot be changed from the browser.", @@ -253,11 +255,38 @@ userRouter.put("/api-keys/:provider", requireAuth, async (req, res) => { } }); +// GET /user/ollama/models — lists models available on the configured Ollama server +userRouter.get("/ollama/models", requireAuth, async (_req, res) => { + const baseUrl = + process.env.OLLAMA_BASE_URL?.trim() || + process.env.LLAMACPP_BASE_URL?.trim(); + if (!baseUrl) return void res.json({ models: [] }); + + try { + const headers: Record = {}; + const key = + process.env.OLLAMA_API_KEY?.trim() || + process.env.LLAMACPP_API_KEY?.trim(); + if (key) headers["Authorization"] = `Bearer ${key}`; + + const response = await fetch(`${baseUrl}/models`, { + headers, + signal: AbortSignal.timeout(5000), + }); + if (!response.ok) return void res.json({ models: [] }); + + const data = (await response.json()) as { data?: { id: string }[] }; + const models = (data.data ?? []).map((m) => `local-${m.id}`); + res.json({ models }); + } catch { + res.json({ models: [] }); + } +}); + // DELETE /user/account userRouter.delete("/account", requireAuth, async (_req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); - const { error } = await db.auth.admin.deleteUser(userId); + const { error } = await deleteAuthUser(userId); if (error) return void res.status(500).json({ detail: error.message }); res.status(204).send(); }); diff --git a/backend/src/routes/workflows.ts b/backend/src/routes/workflows.ts index 2ea97285..694f45b8 100644 --- a/backend/src/routes/workflows.ts +++ b/backend/src/routes/workflows.ts @@ -1,19 +1,10 @@ import { Router } from "express"; -import { createClient } from "@supabase/supabase-js"; import { requireAuth } from "../middleware/auth"; -import { createServerSupabase } from "../lib/supabase"; - -function getAdminClient() { - return createClient( - process.env.NEXT_PUBLIC_SUPABASE_URL ?? "", - process.env.SUPABASE_SECRET_KEY ?? "", - { auth: { autoRefreshToken: false, persistSession: false } }, - ); -} +import { createDb, DbClient, listAuthUsers } from "../lib/db"; export const workflowsRouter = Router(); -type Db = ReturnType; +type Db = DbClient; type WorkflowRecord = { id: string; @@ -78,7 +69,7 @@ workflowsRouter.get("/", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string; const { type } = req.query as { type?: string }; - const db = createServerSupabase(); + const db = createDb(); // Own workflows let ownQuery = db @@ -112,10 +103,8 @@ workflowsRouter.get("/", requireAuth, async (req, res) => { ? await db.from("user_profiles").select("user_id, display_name").in("user_id", sharerIds) : { data: [] }; - // Fetch sharer emails via admin client - const admin = getAdminClient(); - const { data: authData } = await admin.auth.admin.listUsers({ perPage: 1000 }); - const authUsers = authData?.users ?? []; + // Fetch sharer emails (works in both Supabase and local mode) + const authUsers = await listAuthUsers(); sharedWorkflows = wfs.map((wf) => { const share = shares.find((s) => s.workflow_id === wf.id); @@ -155,7 +144,7 @@ workflowsRouter.post("/", requireAuth, async (req, res) => { .status(400) .json({ detail: "type must be 'assistant' or 'tabular'" }); - const db = createServerSupabase(); + const db = createDb(); const { data, error } = await db .from("workflows") .insert({ @@ -184,7 +173,7 @@ async function handleWorkflowUpdate(req: import("express").Request, res: import( updates.columns_config = req.body.columns_config; if ("practice" in req.body) updates.practice = req.body.practice ?? null; - const db = createServerSupabase(); + const db = createDb(); const access = await resolveWorkflowAccess(workflowId, userId, userEmail, db); if (!access || access.workflow.is_system || !access.allowEdit) { return void res @@ -220,7 +209,7 @@ workflowsRouter.patch("/:workflowId", requireAuth, handleWorkflowUpdate); workflowsRouter.delete("/:workflowId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { workflowId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { error } = await db .from("workflows") .delete() @@ -234,7 +223,7 @@ workflowsRouter.delete("/:workflowId", requireAuth, async (req, res) => { // GET /workflows/hidden workflowsRouter.get("/hidden", requireAuth, async (req, res) => { const userId = res.locals.userId as string; - const db = createServerSupabase(); + const db = createDb(); const { data, error } = await db .from("hidden_workflows") .select("workflow_id") @@ -249,7 +238,7 @@ workflowsRouter.post("/hidden", requireAuth, async (req, res) => { const { workflow_id } = req.body as { workflow_id: string }; if (!workflow_id?.trim()) return void res.status(400).json({ detail: "workflow_id is required" }); - const db = createServerSupabase(); + const db = createDb(); const { error } = await db .from("hidden_workflows") .upsert({ user_id: userId, workflow_id }, { onConflict: "user_id,workflow_id" }); @@ -261,7 +250,7 @@ workflowsRouter.post("/hidden", requireAuth, async (req, res) => { workflowsRouter.delete("/hidden/:workflowId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { workflowId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { error } = await db .from("hidden_workflows") .delete() @@ -276,7 +265,7 @@ workflowsRouter.get("/:workflowId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const userEmail = res.locals.userEmail as string | undefined; const { workflowId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const access = await resolveWorkflowAccess(workflowId, userId, userEmail, db); if (!access) return void res.status(404).json({ detail: "Workflow not found" }); @@ -292,7 +281,7 @@ workflowsRouter.get("/:workflowId", requireAuth, async (req, res) => { workflowsRouter.get("/:workflowId/shares", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { workflowId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: wf } = await db .from("workflows") @@ -317,7 +306,7 @@ workflowsRouter.get("/:workflowId/shares", requireAuth, async (req, res) => { workflowsRouter.delete("/:workflowId/shares/:shareId", requireAuth, async (req, res) => { const userId = res.locals.userId as string; const { workflowId, shareId } = req.params; - const db = createServerSupabase(); + const db = createDb(); const { data: wf } = await db .from("workflows") @@ -339,7 +328,7 @@ workflowsRouter.post("/:workflowId/share", requireAuth, async (req, res) => { if (!emails?.length) return void res.status(400).json({ detail: "emails is required" }); - const db = createServerSupabase(); + const db = createDb(); // Verify ownership const { data: wf } = await db .from("workflows") diff --git a/backend/tsconfig.json b/backend/tsconfig.json index a4b3abf6..b73d34a6 100644 --- a/backend/tsconfig.json +++ b/backend/tsconfig.json @@ -7,6 +7,7 @@ "outDir": "./dist", "rootDir": "./src", "strict": true, + "noImplicitAny": false, "esModuleInterop": true, "skipLibCheck": true, "resolveJsonModule": true, diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/1e6bff24-3674-49cb-b3da-b26d25b6fd17/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/1e6bff24-3674-49cb-b3da-b26d25b6fd17/source.pdf new file mode 100644 index 00000000..53b2c87d Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/1e6bff24-3674-49cb-b3da-b26d25b6fd17/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/2baeef48-2a21-4c57-8899-7072da1429dc/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/2baeef48-2a21-4c57-8899-7072da1429dc/source.pdf new file mode 100644 index 00000000..c0bd3ffc Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/2baeef48-2a21-4c57-8899-7072da1429dc/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/45d9b052-3ac5-402f-a205-1ef1a008a1e4/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/45d9b052-3ac5-402f-a205-1ef1a008a1e4/source.pdf new file mode 100644 index 00000000..5a292a0f Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/45d9b052-3ac5-402f-a205-1ef1a008a1e4/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/4db7226c-46c4-4424-8cf5-a5134838aa58/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/4db7226c-46c4-4424-8cf5-a5134838aa58/source.pdf new file mode 100644 index 00000000..c0bd3ffc Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/4db7226c-46c4-4424-8cf5-a5134838aa58/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/5167ab67-dae7-4be3-99d6-a9aa37f9f3d2/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/5167ab67-dae7-4be3-99d6-a9aa37f9f3d2/source.pdf new file mode 100644 index 00000000..59340675 Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/5167ab67-dae7-4be3-99d6-a9aa37f9f3d2/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/685053a0-b20e-4d4b-a5cc-904f68d1b58a/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/685053a0-b20e-4d4b-a5cc-904f68d1b58a/source.pdf new file mode 100644 index 00000000..5a292a0f Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/685053a0-b20e-4d4b-a5cc-904f68d1b58a/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/6c66232a-fba7-4dad-9e35-cc3acb947309/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/6c66232a-fba7-4dad-9e35-cc3acb947309/source.pdf new file mode 100644 index 00000000..59340675 Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/6c66232a-fba7-4dad-9e35-cc3acb947309/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/72eff259-dbd0-4b83-be00-0f3071cf6e12/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/72eff259-dbd0-4b83-be00-0f3071cf6e12/source.pdf new file mode 100644 index 00000000..2589a3e1 Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/72eff259-dbd0-4b83-be00-0f3071cf6e12/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/7bb3229b-e5c3-488c-93ce-ed37c3f194ae/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/7bb3229b-e5c3-488c-93ce-ed37c3f194ae/source.pdf new file mode 100644 index 00000000..d67d6c56 Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/7bb3229b-e5c3-488c-93ce-ed37c3f194ae/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/abddb691-874e-4e3e-8ce6-d53034e8c9cc/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/abddb691-874e-4e3e-8ce6-d53034e8c9cc/source.pdf new file mode 100644 index 00000000..2589a3e1 Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/abddb691-874e-4e3e-8ce6-d53034e8c9cc/source.pdf differ diff --git a/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/ff55fc63-85e7-4082-a945-8c23cfa5c793/source.pdf b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/ff55fc63-85e7-4082-a945-8c23cfa5c793/source.pdf new file mode 100644 index 00000000..53b2c87d Binary files /dev/null and b/backend/uploads/documents/2425b18a-1c11-4ddf-9b80-7f61d0cdda98/ff55fc63-85e7-4082-a945-8c23cfa5c793/source.pdf differ diff --git a/compose.yml b/compose.yml new file mode 100644 index 00000000..a30fd196 --- /dev/null +++ b/compose.yml @@ -0,0 +1,68 @@ +services: + postgres: + image: postgres:16-alpine + restart: unless-stopped + environment: + POSTGRES_DB: mike + POSTGRES_USER: mike + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-mike_secret} + volumes: + - postgres_data:/var/lib/postgresql/data + - ./docker/postgres/init.sql:/docker-entrypoint-initdb.d/init.sql:ro + healthcheck: + test: ["CMD-SHELL", "pg_isready -U mike -d mike"] + interval: 5s + timeout: 5s + retries: 10 + ports: + - "5432:5432" + + backend: + build: + context: ./backend + dockerfile: Dockerfile + restart: unless-stopped + depends_on: + postgres: + condition: service_healthy + environment: + # Auth & database + AUTH_MODE: local + DATABASE_URL: postgres://mike:${POSTGRES_PASSWORD:-mike_secret}@postgres:5432/mike + JWT_SECRET: ${JWT_SECRET:?JWT_SECRET must be set} + # Storage (local filesystem inside the container) + LOCAL_STORAGE_PATH: /app/uploads + BACKEND_URL: ${BACKEND_URL:-http://localhost:3001} + # CORS + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + PORT: 3001 + # API keys (set the ones you use) + ANTHROPIC_API_KEY: ${ANTHROPIC_API_KEY:-} + GEMINI_API_KEY: ${GEMINI_API_KEY:-} + OPENAI_API_KEY: ${OPENAI_API_KEY:-} + OLLAMA_BASE_URL: ${OLLAMA_BASE_URL:-http://localhost:11434/v1} + # Key encryption (defaults to JWT_SECRET when not set) + USER_API_KEYS_ENCRYPTION_SECRET: ${USER_API_KEYS_ENCRYPTION_SECRET:-} + volumes: + - uploads_data:/app/uploads + ports: + - "${BACKEND_PORT:-3001}:3001" + + frontend: + build: + context: ./frontend + dockerfile: Dockerfile + args: + NEXT_PUBLIC_AUTH_MODE: local + NEXT_PUBLIC_API_BASE_URL: ${NEXT_PUBLIC_API_BASE_URL:-http://localhost:3001} + restart: unless-stopped + depends_on: + - backend + environment: + NODE_ENV: production + ports: + - "${FRONTEND_PORT:-3000}:3000" + +volumes: + postgres_data: + uploads_data: diff --git a/docker/postgres/init.sql b/docker/postgres/init.sql new file mode 100644 index 00000000..fdd16308 --- /dev/null +++ b/docker/postgres/init.sql @@ -0,0 +1,287 @@ +-- Mike local PostgreSQL schema +-- Used when AUTH_MODE=local (Docker Compose self-hosted setup). +-- No Supabase auth schema, no RLS — the backend enforces access control. + +create extension if not exists "pgcrypto"; + +-- --------------------------------------------------------------------------- +-- Users (replaces Supabase auth.users) +-- --------------------------------------------------------------------------- + +create table if not exists public.users ( + id uuid primary key default gen_random_uuid(), + email text not null unique, + password_hash text not null, + created_at timestamptz not null default now() +); + +create index if not exists idx_users_email on public.users(email); + +-- --------------------------------------------------------------------------- +-- User profiles +-- --------------------------------------------------------------------------- + +create table if not exists public.user_profiles ( + id uuid primary key default gen_random_uuid(), + user_id uuid not null unique references public.users(id) on delete cascade, + display_name text, + organisation text, + tier text not null default 'Free', + message_credits_used integer not null default 0, + credits_reset_date timestamptz not null default (now() + interval '30 days'), + tabular_model text not null default 'gemini-3-flash-preview', + created_at timestamptz not null default now(), + updated_at timestamptz not null default now() +); + +create index if not exists idx_user_profiles_user on public.user_profiles(user_id); + +create table if not exists public.user_api_keys ( + id uuid primary key default gen_random_uuid(), + user_id uuid not null references public.users(id) on delete cascade, + provider text not null check (provider in ('claude', 'gemini', 'openai', 'ollama')), + encrypted_key text not null, + iv text not null, + auth_tag text not null, + created_at timestamptz not null default now(), + updated_at timestamptz not null default now(), + unique(user_id, provider) +); + +create index if not exists idx_user_api_keys_user on public.user_api_keys(user_id); + +-- --------------------------------------------------------------------------- +-- Projects and documents +-- --------------------------------------------------------------------------- + +create table if not exists public.projects ( + id uuid primary key default gen_random_uuid(), + user_id text not null, + name text not null, + cm_number text, + visibility text not null default 'private', + shared_with jsonb not null default '[]'::jsonb, + created_at timestamptz not null default now(), + updated_at timestamptz not null default now() +); + +create index if not exists idx_projects_user on public.projects(user_id); +create index if not exists projects_shared_with_idx on public.projects using gin (shared_with); + +create table if not exists public.project_subfolders ( + id uuid primary key default gen_random_uuid(), + project_id uuid not null references public.projects(id) on delete cascade, + user_id text not null, + name text not null, + parent_folder_id uuid references public.project_subfolders(id) on delete cascade, + created_at timestamptz not null default now(), + updated_at timestamptz not null default now() +); + +create index if not exists idx_project_subfolders_project on public.project_subfolders(project_id); + +create table if not exists public.documents ( + id uuid primary key default gen_random_uuid(), + project_id uuid references public.projects(id) on delete cascade, + user_id text not null, + filename text not null, + file_type text, + size_bytes integer not null default 0, + page_count integer, + structure_tree jsonb, + status text not null default 'pending', + folder_id uuid references public.project_subfolders(id) on delete set null, + current_version_id uuid, + created_at timestamptz not null default now(), + updated_at timestamptz not null default now() +); + +create index if not exists idx_documents_user_project on public.documents(user_id, project_id); +create index if not exists idx_documents_project_folder on public.documents(project_id, folder_id); + +create table if not exists public.document_versions ( + id uuid primary key default gen_random_uuid(), + document_id uuid not null references public.documents(id) on delete cascade, + storage_path text not null, + pdf_storage_path text, + source text not null default 'upload', + version_number integer, + display_name text, + created_at timestamptz not null default now(), + constraint document_versions_source_check + check (source = any (array[ + 'upload'::text, + 'user_upload'::text, + 'assistant_edit'::text, + 'user_accept'::text, + 'user_reject'::text, + 'generated'::text + ])) +); + +create index if not exists document_versions_document_id_idx + on public.document_versions(document_id, created_at desc); +create index if not exists document_versions_doc_vnum_idx + on public.document_versions(document_id, version_number); + +alter table public.documents + add constraint documents_current_version_fk + foreign key (current_version_id) + references public.document_versions(id) on delete set null + not valid; + +create table if not exists public.document_edits ( + id uuid primary key default gen_random_uuid(), + document_id uuid not null references public.documents(id) on delete cascade, + chat_message_id uuid, + version_id uuid not null references public.document_versions(id) on delete cascade, + change_id text not null, + del_w_id text, + ins_w_id text, + deleted_text text not null default '', + inserted_text text not null default '', + context_before text, + context_after text, + status text not null default 'pending' + check (status = any (array['pending'::text, 'accepted'::text, 'rejected'::text])), + created_at timestamptz not null default now(), + resolved_at timestamptz +); + +create index if not exists document_edits_document_id_idx on public.document_edits(document_id, created_at desc); +create index if not exists document_edits_message_id_idx on public.document_edits(chat_message_id); +create index if not exists document_edits_version_id_idx on public.document_edits(version_id); + +-- --------------------------------------------------------------------------- +-- Workflows +-- --------------------------------------------------------------------------- + +create table if not exists public.workflows ( + id uuid primary key default gen_random_uuid(), + user_id text, + title text not null, + type text not null, + prompt_md text, + columns_config jsonb, + practice text, + is_system boolean not null default false, + created_at timestamptz not null default now() +); + +create index if not exists idx_workflows_user on public.workflows(user_id); + +create table if not exists public.hidden_workflows ( + id uuid primary key default gen_random_uuid(), + user_id text not null, + workflow_id text not null, + created_at timestamptz not null default now(), + unique(user_id, workflow_id) +); + +create index if not exists idx_hidden_workflows_user on public.hidden_workflows(user_id); + +create table if not exists public.workflow_shares ( + id uuid primary key default gen_random_uuid(), + workflow_id uuid not null references public.workflows(id) on delete cascade, + shared_by_user_id text not null, + shared_with_email text not null, + allow_edit boolean not null default false, + created_at timestamptz not null default now(), + constraint workflow_shares_workflow_email_unique unique(workflow_id, shared_with_email) +); + +create index if not exists workflow_shares_workflow_id_idx on public.workflow_shares(workflow_id); +create index if not exists workflow_shares_email_idx on public.workflow_shares(shared_with_email); + +-- --------------------------------------------------------------------------- +-- Chats +-- --------------------------------------------------------------------------- + +create table if not exists public.chats ( + id uuid primary key default gen_random_uuid(), + project_id uuid references public.projects(id) on delete cascade, + user_id text not null, + title text, + created_at timestamptz not null default now() +); + +create index if not exists idx_chats_user on public.chats(user_id); +create index if not exists idx_chats_project on public.chats(project_id); + +create table if not exists public.chat_messages ( + id uuid primary key default gen_random_uuid(), + chat_id uuid not null references public.chats(id) on delete cascade, + role text not null, + content jsonb, + files jsonb, + workflow jsonb, + annotations jsonb, + created_at timestamptz not null default now() +); + +create index if not exists idx_chat_messages_chat on public.chat_messages(chat_id); + +alter table public.document_edits + add constraint document_edits_chat_message_id_fkey + foreign key (chat_message_id) + references public.chat_messages(id) + on delete set null + not valid; + +-- --------------------------------------------------------------------------- +-- Tabular reviews +-- --------------------------------------------------------------------------- + +create table if not exists public.tabular_reviews ( + id uuid primary key default gen_random_uuid(), + project_id uuid references public.projects(id) on delete cascade, + user_id text not null, + title text, + columns_config jsonb, + workflow_id uuid references public.workflows(id) on delete set null, + practice text, + shared_with jsonb not null default '[]'::jsonb, + created_at timestamptz not null default now(), + updated_at timestamptz not null default now() +); + +create index if not exists idx_tabular_reviews_user on public.tabular_reviews(user_id); +create index if not exists idx_tabular_reviews_project on public.tabular_reviews(project_id); +create index if not exists tabular_reviews_shared_with_idx on public.tabular_reviews using gin (shared_with); + +create table if not exists public.tabular_cells ( + id uuid primary key default gen_random_uuid(), + review_id uuid not null references public.tabular_reviews(id) on delete cascade, + document_id uuid not null references public.documents(id) on delete cascade, + column_index integer not null, + content text, + citations jsonb, + status text not null default 'pending', + created_at timestamptz not null default now() +); + +create index if not exists idx_tabular_cells_review on public.tabular_cells(review_id, document_id, column_index); + +create table if not exists public.tabular_review_chats ( + id uuid primary key default gen_random_uuid(), + review_id uuid not null references public.tabular_reviews(id) on delete cascade, + user_id text not null, + title text, + created_at timestamptz not null default now(), + updated_at timestamptz not null default now() +); + +create index if not exists tabular_review_chats_review_idx on public.tabular_review_chats(review_id, updated_at desc); +create index if not exists tabular_review_chats_user_idx on public.tabular_review_chats(user_id); + +create table if not exists public.tabular_review_chat_messages ( + id uuid primary key default gen_random_uuid(), + chat_id uuid not null references public.tabular_review_chats(id) on delete cascade, + role text not null, + content jsonb, + annotations jsonb, + created_at timestamptz not null default now() +); + +create index if not exists tabular_review_chat_messages_chat_idx + on public.tabular_review_chat_messages(chat_id, created_at); diff --git a/frontend/Dockerfile b/frontend/Dockerfile new file mode 100644 index 00000000..2da10694 --- /dev/null +++ b/frontend/Dockerfile @@ -0,0 +1,31 @@ +FROM node:22-slim AS deps +WORKDIR /app +COPY package*.json ./ +# --legacy-peer-deps: @opennextjs/cloudflare has a peer dep on next versions +# that doesn't exactly match the pinned version in this repo +RUN npm ci --legacy-peer-deps + +FROM node:22-slim AS build +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY . . +# Build-time env vars are baked into the Next.js bundle +ARG NEXT_PUBLIC_AUTH_MODE=local +ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:3001 +ARG NEXT_PUBLIC_SUPABASE_URL="" +ARG NEXT_PUBLIC_SUPABASE_PUBLISHABLE_DEFAULT_KEY="" +ENV NEXT_PUBLIC_AUTH_MODE=$NEXT_PUBLIC_AUTH_MODE +ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL +ENV NEXT_PUBLIC_SUPABASE_URL=$NEXT_PUBLIC_SUPABASE_URL +ENV NEXT_PUBLIC_SUPABASE_PUBLISHABLE_DEFAULT_KEY=$NEXT_PUBLIC_SUPABASE_PUBLISHABLE_DEFAULT_KEY +RUN npm run build + +FROM node:22-slim AS runtime +WORKDIR /app +COPY --from=deps /app/node_modules ./node_modules +COPY --from=build /app/.next ./.next +COPY --from=build /app/public ./public +COPY package.json next.config.ts tsconfig.json ./ +ENV NODE_ENV=production +EXPOSE 3000 +CMD ["npm", "start"] diff --git a/frontend/package-lock.json b/frontend/package-lock.json index decbac1f..d1544e19 100644 --- a/frontend/package-lock.json +++ b/frontend/package-lock.json @@ -7,6 +7,7 @@ "": { "name": "mike", "version": "0.1.0", + "license": "AGPL-3.0-only", "dependencies": { "@aws-sdk/client-s3": "^3.1025.0", "@aws-sdk/s3-request-presigner": "^3.1025.0", @@ -1889,7 +1890,6 @@ "version": "1.9.2", "resolved": "https://registry.npmjs.org/@emnapi/runtime/-/runtime-1.9.2.tgz", "integrity": "sha512-3U4+MIWHImeyu1wnmVygh5WlgfYDtyf0k8AbLhMFxOipihf6nrWC4syIm/SwEeec0mNSafiiNnMJwbza/Is6Lw==", - "dev": true, "license": "MIT", "optional": true, "dependencies": { @@ -2616,7 +2616,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2639,7 +2638,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2662,7 +2660,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2679,7 +2676,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2696,7 +2692,6 @@ "cpu": [ "arm" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2713,7 +2708,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2730,7 +2724,6 @@ "cpu": [ "ppc64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2747,7 +2740,6 @@ "cpu": [ "riscv64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2764,7 +2756,6 @@ "cpu": [ "s390x" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2781,7 +2772,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2798,7 +2788,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2815,7 +2804,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "LGPL-3.0-or-later", "optional": true, "os": [ @@ -2832,7 +2820,6 @@ "cpu": [ "arm" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2855,7 +2842,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2878,7 +2864,6 @@ "cpu": [ "ppc64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2901,7 +2886,6 @@ "cpu": [ "riscv64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2924,7 +2908,6 @@ "cpu": [ "s390x" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2947,7 +2930,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2970,7 +2952,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -2993,7 +2974,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "Apache-2.0", "optional": true, "os": [ @@ -3016,7 +2996,6 @@ "cpu": [ "wasm32" ], - "dev": true, "license": "Apache-2.0 AND LGPL-3.0-or-later AND MIT", "optional": true, "dependencies": { @@ -3036,7 +3015,6 @@ "cpu": [ "arm64" ], - "dev": true, "license": "Apache-2.0 AND LGPL-3.0-or-later", "optional": true, "os": [ @@ -3056,7 +3034,6 @@ "cpu": [ "ia32" ], - "dev": true, "license": "Apache-2.0 AND LGPL-3.0-or-later", "optional": true, "os": [ @@ -3076,7 +3053,6 @@ "cpu": [ "x64" ], - "dev": true, "license": "Apache-2.0 AND LGPL-3.0-or-later", "optional": true, "os": [ diff --git a/frontend/src/app/(pages)/account/models/page.tsx b/frontend/src/app/(pages)/account/models/page.tsx index c83d6814..2c2e5344 100644 --- a/frontend/src/app/(pages)/account/models/page.tsx +++ b/frontend/src/app/(pages)/account/models/page.tsx @@ -14,7 +14,7 @@ import { } from "@/components/ui/dropdown-menu"; import { useUserProfile } from "@/contexts/UserProfileContext"; import type { ApiKeyState } from "@/app/lib/mikeApi"; -import { MODELS } from "@/app/components/assistant/ModelToggle"; +import { MODELS, modelLabel, type ModelOption } from "@/app/components/assistant/ModelToggle"; import { isModelAvailable, modelGroupToProvider, @@ -37,10 +37,15 @@ const API_KEY_FIELDS = [ label: "OpenAI API Key", placeholder: "sk-…", }, + { + provider: "ollama", + label: "Ollama / Llama.cpp API Key (optional)", + placeholder: "Ollama key or leave empty", + }, ] as const; export default function ModelsAndApiKeysPage() { - const { profile, updateModelPreference, updateApiKey } = useUserProfile(); + const { profile, updateModelPreference, updateApiKey, ollamaModels } = useUserProfile(); return (
@@ -66,6 +71,7 @@ export default function ModelsAndApiKeysPage() { "gemini-3-flash-preview" } apiKeys={profile?.apiKeys} + localModels={ollamaModels} onChange={(id) => updateModelPreference("tabularModel", id) } @@ -124,20 +130,35 @@ function TabularModelDropdown({ value, onChange, apiKeys, + localModels = [], }: { value: string; onChange: (id: string) => void; apiKeys?: ApiKeyState; + localModels?: ModelOption[]; }) { const [isOpen, setIsOpen] = useState(false); - const selected = MODELS.find((m) => m.id === value); - const selectedAvailable = apiKeys ? isModelAvailable(value, apiKeys) : true; - const groups: ("Anthropic" | "Google" | "OpenAI")[] = [ - "Anthropic", - "Google", - "OpenAI", + + const allLocalModels = [ + ...MODELS.filter((m) => m.group === "Local"), + ...localModels.filter((m) => !MODELS.some((s) => s.id === m.id)), ]; + const selected = + MODELS.find((m) => m.id === value) ?? + allLocalModels.find((m) => m.id === value); + const selectedAvailable = apiKeys ? isModelAvailable(value, apiKeys) : true; + + type GroupKey = ModelOption["group"]; + const groups: { key: GroupKey; items: ModelOption[] }[] = ( + [ + { key: "Anthropic" as GroupKey, items: MODELS.filter((m) => m.group === "Anthropic") }, + { key: "Google" as GroupKey, items: MODELS.filter((m) => m.group === "Google") }, + { key: "OpenAI" as GroupKey, items: MODELS.filter((m) => m.group === "OpenAI") }, + { key: "Local" as GroupKey, items: allLocalModels }, + ] as { key: GroupKey; items: ModelOption[] }[] + ).filter((g) => g.items.length > 0); + return ( @@ -150,7 +171,7 @@ function TabularModelDropdown({ )} - {selected?.label ?? "Select a model"} + {selected?.label ?? modelLabel(value)} - {groups.map((group, gi) => { - const items = MODELS.filter((m) => m.group === group); - if (items.length === 0) return null; + {groups.map(({ key: group, items }, gi) => { return (
{gi > 0 && } diff --git a/frontend/src/app/components/assistant/AssistantMessage.tsx b/frontend/src/app/components/assistant/AssistantMessage.tsx index f33dfb04..35aead5c 100644 --- a/frontend/src/app/components/assistant/AssistantMessage.tsx +++ b/frontend/src/app/components/assistant/AssistantMessage.tsx @@ -16,7 +16,7 @@ import type { } from "../shared/types"; import { EditCard, applyOptimisticResolution } from "./EditCard"; import { PreResponseWrapper } from "../shared/PreResponseWrapper"; -import { supabase } from "@/lib/supabase"; +import { getSessionToken } from "@/lib/supabase"; function toolCallLabel(name: string): string { if (name === "generate_docx") return "Creating document..."; @@ -88,10 +88,7 @@ function BulkEditActions({ setBusy(verb); setProgress({ done: 0, total: pending.length }); try { - const { - data: { session }, - } = await supabase.auth.getSession(); - const token = session?.access_token; + const token = await getSessionToken(); const apiBase = process.env.NEXT_PUBLIC_API_BASE_URL ?? "http://localhost:3001"; @@ -633,10 +630,7 @@ function DocDownloadBlock({ if (busy || isReloading || !href) return; setBusy(true); try { - const { - data: { session }, - } = await supabase.auth.getSession(); - const token = session?.access_token; + const token = await getSessionToken(); const resp = await fetch(href, { headers: token ? { Authorization: `Bearer ${token}` } : {}, }); diff --git a/frontend/src/app/components/assistant/ChatInput.tsx b/frontend/src/app/components/assistant/ChatInput.tsx index 18914cc8..a21bba28 100644 --- a/frontend/src/app/components/assistant/ChatInput.tsx +++ b/frontend/src/app/components/assistant/ChatInput.tsx @@ -66,7 +66,7 @@ export const ChatInput = forwardRef(function ChatInput( title: string; } | null>(null); const [model, setModel] = useSelectedModel(); - const { profile } = useUserProfile(); + const { profile, ollamaModels } = useUserProfile(); const apiKeys = profile?.apiKeys; const textareaRef = useRef(null); const [docSelectorOpen, setDocSelectorOpen] = useState(false); @@ -275,6 +275,7 @@ export const ChatInput = forwardRef(function ChatInput( value={model} onChange={setModel} apiKeys={apiKeys} + localModels={ollamaModels} /> - + {GROUP_ORDER.map((group, gi) => { - const items = MODELS.filter((m) => m.group === group); + const items = allModels.filter((m) => m.group === group); if (items.length === 0) return null; return (
@@ -106,6 +181,13 @@ export function ModelToggle({ value, onChange, apiKeys }: Props) { ); })} + {/* Inline custom-model input for the Local group */} + {group === "Local" && ( + + )}
); })} diff --git a/frontend/src/app/components/projects/ProjectsOverview.tsx b/frontend/src/app/components/projects/ProjectsOverview.tsx index b84d32e5..8ecc6b4d 100644 --- a/frontend/src/app/components/projects/ProjectsOverview.tsx +++ b/frontend/src/app/components/projects/ProjectsOverview.tsx @@ -41,13 +41,20 @@ export function ProjectsOverview() { const actionsRef = useRef(null); const router = useRouter(); const { user } = useAuth(); + const [loadError, setLoadError] = useState(null); useEffect(() => { + if (!user) return; + setLoading(true); + setLoadError(null); listProjects() .then(setProjects) - .catch(() => setProjects([])) + .catch((err: unknown) => { + setProjects([]); + setLoadError(err instanceof Error ? err.message : "Errore nel caricamento dei progetti"); + }) .finally(() => setLoading(false)); - }, []); + }, [user?.id]); useEffect(() => { setSelectedIds([]); @@ -263,6 +270,14 @@ export function ProjectsOverview() {
))}
+ ) : loadError ? ( +
+

{loadError}

+ +
) : filtered.length === 0 ? (
{activeTab === "all" || activeTab === "mine" ? ( diff --git a/frontend/src/app/components/shared/DocPanel.tsx b/frontend/src/app/components/shared/DocPanel.tsx index 049f19d3..12ef0130 100644 --- a/frontend/src/app/components/shared/DocPanel.tsx +++ b/frontend/src/app/components/shared/DocPanel.tsx @@ -2,7 +2,7 @@ import { useCallback, useEffect, useMemo, useState } from "react"; import { Download, Loader2 } from "lucide-react"; -import { supabase } from "@/lib/supabase"; +import { getSessionToken } from "@/lib/supabase"; import { applyOptimisticResolution } from "../assistant/EditCard"; import { DocView } from "./DocView"; import { DocxView } from "./DocxView"; @@ -355,10 +355,7 @@ function EditResolveButtons({ ); } try { - const { - data: { session }, - } = await supabase.auth.getSession(); - const token = session?.access_token; + const token = await getSessionToken(); const apiBase = process.env.NEXT_PUBLIC_API_BASE_URL ?? "http://localhost:3001"; @@ -457,10 +454,7 @@ function DownloadButton({ if (busy || isReloading) return; setBusy(true); try { - const { - data: { session }, - } = await supabase.auth.getSession(); - const token = session?.access_token; + const token = await getSessionToken(); const apiBase = process.env.NEXT_PUBLIC_API_BASE_URL ?? "http://localhost:3001"; const qs = versionId diff --git a/frontend/src/app/components/shared/DocxView.tsx b/frontend/src/app/components/shared/DocxView.tsx index 1fc81156..b096abdd 100644 --- a/frontend/src/app/components/shared/DocxView.tsx +++ b/frontend/src/app/components/shared/DocxView.tsx @@ -3,7 +3,7 @@ import { useEffect, useMemo, useRef } from "react"; import { MikeIcon } from "@/components/chat/mike-icon"; import { useFetchDocxBytes } from "@/app/hooks/useFetchDocxBytes"; -import { supabase } from "@/lib/supabase"; +import { getSessionToken } from "@/lib/supabase"; import { clearDocxQuoteHighlights, highlightDocxQuote, @@ -144,10 +144,7 @@ async function tagWIdsOnRenderedDom( versionId: string | null | undefined, ): Promise { try { - const { - data: { session }, - } = await supabase.auth.getSession(); - const token = session?.access_token; + const token = await getSessionToken(); const apiBase = process.env.NEXT_PUBLIC_API_BASE_URL ?? "http://localhost:3001"; const qs = versionId diff --git a/frontend/src/app/components/tabular/TRChatPanel.tsx b/frontend/src/app/components/tabular/TRChatPanel.tsx index e066486b..852dc615 100644 --- a/frontend/src/app/components/tabular/TRChatPanel.tsx +++ b/frontend/src/app/components/tabular/TRChatPanel.tsx @@ -448,6 +448,7 @@ function TRChatInput({ model, onModelChange, apiKeys, + localModels, onHeightChange, }: { isLoading: boolean; @@ -456,6 +457,7 @@ function TRChatInput({ model: string; onModelChange: (id: string) => void; apiKeys?: ApiKeyState; + localModels?: import("@/app/components/assistant/ModelToggle").ModelOption[]; onHeightChange: (height: number) => void; }) { const [value, setValue] = useState(""); @@ -532,6 +534,7 @@ function TRChatInput({ value={model} onChange={onModelChange} apiKeys={apiKeys} + localModels={localModels} />