diff --git a/.env.example b/.env.example
index e4256d7..476ffb6 100644
--- a/.env.example
+++ b/.env.example
@@ -29,6 +29,14 @@ OPENAI_API_KEY='your-openai-api-key-here'
# Google Gemini API key for AI text generation
GOOGLE_GENERATIVE_AI_API_KEY='your-gemini-api-key-here'
+# Supabase Storage (for image uploads)
+# Project URL: https://supabase.com/dashboard/project/_/settings/api
+SUPABASE_URL=https://your-project-ref.supabase.co
+# WARNING: `SUPABASE_SERVICE_ROLE_KEY` is highly privileged and must only be used server-side.
+# Never use it in Expo/browser code and never expose it via `EXPO_PUBLIC_*` or any other client-exposed env var mechanism.
+SUPABASE_SERVICE_ROLE_KEY=your_service_role_key_here
+SUPABASE_STORAGE_BUCKET=images
+
# Expo app API URL (for local development, set to localhost:3000)
EXPO_PUBLIC_API_URL=http://localhost:3000
diff --git a/apps/expo/src/app/(tabs)/feed.tsx b/apps/expo/src/app/(tabs)/feed.tsx
index eca6593..24a3f7a 100644
--- a/apps/expo/src/app/(tabs)/feed.tsx
+++ b/apps/expo/src/app/(tabs)/feed.tsx
@@ -120,11 +120,11 @@ export default function FeedScreen() {
{item.title}
- {/* Hybrid Image Display - prioritize AI-generated imageUri */}
- {item.imageUri ? (
+ {/* Image display - prioritize AI-generated imageUrl */}
+ {item.imageUrl ? (
diff --git a/apps/expo/src/app/(tabs)/index.tsx b/apps/expo/src/app/(tabs)/index.tsx
index bd9ea89..ef1ba53 100644
--- a/apps/expo/src/app/(tabs)/index.tsx
+++ b/apps/expo/src/app/(tabs)/index.tsx
@@ -39,7 +39,7 @@ interface ContentCard {
type: "bill" | "government_content" | "court_case" | "general";
isAIGenerated: boolean;
thumbnailUrl?: string;
- imageUri?: string;
+ imageUrl?: string;
}
const _TYPE_LABELS: Record = {
@@ -137,10 +137,10 @@ const ContentCardComponent = ({
{/* Thumbnail */}
- {(item.imageUri ?? item.thumbnailUrl) ? (
+ {(item.imageUrl ?? item.thumbnailUrl) ? (
diff --git a/apps/scraper/src/utils/db/video-operations.ts b/apps/scraper/src/utils/db/video-operations.ts
index d4ccca2..079ef08 100644
--- a/apps/scraper/src/utils/db/video-operations.ts
+++ b/apps/scraper/src/utils/db/video-operations.ts
@@ -5,6 +5,7 @@
import { db } from '@acme/db/client';
import { Video } from '@acme/db/schema';
+import { uploadImage } from '@acme/db/storage';
import { and, eq } from '@acme/db';
import { generateMarketingCopy } from '../ai/marketing-generation.js';
import { generateImage, convertToJpeg } from '../ai/image-generation.js';
@@ -68,12 +69,11 @@ export async function generateVideoForContent(
// Generate marketing copy
const marketingCopy = await generateMarketingCopy(title, fullText, contentType);
- // Generate and convert image
- let imageData: Buffer | null = null;
- let imageMimeType = 'image/jpeg';
+ // Generate and convert image (upload happens after DB write to avoid orphans)
+ let jpegData: Buffer | null = null;
const generatedImage = await generateImage(marketingCopy.imagePrompt);
if (generatedImage) {
- imageData = await convertToJpeg(generatedImage.data);
+ jpegData = await convertToJpeg(generatedImage.data);
}
// Random engagement metrics (same as current video.ts)
@@ -83,7 +83,7 @@ export async function generateVideoForContent(
shares: Math.floor(Math.random() * 1000) + 10,
};
- // Upsert video with hybrid image support
+ // Upsert video first (without image URL)
try {
await db
.insert(Video)
@@ -92,11 +92,7 @@ export async function generateVideoForContent(
contentId,
title: marketingCopy.title,
description: marketingCopy.description,
- imageData,
- imageMimeType,
- imageWidth: imageData ? 1024 : null,
- imageHeight: imageData ? 1024 : null,
- thumbnailUrl: thumbnailUrl ?? undefined, // Add URL-based thumbnail support
+ thumbnailUrl: thumbnailUrl ?? undefined,
author,
engagementMetrics,
sourceContentHash: contentHash,
@@ -106,18 +102,11 @@ export async function generateVideoForContent(
set: {
title: marketingCopy.title,
description: marketingCopy.description,
- imageData,
- imageMimeType,
- imageWidth: imageData ? 1024 : null,
- imageHeight: imageData ? 1024 : null,
- thumbnailUrl: thumbnailUrl ?? undefined, // Update thumbnail URL on conflict
+ thumbnailUrl: thumbnailUrl ?? undefined,
sourceContentHash: contentHash,
updatedAt: new Date(),
},
});
-
- incrementVideosGenerated();
- logger.success(`Video generated for ${contentType}:${contentId}`);
} catch (error) {
// Sanitize error to avoid logging raw image data
const sanitizedError = error instanceof Error
@@ -126,4 +115,38 @@ export async function generateVideoForContent(
logger.error(`Failed to insert video for ${contentType}:${contentId}: ${sanitizedError}`);
throw error;
}
+
+ // Upload image after successful DB write, then update the row
+ if (jpegData) {
+ const storagePath = `videos/${contentType}/${contentId}.jpg`;
+ let imageUrl: string | undefined;
+ try {
+ imageUrl = await uploadImage(storagePath, jpegData);
+ } catch (error) {
+ logger.warn(`Image upload failed for ${contentType}:${contentId}, video saved without image`);
+ }
+ if (imageUrl) {
+ try {
+ await db
+ .update(Video)
+ .set({
+ imageUrl,
+ imageData: null,
+ imageMimeType: null,
+ imageWidth: null,
+ imageHeight: null,
+ })
+ .where(and(eq(Video.contentType, contentType), eq(Video.contentId, contentId)));
+ logger.debug(`Uploaded image to ${storagePath}`);
+ } catch (error) {
+ // Don't delete the uploaded file — it lives at a deterministic path that
+ // may already be referenced by a previous imageUrl, and will be
+ // overwritten on the next successful run.
+ logger.warn(`DB update for imageUrl failed for ${contentType}:${contentId}, image uploaded but URL not saved`);
+ }
+ }
+ }
+
+ incrementVideosGenerated();
+ logger.success(`Video generated for ${contentType}:${contentId}`);
}
diff --git a/docs/IMAGE_INTEGRATION.md b/docs/IMAGE_INTEGRATION.md
deleted file mode 100644
index 0503198..0000000
--- a/docs/IMAGE_INTEGRATION.md
+++ /dev/null
@@ -1,218 +0,0 @@
-# Image Integration for Article System
-
-## Overview
-
-This implementation adds relevant photo search and integration to the article generation system. Instead of AI-generated images, it uses the Pexels API to find high-quality, relevant stock photos that fit each article. Pexels provides instant API access (no approval wait) with generous rate limits.
-
-## Features
-
-- **Automatic Image Search**: When articles are generated, the system automatically searches for relevant photos
-- **AI-Powered Keywords**: Uses GPT-4o-mini to generate optimal search keywords from article content
-- **Thumbnail Support**: Each article gets a primary thumbnail image
-- **Multiple Images**: Articles can have up to 3 relevant images with proper attribution
-- **Source Attribution**: All images include photographer credit and source links
-
-## Database Schema Changes
-
-Added to `Bill`, `GovernmentContent`, and `CourtCase` tables:
-- `thumbnailUrl`: Text field for the primary thumbnail image URL
-- `images`: JSONB array containing image objects with:
- - `url`: Direct URL to the image
- - `alt`: Alt text description
- - `source`: Attribution text (e.g., "Photo by John Doe on Unsplash")
- - `sourceUrl`: Link to the original source page
-
-## Setup
-
-### 1. Get Pexels API Key (INSTANT - No Approval Wait!)
-
-1. Sign up at [Pexels API](https://www.pexels.com/api/)
-2. Your API key is displayed immediately after signup
-3. Copy your API Key
-
-### 2. Set Environment Variable
-
-Add to your `.env` file:
-
-```bash
-PEXELS_API_KEY=your_api_key_here
-```
-
-### 3. Run Database Migration
-
-```bash
-cd packages/db
-# If using a migration tool, run the migration
-# Or apply manually:
-psql -d your_database < migrations/add_image_fields.sql
-```
-
-### 4. Install Dependencies
-
-The scraper already has the necessary dependencies. Just ensure you have:
-- `ai` package (already installed)
-- `@ai-sdk/openai` (already installed)
-
-## How It Works
-
-### 1. Image Search Process
-
-When an article is generated in `apps/scraper/src/utils/db.ts`:
-
-1. **Keyword Generation**: AI analyzes the title and content to extract visual concepts
- - Example: "Infrastructure Bill" → "highway construction bridge"
-
-2. **Image Search**: Queries Unsplash API with generated keywords
- - Filters for landscape orientation
- - Ensures high content quality filter
-
-3. **Storage**: Saves thumbnail URL and image array to database
-
-### 2. Image Search Utility
-
-Located at `apps/scraper/src/utils/image-search.ts`:
-
-```typescript
-// Search for images
-const images = await searchImages('renewable energy solar panels', 3);
-
-// Get just a thumbnail
-const thumbnail = await getThumbnailImage('healthcare hospital');
-
-// Generate search keywords from content
-const keywords = await generateImageSearchKeywords(title, content, type);
-```
-
-### 3. API Integration
-
-The tRPC API endpoints in `packages/api/src/router/content.ts` now include:
-
-- `thumbnailUrl` in content card responses (for list views)
-- `images` array in detailed content responses (for article pages)
-
-## Usage in Frontend
-
-### Content Cards (List View)
-
-```typescript
-// Thumbnails are available in list responses
-const { data } = trpc.content.getAll.useQuery();
-
-data.forEach(item => {
- if (item.thumbnailUrl) {
- // Display thumbnail
-
- }
-});
-```
-
-### Article Detail View
-
-```typescript
-// Full image array available in detail view
-const { data } = trpc.content.getById.useQuery({ id });
-
-if (data.images && data.images.length > 0) {
- data.images.forEach(image => {
-
-
-
- {image.source}
-
-
- });
-}
-```
-
-## Fallback Behavior
-
-The system gracefully handles cases where images aren't available:
-
-- **No API Key**: Logs warning and continues without images
-- **No Results**: Articles work fine without images
-- **API Errors**: Logs error and continues processing
-- **Rate Limits**: Respects Unsplash's free tier limits (50 requests/hour)
-
-## Customization
-
-### Change Number of Images
-
-In `apps/scraper/src/utils/db.ts`:
-
-```typescript
-// Get more or fewer images
-images = await searchImages(searchQuery, 5); // Get 5 instead of 3
-```
-
-### Different Image Source
-
-Replace `apps/scraper/src/utils/image-search.ts` with a different API:
-
-- **Pexels**: Free, no attribution required
-- **Pixabay**: Free, no attribution required
-- **Getty Images**: Premium, requires license
-
-### Customize Search Keywords
-
-Modify the AI prompt in `generateImageSearchKeywords()` to adjust keyword generation:
-
-```typescript
-prompt: `Generate keywords focusing on [your specific requirements]...`
-```
-
-## Rate Limits
-
-**Unsplash Free Tier**:
-- 50 requests per hour
-- 5,000 total requests per month
-
-For higher volume, consider:
-1. Upgrading to Unsplash paid tier
-2. Caching image search results
-3. Using multiple image APIs with fallback
-
-## Testing
-
-To test image search without running the full scraper:
-
-```bash
-cd apps/scraper
-
-# Test image search
-node -e "
-import('./src/utils/image-search.ts').then(async ({ searchImages }) => {
- const images = await searchImages('congress capitol building', 3);
- console.log(images);
-});
-"
-```
-
-## Troubleshooting
-
-### No images appearing
-
-1. Check `UNSPLASH_ACCESS_KEY` is set correctly
-2. Verify you haven't hit rate limits
-3. Check console logs for errors
-4. Test API key manually: `curl -H "Authorization: Client-ID YOUR_KEY" "https://api.unsplash.com/photos/random"`
-
-### Images not relevant
-
-1. Review generated keywords in logs
-2. Adjust keyword generation prompt
-3. Consider using different search terms or manual keywords
-
-### Database errors
-
-1. Ensure migration was applied
-2. Check that columns exist: `\d bill` in psql
-3. Verify JSONB type is supported in your PostgreSQL version
-
-## Future Enhancements
-
-- [ ] Image caching to reduce API calls
-- [ ] Multiple image source fallbacks
-- [ ] Image optimization and CDN integration
-- [ ] User-selectable images from search results
-- [ ] Image relevance scoring
-- [ ] Automatic image cropping for thumbnails
diff --git a/packages/api/src/router/content.ts b/packages/api/src/router/content.ts
index 41e68d1..ccdd1e8 100644
--- a/packages/api/src/router/content.ts
+++ b/packages/api/src/router/content.ts
@@ -49,7 +49,7 @@ const ContentCardSchema = z.object({
type: z.enum(["bill", "government_content", "court_case", "general"]),
isAIGenerated: z.boolean(),
thumbnailUrl: z.string().optional(),
- imageUri: z.string().optional(), // Add support for AI-generated data URIs
+ imageUrl: z.string().optional(),
});
export type ContentCard = z.infer;
diff --git a/packages/api/src/router/video.ts b/packages/api/src/router/video.ts
index 02919e0..48e12dc 100644
--- a/packages/api/src/router/video.ts
+++ b/packages/api/src/router/video.ts
@@ -7,7 +7,6 @@ import { Video } from "@acme/db/schema";
import { publicProcedure } from "../trpc";
-// Schema for video/feed post (from Video table) - Hybrid image support
export const VideoPostSchema = z.object({
id: z.string(),
title: z.string().max(100),
@@ -18,10 +17,9 @@ export const VideoPostSchema = z.object({
shares: z.number(),
type: z.enum(["bill", "government_content", "court_case", "general"]),
articlePreview: z.string(),
- // Hybrid image support - use whichever is available
- imageUri: z.string().optional(), // Data URI from Video.imageData (AI-generated)
- thumbnailUrl: z.string().optional(), // URL from source content (scraped)
- originalContentId: z.string(), // Reference to source content
+ imageUrl: z.string().optional(),
+ thumbnailUrl: z.string().optional(),
+ originalContentId: z.string(),
});
export type VideoPost = z.infer;
@@ -45,15 +43,8 @@ export const videoRouter = {
.limit(limit)
.offset(cursor);
- // Transform to feed format with hybrid image support
+ // Transform to feed format
const feedPosts = videos.map((video) => {
- // Handle AI-generated binary images (convert to data URI)
- let imageUri: string | undefined;
- if (video.imageData && video.imageMimeType) {
- const base64 = video.imageData.toString("base64");
- imageUri = `data:${video.imageMimeType};base64,${base64}`;
- }
-
const metrics = video.engagementMetrics as {
likes: number;
comments: number;
@@ -80,10 +71,15 @@ export const videoRouter = {
comments: metrics.comments,
shares: metrics.shares,
type,
- articlePreview: video.description, // Marketing description as preview
- imageUri, // AI-generated data URI (if exists)
- thumbnailUrl: video.thumbnailUrl ?? undefined, // URL-based thumbnail (if exists)
- originalContentId: video.contentId, // For "Read Full Article" navigation
+ articlePreview: video.description,
+ imageUrl:
+ video.imageUrl ??
+ // Fallback: serve legacy imageData as data-URI until migration completes
+ (video.imageData
+ ? `data:${video.imageMimeType ?? "image/jpeg"};base64,${Buffer.from(video.imageData).toString("base64")}`
+ : undefined),
+ thumbnailUrl: video.thumbnailUrl ?? undefined,
+ originalContentId: video.contentId,
};
});
diff --git a/packages/db/eslint.config.ts b/packages/db/eslint.config.ts
index f54f34c..93660c4 100644
--- a/packages/db/eslint.config.ts
+++ b/packages/db/eslint.config.ts
@@ -4,7 +4,7 @@ import { baseConfig } from "@acme/eslint-config/base";
export default defineConfig(
{
- ignores: ["dist/**", "migrate-images.ts"],
+ ignores: ["dist/**", "migrate-images.ts", "migrate-images-to-storage.ts"],
},
baseConfig,
);
diff --git a/packages/db/migrate-images-to-storage.ts b/packages/db/migrate-images-to-storage.ts
new file mode 100644
index 0000000..b39cf8a
--- /dev/null
+++ b/packages/db/migrate-images-to-storage.ts
@@ -0,0 +1,127 @@
+/**
+ * Migration: move Video.imageData (bytea) blobs to Supabase Storage.
+ *
+ * For each Video row that has imageData but no imageUrl:
+ * 1. Upload the buffer to Supabase Storage
+ * 2. Write the public URL back to imageUrl
+ * 3. Null out imageData to free space
+ *
+ * After running this and verifying, drop the imageData column:
+ * ALTER TABLE video DROP COLUMN image_data;
+ * ALTER TABLE video DROP COLUMN image_mime_type;
+ * ALTER TABLE video DROP COLUMN image_width;
+ * ALTER TABLE video DROP COLUMN image_height;
+ *
+ * Usage: pnpm with-env tsx migrate-images-to-storage.ts [--dry-run]
+ */
+
+import { dirname, join } from "path";
+import { fileURLToPath } from "url";
+import { config } from "dotenv";
+import pg from "pg";
+
+import { uploadImage } from "./src/storage.js";
+
+const __filename = fileURLToPath(import.meta.url);
+const __dirname = dirname(__filename);
+
+config({ path: join(__dirname, "../../.env") });
+
+const BATCH_SIZE = 50;
+const dryRun = process.argv.includes("--dry-run");
+
+async function migrate() {
+ const pool = new pg.Pool({ connectionString: process.env.POSTGRES_URL });
+
+ console.log(dryRun ? "[DRY RUN] " : "", "Starting image migration...\n");
+
+ const client = await pool.connect();
+
+ try {
+ let migrated = 0;
+ let errors = 0;
+ let lastId: string | null = null;
+
+ // Migrate in batches using an ID cursor to keep memory bounded
+ while (true) {
+ const { rows } = await client.query<{
+ id: string;
+ content_type: string;
+ content_id: string;
+ image_data: Buffer;
+ image_mime_type: string | null;
+ }>(
+ `SELECT id, content_type, content_id, image_data, image_mime_type
+ FROM video
+ WHERE image_data IS NOT NULL AND image_url IS NULL
+ ${lastId ? "AND id > $2" : ""}
+ ORDER BY id
+ LIMIT $1`,
+ lastId ? [BATCH_SIZE, lastId] : [BATCH_SIZE],
+ );
+
+ if (rows.length === 0) break;
+
+ for (const row of rows) {
+ const storagePath = `videos/${row.content_type}/${row.content_id}.jpg`;
+ const mimeType = row.image_mime_type ?? "image/jpeg";
+
+ try {
+ if (dryRun) {
+ console.log(
+ ` [DRY RUN] Would upload ${storagePath} (${row.image_data.length} bytes)`,
+ );
+ migrated++;
+ continue;
+ }
+
+ // Upload via shared storage abstraction
+ const publicUrl = await uploadImage(
+ storagePath,
+ row.image_data,
+ mimeType,
+ );
+
+ // Write URL back and clear blob
+ await client.query(
+ `UPDATE video
+ SET image_url = $1, image_data = NULL, image_mime_type = NULL,
+ image_width = NULL, image_height = NULL
+ WHERE id = $2`,
+ [publicUrl, row.id],
+ );
+
+ migrated++;
+ console.log(` Migrated: ${storagePath}`);
+ } catch (err) {
+ errors++;
+ console.error(
+ ` Failed: ${storagePath} — ${err instanceof Error ? err.message : err}`,
+ );
+ }
+ }
+
+ lastId = rows[rows.length - 1]!.id;
+ }
+
+ console.log(`\nDone: ${migrated} migrated, ${errors} errors`);
+
+ if (!dryRun && migrated > 0 && errors === 0) {
+ console.log(
+ "\nAll blobs migrated. You can now drop the old columns:\n" +
+ " ALTER TABLE video DROP COLUMN image_data;\n" +
+ " ALTER TABLE video DROP COLUMN image_mime_type;\n" +
+ " ALTER TABLE video DROP COLUMN image_width;\n" +
+ " ALTER TABLE video DROP COLUMN image_height;",
+ );
+ }
+ } finally {
+ client.release();
+ await pool.end();
+ }
+}
+
+migrate().catch((err) => {
+ console.error("Migration failed:", err);
+ process.exit(1);
+});
diff --git a/packages/db/migrations/add_imageurl_to_video.sql b/packages/db/migrations/add_imageurl_to_video.sql
new file mode 100644
index 0000000..f275ae1
--- /dev/null
+++ b/packages/db/migrations/add_imageurl_to_video.sql
@@ -0,0 +1,4 @@
+-- Add image_url column to video table for object storage URLs
+-- This stores the public URL of AI-generated images uploaded to Supabase Storage / S3
+
+ALTER TABLE video ADD COLUMN IF NOT EXISTS image_url TEXT;
diff --git a/packages/db/package.json b/packages/db/package.json
index cef5270..9305219 100644
--- a/packages/db/package.json
+++ b/packages/db/package.json
@@ -14,6 +14,10 @@
"./schema": {
"types": "./dist/schema.d.ts",
"default": "./src/schema.ts"
+ },
+ "./storage": {
+ "types": "./dist/storage.d.ts",
+ "default": "./src/storage.ts"
}
},
"license": "MIT",
@@ -29,6 +33,7 @@
"with-env": "dotenv -e ../../.env --"
},
"dependencies": {
+ "@supabase/supabase-js": "^2.101.1",
"@vercel/postgres": "^0.10.0",
"drizzle-orm": "^0.45.2",
"drizzle-zod": "^0.8.3",
diff --git a/packages/db/src/schema.ts b/packages/db/src/schema.ts
index 88bf389..d6e8fdc 100644
--- a/packages/db/src/schema.ts
+++ b/packages/db/src/schema.ts
@@ -176,12 +176,15 @@ export const Video = pgTable(
title: t.varchar({ length: 25 }).notNull(), // Max 25 chars
description: t.text().notNull(), // 50-word catchy headline
- // Hybrid image storage: Binary AI-generated images OR URL-based scraped thumbnails
- imageData: bytea("image_data"), // Raw JPEG bytes (AI-generated)
- imageMimeType: t.varchar("image_mime_type", { length: 50 }), // "image/jpeg"
+ // Image storage: uploaded image URL and source thumbnail URL (scraped)
+ imageUrl: t.text(), // Public URL of uploaded image
+ thumbnailUrl: t.text(), // URL from source content (scraped)
+
+ // Deprecated: binary image storage (pending migration removal)
+ imageData: bytea("image_data"),
+ imageMimeType: t.varchar("image_mime_type", { length: 50 }),
imageWidth: t.integer("image_width"),
imageHeight: t.integer("image_height"),
- thumbnailUrl: t.text(), // URL from source content (scraped)
// Metadata
author: t.varchar({ length: 100 }), // "govtrack.com", "whitehouse.gov", etc.
diff --git a/packages/db/src/storage.ts b/packages/db/src/storage.ts
new file mode 100644
index 0000000..7df55e4
--- /dev/null
+++ b/packages/db/src/storage.ts
@@ -0,0 +1,58 @@
+import { createClient } from "@supabase/supabase-js";
+
+function getBucket() {
+ return process.env.SUPABASE_STORAGE_BUCKET ?? "images";
+}
+
+function getClient() {
+ const url = process.env.SUPABASE_URL;
+ const key = process.env.SUPABASE_SERVICE_ROLE_KEY;
+ if (!url || !key) {
+ throw new Error(
+ "Missing SUPABASE_URL or SUPABASE_SERVICE_ROLE_KEY for storage",
+ );
+ }
+ return createClient(url, key);
+}
+
+/**
+ * Upload an image buffer to object storage.
+ * @param path - Storage path (e.g. "videos/abc-123.jpg")
+ * @param data - Raw image bytes
+ * @param mimeType - MIME type (default "image/jpeg")
+ * @returns Public URL of the uploaded image
+ */
+export async function uploadImage(
+ path: string,
+ data: Buffer,
+ mimeType = "image/jpeg",
+): Promise {
+ const supabase = getClient();
+
+ const { error } = await supabase.storage.from(getBucket()).upload(path, data, {
+ contentType: mimeType,
+ upsert: true,
+ });
+
+ if (error) {
+ throw new Error(`Storage upload failed for ${path}: ${error.message}`);
+ }
+
+ const {
+ data: { publicUrl },
+ } = supabase.storage.from(getBucket()).getPublicUrl(path);
+
+ return publicUrl;
+}
+
+/**
+ * Delete an image from object storage.
+ * @param path - Storage path to delete
+ */
+export async function deleteImage(path: string): Promise {
+ const supabase = getClient();
+ const { error } = await supabase.storage.from(getBucket()).remove([path]);
+ if (error) {
+ throw new Error(`Storage delete failed for ${path}: ${error.message}`);
+ }
+}
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index a8ded77..10c029f 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -509,6 +509,9 @@ importers:
packages/db:
dependencies:
+ '@supabase/supabase-js':
+ specifier: ^2.101.1
+ version: 2.101.1(bufferutil@4.1.0)(utf-8-validate@6.0.4)
'@vercel/postgres':
specifier: ^0.10.0
version: 0.10.0(utf-8-validate@6.0.4)
@@ -3549,6 +3552,33 @@ packages:
'@standard-schema/spec@1.1.0':
resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==}
+ '@supabase/auth-js@2.101.1':
+ resolution: {integrity: sha512-Kd0Wey+RkFHgyVep7adS6UOE2pN6MJ3mZ32PAXSvfw6IjUkFRC7IQpdZZjUOcUe5pXr1ejufCRgF6lsGINe4Tw==}
+ engines: {node: '>=20.0.0'}
+
+ '@supabase/functions-js@2.101.1':
+ resolution: {integrity: sha512-OZWU7YtaG+NNNFZK8p/FuJ6gpq7pFyrG2fLOopP73HAIDHDGpOttPJapvO8ADu3RkqfQfkwrB354vPkSBbZ20A==}
+ engines: {node: '>=20.0.0'}
+
+ '@supabase/phoenix@0.4.0':
+ resolution: {integrity: sha512-RHSx8bHS02xwfHdAbX5Lpbo6PXbgyf7lTaXTlwtFDPwOIw64NnVRwFAXGojHhjtVYI+PEPNSWwkL90f4agN3bw==}
+
+ '@supabase/postgrest-js@2.101.1':
+ resolution: {integrity: sha512-UW1RajH5jbZoK+ldAJ1I6VZ+HWwZ2oaKjEQ6Gn+AQ67CHQVxGl8wNQoLYyumbyaExm41I+wn7arulcY1eHeZJw==}
+ engines: {node: '>=20.0.0'}
+
+ '@supabase/realtime-js@2.101.1':
+ resolution: {integrity: sha512-Oa6dno0OB9I+hv5do5zsZHbFu41ViZnE9IWjmkeeF/8fPmB5fWoHGqeTYEC3/0DAgtpUoFJa4FpvzFH0SBHo1Q==}
+ engines: {node: '>=20.0.0'}
+
+ '@supabase/storage-js@2.101.1':
+ resolution: {integrity: sha512-WhTaUOBgeEvnKLy95Cdlp6+D5igSF/65yC727w1olxbet5nzUvMlajKUWyzNtQu2efrz2cQ7FcdVBdQqgT9YKQ==}
+ engines: {node: '>=20.0.0'}
+
+ '@supabase/supabase-js@2.101.1':
+ resolution: {integrity: sha512-Jnhm3LfuACwjIzvk2pfUbGQn7pa7hi6MFzfSyPrRYWVCCu69RPLCFyHSBl7HSBwadbQ3UZOznnD3gPca3ePrRA==}
+ engines: {node: '>=20.0.0'}
+
'@swc/helpers@0.5.15':
resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==}
@@ -3855,6 +3885,9 @@ packages:
'@types/turndown@5.0.6':
resolution: {integrity: sha512-ru00MoyeeouE5BX4gRL+6m/BsDfbRayOskWqUvh7CLGW+UXxHQItqALa38kKnOiZPqJrtzJUgAC2+F0rL1S4Pg==}
+ '@types/ws@8.18.1':
+ resolution: {integrity: sha512-ThVF6DCVhA8kUGy+aazFQ4kXQ7E1Ty7A3ypFOe0IcJV8O/M511G99AW24irKrW56Wt44yG9+ij8FaqoBGkuBXg==}
+
'@types/yargs-parser@21.0.3':
resolution: {integrity: sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==}
@@ -5758,6 +5791,10 @@ packages:
hyphenate-style-name@1.1.0:
resolution: {integrity: sha512-WDC/ui2VVRrz3jOVi+XtjqkDjiVjTtFaAGiW37k6b+ohyQ5wYDOGkvCZa8+H0nx3gyvv0+BST9xuOgIyGQ00gw==}
+ iceberg-js@0.8.1:
+ resolution: {integrity: sha512-1dhVQZXhcHje7798IVM+xoo/1ZdVfzOMIc8/rgVSijRK38EDqOJoGula9N/8ZI5RD8QTxNQtK/Gozpr+qUqRRA==}
+ engines: {node: '>=20.0.0'}
+
iconv-lite@0.6.3:
resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==}
engines: {node: '>=0.10.0'}
@@ -11404,6 +11441,46 @@ snapshots:
'@standard-schema/spec@1.1.0': {}
+ '@supabase/auth-js@2.101.1':
+ dependencies:
+ tslib: 2.8.1
+
+ '@supabase/functions-js@2.101.1':
+ dependencies:
+ tslib: 2.8.1
+
+ '@supabase/phoenix@0.4.0': {}
+
+ '@supabase/postgrest-js@2.101.1':
+ dependencies:
+ tslib: 2.8.1
+
+ '@supabase/realtime-js@2.101.1(bufferutil@4.1.0)(utf-8-validate@6.0.4)':
+ dependencies:
+ '@supabase/phoenix': 0.4.0
+ '@types/ws': 8.18.1
+ tslib: 2.8.1
+ ws: 8.20.0(bufferutil@4.1.0)(utf-8-validate@6.0.4)
+ transitivePeerDependencies:
+ - bufferutil
+ - utf-8-validate
+
+ '@supabase/storage-js@2.101.1':
+ dependencies:
+ iceberg-js: 0.8.1
+ tslib: 2.8.1
+
+ '@supabase/supabase-js@2.101.1(bufferutil@4.1.0)(utf-8-validate@6.0.4)':
+ dependencies:
+ '@supabase/auth-js': 2.101.1
+ '@supabase/functions-js': 2.101.1
+ '@supabase/postgrest-js': 2.101.1
+ '@supabase/realtime-js': 2.101.1(bufferutil@4.1.0)(utf-8-validate@6.0.4)
+ '@supabase/storage-js': 2.101.1
+ transitivePeerDependencies:
+ - bufferutil
+ - utf-8-validate
+
'@swc/helpers@0.5.15':
dependencies:
tslib: 2.8.1
@@ -11666,6 +11743,10 @@ snapshots:
'@types/turndown@5.0.6': {}
+ '@types/ws@8.18.1':
+ dependencies:
+ '@types/node': 25.5.0
+
'@types/yargs-parser@21.0.3': {}
'@types/yargs@17.0.35':
@@ -14013,6 +14094,8 @@ snapshots:
hyphenate-style-name@1.1.0: {}
+ iceberg-js@0.8.1: {}
+
iconv-lite@0.6.3:
dependencies:
safer-buffer: 2.1.2
diff --git a/social-media-agent/src/agent.ts b/social-media-agent/src/agent.ts
index 61a2cbe..4f23c2f 100644
--- a/social-media-agent/src/agent.ts
+++ b/social-media-agent/src/agent.ts
@@ -36,7 +36,7 @@ export interface ContentItem {
type: string;
isAIGenerated?: boolean;
thumbnailUrl?: string;
- imageUri?: string;
+ imageUrl?: string;
}
export class SocialMediaAgent {
diff --git a/turbo.json b/turbo.json
index dd1677a..a045ef2 100644
--- a/turbo.json
+++ b/turbo.json
@@ -51,7 +51,10 @@
"AUTH_DISCORD_SECRET",
"AUTH_REDIRECT_PROXY_URL",
"AUTH_SECRET",
- "PORT"
+ "PORT",
+ "SUPABASE_URL",
+ "SUPABASE_SERVICE_ROLE_KEY",
+ "SUPABASE_STORAGE_BUCKET"
],
"globalPassThroughEnv": [
"NODE_ENV",