diff --git a/README.md b/README.md
index 25a345fe..6a2f1523 100644
--- a/README.md
+++ b/README.md
@@ -54,6 +54,7 @@ Before running the project setup, you'll need to configure the following environ
- **Testnet** (safe testing with mock tokens): `https://v6-pegasus-node-02.origin-trail.network:8900`
- **Mainnet** (production DKG interactions): `https://positron.origin-trail.network`
- **Local development**: `http://localhost:8900` (default)
+- **`DKG_NODE_CUSTOM_RPC`**: Optional custom blockchain RPC URL. Leave unset to use the default RPC behavior.
- **`PORT`**: Server port (default: `9200`)
- **`EXPO_PUBLIC_APP_URL`**: Public app URL (default: `http://localhost:9200`)
- **`EXPO_PUBLIC_MCP_URL`**: MCP server URL (default: `http://localhost:9200`)
@@ -71,8 +72,21 @@ The setup script will:
- Prompt for required environment variables
- Create `.env` and `.env.development.local` files
- Set up the SQLite database with migrations
+- Optionally enable async publishing and provision the Publisher database using the Engine-derived MySQL password
- Create an admin user (username: `admin`, password: `admin123`)
+If you enable async publishing during setup, the Agent writes the consolidated Publisher settings into `apps/agent/.env`:
+
+- `ASYNC_PUBLISHING_ENABLED=true`
+- `DKGP_DATABASE_URL`
+- `REDIS_URL`
+
+The setup stores `DKG_PUBLISH_WALLET` in `apps/agent/.env` without a `0x`
+prefix for compatibility with existing node env files, and Publisher wallet
+records are seeded into MySQL using the same bare 64-hex format.
+
+The Agent server only loads `@dkg/plugin-dkg-publisher` when `ASYNC_PUBLISHING_ENABLED=true`.
+
### 4. Start Development
```bash
diff --git a/apps/agent/README.md b/apps/agent/README.md
index 9792c5ed..5536ad13 100644
--- a/apps/agent/README.md
+++ b/apps/agent/README.md
@@ -23,6 +23,11 @@
npm run script:setup
```
+ The setup flow can also enable async publishing. When enabled, it reuses the
+ Engine MySQL password from `dkg-engine/current/.env`, writes the consolidated
+ Publisher settings into `apps/agent/.env`, and the Agent loads the Publisher
+ plugin only when `ASYNC_PUBLISHING_ENABLED=true`.
+
Now you can run the app in development mode using:
```bash
diff --git a/apps/agent/env.d.ts b/apps/agent/env.d.ts
index 4a4a9c1d..14a94bb5 100644
--- a/apps/agent/env.d.ts
+++ b/apps/agent/env.d.ts
@@ -14,6 +14,15 @@ declare global {
DKG_PUBLISH_WALLET: string;
DKG_BLOCKCHAIN: string;
DKG_OTNODE_URL: string;
+ DKG_NODE_CUSTOM_RPC?: string;
+ ASYNC_PUBLISHING_ENABLED?: string;
+ DKGP_DATABASE_URL?: string;
+ REDIS_URL?: string;
+ WORKER_COUNT?: string;
+ POLL_FREQUENCY?: string;
+ STORAGE_TYPE?: string;
+ STORAGE_PATH?: string;
+ STORAGE_BASE_URL?: string;
SMTP_HOST: string;
SMTP_PORT: string;
SMTP_USER: string;
diff --git a/apps/agent/original_dkg_node b/apps/agent/original_dkg_node
deleted file mode 100644
index 2ca01577..00000000
Binary files a/apps/agent/original_dkg_node and /dev/null differ
diff --git a/apps/agent/package.json b/apps/agent/package.json
index 9563affb..161a0b5f 100644
--- a/apps/agent/package.json
+++ b/apps/agent/package.json
@@ -17,6 +17,7 @@
"dev:server": "tsup src/server/*.ts --format esm,cjs --watch --onSuccess=\"node dist/index.js --dev\"",
"drizzle:studio": "drizzle-kit studio",
"script:setup": "node dist/scripts/setup.js",
+ "script:publisher": "node dist/scripts/publisher.js",
"script:createUser": "node dist/scripts/createUser.js",
"script:createToken": "node dist/scripts/createToken.js",
"test": "PLAYWRIGHT_JUNIT_OUTPUT_NAME=DKG_Node_UI_Tests.xml npx playwright test spec/testUI.spec.js --grep '@gh_actions' --reporter=list,html,junit",
@@ -32,6 +33,7 @@
"dependencies": {
"@dkg/expo-forcegraph": "^0.0.0",
"@dkg/plugin-dkg-essentials": "^0.0.3",
+ "@dkg/plugin-dkg-publisher": "^1.0.0",
"@dkg/plugin-example": "^0.0.3",
"@dkg/plugin-oauth": "^0.0.2",
"@dkg/plugin-swagger": "^0.0.2",
diff --git a/apps/agent/src/server/helpers.ts b/apps/agent/src/server/helpers.ts
index 321a385f..9cca9a3a 100644
--- a/apps/agent/src/server/helpers.ts
+++ b/apps/agent/src/server/helpers.ts
@@ -34,6 +34,10 @@ export async function createFileWithContent(filePath: string, content: string) {
}
}
+export async function writeFileWithContent(filePath: string, content: string) {
+ await fs.writeFile(filePath, content, { encoding: "utf8" });
+}
+
export function configEnv() {
dotenv.config();
if (process.argv.includes("--dev")) {
diff --git a/apps/agent/src/server/index.ts b/apps/agent/src/server/index.ts
index f78f2ffc..def22735 100644
--- a/apps/agent/src/server/index.ts
+++ b/apps/agent/src/server/index.ts
@@ -1,109 +1,94 @@
import path from "path";
-import { createPluginServer, defaultPlugin } from "@dkg/plugins";
+import {
+ createPluginServer,
+ defaultPlugin,
+ type DkgPlugin,
+} from "@dkg/plugins";
import { authorized, createOAuthPlugin } from "@dkg/plugin-oauth";
import dkgEssentialsPlugin from "@dkg/plugin-dkg-essentials";
import createFsBlobStorage from "@dkg/plugin-dkg-essentials/createFsBlobStorage";
import examplePlugin from "@dkg/plugin-example";
import swaggerPlugin from "@dkg/plugin-swagger";
-//@ts-expect-error No types for dkg.js ...
+// @ts-expect-error No types for dkg.js ...
import DKG from "dkg.js";
import { eq } from "drizzle-orm";
+import { getTestMessageUrl } from "nodemailer";
import { userCredentialsSchema } from "@/shared/auth";
import { processStreamingCompletion } from "@/shared/chat";
import { verify } from "@node-rs/argon2";
-import { configDatabase, configEnv } from "./helpers";
-import webInterfacePlugin from "./webInterfacePlugin";
import createAccountManagementPlugin from "./accountManagementPlugin";
import {
- users,
- SqliteOAuthStorageProvider,
SqliteAccountManagementProvider,
+ SqliteOAuthStorageProvider,
+ users,
} from "./database/sqlite";
+import { configDatabase, configEnv } from "./helpers";
import mailer from "./mailer";
-import { getTestMessageUrl } from "nodemailer";
-
-configEnv();
-const db = configDatabase();
-
-const version = "1.0.0";
-
-const { oauthPlugin, openapiSecurityScheme } = createOAuthPlugin({
- storage: new SqliteOAuthStorageProvider(db),
- issuerUrl: new URL(process.env.EXPO_PUBLIC_MCP_URL),
- scopesSupported: [
- "mcp",
- "llm",
- "scope123",
- "blob",
- "epcis.read",
- "epcis.write",
- ],
- loginPageUrl: new URL(process.env.EXPO_PUBLIC_APP_URL + "/login"),
- schema: userCredentialsSchema,
- async login(credentials) {
- const user = await db
- .select()
- .from(users)
- .where(eq(users.email, credentials.email))
- .then((r) => r.at(0));
- if (!user) throw new Error("Invalid credentials");
+import webInterfacePlugin from "./webInterfacePlugin";
- const isValid = await verify(user.password, credentials.password);
- if (!isValid) throw new Error("Invalid credentials");
+async function main() {
+ configEnv();
+ const db = configDatabase();
+ const version = "1.0.0";
- return { scopes: user.scope.split(" "), extra: { userId: user.id } };
- },
-});
+ const { oauthPlugin, openapiSecurityScheme } = createOAuthPlugin({
+ storage: new SqliteOAuthStorageProvider(db),
+ issuerUrl: new URL(process.env.EXPO_PUBLIC_MCP_URL),
+ scopesSupported: [
+ "mcp",
+ "llm",
+ "scope123",
+ "blob",
+ "epcis.read",
+ "epcis.write",
+ ],
+ loginPageUrl: new URL(process.env.EXPO_PUBLIC_APP_URL + "/login"),
+ schema: userCredentialsSchema,
+ async login(credentials) {
+ const user = await db
+ .select()
+ .from(users)
+ .where(eq(users.email, credentials.email))
+ .then((results) => results.at(0));
+ if (!user) throw new Error("Invalid credentials");
-const accountManagementPlugin = createAccountManagementPlugin({
- provider: new SqliteAccountManagementProvider(db),
- async sendMail(toEmail, code) {
- const m = await mailer();
- if (!m) throw new Error("No SMTP transport available");
+ const isValid = await verify(user.password, credentials.password);
+ if (!isValid) throw new Error("Invalid credentials");
- await m
- .sendMail({
- to: toEmail,
- subject: "Password reset request | DKG Node",
- text:
- `Your password reset code is ${code}.` +
- `Link: ${process.env.EXPO_PUBLIC_APP_URL}/password-reset?code=${code}`,
- html:
- `
Your password reset code is ${code}.
` +
- `Please click here to reset your password.
`,
- })
- .then((info) => {
- console.debug(info);
- console.debug(getTestMessageUrl(info));
- });
- },
-});
+ return { scopes: user.scope.split(" "), extra: { userId: user.id } };
+ },
+ });
-const blobStorage = createFsBlobStorage(path.join(__dirname, "../data"));
+ const accountManagementPlugin = createAccountManagementPlugin({
+ provider: new SqliteAccountManagementProvider(db),
+ async sendMail(toEmail, code) {
+ const transport = await mailer();
+ if (!transport) throw new Error("No SMTP transport available");
-const otnodeUrl = new URL(process.env.DKG_OTNODE_URL);
+ await transport
+ .sendMail({
+ to: toEmail,
+ subject: "Password reset request | DKG Node",
+ text:
+ `Your password reset code is ${code}.` +
+ `Link: ${process.env.EXPO_PUBLIC_APP_URL}/password-reset?code=${code}`,
+ html:
+ `Your password reset code is ${code}.
` +
+ `Please click here to reset your password.
`,
+ })
+ .then((info) => {
+ console.debug(info);
+ console.debug(getTestMessageUrl(info));
+ });
+ },
+ });
-const app = createPluginServer({
- name: "DKG API",
- version,
- context: {
- blob: blobStorage,
- dkg: new DKG({
- endpoint: `${otnodeUrl.protocol}//${otnodeUrl.hostname}`,
- port: otnodeUrl.port || "8900",
- blockchain: {
- name: process.env.DKG_BLOCKCHAIN,
- privateKey: process.env.DKG_PUBLISH_WALLET,
- },
- maxNumberOfRetries: 300,
- frequency: 2,
- contentType: "all",
- nodeApiVersion: "/v1",
- }),
- },
- plugins: [
+ const blobStorage = createFsBlobStorage(path.join(__dirname, "../data"));
+ const otnodeUrl = new URL(process.env.DKG_OTNODE_URL);
+ const dkgCustomRpc = process.env.DKG_NODE_CUSTOM_RPC?.trim();
+ const plugins: DkgPlugin[] = [
defaultPlugin,
oauthPlugin,
(_, __, api) => {
@@ -119,7 +104,6 @@ const app = createPluginServer({
api.use("/change-password", authorized([]));
api.use("/profile", authorized([]));
},
- // Streaming LLM middleware ā intercepts SSE requests before Expo Router
(_, __, api) => {
api.post("/llm", (req, res, next) => {
if (!req.headers.accept?.includes("text/event-stream")) return next();
@@ -128,8 +112,18 @@ const app = createPluginServer({
},
accountManagementPlugin,
dkgEssentialsPlugin,
+ ];
+
+ if (process.env.ASYNC_PUBLISHING_ENABLED === "true") {
+ const { default: dkgPublisherPlugin } = await import(
+ "@dkg/plugin-dkg-publisher"
+ );
+ plugins.push(dkgPublisherPlugin);
+ }
+
+ plugins.push(
examplePlugin.withNamespace("protected", {
- middlewares: [authorized(["scope123"])], // Allow only users with the "scope123" scope
+ middlewares: [authorized(["scope123"])],
}),
swaggerPlugin({
version,
@@ -145,28 +139,55 @@ const app = createPluginServer({
],
}),
webInterfacePlugin(path.join(__dirname, "./app")),
- ],
-});
-
-const port = process.env.PORT || 9200;
-const server = app.listen(port, (err) => {
- if (err) {
- console.error(err);
- process.exit(1);
- }
- console.log(`Server running at http://localhost:${port}/`);
+ );
- process.on("SIGINT", () => {
- server.close();
- process.exit(0);
+ const app = createPluginServer({
+ name: "DKG API",
+ version,
+ context: {
+ blob: blobStorage,
+ dkg: new DKG({
+ endpoint: `${otnodeUrl.protocol}//${otnodeUrl.hostname}`,
+ port: otnodeUrl.port || "8900",
+ blockchain: {
+ name: process.env.DKG_BLOCKCHAIN,
+ privateKey: process.env.DKG_PUBLISH_WALLET,
+ ...(dkgCustomRpc && { rpc: dkgCustomRpc }),
+ },
+ maxNumberOfRetries: 300,
+ frequency: 2,
+ contentType: "all",
+ nodeApiVersion: "/v1",
+ }),
+ },
+ plugins,
});
- process.on("SIGTERM", () => {
- server.close((err) => {
- if (err) {
- console.error(err);
- process.exit(1);
- }
+
+ const port = process.env.PORT || 9200;
+ const server = app.listen(port, (error) => {
+ if (error) {
+ console.error(error);
+ process.exit(1);
+ }
+ console.log(`Server running at http://localhost:${port}/`);
+
+ process.on("SIGINT", () => {
+ server.close();
process.exit(0);
});
+ process.on("SIGTERM", () => {
+ server.close((closeError) => {
+ if (closeError) {
+ console.error(closeError);
+ process.exit(1);
+ }
+ process.exit(0);
+ });
+ });
});
+}
+
+main().catch((error) => {
+ console.error(error);
+ process.exit(1);
});
diff --git a/apps/agent/src/server/scripts/publisher.ts b/apps/agent/src/server/scripts/publisher.ts
new file mode 100644
index 00000000..4298577d
--- /dev/null
+++ b/apps/agent/src/server/scripts/publisher.ts
@@ -0,0 +1,733 @@
+import prompts from "prompts";
+import {
+ addPublisherWallets,
+ buildPublisherDefaults,
+ isValidMysqlIdentifier,
+ isValidPrivateKey,
+ listPublisherWallets,
+ readAgentEnv,
+ resolveEngineMysqlPassword,
+ resolvePublisherConfigFromAgentEnv,
+ resetPublisherDatabase,
+ setPublisherWalletActive,
+ stripPrivateKeyPrefix,
+ type AgentEnvState,
+ upsertAgentEnvValues,
+} from "../setupPublisher";
+
+const promptOptions = {
+ onCancel: () => {
+ throw new Error("DKG Publisher plugin setup cancelled by user");
+ },
+};
+
+const styles = {
+ reset: "\x1b[0m",
+ bold: "\x1b[1m",
+ cyan: "\x1b[36m",
+ blue: "\x1b[34m",
+ yellow: "\x1b[33m",
+};
+
+const LOCK_STALE_THRESHOLD_MS = 30 * 60 * 1000;
+
+function printBanner() {
+ const lines = [
+ "+------------------------------------------------------------+",
+ "| DKG Publisher Plugin Management |",
+ "| |",
+ "| Configure DKG Publisher plugin for async publishing. |",
+ "+------------------------------------------------------------+",
+ ];
+
+ console.log(
+ `\n${styles.bold}${styles.blue}${lines.join("\n")}${styles.reset}\n`,
+ );
+}
+
+function printSection(title: string, description?: string) {
+ console.log(
+ `\n${styles.bold}${styles.cyan}=== ${title} ===${styles.reset}`,
+ );
+ if (description) {
+ console.log(`${styles.yellow}${description}${styles.reset}`);
+ }
+}
+
+function getRecommendedWorkerCount(walletCount: number) {
+ return Math.max(1, Math.min(Math.ceil(walletCount / 10), 5));
+}
+
+function isValidUrl(value: string) {
+ try {
+ new URL(value);
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+function isStaleLock(lockedAt: Date | null) {
+ if (!lockedAt) {
+ return false;
+ }
+ return Date.now() - lockedAt.getTime() >= LOCK_STALE_THRESHOLD_MS;
+}
+
+function resolvePublisherConfigSafely(
+ envValues: Record,
+ appUrl: string,
+ options: { silent?: boolean } = {},
+) {
+ try {
+ return resolvePublisherConfigFromAgentEnv(envValues, appUrl);
+ } catch (error: any) {
+ if (!options.silent) {
+ console.warn(
+ `Existing DKG Publisher plugin configuration could not be parsed (${error.message}).`,
+ );
+ }
+ return null;
+ }
+}
+
+async function collectAdditionalWallets(options?: {
+ promptForFirstConfirmation?: boolean;
+}) {
+ const wallets: string[] = [];
+
+ let shouldAdd = options?.promptForFirstConfirmation === false;
+ if (!shouldAdd) {
+ const addWalletsResponse = await prompts(
+ {
+ type: "confirm",
+ name: "addWallets",
+ message: "Add additional publishing wallets?",
+ initial: false,
+ },
+ promptOptions,
+ );
+ shouldAdd = addWalletsResponse.addWallets === true;
+ }
+
+ while (shouldAdd) {
+ const walletResponse = await prompts(
+ {
+ type: "text",
+ name: "privateKey",
+ message: `Additional publish wallet private key #${wallets.length + 1}`,
+ validate: (value) => {
+ if (!value.length) return "Required";
+ return (
+ isValidPrivateKey(value) ||
+ "Private key must be 64 hexadecimal characters, with or without a 0x prefix"
+ );
+ },
+ },
+ promptOptions,
+ );
+
+ wallets.push(walletResponse.privateKey);
+
+ const continueResponse = await prompts(
+ {
+ type: "confirm",
+ name: "addAnother",
+ message: "Add another publishing wallet?",
+ initial: false,
+ },
+ promptOptions,
+ );
+ shouldAdd = continueResponse.addAnother === true;
+ }
+
+ return wallets;
+}
+
+async function ensurePrimaryWallet(envPublishWallet: string | undefined) {
+ if (envPublishWallet && isValidPrivateKey(envPublishWallet)) {
+ return envPublishWallet;
+ }
+
+ const response = await prompts(
+ {
+ type: "text",
+ name: "primaryWallet",
+ message:
+ "Primary publish wallet private key (required for publisher provisioning)",
+ validate: (value) =>
+ isValidPrivateKey(value) ||
+ "Private key must be 64 hexadecimal characters, with or without a 0x prefix",
+ },
+ promptOptions,
+ );
+
+ return response.primaryWallet as string;
+}
+
+async function runUpdateConfigMode() {
+ const envState = await readAgentEnv();
+ const enginePassword = await resolveEngineMysqlPassword();
+ const appUrl = envState.values.EXPO_PUBLIC_MCP_URL || "http://localhost:9200";
+ const publisherConfig = resolvePublisherConfigSafely(
+ envState.values,
+ appUrl,
+ { silent: true },
+ );
+
+ printSection(
+ "Update DKG Publisher Plugin Config",
+ "Update MySQL/Redis settings and seed wallets for async publishing.",
+ );
+
+ const mysqlPasswordFromEnv = publisherConfig?.mysql.password || "";
+ const mysqlPasswordFromEngine = enginePassword.mysqlPassword || "";
+ const mysqlPasswordDefault = mysqlPasswordFromEnv || mysqlPasswordFromEngine;
+ const hasDefaultMysqlPassword = mysqlPasswordDefault.length > 0;
+
+ const connectionResponse = await prompts(
+ [
+ {
+ type: "text",
+ name: "mysqlHost",
+ message: "Publisher MySQL host",
+ initial: publisherConfig?.mysql.host || "localhost",
+ },
+ {
+ type: "number",
+ name: "mysqlPort",
+ message: "Publisher MySQL port",
+ initial: publisherConfig?.mysql.port || 3306,
+ min: 0,
+ },
+ {
+ type: "text",
+ name: "mysqlUser",
+ message: "Publisher MySQL username",
+ initial: publisherConfig?.mysql.user || "root",
+ },
+ {
+ type: "password",
+ name: "mysqlPassword",
+ message: hasDefaultMysqlPassword
+ ? "Publisher MySQL password (leave blank to use the password set during dkg-node install)"
+ : "Publisher MySQL password",
+ validate: (value) =>
+ hasDefaultMysqlPassword || value.length > 0
+ ? true
+ : "Required for async publishing",
+ },
+ {
+ type: "text",
+ name: "mysqlDatabase",
+ message: "Publisher MySQL database name",
+ initial: publisherConfig?.mysql.database || "dkg_publisher_db",
+ validate: (value) =>
+ isValidMysqlIdentifier(value) ||
+ "Use letters, numbers, and underscores only",
+ },
+ {
+ type: "text",
+ name: "redisHost",
+ message: "Publisher Redis host",
+ initial: publisherConfig?.redis.host || "localhost",
+ },
+ {
+ type: "number",
+ name: "redisPort",
+ message: "Publisher Redis port",
+ initial: publisherConfig?.redis.port || 6379,
+ min: 0,
+ },
+ {
+ type: "password",
+ name: "redisPassword",
+ message: "Publisher Redis password (leave blank if no password)",
+ initial: publisherConfig?.redis.password || "",
+ },
+ ],
+ promptOptions,
+ );
+
+ const primaryWallet = await ensurePrimaryWallet(envState.values.DKG_PUBLISH_WALLET);
+ const normalizedPrimaryWalletForEnv = stripPrivateKeyPrefix(primaryWallet);
+ const additionalWallets = await collectAdditionalWallets();
+ const walletCount = 1 + additionalWallets.length;
+
+ const advancedResponse = await prompts(
+ {
+ type: "confirm",
+ name: "advanced",
+ message: "Configure advanced worker and storage overrides?",
+ initial: false,
+ },
+ promptOptions,
+ );
+
+ const recommendedWorkerCount = getRecommendedWorkerCount(walletCount);
+ const workerDefaultsResponse: {
+ workerCount: number;
+ pollFrequency?: number;
+ storagePath?: string;
+ storageBaseUrl?: string;
+ } = advancedResponse.advanced
+ ? await prompts(
+ [
+ {
+ type: "number",
+ name: "workerCount",
+ message: `Publisher worker count (default: ${recommendedWorkerCount}, based on wallet count; concurrency auto-balances)`,
+ initial:
+ publisherConfig?.workerCount ||
+ recommendedWorkerCount,
+ min: 1,
+ },
+ {
+ type: "number",
+ name: "pollFrequency",
+ message: "Publisher poll frequency (ms)",
+ initial: publisherConfig?.pollFrequency || 2000,
+ min: 100,
+ },
+ {
+ type: "text",
+ name: "storagePath",
+ message: "Publisher storage path",
+ initial: publisherConfig?.storagePath || "./data/publisher",
+ },
+ {
+ type: "text",
+ name: "storageBaseUrl",
+ message: "Publisher storage base URL",
+ initial:
+ publisherConfig?.storageBaseUrl ||
+ new URL("/storage", appUrl).toString().replace(/\/$/, ""),
+ validate: (value) =>
+ isValidUrl(value) || "Provide a valid URL",
+ },
+ ],
+ promptOptions,
+ )
+ : {
+ workerCount: publisherConfig?.workerCount || recommendedWorkerCount,
+ };
+
+ const mysqlPassword =
+ connectionResponse.mysqlPassword?.trim() || mysqlPasswordDefault;
+ if (!mysqlPassword) {
+ throw new Error("Async publishing requires a MySQL password");
+ }
+
+ const publisherDefaults = buildPublisherDefaults(appUrl, mysqlPassword, {
+ mysqlHost: connectionResponse.mysqlHost,
+ mysqlPort: connectionResponse.mysqlPort,
+ mysqlUser: connectionResponse.mysqlUser,
+ mysqlDatabase: connectionResponse.mysqlDatabase,
+ redisHost: connectionResponse.redisHost,
+ redisPort: connectionResponse.redisPort,
+ redisPassword: connectionResponse.redisPassword,
+ workerCount: workerDefaultsResponse.workerCount,
+ pollFrequency: workerDefaultsResponse.pollFrequency,
+ storagePath: workerDefaultsResponse.storagePath,
+ storageBaseUrl: workerDefaultsResponse.storageBaseUrl,
+ });
+
+ const blockchain = envState.values.DKG_BLOCKCHAIN || "hardhat1:31337";
+ const walletSeeds = [
+ { privateKey: primaryWallet, blockchain },
+ ...additionalWallets.map((privateKey) => ({ privateKey, blockchain })),
+ ];
+
+ const provisionResult = await addPublisherWallets(
+ publisherDefaults.databaseUrl,
+ walletSeeds,
+ );
+
+ await upsertAgentEnvValues(
+ {
+ DKGP_DATABASE_URL: publisherDefaults.databaseUrl,
+ REDIS_URL: publisherDefaults.redisUrl,
+ WORKER_COUNT: workerDefaultsResponse.workerCount,
+ ASYNC_PUBLISHING_ENABLED: true,
+ DKG_PUBLISH_WALLET: normalizedPrimaryWalletForEnv,
+ ...(advancedResponse.advanced
+ ? {
+ POLL_FREQUENCY: publisherDefaults.pollFrequency,
+ STORAGE_TYPE: "filesystem",
+ STORAGE_PATH: publisherDefaults.storagePath,
+ STORAGE_BASE_URL: publisherDefaults.storageBaseUrl,
+ }
+ : {
+ POLL_FREQUENCY: null,
+ STORAGE_TYPE: null,
+ STORAGE_PATH: null,
+ STORAGE_BASE_URL: null,
+ }),
+ },
+ envState.envPath,
+ );
+
+ console.log(
+ `DKG Publisher plugin configuration updated. walletsInserted=${provisionResult.walletsInserted}`,
+ );
+}
+
+function printWalletTable(
+ wallets: Array<{
+ id: number;
+ address: string;
+ blockchain: string;
+ isActive: boolean;
+ isLocked: boolean;
+ totalUses: number;
+ }>,
+) {
+ if (!wallets.length) {
+ console.log("No wallets found in the DKG Publisher plugin database.");
+ return;
+ }
+
+ console.log("");
+ console.log("ID Active Locked Uses Blockchain Address");
+ for (const wallet of wallets) {
+ const id = wallet.id.toString().padEnd(3, " ");
+ const active = (wallet.isActive ? "yes" : "no").padEnd(7, " ");
+ const locked = (wallet.isLocked ? "yes" : "no").padEnd(7, " ");
+ const uses = wallet.totalUses.toString().padEnd(5, " ");
+ const chain = wallet.blockchain.padEnd(15, " ");
+ console.log(
+ `${id}${active}${locked}${uses}${chain}${wallet.address}`,
+ );
+ }
+}
+
+async function runWalletManagementMode() {
+ printSection(
+ "Manage DKG Publisher Plugin Wallets",
+ "List, add, deactivate, or reactivate wallets in the DKG Publisher plugin database.",
+ );
+
+ const envState = await readAgentEnv();
+ const appUrl = envState.values.EXPO_PUBLIC_MCP_URL || "http://localhost:9200";
+ const publisherConfig = resolvePublisherConfigSafely(
+ envState.values,
+ appUrl,
+ );
+
+ if (!publisherConfig) {
+ throw new Error(
+ "DKGP_DATABASE_URL is missing in apps/agent/.env. Run DKG Publisher plugin config update first.",
+ );
+ }
+
+ const blockchain = envState.values.DKG_BLOCKCHAIN || "hardhat1:31337";
+ let keepRunning = true;
+
+ while (keepRunning) {
+ const wallets = await listPublisherWallets(publisherConfig.databaseUrl);
+
+ const actionResponse = await prompts(
+ {
+ type: "select",
+ name: "action",
+ message: "Wallet management action",
+ choices: [
+ { title: "List wallets", value: "list" },
+ { title: "Add wallet(s)", value: "add" },
+ { title: "Deactivate wallet", value: "deactivate" },
+ { title: "Reactivate wallet", value: "reactivate" },
+ { title: "Back", value: "back" },
+ ],
+ initial: 0,
+ },
+ promptOptions,
+ );
+
+ if (actionResponse.action === "back") {
+ keepRunning = false;
+ continue;
+ }
+
+ if (actionResponse.action === "list") {
+ printWalletTable(wallets);
+ continue;
+ }
+
+ if (actionResponse.action === "add") {
+ const additionalWallets = await collectAdditionalWallets({
+ promptForFirstConfirmation: false,
+ });
+ if (!additionalWallets.length) {
+ console.log("No wallets were added.");
+ continue;
+ }
+
+ const result = await addPublisherWallets(
+ publisherConfig.databaseUrl,
+ additionalWallets.map((privateKey) => ({ privateKey, blockchain })),
+ );
+ console.log(`Wallet add complete. walletsInserted=${result.walletsInserted}`);
+ continue;
+ }
+
+ if (actionResponse.action === "deactivate") {
+ const activeWallets = wallets.filter((wallet) => wallet.isActive);
+ if (!activeWallets.length) {
+ console.log("No active wallets available to deactivate.");
+ continue;
+ }
+
+ const targetResponse = await prompts(
+ {
+ type: "select",
+ name: "walletId",
+ message: "Choose wallet to deactivate",
+ choices: activeWallets.map((wallet) => ({
+ title: `#${wallet.id} ${wallet.address} (${wallet.isLocked ? "locked" : "available"})`,
+ value: wallet.id,
+ })),
+ },
+ promptOptions,
+ );
+
+ const selected = wallets.find((wallet) => wallet.id === targetResponse.walletId);
+ if (!selected) {
+ console.log("Wallet not found.");
+ continue;
+ }
+
+ let forceUnlock = false;
+ if (selected.isLocked) {
+ if (!isStaleLock(selected.lockedAt)) {
+ console.log(
+ `Wallet #${selected.id} is currently locked. Wait for the active publish to finish before deactivating.`,
+ );
+ continue;
+ }
+
+ const unlockResponse = await prompts(
+ {
+ type: "confirm",
+ name: "forceUnlock",
+ message:
+ "Wallet appears locked for over 30 minutes. Force unlock and deactivate?",
+ initial: false,
+ },
+ promptOptions,
+ );
+ forceUnlock = unlockResponse.forceUnlock === true;
+ if (!forceUnlock) {
+ console.log("Deactivation cancelled.");
+ continue;
+ }
+ }
+
+ const result = await setPublisherWalletActive(
+ publisherConfig.databaseUrl,
+ selected.id,
+ false,
+ { forceUnlock },
+ );
+ console.log(
+ `Wallet #${result.id} deactivated${result.forcedUnlock ? " (forced unlock applied)" : ""}.`,
+ );
+ continue;
+ }
+
+ if (actionResponse.action === "reactivate") {
+ const inactiveWallets = wallets.filter((wallet) => !wallet.isActive);
+ if (!inactiveWallets.length) {
+ console.log("No inactive wallets available to reactivate.");
+ continue;
+ }
+
+ const targetResponse = await prompts(
+ {
+ type: "select",
+ name: "walletId",
+ message: "Choose wallet to reactivate",
+ choices: inactiveWallets.map((wallet) => ({
+ title: `#${wallet.id} ${wallet.address}`,
+ value: wallet.id,
+ })),
+ },
+ promptOptions,
+ );
+
+ const result = await setPublisherWalletActive(
+ publisherConfig.databaseUrl,
+ targetResponse.walletId,
+ true,
+ );
+ console.log(`Wallet #${result.id} reactivated.`);
+ }
+ }
+}
+
+async function runFreshResetMode() {
+ const envState = await readAgentEnv();
+ const appUrl = envState.values.EXPO_PUBLIC_MCP_URL || "http://localhost:9200";
+ const publisherConfig = resolvePublisherConfigSafely(
+ envState.values,
+ appUrl,
+ );
+
+ if (!publisherConfig) {
+ throw new Error(
+ "DKGP_DATABASE_URL is missing in apps/agent/.env. Run DKG Publisher plugin config update first.",
+ );
+ }
+
+ printSection(
+ "Fresh Setup (Will Delete Existing Data)",
+ "This will drop DKG Publisher plugin tables and recreate them. Existing async publishing queue/status history will be removed.",
+ );
+
+ const confirmationResponse = await prompts(
+ {
+ type: "text",
+ name: "confirmation",
+ message: 'Type "RESET DKG PUBLISHER DATA" to continue',
+ validate: (value) =>
+ value === "RESET DKG PUBLISHER DATA" ||
+ 'Type exactly: RESET DKG PUBLISHER DATA',
+ },
+ promptOptions,
+ );
+
+ if (confirmationResponse.confirmation !== "RESET DKG PUBLISHER DATA") {
+ console.log("DKG Publisher plugin fresh setup cancelled.");
+ return;
+ }
+
+ const primaryWallet = await ensurePrimaryWallet(envState.values.DKG_PUBLISH_WALLET);
+ const normalizedPrimaryWalletForEnv = stripPrivateKeyPrefix(primaryWallet);
+ const additionalWallets = await collectAdditionalWallets();
+ const blockchain = envState.values.DKG_BLOCKCHAIN || "hardhat1:31337";
+ const walletSeeds = [
+ { privateKey: primaryWallet, blockchain },
+ ...additionalWallets.map((privateKey) => ({ privateKey, blockchain })),
+ ];
+
+ const result = await resetPublisherDatabase(
+ publisherConfig.databaseUrl,
+ walletSeeds,
+ );
+
+ await upsertAgentEnvValues(
+ {
+ ASYNC_PUBLISHING_ENABLED: true,
+ DKG_PUBLISH_WALLET: normalizedPrimaryWalletForEnv,
+ },
+ envState.envPath,
+ );
+
+ console.log(
+ `DKG Publisher plugin fresh setup complete. droppedTables=${result.droppedTables.length}, walletsInserted=${result.walletsInserted}`,
+ );
+}
+
+async function shouldContinueManaging() {
+ const response = await prompts(
+ {
+ type: "confirm",
+ name: "continueManaging",
+ message: "Run another DKG Publisher plugin action?",
+ initial: false,
+ },
+ promptOptions,
+ );
+
+ return response.continueManaging === true;
+}
+
+async function main() {
+ printBanner();
+ let envState: AgentEnvState;
+ try {
+ envState = await readAgentEnv();
+ } catch (error: any) {
+ if (error?.code === "ENOENT") {
+ throw new Error(
+ "apps/agent/.env was not found. Run npm run script:setup first.",
+ );
+ }
+ throw error;
+ }
+ const appUrl = envState.values.EXPO_PUBLIC_MCP_URL || "http://localhost:9200";
+ let publisherConfig = resolvePublisherConfigSafely(
+ envState.values,
+ appUrl,
+ );
+
+ if (!publisherConfig) {
+ console.log(
+ "DKG Publisher plugin config was not found in apps/agent/.env. Starting config update mode.",
+ );
+ await runUpdateConfigMode();
+ const continueAfterAutoSetup = await shouldContinueManaging();
+ if (!continueAfterAutoSetup) {
+ console.log("DKG Publisher plugin management finished.");
+ return;
+ }
+ envState = await readAgentEnv();
+ publisherConfig = resolvePublisherConfigSafely(
+ envState.values,
+ envState.values.EXPO_PUBLIC_MCP_URL || "http://localhost:9200",
+ );
+ }
+
+ if (!publisherConfig) {
+ throw new Error(
+ "DKG Publisher plugin configuration is still missing after setup. Verify DKGP_DATABASE_URL in apps/agent/.env.",
+ );
+ }
+
+ let done = false;
+ while (!done) {
+ const response = await prompts(
+ {
+ type: "select",
+ name: "mode",
+ message: "Choose DKG Publisher plugin management mode",
+ choices: [
+ { title: "Update DKG Publisher plugin config", value: "config" },
+ { title: "Manage wallets", value: "wallets" },
+ { title: "Fresh setup (will delete existing data)", value: "reset" },
+ { title: "Exit", value: "exit" },
+ ],
+ initial: 0,
+ },
+ promptOptions,
+ );
+
+ if (response.mode === "exit") {
+ done = true;
+ continue;
+ }
+
+ try {
+ if (response.mode === "config") {
+ await runUpdateConfigMode();
+ } else if (response.mode === "wallets") {
+ await runWalletManagementMode();
+ } else if (response.mode === "reset") {
+ await runFreshResetMode();
+ }
+
+ const continueManaging = await shouldContinueManaging();
+ if (!continueManaging) {
+ done = true;
+ }
+ } catch (error: any) {
+ console.error(`DKG Publisher plugin management error: ${error.message}`);
+ }
+ }
+
+ console.log("DKG Publisher plugin management finished.");
+}
+
+main().catch((error) => {
+ console.error("Error occurred during DKG Publisher plugin management:", error);
+ process.exit(1);
+});
diff --git a/apps/agent/src/server/scripts/setup.ts b/apps/agent/src/server/scripts/setup.ts
index 9a4fc915..06670bf0 100644
--- a/apps/agent/src/server/scripts/setup.ts
+++ b/apps/agent/src/server/scripts/setup.ts
@@ -1,19 +1,259 @@
import path from "path";
import prompts from "prompts";
+
+import { getLLMProviderApiKeyEnvName, LLMProvider } from "@/shared/chat";
+import { DEFAULT_SYSTEM_PROMPT } from "@/shared/prompts/defaultSystemPrompt";
+
import {
configDatabase,
configEnv,
createFileWithContent,
createUser,
+ writeFileWithContent,
} from "../helpers";
import {
- getLLMProviderApiKeyEnvName,
- LLMProvider,
-} from "@/shared/chat";
-import { DEFAULT_SYSTEM_PROMPT } from "@/shared/prompts/defaultSystemPrompt";
+ buildPublisherDefaults,
+ isValidPrivateKey,
+ isValidMysqlIdentifier,
+ provisionAsyncPublishing,
+ resolveEngineMysqlPassword,
+ stripPrivateKeyPrefix,
+} from "../setupPublisher";
+
+function formatEnvValue(value: string) {
+ return `"${value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')}"`;
+}
+
+const promptOptions = {
+ onCancel: () => {
+ throw new Error("Setup cancelled by user");
+ },
+};
+
+const styles = {
+ reset: "\x1b[0m",
+ bold: "\x1b[1m",
+ cyan: "\x1b[36m",
+ blue: "\x1b[34m",
+ yellow: "\x1b[33m",
+};
+
+function printBanner() {
+ const lines = [
+ "+------------------------------------------------------------+",
+ "| DKG Agent Setup |",
+ "| |",
+ "| This script will help you configure your DKG Agent. |",
+ "+------------------------------------------------------------+",
+ ];
+
+ console.log(
+ `\n${styles.bold}${styles.blue}${lines.join("\n")}${styles.reset}\n`,
+ );
+}
+
+function printSection(title: string, description?: string) {
+ console.log(
+ `\n${styles.bold}${styles.cyan}=== ${title} ===${styles.reset}`,
+ );
+ if (description) {
+ console.log(`${styles.yellow}${description}${styles.reset}`);
+ }
+}
+
+function getRecommendedWorkerCount(walletCount: number) {
+ return Math.max(1, Math.min(Math.ceil(walletCount / 10), 5));
+}
+
+function isValidUrl(value: string) {
+ try {
+ new URL(value);
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+async function collectAdditionalPublisherWallets() {
+ const additionalWallets: string[] = [];
+ const addWalletsResponse = await prompts(
+ {
+ type: "confirm",
+ name: "addMoreWallets",
+ message:
+ "Add more publishing wallets now? The primary wallet will already be included.",
+ initial: false,
+ },
+ promptOptions,
+ );
+
+ let shouldAddWallet = addWalletsResponse.addMoreWallets === true;
+ while (shouldAddWallet) {
+ const walletResponse = await prompts(
+ {
+ type: "text",
+ name: "privateKey",
+ message: `Additional publish wallet private key #${additionalWallets.length + 1}`,
+ validate: (value) => {
+ if (!value.length) return "Required";
+ return (
+ isValidPrivateKey(value) ||
+ "Private key must be 64 hexadecimal characters, with or without a 0x prefix"
+ );
+ },
+ },
+ promptOptions,
+ );
+
+ additionalWallets.push(walletResponse.privateKey);
+
+ const continueResponse = await prompts(
+ {
+ type: "confirm",
+ name: "addAnotherWallet",
+ message: "Add another publishing wallet?",
+ initial: false,
+ },
+ promptOptions,
+ );
+ shouldAddWallet = continueResponse.addAnotherWallet === true;
+ }
+
+ return additionalWallets;
+}
+
+async function collectAdvancedPublisherOptions(
+ hasEngineDefaultMysqlPassword: boolean,
+) {
+ const connectionResponse = await prompts(
+ [
+ {
+ type: "text",
+ name: "mysqlHost",
+ message: "Publisher MySQL host",
+ initial: "localhost",
+ },
+ {
+ type: "number",
+ name: "mysqlPort",
+ message: "Publisher MySQL port",
+ initial: 3306,
+ min: 0,
+ },
+ {
+ type: "text",
+ name: "mysqlUser",
+ message: "Publisher MySQL username",
+ initial: "root",
+ },
+ {
+ type: "password",
+ name: "mysqlPassword",
+ message: hasEngineDefaultMysqlPassword
+ ? "Publisher MySQL password (leave blank to use the password set during dkg-node install)"
+ : "Publisher MySQL password",
+ validate: (value) =>
+ hasEngineDefaultMysqlPassword || value.length > 0
+ ? true
+ : "Required for async publishing",
+ },
+ {
+ type: "text",
+ name: "mysqlDatabase",
+ message: "Publisher MySQL database name",
+ initial: "dkg_publisher_db",
+ validate: (value) =>
+ isValidMysqlIdentifier(value) ||
+ "Use letters, numbers, and underscores only",
+ },
+ {
+ type: "text",
+ name: "redisHost",
+ message: "Publisher Redis host",
+ initial: "localhost",
+ },
+ {
+ type: "number",
+ name: "redisPort",
+ message: "Publisher Redis port",
+ initial: 6379,
+ min: 0,
+ },
+ {
+ type: "password",
+ name: "redisPassword",
+ message: "Publisher Redis password (leave blank if no password)",
+ },
+ ],
+ promptOptions,
+ );
+
+ const additionalWallets = await collectAdditionalPublisherWallets();
+ const totalWalletCount = 1 + additionalWallets.length;
+ const recommendedWorkerCount = getRecommendedWorkerCount(totalWalletCount);
+ const workerDefaultsResponse = await prompts(
+ [
+ {
+ type: "number",
+ name: "workerCount",
+ message: `Publisher worker count (default: ${recommendedWorkerCount}, based on wallet count; concurrency auto-balances)`,
+ initial: recommendedWorkerCount,
+ min: 1,
+ },
+ {
+ type: "number",
+ name: "pollFrequency",
+ message: "Publisher poll frequency (ms)",
+ initial: 2000,
+ min: 100,
+ },
+ {
+ type: "text",
+ name: "storagePath",
+ message: "Publisher storage path",
+ initial: "./data/publisher",
+ },
+ {
+ type: "text",
+ name: "storageBaseUrl",
+ message: "Publisher storage base URL",
+ initial: "http://localhost:9200/storage",
+ },
+ ],
+ promptOptions,
+ );
+
+ return {
+ ...connectionResponse,
+ ...workerDefaultsResponse,
+ additionalWallets,
+ };
+}
async function setup() {
- const r = await prompts([
+ const enginePassword = await resolveEngineMysqlPassword();
+ printBanner();
+
+ const asyncPublishingChoices =
+ enginePassword.status === "found"
+ ? [
+ {
+ title: "Yes (Recommended)",
+ value: "recommended",
+ },
+ { title: "No", value: "disabled" },
+ { title: "Yes, with advanced configuration", value: "advanced" },
+ ]
+ : [
+ { title: "No (Recommended)", value: "disabled" },
+ { title: "Yes, with advanced configuration", value: "advanced" },
+ ];
+
+ printSection(
+ "DKG Agent LLM Configuration",
+ "Choose the language model provider, credentials, model, and default system prompt for your DKG Agent.",
+ );
+ const llmResponse = await prompts([
{
type: "select",
name: "llmProvider",
@@ -31,13 +271,13 @@ async function setup() {
{
type: "text",
name: "llmModel",
- message: (prev) => "Model name",
- validate: (val) => val.length || "Model name is required",
+ message: "Model name",
+ validate: (value) => value.length || "Model name is required",
},
{
type: "number",
name: "llmTemperature",
- message: (prev) => "Temperature",
+ message: "Temperature",
initial: 1,
min: 0,
max: 1,
@@ -46,29 +286,54 @@ async function setup() {
{
type: "text",
name: "llmSystemPrompt",
- message: (prev) => "System prompt",
+ message: "System prompt",
initial: DEFAULT_SYSTEM_PROMPT,
- format: (val) => (val === DEFAULT_SYSTEM_PROMPT ? "" : val.trim()),
+ format: (value) => (value === DEFAULT_SYSTEM_PROMPT ? "" : value.trim()),
},
+ ], promptOptions);
+
+ printSection(
+ "Document Processing",
+ "Choose how the agent should convert uploaded documents for downstream use.",
+ );
+ const documentResponse = await prompts([
{
type: "select",
name: "docConversionProvider",
message: "Document conversion provider",
choices: [
- { title: "unpdf ā basic PDF only", value: "unpdf" },
- { title: "Mistral OCR ā complex PDF/DOCX/PPTX", value: "mistral" },
+ { title: "unpdf - basic PDF only", value: "unpdf" },
+ { title: "Mistral OCR - complex PDF/DOCX/PPTX", value: "mistral" },
],
initial: 0,
},
{
- type: (_, a) =>
- a.docConversionProvider === "mistral" && a.llmProvider !== "mistralai"
+ type: (_, answers) =>
+ answers.docConversionProvider === "mistral" &&
+ llmResponse.llmProvider !== "mistralai"
? "text"
: null,
name: "mistralApiKey",
message: "MISTRAL_API_KEY",
- validate: (val) => val.length || "Required for Mistral OCR provider",
+ validate: (value) => value.length || "Required for Mistral OCR provider",
},
+ ], promptOptions);
+
+ printSection(
+ "DKG Interaction",
+ "Configure the DKG network, blockchain, and publishing setup for your DKG Agent.",
+ );
+ if (enginePassword.status === "missing-file") {
+ console.log(
+ `DKG Publisher plugin MySQL defaults were not found at ${enginePassword.envPath}. Use advanced async setup if you want to provide them manually.`,
+ );
+ } else if (enginePassword.status === "missing-key") {
+ console.log(
+ `DKG Publisher plugin MySQL defaults are incomplete in ${enginePassword.envPath} because REPOSITORY_PASSWORD is missing. Use advanced async setup if you want to provide the password manually.`,
+ );
+ }
+
+ const publishingResponse = await prompts([
{
type: "select",
name: "dkgEnv",
@@ -80,12 +345,13 @@ async function setup() {
],
},
{
- type: (_, a) => (a.dkgEnv === "development" ? "text" : "select"),
+ type: (_, answers) => (answers.dkgEnv === "development" ? "text" : "select"),
name: "dkgBlockchain",
message: "DKG blockchain",
- initial: (_, a) => (a.dkgEnv === "development" ? "hardhat1:31337" : ""),
- choices: (prev) =>
- prev === "mainnet"
+ initial: (_, answers) =>
+ answers.dkgEnv === "development" ? "hardhat1:31337" : "",
+ choices: (previous) =>
+ previous === "mainnet"
? [
{ title: "NeuroWeb", value: "otp:2043" },
{ title: "Base", value: "base:8453" },
@@ -97,16 +363,56 @@ async function setup() {
{ title: "Gnosis Chiado", value: "gnosis:10200" },
],
},
+ {
+ type: "text",
+ name: "dkgCustomRpc",
+ message: "Custom blockchain RPC (leave blank to use default RPC)",
+ format: (value) => value.trim(),
+ validate: (value) =>
+ !value.trim() ||
+ isValidUrl(value.trim()) ||
+ "Provide a valid URL or leave blank to use the default RPC",
+ },
{
type: "text",
name: "dkgPublishWallet",
message: "Publish wallet private key",
- initial: (_, a) =>
- a.dkgEnv === "development"
- ? "0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
+ initial: (_, answers) =>
+ answers.dkgEnv === "development"
+ ? "ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80"
: "",
- validate: (val) => val.length || "Required",
+ validate: (value) => {
+ if (!value.length) return "Required";
+ return (
+ isValidPrivateKey(value) ||
+ "Private key must be 64 hexadecimal characters, with or without a 0x prefix"
+ );
+ },
},
+ {
+ type: "select",
+ name: "asyncPublishingMode",
+ message:
+ "Enable async publishing (DKG Publisher plugin) on the DKG for smoother publishing, easier status tracking, and queue management?",
+ choices: asyncPublishingChoices,
+ initial: 0,
+ },
+ ], promptOptions);
+
+ const advancedPublisherResponse =
+ publishingResponse.asyncPublishingMode === "advanced"
+ ? (printSection(
+ "Advanced Async Publishing",
+ "Provide MySQL, Redis, wallet, worker, and storage overrides for the DKG Publisher plugin (async publishing).",
+ ),
+ await collectAdvancedPublisherOptions(enginePassword.status === "found"))
+ : null;
+
+ printSection(
+ "Email Configuration",
+ "Configure SMTP settings for password reset emails and other notifications.",
+ );
+ const emailResponse = await prompts([
{
type: "confirm",
name: "smtpEnabled",
@@ -114,105 +420,239 @@ async function setup() {
initial: true,
},
{
- type: (_, a) => (a.smtpEnabled ? "text" : null),
+ type: (_, answers) => (answers.smtpEnabled ? "text" : null),
name: "smtpHost",
message: "SMTP Host",
- validate: (val) => val.length || "Required",
+ validate: (value) => value.length || "Required",
},
{
- type: (_, a) => (a.smtpEnabled ? "number" : null),
+ type: (_, answers) => (answers.smtpEnabled ? "number" : null),
name: "smtpPort",
message: "SMTP Port",
initial: 587,
min: 0,
},
{
- type: (_, a) => (a.smtpEnabled ? "text" : null),
+ type: (_, answers) => (answers.smtpEnabled ? "text" : null),
name: "smtpUsername",
message: "SMTP Username",
},
{
- type: (_, a) => (a.smtpEnabled ? "password" : null),
+ type: (_, answers) => (answers.smtpEnabled ? "password" : null),
name: "smtpPassword",
message: "SMTP Password",
},
{
- type: (_, a) => (a.smtpEnabled ? "confirm" : null),
+ type: (_, answers) => (answers.smtpEnabled ? "confirm" : null),
name: "smtpSecure",
message: "SMTP Secure",
initial: true,
},
{
- type: (_, a) => (a.smtpEnabled ? "text" : null),
+ type: (_, answers) => (answers.smtpEnabled ? "text" : null),
name: "smtpFrom",
message: "SMTP Sender email",
initial: "noreply@example.com",
},
+ ], promptOptions);
+
+ printSection(
+ "DKG Agent Database",
+ "Choose the local SQLite database file used by the DKG Agent.",
+ );
+ const finalResponse = await prompts([
{
type: "text",
name: "dbFilename",
message: "Database filename (e.g. example.db)",
- validate: (val) => val.length || "Required",
- format: (val) => (val.endsWith(".db") ? val : `${val}.db`),
+ validate: (value) => value.length || "Required",
+ format: (value) => (value.endsWith(".db") ? value : `${value}.db`),
},
- ]);
+ ], promptOptions);
- console.log("\nCreating .env file...");
- await createFileWithContent(
+ const response = {
+ ...llmResponse,
+ ...documentResponse,
+ ...publishingResponse,
+ ...(advancedPublisherResponse || {}),
+ ...emailResponse,
+ ...finalResponse,
+ };
+
+ const requestedAsyncPublishing = response.asyncPublishingMode !== "disabled";
+ const appUrl = "http://localhost:9200";
+ const customRpc = response.dkgCustomRpc?.trim() || "";
+ const envPublishWallet = stripPrivateKeyPrefix(response.dkgPublishWallet);
+ const mysqlPassword =
+ response.mysqlPassword?.trim() || enginePassword.mysqlPassword || "";
+ const publisherDefaults =
+ requestedAsyncPublishing && mysqlPassword
+ ? buildPublisherDefaults(appUrl, mysqlPassword, {
+ mysqlHost: response.mysqlHost,
+ mysqlPort: response.mysqlPort,
+ mysqlUser: response.mysqlUser,
+ mysqlDatabase: response.mysqlDatabase,
+ redisHost: response.redisHost,
+ redisPort: response.redisPort,
+ redisPassword: response.redisPassword,
+ workerCount: response.workerCount,
+ pollFrequency: response.pollFrequency,
+ storagePath: response.storagePath,
+ storageBaseUrl: response.storageBaseUrl,
+ })
+ : null;
+
+ if (requestedAsyncPublishing && !publisherDefaults) {
+ throw new Error("Async publishing requires a MySQL password");
+ }
+
+ const additionalPublisherWallets = advancedPublisherResponse?.additionalWallets || [];
+
+ let publisherProvisionResult:
+ | { databaseCreated: boolean; walletsInserted: number }
+ | null = null;
+ let publisherProvisionError: string | null = null;
+
+ if (publisherDefaults) {
+ try {
+ publisherProvisionResult = await provisionAsyncPublishing(
+ publisherDefaults.databaseUrl,
+ [
+ {
+ privateKey: response.dkgPublishWallet,
+ blockchain: response.dkgBlockchain,
+ },
+ ...additionalPublisherWallets.map((privateKey) => ({
+ privateKey,
+ blockchain: response.dkgBlockchain,
+ })),
+ ],
+ );
+ } catch (error: any) {
+ publisherProvisionError = error.message;
+ console.warn(
+ `DKG Publisher plugin provisioning warning: ${publisherProvisionError}`,
+ );
+ }
+ }
+
+ const asyncPublishingEnabled =
+ requestedAsyncPublishing && publisherProvisionError === null;
+
+ const envLines = [
+ "PORT=9200",
+ `EXPO_PUBLIC_MCP_URL=${formatEnvValue(appUrl)}`,
+ `EXPO_PUBLIC_APP_URL=${formatEnvValue(appUrl)}`,
+ `DATABASE_URL=${formatEnvValue(response.dbFilename)}`,
+ `LLM_PROVIDER=${formatEnvValue(response.llmProvider)}`,
+ `LLM_MODEL=${formatEnvValue(response.llmModel)}`,
+ `LLM_TEMPERATURE=${formatEnvValue(String(response.llmTemperature))}`,
+ `LLM_SYSTEM_PROMPT=${formatEnvValue(response.llmSystemPrompt)}`,
+ `${getLLMProviderApiKeyEnvName(response.llmProvider)}=${formatEnvValue(response.llmApiKey)}`,
+ `DKG_PUBLISH_WALLET=${formatEnvValue(envPublishWallet)}`,
+ `DKG_BLOCKCHAIN=${formatEnvValue(response.dkgBlockchain)}`,
+ 'DKG_OTNODE_URL="http://localhost:8900"',
+ `ASYNC_PUBLISHING_ENABLED=${asyncPublishingEnabled ? "true" : "false"}`,
+ `SMTP_HOST=${formatEnvValue(response.smtpHost || "")}`,
+ `SMTP_PORT=${formatEnvValue(String(response.smtpPort || ""))}`,
+ `SMTP_USER=${formatEnvValue(response.smtpUsername || "")}`,
+ `SMTP_PASS=${formatEnvValue(response.smtpPassword || "")}`,
+ `SMTP_SECURE=${response.smtpSecure === undefined ? "true" : response.smtpSecure}`,
+ `SMTP_FROM=${formatEnvValue(response.smtpFrom || "")}`,
+ `DOCUMENT_CONVERSION_PROVIDER=${formatEnvValue(response.docConversionProvider)}`,
+ ];
+
+ if (customRpc) {
+ envLines.push(`DKG_NODE_CUSTOM_RPC=${formatEnvValue(customRpc)}`);
+ }
+
+ if (
+ response.docConversionProvider === "mistral" &&
+ response.llmProvider !== "mistralai"
+ ) {
+ envLines.push(`MISTRAL_API_KEY=${formatEnvValue(response.mistralApiKey)}`);
+ }
+
+ if (publisherDefaults) {
+ envLines.push(`DKGP_DATABASE_URL=${formatEnvValue(publisherDefaults.databaseUrl)}`);
+ envLines.push(`REDIS_URL=${formatEnvValue(publisherDefaults.redisUrl)}`);
+ envLines.push(`WORKER_COUNT=${formatEnvValue(String(publisherDefaults.workerCount))}`);
+
+ if (response.asyncPublishingMode === "advanced") {
+ envLines.push(
+ `POLL_FREQUENCY=${formatEnvValue(String(publisherDefaults.pollFrequency))}`,
+ );
+ envLines.push(`STORAGE_TYPE="filesystem"`);
+ envLines.push(`STORAGE_PATH=${formatEnvValue(publisherDefaults.storagePath)}`);
+ envLines.push(
+ `STORAGE_BASE_URL=${formatEnvValue(publisherDefaults.storageBaseUrl)}`,
+ );
+ }
+ }
+
+ console.log("\nWriting .env file...");
+ await writeFileWithContent(
path.resolve(process.cwd(), ".env"),
- `PORT=9200
-EXPO_PUBLIC_MCP_URL="http://localhost:9200"
-EXPO_PUBLIC_APP_URL="http://localhost:9200"
-DATABASE_URL="${r.dbFilename}"
-LLM_PROVIDER="${r.llmProvider}"
-LLM_MODEL="${r.llmModel}"
-LLM_TEMPERATURE="${r.llmTemperature}"
-LLM_SYSTEM_PROMPT="${r.llmSystemPrompt}"
-${getLLMProviderApiKeyEnvName(r.llmProvider)}="${r.llmApiKey}"
-DKG_PUBLISH_WALLET="${r.dkgPublishWallet}"
-DKG_BLOCKCHAIN="${r.dkgBlockchain}"
-DKG_OTNODE_URL="http://localhost:8900"
-SMTP_HOST="${r.smtpHost || ""}"
-SMTP_PORT="${r.smtpPort || ""}"
-SMTP_USER="${r.smtpUsername || ""}"
-SMTP_PASS="${r.smtpPassword || ""}"
-SMTP_SECURE=${r.smtpSecure === undefined ? "true" : r.smtpSecure}
-SMTP_FROM="${r.smtpFrom || ""}"
-DOCUMENT_CONVERSION_PROVIDER="${r.docConversionProvider}"
-${r.docConversionProvider === "mistral" && r.llmProvider !== "mistralai" ? `MISTRAL_API_KEY="${r.mistralApiKey}"\n` : ""}`,
+ `${envLines.join("\n")}\n`,
);
- console.log("Creating .env.development.local file...");
+ console.log("Ensuring .env.development.local exists...");
await createFileWithContent(
path.resolve(process.cwd(), ".env.development.local"),
- `# These values will override the .env file during the development
+ `# These values will override the .env file during development
EXPO_PUBLIC_APP_URL="http://localhost:8081"
`,
);
configEnv();
+ process.env.DATABASE_URL = response.dbFilename;
+ process.env.EXPO_PUBLIC_MCP_URL = appUrl;
+ process.env.EXPO_PUBLIC_APP_URL = appUrl;
console.log("Configuring database...");
console.log("Running migrations...");
const db = configDatabase();
console.log("Creating admin user...");
- const userId = await createUser(
- db,
- {
- email: "admin@example.com",
- password: "admin123",
- },
- ["mcp", "llm", "blob", "scope123"],
- );
- console.log(`Created admin user:
- ID: ${userId}
+ try {
+ const user = await createUser(
+ db,
+ {
+ email: "admin@example.com",
+ password: "admin123",
+ },
+ ["mcp", "llm", "blob", "scope123"],
+ );
+
+ console.log(`Created admin user:
+ ID: ${user.id}
Email: admin@example.com
Password: admin123
Scope: mcp, llm, blob, scope123
To create new users, run 'npm run script:createUser' inside of the agent directory.
`);
+ } catch (error: any) {
+ if (error.message?.includes("already exists")) {
+ console.log("Admin user already exists. Skipping creation.");
+ } else {
+ throw error;
+ }
+ }
+
+ console.log(`Async publishing: ${asyncPublishingEnabled ? "enabled" : "disabled"}`);
+ if (publisherDefaults) {
+ if (publisherProvisionResult) {
+ console.log(
+ `DKG Publisher plugin DB ready: created=${publisherProvisionResult.databaseCreated}, walletsInserted=${publisherProvisionResult.walletsInserted}`,
+ );
+ }
+ if (publisherProvisionError) {
+ console.log(
+ `DKG Publisher plugin provisioning warning: ${publisherProvisionError}. Async publishing was left disabled in apps/agent/.env.`,
+ );
+ }
+ }
}
setup()
diff --git a/apps/agent/src/server/setupPublisher.ts b/apps/agent/src/server/setupPublisher.ts
new file mode 100644
index 00000000..afad1bfc
--- /dev/null
+++ b/apps/agent/src/server/setupPublisher.ts
@@ -0,0 +1,553 @@
+import path from "path";
+import { promises as fs } from "fs";
+import dotenv from "dotenv";
+import mysql from "mysql2/promise";
+import { provisionPublisherDatabase } from "@dkg/plugin-dkg-publisher/provision";
+import {
+ isValidPrivateKey,
+ normalizePrivateKey,
+ stripPrivateKeyPrefix,
+} from "@dkg/plugin-dkg-publisher/privateKey";
+
+export type AsyncPublishingMode = "disabled" | "recommended" | "advanced";
+
+export interface EnginePasswordResolution {
+ envPath: string;
+ mysqlPassword: string | null;
+ status: "found" | "missing-file" | "missing-key";
+}
+
+export interface PublisherAdvancedOptions {
+ mysqlHost?: string;
+ mysqlPort?: number;
+ mysqlUser?: string;
+ mysqlDatabase?: string;
+ redisHost?: string;
+ redisPort?: number;
+ redisPassword?: string;
+ redisUrl?: string;
+ workerCount?: number;
+ pollFrequency?: number;
+ storagePath?: string;
+ storageBaseUrl?: string;
+}
+
+export interface AgentEnvState {
+ envPath: string;
+ content: string;
+ values: Record;
+}
+
+export interface PublisherMysqlConfig {
+ host: string;
+ port: number;
+ user: string;
+ password: string;
+ database: string;
+}
+
+export interface PublisherRedisConfig {
+ host: string;
+ port: number;
+ password: string;
+}
+
+export interface PublisherResolvedConfig {
+ databaseUrl: string;
+ redisUrl: string;
+ workerCount: number;
+ pollFrequency: number;
+ storagePath: string;
+ storageBaseUrl: string;
+ mysql: PublisherMysqlConfig;
+ redis: PublisherRedisConfig;
+}
+
+export interface PublisherWalletRecord {
+ id: number;
+ address: string;
+ blockchain: string;
+ isActive: boolean;
+ isLocked: boolean;
+ lockedAt: Date | null;
+ lastUsedAt: Date | null;
+ totalUses: number;
+ successfulUses: number;
+ failedUses: number;
+ createdAt: Date | null;
+}
+
+export interface PublisherWalletActivationResult {
+ id: number;
+ address: string;
+ isActive: boolean;
+ wasLocked: boolean;
+ forcedUnlock: boolean;
+}
+
+export interface PublisherResetResult {
+ droppedTables: string[];
+ databaseCreated: boolean;
+ walletsInserted: number;
+}
+
+const MYSQL_IDENTIFIER_PATTERN = /^[A-Za-z0-9_]+$/;
+const ENV_KEY_PATTERN = /^\s*([A-Za-z_][A-Za-z0-9_]*)\s*=/;
+const DEFAULT_APP_URL = "http://localhost:9200";
+const DEFAULT_STORAGE_PATH = "./data/publisher";
+const DEFAULT_POLL_FREQUENCY = 2000;
+const PUBLISHER_TABLES = [
+ "__drizzle_migrations",
+ "wallet_metrics",
+ "publishing_attempts",
+ "assets",
+ "wallets",
+ "batches",
+ "metrics_hourly",
+] as const;
+
+export function resolveNodeRoot(currentWorkingDirectory = process.cwd()) {
+ return path.resolve(currentWorkingDirectory, "../..");
+}
+
+export function resolveAgentEnvPath(currentWorkingDirectory = process.cwd()) {
+ return path.resolve(currentWorkingDirectory, ".env");
+}
+
+function parsePositiveInt(value: string | undefined, fallback: number) {
+ const parsed = Number.parseInt(value || "", 10);
+ return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
+}
+
+function serializeEnvValue(value: string | number | boolean) {
+ if (typeof value === "number" || typeof value === "boolean") {
+ return `${value}`;
+ }
+
+ return `"${value.replace(/\\/g, "\\\\").replace(/"/g, '\\"')}"`;
+}
+
+function parseMysqlConnectionString(databaseUrl: string): PublisherMysqlConfig {
+ const parsed = new URL(databaseUrl);
+ const database = decodeURIComponent(parsed.pathname.replace(/^\//, ""));
+
+ if (!database) {
+ throw new Error("DKGP_DATABASE_URL must include a database name");
+ }
+
+ if (!MYSQL_IDENTIFIER_PATTERN.test(database)) {
+ throw new Error(
+ "DKGP_DATABASE_URL contains an invalid database name. Use letters, numbers, and underscores only.",
+ );
+ }
+
+ return {
+ host: parsed.hostname || "localhost",
+ port: parsePositiveInt(parsed.port, 3306),
+ user: decodeURIComponent(parsed.username || "root"),
+ password: decodeURIComponent(parsed.password || ""),
+ database,
+ };
+}
+
+function parseRedisConnectionString(redisUrl: string): PublisherRedisConfig {
+ const parsed = new URL(redisUrl);
+ return {
+ host: parsed.hostname || "localhost",
+ port: parsePositiveInt(parsed.port, 6379),
+ password: decodeURIComponent(parsed.password || ""),
+ };
+}
+
+function normalizeRedisUrlCandidate(value: string) {
+ const trimmed = value.trim();
+ if (!trimmed) {
+ return "";
+ }
+
+ if (trimmed.startsWith("redis://") || trimmed.startsWith("rediss://")) {
+ return trimmed;
+ }
+
+ if (!trimmed.includes("://")) {
+ return `redis://${trimmed}`;
+ }
+
+ return trimmed;
+}
+
+async function withPublisherConnection(
+ databaseUrl: string,
+ operation: (connection: mysql.Connection) => Promise,
+) {
+ const connection = await mysql.createConnection(databaseUrl);
+ try {
+ return await operation(connection);
+ } finally {
+ await connection.end();
+ }
+}
+
+export async function resolveEngineMysqlPassword(): Promise {
+ const envPath =
+ process.env.DKG_ENGINE_ENV_PATH ||
+ path.join(resolveNodeRoot(), "dkg-engine/current/.env");
+
+ try {
+ const envFile = await fs.readFile(envPath, "utf8");
+ const parsedEnv = dotenv.parse(envFile);
+ const mysqlPassword = parsedEnv.REPOSITORY_PASSWORD?.trim();
+
+ if (!mysqlPassword) {
+ return {
+ envPath,
+ mysqlPassword: null,
+ status: "missing-key",
+ };
+ }
+
+ return {
+ envPath,
+ mysqlPassword,
+ status: "found",
+ };
+ } catch (error: any) {
+ if (error?.code === "ENOENT") {
+ return {
+ envPath,
+ mysqlPassword: null,
+ status: "missing-file",
+ };
+ }
+
+ throw error;
+ }
+}
+
+export function buildPublisherDatabaseUrl(
+ mysqlPassword: string,
+ options: PublisherAdvancedOptions = {},
+) {
+ const mysqlUser = options.mysqlUser || "root";
+ const mysqlHost = options.mysqlHost || "localhost";
+ const mysqlPort = options.mysqlPort || 3306;
+ const mysqlDatabase = options.mysqlDatabase || "dkg_publisher_db";
+
+ if (!MYSQL_IDENTIFIER_PATTERN.test(mysqlDatabase)) {
+ throw new Error(
+ "Publisher MySQL database name may contain only letters, numbers, and underscores",
+ );
+ }
+
+ const encodedUser = encodeURIComponent(mysqlUser);
+ const encodedPassword = encodeURIComponent(mysqlPassword);
+
+ return `mysql://${encodedUser}:${encodedPassword}@${mysqlHost}:${mysqlPort}/${mysqlDatabase}`;
+}
+
+export function isValidMysqlIdentifier(value: string) {
+ return MYSQL_IDENTIFIER_PATTERN.test(value);
+}
+
+export { isValidPrivateKey, normalizePrivateKey, stripPrivateKeyPrefix };
+
+export async function readAgentEnv(
+ envPath = resolveAgentEnvPath(),
+): Promise {
+ const content = await fs.readFile(envPath, "utf8");
+ return {
+ envPath,
+ content,
+ values: dotenv.parse(content),
+ };
+}
+
+export async function upsertAgentEnvValues(
+ updates: Record,
+ envPath = resolveAgentEnvPath(),
+) {
+ let existingContent = "";
+ try {
+ existingContent = await fs.readFile(envPath, "utf8");
+ } catch (error: any) {
+ if (error?.code !== "ENOENT") {
+ throw error;
+ }
+ }
+
+ const lines = existingContent.length
+ ? existingContent.split(/\r?\n/)
+ : ([] as string[]);
+ const pending = new Map();
+
+ for (const [key, value] of Object.entries(updates)) {
+ if (value === undefined) {
+ continue;
+ }
+ if (value === null) {
+ pending.set(key, null);
+ continue;
+ }
+ pending.set(key, `${key}=${serializeEnvValue(value)}`);
+ }
+
+ const nextLines: string[] = [];
+ const seenEnvKeys = new Set();
+ for (const line of lines) {
+ const match = line.match(ENV_KEY_PATTERN);
+ if (!match) {
+ nextLines.push(line);
+ continue;
+ }
+
+ const key = match[1];
+ if (!key) {
+ nextLines.push(line);
+ continue;
+ }
+
+ if (seenEnvKeys.has(key)) {
+ // Keep only the first occurrence for each env key to avoid ambiguous duplicates.
+ continue;
+ }
+ seenEnvKeys.add(key);
+
+ const replacement = pending.get(key);
+ if (replacement !== undefined) {
+ if (replacement !== null) {
+ nextLines.push(replacement);
+ }
+ pending.delete(key);
+ continue;
+ }
+
+ nextLines.push(line);
+ }
+
+ for (const replacement of pending.values()) {
+ if (replacement !== null) {
+ nextLines.push(replacement);
+ }
+ }
+
+ const nextContent = `${nextLines.join("\n").replace(/\n*$/, "")}\n`;
+ await fs.writeFile(envPath, nextContent, "utf8");
+ return readAgentEnv(envPath);
+}
+
+export function buildRedisUrl(options: PublisherAdvancedOptions = {}) {
+ if (options.redisUrl) {
+ return options.redisUrl;
+ }
+
+ const redisHost = options.redisHost || "localhost";
+ const redisPort = options.redisPort || 6379;
+ const redisPassword = options.redisPassword?.trim();
+ const auth = redisPassword ? `:${encodeURIComponent(redisPassword)}@` : "";
+
+ return `redis://${auth}${redisHost}:${redisPort}`;
+}
+
+export function buildPublisherDefaults(
+ appUrl: string,
+ mysqlPassword: string,
+ options: PublisherAdvancedOptions = {},
+) {
+ return {
+ databaseUrl: buildPublisherDatabaseUrl(mysqlPassword, options),
+ redisUrl: buildRedisUrl(options),
+ workerCount: options.workerCount || 1,
+ pollFrequency: options.pollFrequency || 2000,
+ storagePath: options.storagePath || "./data/publisher",
+ storageBaseUrl:
+ options.storageBaseUrl ||
+ new URL("/storage", appUrl).toString().replace(/\/$/, ""),
+ };
+}
+
+export function resolvePublisherConfigFromAgentEnv(
+ envValues: Record,
+ appUrl = DEFAULT_APP_URL,
+): PublisherResolvedConfig | null {
+ const databaseUrl = envValues.DKGP_DATABASE_URL;
+ if (!databaseUrl) {
+ return null;
+ }
+
+ const mysql = parseMysqlConnectionString(databaseUrl);
+ const fallbackRedisUrl = `redis://${envValues.REDIS_PASSWORD ? `:${encodeURIComponent(envValues.REDIS_PASSWORD)}@` : ""}${envValues.REDIS_HOST || "localhost"}:${envValues.REDIS_PORT || "6379"}`;
+ const redisUrlCandidate = normalizeRedisUrlCandidate(envValues.REDIS_URL || "");
+
+ let redisUrl = fallbackRedisUrl;
+ let redis: PublisherRedisConfig;
+ try {
+ if (redisUrlCandidate) {
+ redis = parseRedisConnectionString(redisUrlCandidate);
+ redisUrl = redisUrlCandidate;
+ } else {
+ redis = parseRedisConnectionString(fallbackRedisUrl);
+ }
+ } catch {
+ // Recover from malformed REDIS_URL in existing env by falling back to host/port/password.
+ redis = parseRedisConnectionString(fallbackRedisUrl);
+ redisUrl = fallbackRedisUrl;
+ }
+
+ return {
+ databaseUrl,
+ redisUrl,
+ workerCount: parsePositiveInt(envValues.WORKER_COUNT, 1),
+ pollFrequency: parsePositiveInt(
+ envValues.POLL_FREQUENCY,
+ DEFAULT_POLL_FREQUENCY,
+ ),
+ storagePath: envValues.STORAGE_PATH || DEFAULT_STORAGE_PATH,
+ storageBaseUrl:
+ envValues.STORAGE_BASE_URL ||
+ new URL("/storage", appUrl).toString().replace(/\/$/, ""),
+ mysql,
+ redis,
+ };
+}
+
+export async function provisionAsyncPublishing(
+ databaseUrl: string,
+ walletSeeds: Array<{
+ privateKey: string;
+ blockchain: string;
+ }> = [],
+) {
+ return provisionPublisherDatabase(databaseUrl, walletSeeds);
+}
+
+export async function listPublisherWallets(
+ databaseUrl: string,
+): Promise {
+ return withPublisherConnection(databaseUrl, async (connection) => {
+ const [rows] = await connection.query(
+ `SELECT
+ id,
+ address,
+ blockchain,
+ is_active AS isActive,
+ is_locked AS isLocked,
+ locked_at AS lockedAt,
+ last_used_at AS lastUsedAt,
+ total_uses AS totalUses,
+ successful_uses AS successfulUses,
+ failed_uses AS failedUses,
+ created_at AS createdAt
+ FROM wallets
+ ORDER BY created_at ASC, id ASC`,
+ );
+
+ return rows.map((row) => ({
+ id: Number(row.id),
+ address: row.address,
+ blockchain: row.blockchain,
+ isActive: Boolean(row.isActive),
+ isLocked: Boolean(row.isLocked),
+ lockedAt: row.lockedAt ? new Date(row.lockedAt) : null,
+ lastUsedAt: row.lastUsedAt ? new Date(row.lastUsedAt) : null,
+ totalUses: Number(row.totalUses || 0),
+ successfulUses: Number(row.successfulUses || 0),
+ failedUses: Number(row.failedUses || 0),
+ createdAt: row.createdAt ? new Date(row.createdAt) : null,
+ }));
+ });
+}
+
+export async function addPublisherWallets(
+ databaseUrl: string,
+ walletSeeds: Array<{ privateKey: string; blockchain: string }>,
+) {
+ if (!walletSeeds.length) {
+ return { walletsInserted: 0 };
+ }
+
+ const result = await provisionPublisherDatabase(databaseUrl, walletSeeds);
+ return { walletsInserted: result.walletsInserted };
+}
+
+export async function setPublisherWalletActive(
+ databaseUrl: string,
+ walletId: number,
+ isActive: boolean,
+ options: { forceUnlock?: boolean } = {},
+): Promise {
+ return withPublisherConnection(databaseUrl, async (connection) => {
+ const [rows] = await connection.query(
+ `SELECT id, address, is_locked AS isLocked FROM wallets WHERE id = ? LIMIT 1`,
+ [walletId],
+ );
+
+ if (!rows.length) {
+ throw new Error(`Wallet with id ${walletId} was not found`);
+ }
+
+ const row = rows[0];
+ const wasLocked = Boolean(row.isLocked);
+
+ if (!isActive && wasLocked && !options.forceUnlock) {
+ throw new Error(
+ `Wallet ${walletId} is currently locked and cannot be deactivated without force unlock`,
+ );
+ }
+
+ let forcedUnlock = false;
+ if (!isActive && wasLocked && options.forceUnlock) {
+ await connection.execute(
+ `UPDATE wallets SET is_locked = FALSE, locked_at = NULL WHERE id = ?`,
+ [walletId],
+ );
+ forcedUnlock = true;
+ }
+
+ await connection.execute(`UPDATE wallets SET is_active = ? WHERE id = ?`, [
+ isActive,
+ walletId,
+ ]);
+
+ return {
+ id: Number(row.id),
+ address: row.address,
+ isActive,
+ wasLocked,
+ forcedUnlock,
+ };
+ });
+}
+
+export async function resetPublisherDatabase(
+ databaseUrl: string,
+ walletSeeds: Array<{
+ privateKey: string;
+ blockchain: string;
+ }> = [],
+): Promise {
+ try {
+ await withPublisherConnection(databaseUrl, async (connection) => {
+ await connection.execute("SET FOREIGN_KEY_CHECKS = 0");
+ try {
+ for (const tableName of PUBLISHER_TABLES) {
+ await connection.execute(`DROP TABLE IF EXISTS \`${tableName}\``);
+ }
+ } finally {
+ await connection.execute("SET FOREIGN_KEY_CHECKS = 1");
+ }
+ });
+ } catch (error: any) {
+ // Database may not exist yet; provisioning below will create it.
+ if (error?.code !== "ER_BAD_DB_ERROR") {
+ throw error;
+ }
+ }
+
+ const provisionResult = await provisionPublisherDatabase(databaseUrl, walletSeeds);
+
+ return {
+ droppedTables: [...PUBLISHER_TABLES],
+ databaseCreated: provisionResult.databaseCreated,
+ walletsInserted: provisionResult.walletsInserted,
+ };
+}
diff --git a/apps/agent/tests/integration/dkg-publisher.env b/apps/agent/tests/integration/dkg-publisher.env
index a447b702..360ce81e 100644
--- a/apps/agent/tests/integration/dkg-publisher.env
+++ b/apps/agent/tests/integration/dkg-publisher.env
@@ -5,12 +5,10 @@
DKGP_DATABASE_URL=sqlite://./test-dkg-publisher.db
# Redis (test instance - will be started by test environment)
-REDIS_HOST=localhost
-REDIS_PORT=6379
-# REDIS_PASSWORD= (empty for test Redis)
+REDIS_URL=redis://localhost:6379
# DKG Network (testnet)
-DKG_ENDPOINT=http://localhost:8900
+DKG_OTNODE_URL=http://localhost:8900
DKG_BLOCKCHAIN=otp:20430
# Storage (test directory)
diff --git a/apps/agent/tests/integration/setup/test-server.ts b/apps/agent/tests/integration/setup/test-server.ts
index 0d79917d..aad91b28 100644
--- a/apps/agent/tests/integration/setup/test-server.ts
+++ b/apps/agent/tests/integration/setup/test-server.ts
@@ -133,7 +133,7 @@ export async function createTestServer(config: TestServerConfig = {}): Promise<{
port: "8900",
blockchain: {
name: process.env.DKG_BLOCKCHAIN || "hardhat1:31337",
- privateKey: process.env.DKG_PUBLISH_WALLET || "0x" + "a".repeat(64),
+ privateKey: process.env.DKG_PUBLISH_WALLET || "a".repeat(64),
},
maxNumberOfRetries: 300,
frequency: 2,
diff --git a/apps/agent/tests/unit/setupPublisher.spec.ts b/apps/agent/tests/unit/setupPublisher.spec.ts
new file mode 100644
index 00000000..28f404c6
--- /dev/null
+++ b/apps/agent/tests/unit/setupPublisher.spec.ts
@@ -0,0 +1,140 @@
+import { expect } from "chai";
+import os from "os";
+import path from "path";
+import { mkdtemp, readFile, rm, writeFile } from "fs/promises";
+
+import {
+ resolvePublisherConfigFromAgentEnv,
+ upsertAgentEnvValues,
+} from "../../src/server/setupPublisher";
+
+describe("setupPublisher helpers", () => {
+ it("resolves publisher config from consolidated agent env values", () => {
+ const resolved = resolvePublisherConfigFromAgentEnv(
+ {
+ DKGP_DATABASE_URL:
+ "mysql://root:my%21pass@localhost:3306/dkg_publisher_db",
+ REDIS_URL: "redis://:redis%23secret@localhost:6379",
+ WORKER_COUNT: "3",
+ POLL_FREQUENCY: "1500",
+ STORAGE_PATH: "./publisher-data",
+ STORAGE_BASE_URL: "http://localhost:9200/storage",
+ },
+ "http://localhost:9200",
+ );
+
+ expect(resolved).to.not.equal(null);
+ expect(resolved!.mysql.user).to.equal("root");
+ expect(resolved!.mysql.password).to.equal("my!pass");
+ expect(resolved!.mysql.database).to.equal("dkg_publisher_db");
+ expect(resolved!.redis.password).to.equal("redis#secret");
+ expect(resolved!.workerCount).to.equal(3);
+ expect(resolved!.pollFrequency).to.equal(1500);
+ expect(resolved!.storagePath).to.equal("./publisher-data");
+ });
+
+ it("resolves redis settings from host/port/password when REDIS_URL is missing", () => {
+ const resolved = resolvePublisherConfigFromAgentEnv(
+ {
+ DKGP_DATABASE_URL:
+ "mysql://root:password@localhost:3306/dkg_publisher_db",
+ REDIS_HOST: "redis.internal",
+ REDIS_PORT: "6380",
+ REDIS_PASSWORD: "redis#secret",
+ },
+ "http://localhost:9200",
+ );
+
+ expect(resolved).to.not.equal(null);
+ expect(resolved!.redisUrl).to.equal(
+ "redis://:redis%23secret@redis.internal:6380",
+ );
+ expect(resolved!.redis.host).to.equal("redis.internal");
+ expect(resolved!.redis.port).to.equal(6380);
+ expect(resolved!.redis.password).to.equal("redis#secret");
+ });
+
+ it("recovers from legacy REDIS_URL format without protocol", () => {
+ const resolved = resolvePublisherConfigFromAgentEnv(
+ {
+ DKGP_DATABASE_URL:
+ "mysql://root:password@localhost:3306/dkg_publisher_db",
+ REDIS_URL: "localhost:6379",
+ },
+ "http://localhost:9200",
+ );
+
+ expect(resolved).to.not.equal(null);
+ expect(resolved!.redisUrl).to.equal("redis://localhost:6379");
+ expect(resolved!.redis.host).to.equal("localhost");
+ expect(resolved!.redis.port).to.equal(6379);
+ });
+
+ it("updates existing keys and appends missing keys when writing agent env", async () => {
+ const tempDir = await mkdtemp(path.join(os.tmpdir(), "dkg-agent-env-"));
+ const envPath = path.join(tempDir, ".env");
+
+ try {
+ await writeFile(
+ envPath,
+ [
+ 'ASYNC_PUBLISHING_ENABLED="false"',
+ 'DKG_BLOCKCHAIN="otp:20430"',
+ ].join("\n"),
+ "utf8",
+ );
+
+ await upsertAgentEnvValues(
+ {
+ ASYNC_PUBLISHING_ENABLED: true,
+ DKGP_DATABASE_URL: "mysql://root:password@localhost:3306/dkg_publisher_db",
+ },
+ envPath,
+ );
+
+ const updated = await readFile(envPath, "utf8");
+ expect(updated).to.contain("ASYNC_PUBLISHING_ENABLED=true");
+ expect(updated).to.contain('DKG_BLOCKCHAIN="otp:20430"');
+ expect(updated).to.contain(
+ 'DKGP_DATABASE_URL="mysql://root:password@localhost:3306/dkg_publisher_db"',
+ );
+ } finally {
+ await rm(tempDir, { recursive: true, force: true });
+ }
+ });
+
+ it("removes keys and collapses duplicate entries when writing agent env", async () => {
+ const tempDir = await mkdtemp(path.join(os.tmpdir(), "dkg-agent-env-"));
+ const envPath = path.join(tempDir, ".env");
+
+ try {
+ await writeFile(
+ envPath,
+ [
+ 'POLL_FREQUENCY="5000"',
+ 'POLL_FREQUENCY="9000"',
+ 'ASYNC_PUBLISHING_ENABLED="false"',
+ ].join("\n"),
+ "utf8",
+ );
+
+ await upsertAgentEnvValues(
+ {
+ ASYNC_PUBLISHING_ENABLED: true,
+ POLL_FREQUENCY: null,
+ },
+ envPath,
+ );
+
+ const updated = await readFile(envPath, "utf8");
+ const asyncMatches = updated.match(/^ASYNC_PUBLISHING_ENABLED=/gm) || [];
+ const pollMatches = updated.match(/^POLL_FREQUENCY=/gm) || [];
+
+ expect(asyncMatches.length).to.equal(1);
+ expect(updated).to.contain("ASYNC_PUBLISHING_ENABLED=true");
+ expect(pollMatches.length).to.equal(0);
+ } finally {
+ await rm(tempDir, { recursive: true, force: true });
+ }
+ });
+});
diff --git a/docs/build-a-dkg-node-ai-agent/plugins/epcis-plugin.md b/docs/build-a-dkg-node-ai-agent/plugins/epcis-plugin.md
index 6fcdd38f..e2525b06 100644
--- a/docs/build-a-dkg-node-ai-agent/plugins/epcis-plugin.md
+++ b/docs/build-a-dkg-node-ai-agent/plugins/epcis-plugin.md
@@ -25,11 +25,11 @@ It provides both HTTP endpoints and MCP tools for:
1. Enable EPCIS + publisher plugins in server plugin registration (this is not enabled by default in this repo):
- `apps/agent/src/server/index.ts` should include `epcisPlugin` in the `plugins` array.
- - `apps/agent/src/server/index.ts` should include `dkgPublisherPlugin` in the `plugins` array.
+ - `apps/agent/src/server/index.ts` loads `dkgPublisherPlugin` when `ASYNC_PUBLISHING_ENABLED=true`.
- If you want route-level EPCIS scope enforcement, apply `applyEpcisHttpScopeGuards(api, authorized)` in the auth middleware plugin.
-2. Run publisher plugin setup:
- - `cd packages/plugin-dkg-publisher && npm run setup`
- - This initializes publisher configuration (including `.env.publisher`) for the publisher flow.
+2. Run the main Agent setup and enable async publishing when prompted:
+ - `cd apps/agent && npm run script:setup`
+ - This initializes the consolidated Agent + Publisher configuration in `apps/agent/.env`.
3. Configure runtime environment:
- `EXPO_PUBLIC_MCP_URL=http://localhost:9200` (local same-host setup)
4. Create a token with EPCIS scopes:
diff --git a/package-lock.json b/package-lock.json
index ca2e6cc8..648a017b 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -52,6 +52,7 @@
"dependencies": {
"@dkg/expo-forcegraph": "^0.0.0",
"@dkg/plugin-dkg-essentials": "^0.0.3",
+ "@dkg/plugin-dkg-publisher": "^1.0.0",
"@dkg/plugin-example": "^0.0.3",
"@dkg/plugin-oauth": "^0.0.2",
"@dkg/plugin-swagger": "^0.0.2",
diff --git a/packages/plugin-dkg-essentials/src/plugins/dkg-tools.ts b/packages/plugin-dkg-essentials/src/plugins/dkg-tools.ts
index f409de48..436e4ff1 100644
--- a/packages/plugin-dkg-essentials/src/plugins/dkg-tools.ts
+++ b/packages/plugin-dkg-essentials/src/plugins/dkg-tools.ts
@@ -1,6 +1,6 @@
import consumers from "stream/consumers";
import { defineDkgPlugin } from "@dkg/plugins";
-import { z } from "@dkg/plugins/helpers";
+import { openAPIRoute, z } from "@dkg/plugin-swagger";
import {
CompleteResourceTemplateCallback,
ResourceTemplate,
@@ -9,8 +9,12 @@ import {
import { BLOCKCHAIN_IDS } from "dkg.js/constants";
import { getExplorerUrl, validateSparqlQuery } from "../utils";
-export default defineDkgPlugin((ctx, mcp) => {
-
+type SupportedQueryType = "SELECT" | "CONSTRUCT";
+type SparqlValidationResult =
+ | { valid: true; queryType: SupportedQueryType }
+ | { valid: false; error: string };
+
+export default defineDkgPlugin((ctx, mcp, api) => {
async function publishJsonLdAsset(
jsonldRaw: string,
privacy: "private" | "public",
@@ -31,6 +35,81 @@ export default defineDkgPlugin((ctx, mcp) => {
}
}
+ async function resolveJsonLdInput(jsonld: string): Promise {
+ if (!jsonld) {
+ console.error("No JSON-LD content provided after file read.");
+ throw new Error("No JSON-LD content provided.");
+ }
+
+ if (jsonld.startsWith("{") || jsonld.startsWith("[")) {
+ return jsonld;
+ }
+
+ const blob = await ctx.blob.get(jsonld);
+ if (!blob) {
+ console.error(`File with id "${jsonld}" not found`);
+ throw new Error(`File with id "${jsonld}" not found`);
+ }
+
+ return consumers.text(blob.data);
+ }
+
+ function validateSparqlInput(query: string): SparqlValidationResult {
+ const validation = validateSparqlQuery(query);
+ if (!validation.valid) {
+ return {
+ valid: false,
+ error: validation.error || "Invalid SPARQL query",
+ };
+ }
+
+ if (!validation.queryType) {
+ throw new Error(
+ "Internal error: queryType missing after successful validation",
+ );
+ }
+
+ return {
+ valid: true,
+ queryType: validation.queryType as SupportedQueryType,
+ };
+ }
+
+ async function runSparqlQuery(
+ query: string,
+ queryType: SupportedQueryType,
+ ) {
+ return ctx.dkg.graph.query(query, queryType);
+ }
+
+ function formatSparqlResult(queryResult: unknown, queryType: SupportedQueryType) {
+ const isConstructQuery = queryType === "CONSTRUCT";
+ const hasDataProperty =
+ typeof queryResult === "object" &&
+ queryResult !== null &&
+ "data" in queryResult;
+
+ if (
+ isConstructQuery &&
+ hasDataProperty &&
+ typeof queryResult.data === "string"
+ ) {
+ return {
+ codeBlockLang: "ntriples",
+ resultText: queryResult.data,
+ };
+ }
+
+ return {
+ codeBlockLang: "json",
+ resultText: JSON.stringify(queryResult, null, 2),
+ };
+ }
+
+ async function getAssetByUal(ual: string) {
+ return ctx.dkg.asset.get(ual);
+ }
+
const ualCompleteOptions: Record = {
blockchainName: (val) =>
(Object.values(BLOCKCHAIN_IDS) as string[]).reduce(
@@ -119,8 +198,9 @@ export default defineDkgPlugin((ctx, mcp) => {
{
title: "DKG Knowledge Asset create tool",
description:
- "A tool for creating and publishing Knowledge Assets on OriginTrail Decentralized Knowledge Graph (DKG), " +
- "taking either a single JSON-LD string or a single file id as input. " +
+ "A synchronous direct-publish tool for creating and publishing Knowledge Assets on OriginTrail Decentralized Knowledge Graph (DKG). " +
+ "Use this when you want immediate publish execution and direct UAL output. " +
+ "Takes either a single JSON-LD string or a single file id as input. " +
"Optionally, you can specify privacy as 'private' or 'public' (default: 'private').",
inputSchema: {
jsonld: z
@@ -130,21 +210,8 @@ export default defineDkgPlugin((ctx, mcp) => {
},
},
async (input) => {
- if (!input.jsonld) {
- console.error("No JSON-LD content provided after file read.");
- throw new Error("No JSON-LD content provided.");
- }
const privacy = input.privacy || "private";
- const content =
- input.jsonld.startsWith("{") || input.jsonld.startsWith("[")
- ? input.jsonld
- : await ctx.blob.get(input.jsonld).then((r) => {
- if (!r) {
- console.error(`File with id "${input.jsonld}" not found`);
- throw new Error(`File with id "${input.jsonld}" not found`);
- }
- return consumers.text(r.data);
- });
+ const content = await resolveJsonLdInput(input.jsonld);
const { ual, error } = await publishJsonLdAsset(content, privacy);
if (error) {
@@ -176,9 +243,7 @@ export default defineDkgPlugin((ctx, mcp) => {
},
},
async ({ query }) => {
- // Validate query syntax
- const validation = validateSparqlQuery(query);
-
+ const validation = validateSparqlInput(query);
if (!validation.valid) {
console.error("Invalid SPARQL query:", validation.error);
return {
@@ -191,34 +256,13 @@ export default defineDkgPlugin((ctx, mcp) => {
};
}
- // Use validated query type (must be defined after successful validation)
- if (!validation.queryType) {
- throw new Error("Internal error: queryType missing after successful validation");
- }
- const queryType = validation.queryType;
-
try {
- console.log(`Executing SPARQL ${queryType} query...`);
- const queryResult = await ctx.dkg.graph.query(query, queryType);
-
- // Format result based on query type:
- // - SELECT returns { data: [...bindings] }
- // - CONSTRUCT returns { data: "" }
- const isConstructQuery = queryType === "CONSTRUCT";
- const hasDataProperty = typeof queryResult === "object" && queryResult !== null && "data" in queryResult;
-
- let resultText: string;
- let codeBlockLang: string;
-
- if (isConstructQuery && hasDataProperty && typeof queryResult.data === "string") {
- // CONSTRUCT: extract N-triples string directly
- resultText = queryResult.data;
- codeBlockLang = "ntriples";
- } else {
- // SELECT or fallback: JSON format
- resultText = JSON.stringify(queryResult, null, 2);
- codeBlockLang = "json";
- }
+ console.log(`Executing SPARQL ${validation.queryType} query...`);
+ const queryResult = await runSparqlQuery(query, validation.queryType);
+ const { codeBlockLang, resultText } = formatSparqlResult(
+ queryResult,
+ validation.queryType,
+ );
return {
content: [
@@ -250,10 +294,16 @@ export default defineDkgPlugin((ctx, mcp) => {
title: "DKG Knowledge Asset get tool",
description:
"Retrieve a specific Knowledge Asset from the DKG by its UAL (Unique Asset Locator). ",
- inputSchema: { ual: z.string().describe("The UAL (Unique Asset Locator) in format: did:dkg:{blockchainName}:{blockchainId}/{blockchainAddress}/{collectionId}/{assetId} or did:dkg:{blockchainName}:{blockchainId}/{blockchainAddress}/{collectionId}") },
+ inputSchema: {
+ ual: z
+ .string()
+ .describe(
+ "The UAL (Unique Asset Locator) in format: did:dkg:{blockchainName}:{blockchainId}/{blockchainAddress}/{collectionId}/{assetId} or did:dkg:{blockchainName}:{blockchainId}/{blockchainAddress}/{collectionId}",
+ ),
+ },
},
async ({ ual }) => {
- const getAssetResult = await ctx.dkg.asset.get(ual);
+ const getAssetResult = await getAssetByUal(ual);
return {
content: [
{ type: "text", text: JSON.stringify(getAssetResult, null, 2) },
@@ -262,4 +312,186 @@ export default defineDkgPlugin((ctx, mcp) => {
},
);
+ api.post(
+ "/api/dkg/create",
+ openAPIRoute(
+ {
+ tag: "DKG Publishing",
+ summary: "Create and Publish DKG Asset",
+ description:
+ "Synchronously create and publish a Knowledge Asset on DKG from JSON-LD content or uploaded blob id.",
+ body: z.object({
+ jsonld: z
+ .string()
+ .describe("JSON-LD content or ID of an uploaded file"),
+ privacy: z.enum(["private", "public"]).optional().default("private"),
+ }),
+ response: {
+ schema: z.object({
+ success: z.boolean(),
+ data: z
+ .object({
+ ual: z.string(),
+ explorerLink: z.string(),
+ message: z.string(),
+ })
+ .optional(),
+ error: z.string().optional(),
+ }),
+ },
+ finalizeRouteConfig: (config) => ({
+ ...config,
+ security: [],
+ }),
+ },
+ async (req, res) => {
+ try {
+ const privacy = req.body.privacy || "private";
+ const content = await resolveJsonLdInput(req.body.jsonld);
+
+ const { ual, error } = await publishJsonLdAsset(content, privacy);
+ if (error) {
+ console.error("Error creating asset:", error);
+ return res.status(500).json({
+ success: false,
+ error: "Failed to create asset: " + error,
+ });
+ }
+
+ if (!ual) {
+ return res.status(500).json({
+ success: false,
+ error: "Failed to create asset: missing UAL in response",
+ });
+ }
+
+ const explorerLink = getExplorerUrl(ual);
+ const message = `Knowledge Asset collection successfully created.\n\nUAL: ${ual}\nDKG Explorer link: ${explorerLink}`;
+
+ return res.json({
+ success: true,
+ data: {
+ ual,
+ explorerLink,
+ message,
+ },
+ });
+ } catch (error) {
+ const errorMessage =
+ error instanceof Error ? error.message : String(error);
+ return res.status(500).json({
+ success: false,
+ error: errorMessage,
+ });
+ }
+ },
+ ),
+ );
+
+ api.post(
+ "/api/dkg/query",
+ openAPIRoute(
+ {
+ tag: "DKG Retrieval",
+ summary: "Execute SPARQL Query",
+ description: "Execute a SPARQL query on the DKG network",
+ body: z.object({
+ query: z.string().min(1, "Query cannot be empty"),
+ queryType: z
+ .enum(["SELECT", "CONSTRUCT"])
+ .optional()
+ .default("SELECT"),
+ validate: z.boolean().optional().default(true),
+ }),
+ response: {
+ schema: z.object({
+ success: z.boolean(),
+ data: z.any().optional(),
+ error: z.string().optional(),
+ validation: z
+ .object({
+ valid: z.boolean(),
+ error: z.string().optional(),
+ })
+ .optional(),
+ }),
+ },
+ finalizeRouteConfig: (config) => ({
+ ...config,
+ security: [],
+ }),
+ },
+ async (req, res) => {
+ try {
+ let queryType: SupportedQueryType;
+ if (req.body.validate !== false) {
+ const validation = validateSparqlInput(req.body.query);
+ if (!validation.valid) {
+ return res.status(400).json({
+ success: false,
+ error: validation.error,
+ validation: {
+ valid: false,
+ error: validation.error,
+ },
+ });
+ }
+ queryType = validation.queryType;
+ } else {
+ queryType = req.body.queryType || "SELECT";
+ }
+
+ const queryResult = await runSparqlQuery(req.body.query, queryType);
+ return res.json({
+ success: true,
+ data: queryResult,
+ });
+ } catch (error: any) {
+ return res.status(500).json({
+ success: false,
+ error: error.message,
+ });
+ }
+ },
+ ),
+ );
+
+ api.get(
+ "/api/dkg/get",
+ openAPIRoute(
+ {
+ tag: "DKG Retrieval",
+ summary: "Get DKG Asset",
+ description: "Retrieve an asset from DKG by UAL",
+ query: z.object({
+ ual: z.string(),
+ }),
+ response: {
+ schema: z.object({
+ success: z.boolean(),
+ data: z.any().optional(),
+ error: z.string().optional(),
+ }),
+ },
+ finalizeRouteConfig: (config) => ({
+ ...config,
+ security: [],
+ }),
+ },
+ async (req, res) => {
+ try {
+ const asset = await getAssetByUal(req.query.ual);
+ return res.json({
+ success: true,
+ data: asset,
+ });
+ } catch (error: any) {
+ return res.status(500).json({
+ success: false,
+ error: error.message,
+ });
+ }
+ },
+ ),
+ );
});
diff --git a/packages/plugin-dkg-essentials/src/plugins/document-to-markdown/index.ts b/packages/plugin-dkg-essentials/src/plugins/document-to-markdown/index.ts
index 9007d612..e26521db 100644
--- a/packages/plugin-dkg-essentials/src/plugins/document-to-markdown/index.ts
+++ b/packages/plugin-dkg-essentials/src/plugins/document-to-markdown/index.ts
@@ -134,7 +134,7 @@ export function createDocumentToMarkdownPlugin(
// REST endpoint for document-to-markdown conversion
api.post(
- "/document-to-markdown",
+ "/api/document-to-markdown",
openAPIRoute(
{
summary: "Convert document to Markdown",
diff --git a/packages/plugin-dkg-essentials/tests/dkg-tools.spec.ts b/packages/plugin-dkg-essentials/tests/dkg-tools.spec.ts
index d512bb58..9c257921 100644
--- a/packages/plugin-dkg-essentials/tests/dkg-tools.spec.ts
+++ b/packages/plugin-dkg-essentials/tests/dkg-tools.spec.ts
@@ -3,6 +3,7 @@
import { describe, it, beforeEach, afterEach } from "mocha";
import { expect } from "chai";
import sinon from "sinon";
+import request from "supertest";
import { dkgToolsPlugin } from "../dist/index.js";
import {
getExplorerUrl,
@@ -47,6 +48,12 @@ mockDkgContext.dkg.asset = {
UAL: "did:dkg:otp:20430/0x123456/12345",
}),
};
+mockDkgContext.dkg.graph = {
+ query: (query: string, queryType: string) =>
+ Promise.resolve({
+ data: [{ query, queryType, result: "ok" }],
+ }),
+};
describe("@dkg/plugin-dkg-essentials checks", () => {
let mockMcpServer: McpServer;
@@ -84,10 +91,16 @@ describe("@dkg/plugin-dkg-essentials checks", () => {
expect(tools.some((t) => t.name === "dkg-create")).to.equal(true);
});
- it("should register exactly 2 tools", async () => {
+ it("should register the dkg-sparql-query tool", async () => {
+ const tools = await mockMcpClient.listTools().then((t) => t.tools);
+
+ expect(tools.some((t) => t.name === "dkg-sparql-query")).to.equal(true);
+ });
+
+ it("should register exactly 3 tools", async () => {
const tools = await mockMcpClient.listTools().then((t) => t.tools);
- expect(tools.length).to.equal(2);
+ expect(tools.length).to.equal(3);
});
it("should have correct dkg-get tool configuration", async () => {
@@ -96,7 +109,7 @@ describe("@dkg/plugin-dkg-essentials checks", () => {
expect(dkgGetTool).to.not.equal(undefined);
expect(dkgGetTool!.title).to.equal("DKG Knowledge Asset get tool");
- expect(dkgGetTool!.description).to.include("GET operation");
+ expect(dkgGetTool!.description).to.include("Retrieve a specific");
expect(dkgGetTool!.description).to.include("UAL");
expect(dkgGetTool!.inputSchema).to.not.equal(undefined);
});
@@ -412,6 +425,108 @@ describe("@dkg/plugin-dkg-essentials checks", () => {
// Restore original mock
mockDkgContext.dkg.asset.get = originalGet;
});
+
+ it("should handle DKG service errors in SPARQL tool", async () => {
+ const originalQuery = mockDkgContext.dkg.graph.query;
+ try {
+ mockDkgContext.dkg.graph.query = () => {
+ throw new Error("DKG query unavailable");
+ };
+
+ const result = await mockMcpClient.callTool({
+ name: "dkg-sparql-query",
+ arguments: { query: "SELECT * WHERE { ?s ?p ?o } LIMIT 1" },
+ });
+
+ expect((result.content as any[])[0].text).to.include(
+ "Error executing SPARQL query",
+ );
+ expect((result.content as any[])[0].text).to.include(
+ "DKG query unavailable",
+ );
+ } finally {
+ mockDkgContext.dkg.graph.query = originalQuery;
+ }
+ });
+ });
+
+ describe("HTTP API Routes", () => {
+ it("should expose POST /api/dkg/create from Essentials", async () => {
+ const response = await request(app)
+ .post("/api/dkg/create")
+ .send({
+ jsonld: JSON.stringify({
+ "@context": "https://schema.org",
+ "@type": "Organization",
+ name: "HTTP Create Test Organization",
+ }),
+ privacy: "private",
+ })
+ .expect(200);
+
+ expect(response.body.success).to.equal(true);
+ expect(response.body.data.ual).to.equal("did:dkg:otp:20430/0x123456/12345");
+ expect(response.body.data.explorerLink).to.include(
+ "https://dkg-testnet.origintrail.io/explore?ual=",
+ );
+ });
+
+ it("should return 400 for invalid POST /api/dkg/create input", async () => {
+ await request(app)
+ .post("/api/dkg/create")
+ .send({
+ privacy: "private",
+ })
+ .expect(400);
+ });
+
+ it("should expose POST /api/dkg/query from Essentials", async () => {
+ const response = await request(app)
+ .post("/api/dkg/query")
+ .send({
+ query: "SELECT * WHERE { ?s ?p ?o } LIMIT 1",
+ queryType: "SELECT",
+ })
+ .expect(200);
+
+ expect(response.body.success).to.equal(true);
+ expect(response.body.data.data[0].result).to.equal("ok");
+ });
+
+ it("should return 400 for invalid POST /api/dkg/query input", async () => {
+ const response = await request(app)
+ .post("/api/dkg/query")
+ .send({
+ query: "bad query",
+ queryType: "SELECT",
+ })
+ .expect(400);
+
+ expect(response.body.success).to.equal(false);
+ expect(response.body.error).to.be.a("string");
+ });
+
+ it("should return 400 for unsupported POST /api/dkg/query queryType", async () => {
+ await request(app)
+ .post("/api/dkg/query")
+ .send({
+ query: "SELECT * WHERE { ?s ?p ?o } LIMIT 1",
+ queryType: "ASK",
+ })
+ .expect(400);
+ });
+
+ it("should expose GET /api/dkg/get from Essentials", async () => {
+ const response = await request(app)
+ .get("/api/dkg/get")
+ .query({ ual: "did:dkg:otp:20430/0x123456/12345" })
+ .expect(200);
+
+ expect(response.body.success).to.equal(true);
+ expect(response.body.data.metadata.UAL).to.equal(
+ "did:dkg:otp:20430/0x123456/12345",
+ );
+ });
});
describe("Utility Functions", () => {
@@ -512,7 +627,7 @@ describe("@dkg/plugin-dkg-essentials checks", () => {
expect(result.text).to.include(
"https://dkg-testnet.origintrail.io/explore?ual=did:dkg:test/123",
);
- expect(result.description).to.be.a("string");
+ expect((result as any)._meta.description).to.be.a("string");
});
});
diff --git a/packages/plugin-dkg-publisher/README.md b/packages/plugin-dkg-publisher/README.md
index 91ff8f2e..2d47c57c 100644
--- a/packages/plugin-dkg-publisher/README.md
+++ b/packages/plugin-dkg-publisher/README.md
@@ -22,16 +22,23 @@ The DKG Publisher plugin provides a **production-ready, scalable system** for pu
### 1. Setup
```bash
-npm run setup
+cd apps/agent
+npm run script:setup
```
-The setup script will guide you through:
+The standard node setup now owns Publisher configuration too. Choose async
+publishing during the Agent setup flow to write the consolidated Publisher
+runtime env into `apps/agent/.env` and provision the Publisher MySQL database.
+In advanced mode, setup can also seed additional publishing wallets into the
+Publisher wallet pool.
-- Database configuration (MySQL)
-- Redis configuration
-- DKG network settings
-- Wallet pool setup
-- Worker configuration
+For post-setup operations (update Publisher config, manage wallets, or reset
+Publisher data), use:
+
+```bash
+cd apps/agent
+npm run script:publisher
+```
### 2. Start the Application
@@ -99,8 +106,18 @@ pending ā queued ā assigned ā publishing ā published ā
**During setup:**
```bash
-npm run setup
-# Choose option 3: "Add wallets only"
+cd apps/agent
+npm run script:setup
+# Choose "Yes, with advanced configuration" for async publishing
+# The primary wallet is seeded automatically and setup can add more wallets
+```
+
+**After setup:**
+
+```bash
+cd apps/agent
+npm run script:publisher
+# Choose "Manage wallets"
```
**After 5 minutes**, workers automatically restart with new concurrency. No application restart needed!
diff --git a/packages/plugin-dkg-publisher/[LOCAL] DKG Node - Publisher.postman_collection.json b/packages/plugin-dkg-publisher/[LOCAL] DKG Node - Publisher.postman_collection.json
index f8dd0504..58699329 100644
--- a/packages/plugin-dkg-publisher/[LOCAL] DKG Node - Publisher.postman_collection.json
+++ b/packages/plugin-dkg-publisher/[LOCAL] DKG Node - Publisher.postman_collection.json
@@ -150,9 +150,9 @@
"method": "GET",
"header": [],
"url": {
- "raw": "{{endpoint}}/api/dkg/assets?ual=did:dkg:otp:20430/0xcdb28e93ed340ec10a71bba00a31dbfcf1bd5d37/347048",
+ "raw": "{{endpoint}}/api/dkg/get?ual=did:dkg:otp:20430/0xcdb28e93ed340ec10a71bba00a31dbfcf1bd5d37/347048",
"host": ["{{endpoint}}"],
- "path": ["api", "dkg", "assets"],
+ "path": ["api", "dkg", "get"],
"query": [
{
"key": "ual",
@@ -160,7 +160,7 @@
}
]
},
- "description": "Generated from cURL: curl -X GET \"http://localhost:9200/api/dkg/assets/did:dkg:otp:20430/0x1234567890abcdef/123456?contentType=all&includeMetadata=true\""
+ "description": "Generated from cURL: curl -X GET \"http://localhost:9200/api/dkg/get?ual=did:dkg:otp:20430/0x1234567890abcdef/123456\""
},
"response": []
}
diff --git a/packages/plugin-dkg-publisher/package-addition.json b/packages/plugin-dkg-publisher/package-addition.json
index f1da997f..5481597d 100644
--- a/packages/plugin-dkg-publisher/package-addition.json
+++ b/packages/plugin-dkg-publisher/package-addition.json
@@ -1,6 +1,5 @@
{
"scripts": {
- "km:setup": "node setup.js",
"km:migrate": "npm run db:migrate",
"km:worker": "npm run worker",
"km:dashboard": "npm run dashboard",
diff --git a/packages/plugin-dkg-publisher/package.json b/packages/plugin-dkg-publisher/package.json
index 9950e434..01cb9016 100644
--- a/packages/plugin-dkg-publisher/package.json
+++ b/packages/plugin-dkg-publisher/package.json
@@ -5,13 +5,29 @@
"main": "./dist/index.js",
"module": "./dist/index.mjs",
"types": "./dist/index.d.ts",
+ "exports": {
+ ".": {
+ "types": "./dist/index.d.ts",
+ "import": "./dist/index.mjs",
+ "require": "./dist/index.js"
+ },
+ "./provision": {
+ "types": "./dist/provision.d.ts",
+ "import": "./dist/provision.mjs",
+ "require": "./dist/provision.js"
+ },
+ "./privateKey": {
+ "types": "./dist/privateKey.d.ts",
+ "import": "./dist/privateKey.mjs",
+ "require": "./dist/privateKey.js"
+ }
+ },
"scripts": {
- "dev": "tsup src/*.ts --format cjs,esm --dts --watch",
- "build": "tsup src/*.ts --format cjs,esm --dts",
+ "dev": "tsup src/*.ts --format cjs,esm --dts --watch --shims",
+ "build": "tsup src/*.ts --format cjs,esm --dts --shims",
"check-types": "tsc --noEmit",
"lint": "eslint . --max-warnings 0",
"test": "mocha --loader ../../node_modules/tsx/dist/loader.mjs 'tests/**/*.spec.ts'",
- "setup": "node setup.js",
"db:generate": "drizzle-kit generate:mysql",
"db:migrate": "node migrate.js",
"db:push": "drizzle-kit push:mysql",
diff --git a/packages/plugin-dkg-publisher/scripts/check-queue-status.js b/packages/plugin-dkg-publisher/scripts/check-queue-status.js
index efc900d8..7c2b062f 100644
--- a/packages/plugin-dkg-publisher/scripts/check-queue-status.js
+++ b/packages/plugin-dkg-publisher/scripts/check-queue-status.js
@@ -6,11 +6,16 @@ const IORedis = require("ioredis");
async function checkQueueStatus() {
console.log("š Checking BullMQ queue status...\n");
+ const redisUrl =
+ process.env.REDIS_URL ||
+ `redis://${process.env.REDIS_HOST || "localhost"}:${process.env.REDIS_PORT || "6379"}`;
+ const parsedRedisUrl = new URL(redisUrl);
+
// Connect to Redis (same config as the service)
const redis = new IORedis({
- host: process.env.REDIS_HOST || "localhost",
- port: process.env.REDIS_PORT || 6379,
- password: process.env.REDIS_PASSWORD,
+ host: parsedRedisUrl.hostname || "localhost",
+ port: Number(parsedRedisUrl.port || 6379),
+ password: parsedRedisUrl.password || process.env.REDIS_PASSWORD,
maxRetriesPerRequest: 3,
retryStrategy: (times) => Math.min(times * 50, 2000),
});
diff --git a/packages/plugin-dkg-publisher/scripts/clear-failed-jobs.js b/packages/plugin-dkg-publisher/scripts/clear-failed-jobs.js
index e7d99e11..33eb79dd 100644
--- a/packages/plugin-dkg-publisher/scripts/clear-failed-jobs.js
+++ b/packages/plugin-dkg-publisher/scripts/clear-failed-jobs.js
@@ -6,11 +6,16 @@ const IORedis = require("ioredis");
async function clearFailedJobs() {
console.log("š§¹ Clearing failed jobs from BullMQ...");
+ const redisUrl =
+ process.env.REDIS_URL ||
+ `redis://${process.env.REDIS_HOST || "localhost"}:${process.env.REDIS_PORT || "6379"}`;
+ const parsedRedisUrl = new URL(redisUrl);
+
// Connect to Redis (same config as the service)
const redis = new IORedis({
- host: process.env.REDIS_HOST || "localhost",
- port: process.env.REDIS_PORT || 6379,
- password: process.env.REDIS_PASSWORD,
+ host: parsedRedisUrl.hostname || "localhost",
+ port: Number(parsedRedisUrl.port || 6379),
+ password: parsedRedisUrl.password || process.env.REDIS_PASSWORD,
maxRetriesPerRequest: 3,
retryStrategy: (times) => Math.min(times * 50, 2000),
});
diff --git a/packages/plugin-dkg-publisher/setup.js b/packages/plugin-dkg-publisher/setup.js
deleted file mode 100644
index f7847fad..00000000
--- a/packages/plugin-dkg-publisher/setup.js
+++ /dev/null
@@ -1,1125 +0,0 @@
-#!/usr/bin/env node
-
-/**
- * DKG Publisher Plugin Setup Script
- *
- * This script configures the DKG Publisher plugin for any DKG-Node project.
- * It handles database creation, migrations, wallet setup, and environment configuration.
- */
-
-const fs = require("fs").promises;
-const path = require("path");
-const readline = require("readline");
-const mysql = require("mysql2/promise");
-const crypto = require("crypto");
-const CryptoJS = require("crypto-js");
-const { Wallet } = require("ethers");
-
-// Colors for console output
-const colors = {
- green: "\x1b[32m",
- red: "\x1b[31m",
- yellow: "\x1b[33m",
- blue: "\x1b[34m",
- cyan: "\x1b[36m",
- reset: "\x1b[0m",
- bold: "\x1b[1m",
-};
-
-function log(message, color = "reset") {
- const colorCode = colors[color] || colors.reset;
- console.log(`${colorCode}${message}${colors.reset}`);
-}
-
-function logStep(step, message) {
- log(
- `\n${colors.bold}[${step}]${colors.reset} ${colors.cyan}${message}${colors.reset}`,
- );
-}
-
-// Interactive input helper
-function ask(question, options = {}) {
- const rl = readline.createInterface({
- input: process.stdin,
- output: process.stdout,
- });
-
- return new Promise((resolve) => {
- const prompt = options.password
- ? question.replace(/:/g, " (input hidden):")
- : question;
-
- // Mute output for password fields so typed characters are not echoed
- if (options.password) {
- rl._writeToOutput = (str) => {
- // Only suppress characters after the prompt has been written
- if (str === prompt || str.includes(prompt)) {
- process.stdout.write(str);
- }
- };
- }
-
- rl.question(`${colors.yellow}${prompt}${colors.reset} `, (answer) => {
- if (options.password) {
- // Print a newline since the user's Enter was not echoed
- process.stdout.write("\n");
- }
- rl.close();
-
- // Handle empty input - use default if available
- const finalAnswer = answer.trim() || options.default || "";
-
- // Handle required fields
- if (options.required && !finalAnswer) {
- log("This field is required!", "red");
- return resolve(ask(question, options));
- }
-
- // Handle validation
- if (options.validate && !options.validate(finalAnswer)) {
- log(options.error || "Invalid input!", "red");
- return resolve(ask(question, options));
- }
-
- resolve(finalAnswer);
- });
- });
-}
-
-// Create file with content (won't overwrite existing)
-async function createFile(filePath, content, overwrite = false) {
- try {
- if (!overwrite) {
- try {
- await fs.access(filePath);
- log(
- `File ${path.basename(filePath)} already exists, skipping...`,
- "yellow",
- );
- return false;
- } catch {
- // File doesn't exist, continue
- }
- }
-
- await fs.writeFile(filePath, content, "utf8");
- log(`Created ${path.basename(filePath)}`, "green");
- return true;
- } catch (error) {
- log(`Error creating ${path.basename(filePath)}: ${error.message}`, "red");
- return false;
- }
-}
-
-// Generate secure random key
-function generateSecretKey(length = 32) {
- return crypto.randomBytes(length).toString("hex");
-}
-
-// Encrypt wallet private key using AES (same method as runtime)
-function encryptPrivateKey(privateKey, encryptionKey) {
- return CryptoJS.AES.encrypt(privateKey, encryptionKey).toString();
-}
-
-// Validate Ethereum address
-function isValidAddress(address) {
- return /^0x[a-fA-F0-9]{40}$/.test(address);
-}
-
-// Validate private key
-function isValidPrivateKey(key) {
- return /^0x[a-fA-F0-9]{64}$/.test(key) || /^[a-fA-F0-9]{64}$/.test(key);
-}
-
-// Derive wallet address from private key
-function getAddressFromPrivateKey(privateKey) {
- try {
- // Ensure private key has 0x prefix
- const formattedKey = privateKey.startsWith("0x")
- ? privateKey
- : `0x${privateKey}`;
- const wallet = new Wallet(formattedKey);
- return wallet.address;
- } catch (error) {
- throw new Error(`Invalid private key: ${error.message}`);
- }
-}
-
-// Bootstrap migration journal for databases created by a previous version of
-// setup.js (raw DDL, no __drizzle_migrations table). Without this, migrate()
-// would try to re-run CREATE TABLE statements on existing tables and fail.
-// This mirrors the logic in src/database/bootstrap.ts but uses raw mysql2 queries.
-async function bootstrapJournalForSetup(pool) {
- const fsSync = require("fs");
-
- const [journals] = await pool.execute(
- `SELECT COUNT(*) as cnt FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = '__drizzle_migrations'`,
- );
- if (Number(journals[0].cnt) > 0) return;
-
- const [tables] = await pool.execute(
- `SELECT COUNT(*) as cnt FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name IN ('assets', 'wallets', 'publishing_attempts', 'batches')`,
- );
- const tableCount = Number(tables[0].cnt);
- if (tableCount === 0) return; // Fresh DB
-
- if (tableCount < 4) {
- throw new Error(
- "Database is in a partial state (some tables missing). Please choose 'Start fresh' (option 1).",
- );
- }
-
- log(" Bootstrapping migration journal for existing database...", "cyan");
-
- await pool.execute(`
- CREATE TABLE IF NOT EXISTS __drizzle_migrations (
- id SERIAL PRIMARY KEY,
- hash text NOT NULL,
- created_at bigint
- )
- `);
-
- const journalPath = path.join(
- __dirname,
- "src/database/migrations/meta/_journal.json",
- );
- const journal = JSON.parse(fsSync.readFileSync(journalPath, "utf-8"));
-
- // Seed 0000 and 0001 (setup.js schema = post-0001 state)
- for (const entry of journal.entries) {
- if (entry.idx > 1) break;
- const sqlFile = path.join(
- __dirname,
- `src/database/migrations/${entry.tag}.sql`,
- );
- const content = fsSync.readFileSync(sqlFile, "utf-8");
- const hash = crypto.createHash("sha256").update(content).digest("hex");
- await pool.execute(
- `INSERT INTO __drizzle_migrations (hash, created_at) VALUES (?, ?)`,
- [hash, entry.when],
- );
- }
-
- // Check if 0002 changes are already present
- const entry0002 = journal.entries.find((e) => e.idx === 2);
- if (entry0002) {
- const [hasErrorDetails] = await pool.execute(
- `SELECT COUNT(*) as cnt FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = 'publishing_attempts' AND column_name = 'error_details'`,
- );
- const [hasPrivateKey] = await pool.execute(
- `SELECT COUNT(*) as cnt FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = 'wallets' AND column_name = 'private_key'`,
- );
- if (
- Number(hasErrorDetails[0].cnt) > 0 &&
- Number(hasPrivateKey[0].cnt) > 0
- ) {
- const sqlFile = path.join(
- __dirname,
- `src/database/migrations/${entry0002.tag}.sql`,
- );
- const content = fsSync.readFileSync(sqlFile, "utf-8");
- const hash = crypto.createHash("sha256").update(content).digest("hex");
- await pool.execute(
- `INSERT INTO __drizzle_migrations (hash, created_at) VALUES (?, ?)`,
- [hash, entry0002.when],
- );
- }
- }
-
- log(" ā Migration journal bootstrapped", "green");
-}
-
-// Check if configuration already exists
-async function checkExistingConfig() {
- const existingEnv = ".env.publisher";
- const existingCompose = "docker-compose.knowledge-manager.yml";
-
- let hasConfig = false;
- let configDetails = {
- env: false,
- compose: false,
- database: null,
- redis: null,
- };
-
- try {
- await fs.access(existingEnv);
- configDetails.env = true;
- hasConfig = true;
-
- // Parse existing env
- const envContent = await fs.readFile(existingEnv, "utf8");
- const dbMatch = envContent.match(
- /DKGP_DATABASE_URL=mysql:\/\/([^:]+):?([^@]*)@([^:]+):(\d+)\/(\w+)/,
- );
- const redisMatch = envContent.match(
- /REDIS_URL=redis:\/\/(?:([^@]+)@)?([^:]+):(\d+)/,
- );
-
- if (dbMatch) {
- configDetails.database = {
- user: dbMatch[1],
- password: dbMatch[2] || "",
- host: dbMatch[3],
- port: dbMatch[4],
- name: dbMatch[5],
- };
- }
-
- if (redisMatch) {
- configDetails.redis = {
- password: redisMatch[1] || "",
- host: redisMatch[2],
- port: redisMatch[3],
- };
- }
- } catch {}
-
- try {
- await fs.access(existingCompose);
- configDetails.compose = true;
- hasConfig = true;
- } catch {}
-
- return { hasConfig, configDetails };
-}
-
-// Add wallets only mode
-async function addWalletsOnly(configDetails) {
- if (!configDetails.database) {
- log(
- "ā No database configuration found. Please run full setup first.",
- "red",
- );
- return;
- }
-
- // Read encryption key from existing .env file
- let encryptionKey;
- try {
- const envContent = await fs.readFile(".env.publisher", "utf8");
- const keyMatch = envContent.match(/ENCRYPTION_KEY=(.+)/);
- if (!keyMatch) {
- log("ā ENCRYPTION_KEY not found in .env.publisher", "red");
- return;
- }
- encryptionKey = keyMatch[1];
- log("ā Found encryption key", "green");
- } catch (error) {
- log("ā Could not read .env.publisher file", "red");
- return;
- }
-
- logStep("1/2", "Connect to Database");
-
- const { database } = configDetails;
- let connection;
-
- try {
- connection = await mysql.createConnection({
- host: database.host,
- port: parseInt(database.port),
- user: database.user,
- password: database.password,
- database: database.name,
- });
-
- log("ā Connected to existing database", "green");
-
- // Check existing wallets
- const [existingWallets] = await connection.execute(
- "SELECT address, blockchain FROM wallets WHERE is_active = TRUE",
- );
-
- if (existingWallets.length > 0) {
- log(`\nš Found ${existingWallets.length} existing wallet(s):`, "cyan");
- existingWallets.forEach((wallet, index) => {
- log(
- ` ${index + 1}. ${wallet.address} (${wallet.blockchain})`,
- "white",
- );
- });
- }
-
- logStep("2/2", "Add New Wallets");
-
- const wallets = [];
- let addMoreWallets = true;
- let walletCount = existingWallets.length + 1;
-
- while (addMoreWallets) {
- log(`${colors.bold}Wallet ${walletCount}:${colors.reset}`);
-
- const privateKey = await ask(`Private Key (0x... or without 0x):`, {
- required: true,
- validate: isValidPrivateKey,
- error: "Invalid private key format",
- });
-
- // Ensure private key has 0x prefix
- const formattedPrivateKey = privateKey.startsWith("0x")
- ? privateKey
- : `0x${privateKey}`;
-
- // Derive address from private key
- let address;
- try {
- address = getAddressFromPrivateKey(formattedPrivateKey);
- log(` ā³ Derived address: ${address}`, "cyan");
- } catch (error) {
- log(`ā ${error.message}`, "red");
- continue;
- }
-
- // Check if wallet already exists
- const [duplicate] = await connection.execute(
- "SELECT id FROM wallets WHERE address = ?",
- [address],
- );
- if (duplicate.length > 0) {
- log(`ā ļø Wallet ${address} already exists, skipping...`, "yellow");
- const addMore = await ask(`Add another wallet? (y/N):`, {
- default: "n",
- });
- addMoreWallets =
- addMore.toLowerCase() === "y" || addMore.toLowerCase() === "yes";
- continue;
- }
-
- const blockchain = await ask(
- `Blockchain (default: ${existingWallets[0]?.blockchain || "otp:20430"}):`,
- {
- default: existingWallets[0]?.blockchain || "otp:20430",
- },
- );
-
- wallets.push({
- address,
- privateKey: formattedPrivateKey,
- blockchain,
- });
-
- log(`ā Wallet ${walletCount} added`, "green");
-
- const addMore = await ask(`Add another wallet? (y/N):`, { default: "n" });
- addMoreWallets =
- addMore.toLowerCase() === "y" || addMore.toLowerCase() === "yes";
- walletCount++;
- }
-
- // Insert new wallets
- for (const wallet of wallets) {
- await connection.execute(
- `INSERT INTO wallets (address, private_key, blockchain) VALUES (?, ?, ?)`,
- [wallet.address, wallet.privateKey, wallet.blockchain],
- );
- }
-
- log(`\nā
Added ${wallets.length} new wallet(s) to database`, "green");
- log(`š Total wallets: ${existingWallets.length + wallets.length}`, "cyan");
- } catch (error) {
- log(`Database error: ${error.message}`, "red");
- } finally {
- if (connection) await connection.end();
- }
-}
-
-// Main setup function
-async function setup() {
- log(
- `${colors.bold}${colors.blue}āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā${colors.reset}`,
- );
- log(
- `${colors.bold}${colors.blue}ā DKG Publisher Plugin Setup ā${colors.reset}`,
- );
- log(
- `${colors.bold}${colors.blue}ā ā${colors.reset}`,
- );
- log(
- `${colors.bold}${colors.blue}ā This script will configure the DKG Publisher plugin ā${colors.reset}`,
- );
- log(
- `${colors.bold}${colors.blue}ā for publishing JSON-LD assets to the DKG blockchain. ā${colors.reset}`,
- );
- log(
- `${colors.bold}${colors.blue}āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā${colors.reset}\n`,
- );
-
- try {
- // Check for existing configuration
- const { hasConfig, configDetails } = await checkExistingConfig();
-
- let setupMode;
-
- if (hasConfig) {
- log("š Existing configuration detected:", "yellow");
- if (configDetails.env) log(" ⢠.env.publisher found", "cyan");
- if (configDetails.compose)
- log(" ⢠docker-compose.knowledge-manager.yml found", "cyan");
-
- setupMode = await ask(
- "\nChoose setup mode:\n1. Start fresh (ā ļø will delete existing tables and backup config files)\n2. Update existing configuration\n3. Add wallets only\nChoice (1-3):",
- {
- validate: (input) => ["1", "2", "3"].includes(input),
- error: "Please enter 1, 2, or 3",
- },
- );
-
- if (setupMode === "1") {
- // Backup existing files
- const timestamp = new Date().toISOString().replace(/[:.]/g, "-");
- try {
- await fs.rename(
- ".env.publisher",
- `.env.publisher.backup-${timestamp}`,
- );
- log("ā Backed up existing .env file", "green");
- } catch {}
- try {
- await fs.rename(
- "docker-compose.knowledge-manager.yml",
- `docker-compose.knowledge-manager.yml.backup-${timestamp}`,
- );
- log("ā Backed up existing docker-compose file", "green");
- } catch {}
- } else if (setupMode === "3") {
- // Wallets-only mode
- return await addWalletsOnly(configDetails);
- }
- }
-
- // Use existing configuration as defaults if available
- const existingDb = configDetails.database;
- const existingRedis = configDetails.redis;
-
- // Step 1: Database Configuration
- logStep("1/7", "Database Configuration");
-
- const dbHost = await ask(
- `MySQL Host (default: ${existingDb?.host || "localhost"}):`,
- { default: existingDb?.host || "localhost" },
- );
- const dbPort = await ask(
- `MySQL Port (default: ${existingDb?.port || "3306"}):`,
- { default: existingDb?.port || "3306" },
- );
- const dbUser = await ask(
- `MySQL Username (default: ${existingDb?.user || "root"}):`,
- { default: existingDb?.user || "root" },
- );
- const dbPassword = await ask("MySQL Password:", {
- password: true,
- default: existingDb?.password || "",
- });
- const dbName = await ask(
- `Database Name (default: ${existingDb?.name || "dkg_publisher_db"}):`,
- { default: existingDb?.name || "dkg_publisher_db" },
- );
-
- // Step 2: Redis Configuration
- logStep("2/7", "Redis Configuration");
-
- const redisHost = await ask(
- `Redis Host (default: ${existingRedis?.host || "localhost"}):`,
- { default: existingRedis?.host || "localhost" },
- );
- const redisPort = await ask(
- `Redis Port (default: ${existingRedis?.port || "6379"}):`,
- { default: existingRedis?.port || "6379" },
- );
- const redisPassword = await ask("Redis Password (leave empty if none):", {
- default: existingRedis?.password || "",
- });
-
- // Step 3: DKG Configuration
- logStep("3/7", "DKG Network Configuration");
-
- const dkgEndpoint = await ask(
- "DKG Engine URL (default: http://localhost:8900):",
- {
- default: "http://localhost:8900",
- },
- );
-
- const blockchainOptions = [
- "hardhat1:31337 (Local Development)",
- "gnosis:100 (Gnosis Mainnet)",
- "gnosis:10200 (Gnosis Testnet)",
- "base:8453 (Base Mainnet)",
- "base:84532 (Base Testnet)",
- "otp:20430 (NeuroWeb Testnet)",
- "otp:2043 (NeuroWeb Mainnet)",
- "custom (Enter manually)",
- ];
-
- log("\nAvailable blockchain networks:");
- blockchainOptions.forEach((option, index) => {
- log(`${index + 1}. ${option}`, "cyan");
- });
-
- const blockchainChoice = await ask("Choose blockchain (1-8):", {
- validate: (input) =>
- ["1", "2", "3", "4", "5", "6", "7", "8"].includes(input),
- error: "Please enter 1, 2, 3, 4, 5, 6, 7, or 8",
- });
-
- let blockchain;
- switch (blockchainChoice) {
- case "1":
- blockchain = "hardhat1:31337";
- break;
- case "2":
- blockchain = "gnosis:100";
- break;
- case "3":
- blockchain = "gnosis:10200";
- break;
- case "4":
- blockchain = "base:8453";
- break;
- case "5":
- blockchain = "base:84532";
- break;
- case "6":
- blockchain = "otp:20430";
- break;
- case "7":
- blockchain = "otp:2043";
- break;
- case "8":
- blockchain = await ask("Enter blockchain (format: name:chainId):", {
- required: true,
- validate: (input) => input.includes(":"),
- error: "Format must be name:chainId (e.g., gnosis:100)",
- });
- break;
- }
-
- if (!blockchain) {
- log("ā Blockchain selection failed", "red");
- throw new Error("Invalid blockchain selection");
- }
-
- // Step 4: Wallet Configuration
- logStep("4/7", "Wallet Pool Setup");
-
- log(
- "The DKG Publisher plugin requires at least one wallet for publishing.",
- );
- log(
- "For high throughput, configure multiple wallets (10-100+ recommended).\n",
- );
-
- const wallets = [];
- let addMoreWallets = true;
- let walletCount = 1;
-
- while (addMoreWallets) {
- log(`${colors.bold}Wallet ${walletCount}:${colors.reset}`);
-
- const privateKey = await ask(`Private Key (0x... or without 0x):`, {
- required: true,
- validate: isValidPrivateKey,
- error: "Invalid private key format",
- });
-
- // Ensure private key has 0x prefix
- const formattedPrivateKey = privateKey.startsWith("0x")
- ? privateKey
- : `0x${privateKey}`;
-
- // Derive address from private key
- let address;
- try {
- address = getAddressFromPrivateKey(formattedPrivateKey);
- log(` ā³ Derived address: ${address}`, "cyan");
- } catch (error) {
- log(`ā ${error.message}`, "red");
- continue;
- }
-
- wallets.push({
- address,
- privateKey: formattedPrivateKey,
- blockchain,
- });
-
- log(`ā Wallet ${walletCount} added`, "green");
-
- const addMore = await ask(`Add another wallet? (y/N):`, { default: "n" });
- addMoreWallets =
- addMore.toLowerCase() === "y" || addMore.toLowerCase() === "yes";
- walletCount++;
- }
-
- // Step 5: Storage Configuration
- logStep("5/7", "File Storage Configuration");
-
- const storageType = await ask(
- "Storage type (filesystem/s3) [default: filesystem]:",
- {
- default: "filesystem",
- validate: (input) => ["filesystem", "s3"].includes(input.toLowerCase()),
- error: 'Please enter "filesystem" or "s3"',
- },
- );
-
- let storageConfig = { type: storageType.toLowerCase() };
-
- if (storageConfig.type === "filesystem") {
- const storagePath = await ask("Storage directory (default: ./storage):", {
- default: "./storage",
- });
- storageConfig.path = storagePath;
- } else {
- const s3Bucket = await ask("S3 Bucket name:", { required: true });
- const s3Region = await ask("S3 Region (default: us-east-1):", {
- default: "us-east-1",
- });
- const s3AccessKey = await ask("AWS Access Key ID:", { required: true });
- const s3SecretKey = await ask("AWS Secret Access Key:", {
- password: true,
- required: true,
- });
-
- storageConfig = {
- type: "s3",
- bucket: s3Bucket,
- region: s3Region,
- accessKeyId: s3AccessKey,
- secretAccessKey: s3SecretKey,
- };
- }
-
- // Step 6: Worker Configuration
- logStep("6/7", "Worker Pool Configuration");
-
- log("Configure worker processes for optimal throughput:");
- log(
- `With ${wallets.length} wallet(s), recommended workers: ${Math.min(Math.ceil(wallets.length / 10), 10)}`,
- );
- log(
- "Note: Worker concurrency is now auto-calculated based on wallet count.",
- );
-
- const workerCount = await ask(
- `Number of worker processes (default: ${Math.min(Math.ceil(wallets.length / 10), 5)}):`,
- {
- default: Math.min(Math.ceil(wallets.length / 10), 5).toString(),
- validate: (input) => !isNaN(parseInt(input)) && parseInt(input) > 0,
- error: "Please enter a positive number",
- },
- );
-
- // Step 7: Create Configuration Files
- logStep("6/7", "Creating Configuration Files");
-
- // Generate security keys once for reuse
- const encryptionKey = generateSecretKey();
- const jwtSecret = generateSecretKey();
-
- // Environment file
- const envContent = `# DKG Publisher Plugin Configuration
-# Generated by setup script on ${new Date().toISOString()}
-
-# Database Configuration
-DKGP_DATABASE_URL=mysql://${dbUser}${dbPassword ? ":" + dbPassword : ""}@${dbHost}:${dbPort}/${dbName}
-
-# Redis Configuration
-REDIS_URL=redis://${redisPassword ? `${redisPassword}@` : ""}${redisHost}:${redisPort}
-
-# DKG Network Configuration
-DKG_ENDPOINT=${dkgEndpoint}
-DKG_BLOCKCHAIN=${blockchain}
-
-# File Storage Configuration
-STORAGE_TYPE=${storageConfig.type}
-${
- storageConfig.type === "filesystem"
- ? ``
- : `AWS_S3_BUCKET=${storageConfig.bucket}
-AWS_S3_REGION=${storageConfig.region}
-AWS_ACCESS_KEY_ID=${storageConfig.accessKeyId}
-AWS_SECRET_ACCESS_KEY=${storageConfig.secretAccessKey}`
-}
-
-# Worker Configuration
-WORKER_COUNT=${workerCount}
-# Note: WORKER_CONCURRENCY is auto-calculated from wallet count (no need to set manually)
-
-# Security
-ENCRYPTION_KEY=${encryptionKey}
-JWT_SECRET=${jwtSecret}
-
-# Monitoring (optional)
-# SENTRY_DSN=
-# DATADOG_API_KEY=
-`;
-
- const overwriteConfig = setupMode === "1" || setupMode === "2"; // Fresh or Update
- await createFile(".env.publisher", envContent, overwriteConfig);
-
- // Skip wallet configuration file - wallets will be inserted directly into database
-
- // Docker compose file
- const dockerComposeContent = `version: '3.8'
-services:
- mysql:
- image: mysql:8.0
- environment:
- MYSQL_ROOT_PASSWORD: ${dbPassword}
- MYSQL_DATABASE: ${dbName}
- MYSQL_USER: km_user
- MYSQL_PASSWORD: ${generateSecretKey(16)}
- ports:
- - "${dbPort}:3306"
- volumes:
- - mysql_data:/var/lib/mysql
- - ./migrations:/docker-entrypoint-initdb.d
- command: >
- --default-authentication-plugin=mysql_native_password
- --innodb-lock-wait-timeout=10
- --max-connections=200
-
- redis:
- image: redis:7-alpine
- ports:
- - "${redisPort}:6379"
- volumes:
- - redis_data:/data
- ${redisPassword ? `command: redis-server --requirepass ${redisPassword}` : ""}
-
- knowledge-manager:
- image: dkg-knowledge-manager:latest
- env_file:
- - .env.publisher
- depends_on:
- - mysql
- - redis
- scale: ${workerCount}
- volumes:
- ${storageConfig.type === "filesystem" ? `- ./storage:/app/storage` : ""}
-
-volumes:
- mysql_data:
- redis_data:
-`;
-
- await createFile(
- "docker-compose.knowledge-manager.yml",
- dockerComposeContent,
- overwriteConfig,
- );
-
- // Package.json scripts
- const packageJsonAddition = {
- scripts: {
- "km:setup": "node setup.js",
- "km:migrate": "npm run db:migrate",
- "km:worker": "npm run worker",
- "km:dashboard": "npm run dashboard",
- "km:status":
- "node -e \"console.log('Use the /api/knowledge/health endpoint for status')\"",
- "km:docker:up":
- "docker-compose -f docker-compose.knowledge-manager.yml up -d",
- "km:docker:down":
- "docker-compose -f docker-compose.knowledge-manager.yml down",
- },
- dependencies: {
- "@dkg/plugin-dkg-publisher": "^1.0.0",
- bullmq: "^4.15.0",
- ioredis: "^5.3.2",
- mysql2: "^3.6.5",
- "drizzle-orm": "^0.29.0",
- },
- };
-
- await createFile(
- "package-addition.json",
- JSON.stringify(packageJsonAddition, null, 2),
- );
-
- // Create storage directory if filesystem
- if (storageConfig.type === "filesystem") {
- try {
- await fs.mkdir(path.resolve(storageConfig.path), { recursive: true });
- log(`Created storage directory: ${storageConfig.path}`, "green");
- } catch (error) {
- log(
- `Warning: Could not create storage directory: ${error.message}`,
- "yellow",
- );
- }
- }
-
- // Database setup
- logStep("7/7", "Database Setup");
- log("Setting up database and creating tables...");
- let connection;
- try {
- // Connect to MySQL without database first
- log(`Connecting to MySQL at ${dbHost}:${dbPort}...`);
- connection = await mysql.createConnection({
- host: dbHost,
- port: parseInt(dbPort),
- user: dbUser,
- password: dbPassword,
- });
-
- log("ā Connected to MySQL server", "green");
-
- // Create database if it doesn't exist
- await connection.execute(
- `CREATE DATABASE IF NOT EXISTS \`${dbName}\` CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci`,
- );
- log(`ā Database '${dbName}' created/verified`, "green");
-
- // Switch to the database
- await connection.changeUser({ database: dbName });
- log(`ā Connected to database '${dbName}'`, "green");
-
- // Run Drizzle migrations to create/update tables
- log("Running database migrations...", "cyan");
-
- // Check if this is a fresh start (Mode 1) ā drop all tables first
- if (setupMode === "1" || !hasConfig) {
- log(" Dropping existing tables for fresh setup...", "white");
- await connection.execute("SET FOREIGN_KEY_CHECKS = 0");
- await connection.execute("DROP TABLE IF EXISTS __drizzle_migrations");
- await connection.execute("DROP TABLE IF EXISTS wallet_metrics");
- await connection.execute("DROP TABLE IF EXISTS publishing_attempts");
- await connection.execute("DROP TABLE IF EXISTS assets");
- await connection.execute("DROP TABLE IF EXISTS wallets");
- await connection.execute("DROP TABLE IF EXISTS batches");
- await connection.execute("DROP TABLE IF EXISTS metrics_hourly");
- await connection.execute("SET FOREIGN_KEY_CHECKS = 1");
- }
-
- // Close the single connection ā Drizzle needs a pool
- await connection.end();
- connection = null;
-
- // Run Drizzle migrations via a dedicated connection with FK checks disabled.
- // Migration 0001 changes column types that have FK references, and MySQL
- // validates FK compatibility on each ALTER TABLE.
- const { drizzle } = require("drizzle-orm/mysql2");
- const { migrate } = require("drizzle-orm/mysql2/migrator");
- const migrationConn = await mysql.createConnection({
- host: dbHost,
- port: parseInt(dbPort),
- user: dbUser,
- password: dbPassword,
- database: dbName,
- });
- try {
- // Bootstrap migration journal for existing databases without one
- // (e.g. created by a previous version of setup.js with raw DDL)
- await bootstrapJournalForSetup(migrationConn);
-
- await migrate(drizzle(migrationConn), {
- migrationsFolder: path.join(__dirname, "src/database/migrations"),
- });
- } finally {
- await migrationConn.end();
- }
- log("ā Database migrations completed", "green");
-
- // Reconnect with single connection for wallet insertion
- connection = await mysql.createConnection({
- host: dbHost,
- port: parseInt(dbPort),
- user: dbUser,
- password: dbPassword,
- database: dbName,
- });
-
- // Verify tables were created
- const [tables] = await connection.execute("SHOW TABLES");
- const tableNames = tables.map((row) => Object.values(row)[0]);
- log(
- `ā Created ${tableNames.length} tables: ${tableNames.join(", ")}`,
- "green",
- );
-
- // Insert wallet configurations
- if (wallets.length > 0) {
- log(`Inserting ${wallets.length} wallet(s)...`, "cyan");
- log(
- `Debug: Wallet addresses to insert: ${wallets.map((w) => w.address).join(", ")}`,
- "yellow",
- );
- let insertedCount = 0;
-
- for (let i = 0; i < wallets.length; i++) {
- const wallet = wallets[i];
- try {
- log(
- ` Processing wallet ${i + 1}/${wallets.length}: ${wallet.address}`,
- "cyan",
- );
-
- // Check if wallet already exists first
- const [existing] = await connection.execute(
- `SELECT id FROM wallets WHERE address = ?`,
- [wallet.address],
- );
-
- if (existing.length > 0) {
- log(
- ` - Wallet ${wallet.address} already exists (ID: ${existing[0].id}), skipped`,
- "yellow",
- );
- continue;
- }
-
- // Store private key as plain text (simplified approach)
- const privateKey = wallet.privateKey;
-
- const [result] = await connection.execute(
- `INSERT INTO wallets (address, private_key, blockchain) VALUES (?, ?, ?)`,
- [wallet.address, privateKey, wallet.blockchain],
- );
-
- insertedCount++;
- log(
- ` ā Wallet ${wallet.address} inserted (ID: ${result.insertId})`,
- "green",
- );
- } catch (walletError) {
- log(
- ` ā Failed to insert wallet ${wallet.address}: ${walletError.message}`,
- "red",
- );
- }
- }
-
- log(
- `ā ${insertedCount} wallet(s) successfully configured in database`,
- "green",
- );
-
- // Verify wallets were inserted
- const [walletCount] = await connection.execute(
- "SELECT COUNT(*) as count FROM wallets WHERE is_active = TRUE",
- );
- log(
- `ā Total active wallets in database: ${walletCount[0].count}`,
- "green",
- );
- } else {
- log(
- "ā ļø No wallets configured. You can add them later using setup script option 3.",
- "yellow",
- );
- }
-
- await connection.end();
- log("ā Database connection closed", "green");
- } catch (error) {
- log(`\nā Database setup failed: ${error.message}`, "red");
- log("Possible causes:", "yellow");
- log("⢠MySQL server not running", "white");
- log("⢠Incorrect credentials", "white");
- log("⢠Network connection issues", "white");
- log("⢠Permission problems", "white");
-
- if (connection) {
- try {
- await connection.end();
- } catch (closeError) {
- // Ignore connection close errors
- }
- }
-
- throw error; // Re-throw to stop setup process
- }
-
- // Final success message
- log(
- `\n${colors.bold}${colors.green}āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā${colors.reset}`,
- );
- log(
- `${colors.bold}${colors.green}ā Setup Complete! ā${colors.reset}`,
- );
- log(
- `${colors.bold}${colors.green}āāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāāā${colors.reset}\n`,
- );
-
- log("Configuration files created:", "green");
- log(" ⢠.env.publisher (environment variables)", "cyan");
- log(" ⢠docker-compose.knowledge-manager.yml (Docker setup)", "cyan");
- log(" ⢠package-addition.json (npm scripts to add)", "cyan");
-
- log("\nNext steps:", "yellow");
- log(
- "The DKG Publisher plugin is now ready! Here's how to use it:",
- "white",
- );
- log("");
- log("ā
Already completed:", "green");
- log(" ⢠Database created with tables", "white");
- log(" ⢠Configuration files generated", "white");
- log(" ⢠Wallets configured in database", "white");
- log("");
- log("š To start using:", "blue");
- log("1. Make sure MySQL and Redis are running locally", "white");
- log("2. Configure DKG Agent to load this plugin", "white");
- log("3. The service will auto-start with configured workers", "white");
- log("4. Test API endpoints or use MCP tools", "white");
- log("");
- log("š Optional:", "cyan");
- log("⢠Add more wallets: npm run setup (choose option 3)", "white");
- log(" Workers will auto-restart to match new wallet count", "white");
- log("⢠View dashboard at: /admin/queues (when agent is running)", "white");
- log("⢠Check health: GET /api/knowledge/health", "white");
-
- log("\nExample usage in DKG Agent plugin:", "yellow");
- log(
- `// The DKG Publisher plugin runs as a DKG plugin
-// and provides these API endpoints:
-
-// Register asset for publishing
-POST /api/knowledge/assets
-{
- "content": { "@context": "https://schema.org", "@type": "Certificate" },
- "metadata": { "source": "my-app", "sourceId": "cert-123" },
- "publishOptions": { "privacy": "private", "priority": 80 }
-}
-
-// Get asset status
-GET /api/knowledge/assets/{id}
-
-// Get metrics
-GET /api/knowledge/metrics/queue
-GET /api/knowledge/metrics/wallets
-GET /api/knowledge/health
-
-// MCP Tool
-knowledge-asset-publish`,
- "white",
- );
-
- log("\nā ļø Security Notes:", "red");
- log("⢠Keep your DATABASE_URL and ENCRYPTION_KEY secure", "yellow");
- log("⢠Use environment variables for production deployments", "yellow");
-
- log(`\n${colors.bold}Happy publishing! š${colors.reset}\n`);
- } catch (error) {
- log(`\nSetup failed: ${error.message}`, "red");
- console.error(error);
- process.exit(1);
- }
-}
-
-// Handle graceful shutdown
-process.on("SIGINT", () => {
- log("\nSetup cancelled by user.", "yellow");
- process.exit(0);
-});
-
-// Run setup if called directly
-if (require.main === module) {
- setup();
-}
-
-module.exports = { setup };
diff --git a/packages/plugin-dkg-publisher/src/blockchainConfig.ts b/packages/plugin-dkg-publisher/src/blockchainConfig.ts
new file mode 100644
index 00000000..7c19668b
--- /dev/null
+++ b/packages/plugin-dkg-publisher/src/blockchainConfig.ts
@@ -0,0 +1,16 @@
+export interface DkgBlockchainConfigBase {
+ name: string;
+ privateKey: string;
+ publicKey?: string;
+}
+
+export function buildDkgBlockchainConfig(
+ base: DkgBlockchainConfigBase,
+ customRpc?: string,
+) {
+ const rpc = customRpc?.trim();
+ return {
+ ...base,
+ ...(rpc ? { rpc } : {}),
+ };
+}
diff --git a/packages/plugin-dkg-publisher/src/config.ts b/packages/plugin-dkg-publisher/src/config.ts
new file mode 100644
index 00000000..aaaacb7b
--- /dev/null
+++ b/packages/plugin-dkg-publisher/src/config.ts
@@ -0,0 +1,144 @@
+import path from "path";
+import { config as dotenvConfig } from "dotenv";
+import type { KnowledgeAssetManagerConfig } from "./types";
+
+export interface PublisherRuntimeSettings {
+ dkgEndpoint: string;
+ dkgBlockchain: string;
+ workerCount: number;
+ pollFrequency: number;
+ storageType: "filesystem" | "s3";
+ storagePath: string;
+ storageBaseUrl: string;
+ redisUrl: string;
+}
+
+export interface PublisherConfigResolution {
+ config: KnowledgeAssetManagerConfig;
+ runtime: PublisherRuntimeSettings;
+ legacyEnvPath: string;
+ loadedLegacyEnv: boolean;
+}
+
+function parsePositiveInt(value: string | undefined, fallback: number) {
+ const parsed = Number.parseInt(value || "", 10);
+ return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback;
+}
+
+function loadLegacyPublisherEnv(): {
+ legacyEnvPath: string;
+ loadedLegacyEnv: boolean;
+} {
+ const legacyEnvPath = path.resolve(__dirname, "..", ".env.publisher");
+ const hasPublisherConfig =
+ Boolean(process.env.DKGP_DATABASE_URL) &&
+ Boolean(
+ process.env.REDIS_URL ||
+ process.env.REDIS_HOST ||
+ process.env.REDIS_PORT ||
+ process.env.REDIS_PASSWORD,
+ );
+
+ if (!hasPublisherConfig) {
+ dotenvConfig({ path: legacyEnvPath });
+ }
+
+ return {
+ legacyEnvPath,
+ loadedLegacyEnv: !hasPublisherConfig && Boolean(process.env.DKGP_DATABASE_URL),
+ };
+}
+
+function getDefaultStorageBaseUrl(): string {
+ const baseUrl =
+ process.env.EXPO_PUBLIC_MCP_URL || `http://localhost:${process.env.PORT || "9200"}`;
+ return new URL("/storage", baseUrl).toString().replace(/\/$/, "");
+}
+
+function resolveRedisSettings() {
+ const redisUrl =
+ process.env.REDIS_URL ||
+ `redis://${process.env.REDIS_HOST || "localhost"}:${process.env.REDIS_PORT || "6379"}`;
+
+ const parsedRedisUrl = new URL(redisUrl);
+
+ return {
+ redisUrl,
+ host: parsedRedisUrl.hostname || "localhost",
+ port: Number(parsedRedisUrl.port || 6379),
+ password:
+ parsedRedisUrl.password || process.env.REDIS_PASSWORD || undefined,
+ };
+}
+
+export function resolvePublisherRuntimeConfig(): PublisherConfigResolution | null {
+ const { legacyEnvPath, loadedLegacyEnv } = loadLegacyPublisherEnv();
+ const databaseUrl = process.env.DKGP_DATABASE_URL;
+
+ if (!databaseUrl) {
+ return null;
+ }
+
+ const redis = resolveRedisSettings();
+ const storageType =
+ process.env.STORAGE_TYPE === "s3" ? "s3" : "filesystem";
+ const storagePath =
+ process.env.STORAGE_PATH || path.resolve(process.cwd(), "data/publisher");
+ const storageBaseUrl =
+ process.env.STORAGE_BASE_URL || getDefaultStorageBaseUrl();
+ const dkgEndpoint =
+ process.env.DKG_OTNODE_URL ||
+ process.env.DKG_ENDPOINT ||
+ "http://localhost:8900";
+ const dkgBlockchain = process.env.DKG_BLOCKCHAIN || "hardhat1:31337";
+ const workerCount = parsePositiveInt(process.env.WORKER_COUNT, 1);
+ const pollFrequency = parsePositiveInt(process.env.POLL_FREQUENCY, 2000);
+
+ return {
+ legacyEnvPath,
+ loadedLegacyEnv,
+ runtime: {
+ dkgEndpoint,
+ dkgBlockchain,
+ workerCount,
+ pollFrequency,
+ storageType,
+ storagePath,
+ storageBaseUrl,
+ redisUrl: redis.redisUrl,
+ },
+ config: {
+ database: {
+ connectionString: databaseUrl,
+ },
+ redis: {
+ host: redis.host,
+ port: redis.port,
+ password: redis.password,
+ },
+ wallets: [],
+ dkg: {
+ endpoint: dkgEndpoint,
+ blockchain: dkgBlockchain,
+ },
+ storage: {
+ type: storageType,
+ path: storagePath,
+ },
+ },
+ };
+}
+
+export function applyPublisherRuntimeDefaults(
+ resolution: PublisherConfigResolution,
+) {
+ process.env.DKGP_DATABASE_URL ||= resolution.config.database.connectionString;
+ process.env.REDIS_URL ||= resolution.runtime.redisUrl;
+ process.env.DKG_OTNODE_URL ||= resolution.runtime.dkgEndpoint;
+ process.env.DKG_BLOCKCHAIN ||= resolution.runtime.dkgBlockchain;
+ process.env.WORKER_COUNT ||= resolution.runtime.workerCount.toString();
+ process.env.POLL_FREQUENCY ||= resolution.runtime.pollFrequency.toString();
+ process.env.STORAGE_TYPE ||= resolution.runtime.storageType;
+ process.env.STORAGE_PATH ||= resolution.runtime.storagePath;
+ process.env.STORAGE_BASE_URL ||= resolution.runtime.storageBaseUrl;
+}
diff --git a/packages/plugin-dkg-publisher/src/database/bootstrap.ts b/packages/plugin-dkg-publisher/src/database/bootstrap.ts
index f6191d24..51a8483c 100644
--- a/packages/plugin-dkg-publisher/src/database/bootstrap.ts
+++ b/packages/plugin-dkg-publisher/src/database/bootstrap.ts
@@ -5,9 +5,9 @@ import fs from "fs";
import path from "path";
/**
- * Bootstrap migration journal for databases created by setup.js (raw DDL).
+ * Bootstrap migration journal for databases created by the legacy publisher setup script (raw DDL).
*
- * setup.js creates tables directly without Drizzle migration tracking.
+ * The legacy setup script creates tables directly without Drizzle migration tracking.
* Without this guard, runMigrations() would try to run 0000 (CREATE TABLE)
* on existing tables and fail.
*
@@ -23,7 +23,7 @@ export async function bootstrapMigrationJournal(db: Database): Promise {
return; // Already managed by Drizzle
}
- // Check if core tables exist (setup.js creates these)
+ // Check if core tables exist (legacy setup script creates these)
const coreTables = ["assets", "wallets", "publishing_attempts", "batches"];
const existingTables = await Promise.all(
coreTables.map((t) => tableExists(db, t)),
@@ -38,14 +38,14 @@ export async function bootstrapMigrationJournal(db: Database): Promise {
const missing = coreTables.filter((_, i) => !existingTables[i]);
throw new Error(
`Database is in a partial state: tables ${missing.join(", ")} are missing. ` +
- `This usually means setup.js crashed mid-creation. ` +
+ `This usually means the legacy setup script crashed mid-creation. ` +
`Please drop all tables and run setup again.`,
);
}
- // Tables exist but no journal ā setup.js-created database
+ // Tables exist but no journal - legacy setup script-created database
console.log(
- "š Detected setup.js-created database without migration journal. Bootstrapping...",
+ "Detected legacy setup script-created database without migration journal. Bootstrapping...",
);
// Create the __drizzle_migrations table (same schema Drizzle uses)
@@ -65,7 +65,7 @@ export async function bootstrapMigrationJournal(db: Database): Promise {
const journalPath = path.join(migrationsDir, "meta/_journal.json");
const journal = JSON.parse(fs.readFileSync(journalPath, "utf-8"));
- // Always seed 0000 and 0001 (setup.js schema = post-0001 state)
+ // Always seed 0000 and 0001 (legacy setup script schema = post-0001 state)
for (const entry of journal.entries) {
if (entry.idx > 1) break; // Only seed 0000 and 0001 unconditionally
@@ -114,7 +114,7 @@ async function tableExists(db: Database, tableName: string): Promise {
const result = await db.execute(
sql`SELECT COUNT(*) as cnt FROM information_schema.tables WHERE table_schema = DATABASE() AND table_name = ${tableName}`,
);
- const rows = result[0] as Array<{ cnt: number | bigint }>;
+ const rows = result[0] as unknown as Array<{ cnt: number | bigint }>;
return Number(rows[0]?.cnt) > 0;
}
@@ -126,6 +126,6 @@ async function columnExists(
const result = await db.execute(
sql`SELECT COUNT(*) as cnt FROM information_schema.columns WHERE table_schema = DATABASE() AND table_name = ${tableName} AND column_name = ${columnName}`,
);
- const rows = result[0] as Array<{ cnt: number | bigint }>;
+ const rows = result[0] as unknown as Array<{ cnt: number | bigint }>;
return Number(rows[0]?.cnt) > 0;
}
diff --git a/packages/plugin-dkg-publisher/src/database/schema.ts b/packages/plugin-dkg-publisher/src/database/schema.ts
index c04009ca..57661edc 100644
--- a/packages/plugin-dkg-publisher/src/database/schema.ts
+++ b/packages/plugin-dkg-publisher/src/database/schema.ts
@@ -34,7 +34,7 @@ export const assets = mysqlTable(
// Publishing configuration
priority: int("priority").default(50),
privacy: mysqlEnum("privacy", ["private", "public"]).default("private"),
- epochs: int("epochs").default(2),
+ epochs: int("epochs").default(12),
replications: int("replications").default(1),
maxAttempts: int("max_attempts").default(3),
diff --git a/packages/plugin-dkg-publisher/src/index.ts b/packages/plugin-dkg-publisher/src/index.ts
index 0f8a95d9..7c29e025 100644
--- a/packages/plugin-dkg-publisher/src/index.ts
+++ b/packages/plugin-dkg-publisher/src/index.ts
@@ -1,7 +1,6 @@
import { defineDkgPlugin } from "@dkg/plugins";
import { z } from "@dkg/plugin-swagger";
import type { KnowledgeAssetManagerConfig, AssetInput } from "./types";
-import { config as dotenvConfig } from "dotenv";
import path from "path";
import {
initializeServices,
@@ -10,11 +9,14 @@ import {
AssetService,
WalletService,
QueueService,
- DkgService,
} from "./services";
import { openAPIRoute } from "@dkg/plugin-swagger";
import express from "express";
import { registerMcpTools } from "./mcp/tools";
+import {
+ applyPublisherRuntimeDefaults,
+ resolvePublisherRuntimeConfig,
+} from "./config";
/**
* DKG Publisher Plugin
@@ -38,38 +40,16 @@ export default defineDkgPlugin((ctx, mcp, api) => {
console.log(
`š DKG Publisher Plugin executing at ${new Date().toISOString()} (${pluginInitTime})`,
);
- // Load configuration from package root .env file
- const envPath = path.resolve(__dirname, "..", ".env.publisher");
-
- console.log(`š§ Loading DKG Publisher config from: ${envPath}`);
- dotenvConfig({ path: envPath });
-
- console.log(`š DKGP_DATABASE_URL found: ${!!process.env.DKGP_DATABASE_URL}`);
+ const resolution = resolvePublisherRuntimeConfig();
+ console.log(`š DKGP_DATABASE_URL found: ${Boolean(resolution)}`);
- // Initialize services if configuration is provided via environment
- if (process.env.DKGP_DATABASE_URL) {
- const config: KnowledgeAssetManagerConfig = {
- database: {
- connectionString: process.env.DKGP_DATABASE_URL,
- },
- redis: {
- host: process.env.REDIS_HOST || "localhost",
- port: process.env.REDIS_PORT ? parseInt(process.env.REDIS_PORT) : 6379,
- password: process.env.REDIS_PASSWORD,
- },
- wallets: [], // Should be loaded from config or setup
- dkg: {
- endpoint: process.env.DKG_ENDPOINT,
- blockchain: process.env.DKG_BLOCKCHAIN,
- },
- encryptionKey: process.env.ENCRYPTION_KEY,
- };
+ if (resolution) {
+ const config: KnowledgeAssetManagerConfig = resolution.config;
+ applyPublisherRuntimeDefaults(resolution);
console.log(`š Initializing DKG Publisher services... (${Date.now()})`);
// Mount storage directory immediately (before services initialize)
- const storageType = process.env.STORAGE_TYPE || "filesystem";
- const storagePath =
- process.env.STORAGE_PATH || path.resolve(__dirname, "../storage");
+ const { storageType, storagePath } = resolution.runtime;
if (storageType === "filesystem") {
try {
@@ -97,6 +77,11 @@ export default defineDkgPlugin((ctx, mcp, api) => {
console.log(` - DKG Endpoint: ${config.dkg?.endpoint}`);
console.log(` - Blockchain: ${config.dkg?.blockchain}`);
console.log(`š Storage configured for: ${storageType}`);
+ if (resolution.loadedLegacyEnv) {
+ console.log(
+ ` - Legacy env imported from: ${resolution.legacyEnvPath}`,
+ );
+ }
})
.catch((error) => {
console.error("ā DKG Publisher Plugin initialization failed:", error);
@@ -105,7 +90,9 @@ export default defineDkgPlugin((ctx, mcp, api) => {
console.log(
"ā ļø DKG Publisher Plugin not configured - DKGP_DATABASE_URL not found",
);
- console.log(` Looked for config in: ${envPath}`);
+ console.log(
+ ` Looked for config in: ${path.resolve(__dirname, "..", ".env.publisher")}`,
+ );
}
// Mount admin dashboard route immediately - handle service readiness internally
@@ -292,114 +279,6 @@ export default defineDkgPlugin((ctx, mcp, api) => {
}
});
- // Add SPARQL query endpoint
- api.post(
- "/api/dkg/query",
- openAPIRoute(
- {
- tag: "DKG Queries",
- summary: "Execute SPARQL Query",
- description: "Execute a SPARQL query on the DKG network",
- body: z.object({
- query: z.string().min(1, "Query cannot be empty"),
- queryType: z
- .enum(["SELECT", "CONSTRUCT", "ASK", "DESCRIBE"])
- .optional()
- .default("SELECT"),
- validate: z.boolean().optional().default(true),
- }),
- response: {
- schema: z.object({
- success: z.boolean(),
- data: z.any().optional(),
- error: z.string().optional(),
- validation: z
- .object({
- valid: z.boolean(),
- error: z.string().optional(),
- })
- .optional(),
- }),
- },
- finalizeRouteConfig: (config) => ({
- ...config,
- security: [],
- }),
- },
- async (req, res) => {
- if (!serviceContainer) {
- return res.status(503).json({
- success: false,
- error: "DKG service is starting up",
- });
- }
-
- try {
- const { query, queryType = "SELECT" } = req.body;
- const dkgService = serviceContainer.get("dkgService");
-
- // Execute SPARQL query
- const result = await dkgService.executeSparqlQuery(query, queryType);
-
- res.json(result);
- } catch (error: any) {
- res.status(500).json({
- success: false,
- error: error.message,
- });
- }
- },
- ),
- );
-
- // Add DKG asset get endpoint
- api.get(
- "/api/dkg/assets",
- openAPIRoute(
- {
- tag: "DKG Queries",
- summary: "Get DKG Asset",
- description: "Retrieve an asset from DKG by UAL",
- query: z.object({
- ual: z.string(),
- }),
- response: {
- schema: z.object({
- success: z.boolean(),
- data: z.any().optional(),
- error: z.string().optional(),
- }),
- },
- finalizeRouteConfig: (config) => ({
- ...config,
- security: [],
- }),
- },
- async (req, res) => {
- if (!serviceContainer) {
- return res.status(503).json({
- success: false,
- error: "DKG service is starting up",
- });
- }
-
- try {
- const { ual } = req.query;
- const dkgService = serviceContainer.get("dkgService");
-
- const result = await dkgService.getAsset(ual);
-
- res.json(result);
- } catch (error: any) {
- res.status(500).json({
- success: false,
- error: error.message,
- });
- }
- },
- ),
- );
-
// Register all MCP tools for publisher plugin
registerMcpTools(mcp, serviceContainer, ctx);
});
@@ -430,3 +309,8 @@ export type {
AssetStatus,
KnowledgeAssetManagerConfig,
} from "./types";
+export {
+ applyPublisherRuntimeDefaults,
+ resolvePublisherRuntimeConfig,
+} from "./config";
+export { provisionPublisherDatabase } from "./provision";
diff --git a/packages/plugin-dkg-publisher/src/mcp/tools.ts b/packages/plugin-dkg-publisher/src/mcp/tools.ts
index cfcf0060..001bc1bc 100644
--- a/packages/plugin-dkg-publisher/src/mcp/tools.ts
+++ b/packages/plugin-dkg-publisher/src/mcp/tools.ts
@@ -21,9 +21,10 @@ export function registerMcpTools(
{
title: "Publish Knowledge Asset",
description:
- "Register a JSON-LD asset for publishing to the DKG. " +
- "You can provide the content directly as a JSON object, or provide a blobId to load content from a previously uploaded file. " +
- "Use blobId for large files. Use the MCP query tools to check status and view recent published assets.",
+ "Register a JSON-LD asset for publishing to the DKG through the DKG Publisher plugin async queue. " +
+ "This tool queues publishing and returns a tracking record, not an immediate final UAL result. " +
+ "For direct synchronous publishing with immediate UAL output, use the Essentials `dkg-create` tool. " +
+ "You can provide content directly as a JSON object, or provide a blobId to load content from a previously uploaded file.",
inputSchema: {
content: z
.object({})
@@ -44,7 +45,15 @@ export function registerMcpTools(
sourceId: z.string().optional(),
})
.optional(),
- privacy: z.enum(["private", "public"]).optional(),
+ publishOptions: z
+ .object({
+ privacy: z.enum(["private", "public"]).optional(),
+ priority: z.number().min(1).max(100).optional(),
+ epochs: z.number().optional(),
+ maxAttempts: z.number().optional(),
+ })
+ .optional()
+ .describe("Optional async publishing controls."),
},
},
async (input: any, req: any) => {
@@ -91,12 +100,25 @@ export function registerMcpTools(
}
const assetService = serviceContainer.get("assetService");
+ const resolvedPrivacy = input.publishOptions?.privacy ?? "private";
+ const resolvedPriority = input.publishOptions?.priority;
+ const resolvedEpochs = input.publishOptions?.epochs;
+ const resolvedMaxAttempts = input.publishOptions?.maxAttempts;
const assetInput = {
content,
metadata: input.metadata,
publishOptions: {
- privacy: input.privacy || "private",
+ privacy: resolvedPrivacy,
+ ...(typeof resolvedPriority === "number"
+ ? { priority: resolvedPriority }
+ : {}),
+ ...(typeof resolvedEpochs === "number"
+ ? { epochs: resolvedEpochs }
+ : {}),
+ ...(typeof resolvedMaxAttempts === "number"
+ ? { maxAttempts: resolvedMaxAttempts }
+ : {}),
},
};
@@ -141,9 +163,15 @@ export function registerMcpTools(
"knowledge-asset-status-by-content-id",
{
title: "Get Knowledge Asset Information by Content ID",
- description: "Check, lookup, show, or query a knowledge asset by its JSON-LD @id (URN). Use this when the user provides a URN like 'urn:test:asset:...' or asks about a specific asset ID. Returns status, UAL, transaction hash, and publishing details.",
+ description:
+ "Check async publishing status for a specific JSON-LD @id (URN) that was submitted through the DKG Publisher plugin queue. " +
+ "Returns tracked Publisher status details (status, UAL if published, transaction hash, attempts, and errors).",
inputSchema: {
- contentId: z.string().describe("The @id from the JSON-LD content (e.g., 'urn:test:asset:manual-test-1')"),
+ contentId: z
+ .string()
+ .describe(
+ "The @id from the JSON-LD content (e.g., 'urn:test:asset:manual-test-1')",
+ ),
},
},
async (input: any, req: any) => {
@@ -189,11 +217,26 @@ export function registerMcpTools(
"knowledge-asset-list-recent",
{
title: "List Recent Knowledge Assets",
- description: "Show, list, or display recent knowledge assets. Use when user asks 'show me recent assets', 'what was published', 'last X assets', 'publishes 200-500', etc. Can filter by status (published, failed, publishing, queued). Supports pagination with offset for large queries.",
+ description:
+ "List recent async publishing records tracked by the DKG Publisher plugin. " +
+ "Use this for queue/history views (optionally filtered by status) with pagination support.",
inputSchema: {
- limit: z.number().min(1).default(20).optional().describe("Number of assets to return (default: 20)"),
- offset: z.number().min(0).default(0).optional().describe("Number of assets to skip (for pagination, default: 0)"),
- status: z.enum(["published", "failed", "publishing", "queued"]).optional().describe("Filter by status (optional)"),
+ limit: z
+ .number()
+ .min(1)
+ .default(20)
+ .optional()
+ .describe("Number of assets to return (default: 20)"),
+ offset: z
+ .number()
+ .min(0)
+ .default(0)
+ .optional()
+ .describe("Number of assets to skip (for pagination, default: 0)"),
+ status: z
+ .enum(["published", "failed", "publishing", "queued"])
+ .optional()
+ .describe("Filter by status (optional)"),
},
},
async (input: any, req: any) => {
@@ -282,11 +325,25 @@ export function registerMcpTools(
"knowledge-asset-query-by-status",
{
title: "Find Knowledge Assets by Status",
- description: "Find, show, list, or query knowledge assets by publishing status. Use when user asks 'show me all published', 'failed assets', 'what's publishing', 'publishes 100-200', etc. Supports statuses: published (successfully published), failed (publishing failed), publishing (currently being published), queued (waiting to publish). Supports pagination with offset for large queries.",
+ description:
+ "Query async publishing records in the DKG Publisher plugin by a required status. " +
+ "Use this for focused queue/operations views (published, failed, publishing, queued) with pagination.",
inputSchema: {
- status: z.enum(["published", "failed", "publishing", "queued"]).describe("The status to filter by"),
- limit: z.number().min(1).default(20).optional().describe("Maximum number of results (default: 20)"),
- offset: z.number().min(0).default(0).optional().describe("Number of assets to skip (for pagination, default: 0)"),
+ status: z
+ .enum(["published", "failed", "publishing", "queued"])
+ .describe("The status to filter by"),
+ limit: z
+ .number()
+ .min(1)
+ .default(20)
+ .optional()
+ .describe("Maximum number of results (default: 20)"),
+ offset: z
+ .number()
+ .min(0)
+ .default(0)
+ .optional()
+ .describe("Number of assets to skip (for pagination, default: 0)"),
},
},
async (input: any, req: any) => {
diff --git a/packages/plugin-dkg-publisher/src/privateKey.ts b/packages/plugin-dkg-publisher/src/privateKey.ts
new file mode 100644
index 00000000..bbf089a0
--- /dev/null
+++ b/packages/plugin-dkg-publisher/src/privateKey.ts
@@ -0,0 +1,27 @@
+const PRIVATE_KEY_BODY_PATTERN = /^[0-9a-fA-F]{64}$/;
+
+export function normalizePrivateKey(privateKey: string) {
+ const trimmed = privateKey.trim();
+ const body = trimmed.startsWith("0x") ? trimmed.slice(2) : trimmed;
+
+ if (!PRIVATE_KEY_BODY_PATTERN.test(body)) {
+ throw new Error(
+ "Private key must be 64 hexadecimal characters, with or without a 0x prefix",
+ );
+ }
+
+ return `0x${body}`;
+}
+
+export function stripPrivateKeyPrefix(privateKey: string) {
+ return normalizePrivateKey(privateKey).slice(2);
+}
+
+export function isValidPrivateKey(privateKey: string) {
+ try {
+ normalizePrivateKey(privateKey);
+ return true;
+ } catch {
+ return false;
+ }
+}
diff --git a/packages/plugin-dkg-publisher/src/provision.ts b/packages/plugin-dkg-publisher/src/provision.ts
new file mode 100644
index 00000000..0ecfff29
--- /dev/null
+++ b/packages/plugin-dkg-publisher/src/provision.ts
@@ -0,0 +1,98 @@
+import mysql from "mysql2/promise";
+import { Wallet } from "ethers";
+import { runMigrations } from "./database";
+import { normalizePrivateKey, stripPrivateKeyPrefix } from "./privateKey";
+
+export interface PublisherWalletSeed {
+ privateKey: string;
+ blockchain: string;
+}
+
+export interface PublisherProvisionResult {
+ databaseCreated: boolean;
+ walletsInserted: number;
+}
+
+const MYSQL_IDENTIFIER_PATTERN = /^[A-Za-z0-9_]+$/;
+
+function getDatabaseName(connectionString: string) {
+ const databaseUrl = new URL(connectionString);
+ const databaseName = databaseUrl.pathname.replace(/^\//, "");
+
+ if (!databaseName) {
+ throw new Error("DKGP_DATABASE_URL must include a database name");
+ }
+
+ if (!MYSQL_IDENTIFIER_PATTERN.test(databaseName)) {
+ throw new Error(
+ "DKGP_DATABASE_URL contains an invalid database name. Use letters, numbers, and underscores only.",
+ );
+ }
+
+ return { databaseUrl, databaseName };
+}
+
+function getAdminConnectionString(connectionString: string) {
+ const { databaseUrl } = getDatabaseName(connectionString);
+ databaseUrl.pathname = "/";
+ return databaseUrl.toString();
+}
+
+export async function provisionPublisherDatabase(
+ connectionString: string,
+ walletSeeds: PublisherWalletSeed[] = [],
+): Promise {
+ const { databaseName } = getDatabaseName(connectionString);
+ let databaseCreated = false;
+ let walletsInserted = 0;
+
+ const adminConnection = await mysql.createConnection(
+ getAdminConnectionString(connectionString),
+ );
+
+ try {
+ const [existingDatabases] = await adminConnection.execute(
+ "SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = ?",
+ [databaseName],
+ );
+ databaseCreated = (existingDatabases as unknown[]).length === 0;
+
+ await adminConnection.query(
+ `CREATE DATABASE IF NOT EXISTS \`${databaseName}\``,
+ );
+ } finally {
+ await adminConnection.end();
+ }
+
+ await runMigrations(connectionString);
+
+ if (!walletSeeds.length) {
+ return { databaseCreated, walletsInserted };
+ }
+
+ const databaseConnection = await mysql.createConnection(connectionString);
+
+ try {
+ for (const walletSeed of walletSeeds) {
+ const normalizedPrivateKey = normalizePrivateKey(walletSeed.privateKey);
+ const storedPrivateKey = stripPrivateKeyPrefix(walletSeed.privateKey);
+ const address = new Wallet(normalizedPrivateKey).address;
+ const [existingWallets] = await databaseConnection.execute(
+ "SELECT id FROM wallets WHERE address = ? LIMIT 1",
+ [address],
+ );
+
+ if ((existingWallets as unknown[]).length === 0) {
+ await databaseConnection.execute(
+ "INSERT INTO wallets (address, private_key, blockchain) VALUES (?, ?, ?)",
+ [address, storedPrivateKey, walletSeed.blockchain],
+ );
+ walletsInserted += 1;
+ }
+ }
+ } finally {
+ await databaseConnection.end();
+ }
+
+ return { databaseCreated, walletsInserted };
+}
diff --git a/packages/plugin-dkg-publisher/src/services/AssetService.ts b/packages/plugin-dkg-publisher/src/services/AssetService.ts
index acf0c2c3..d2d9d1e9 100644
--- a/packages/plugin-dkg-publisher/src/services/AssetService.ts
+++ b/packages/plugin-dkg-publisher/src/services/AssetService.ts
@@ -31,7 +31,7 @@ export class AssetService extends EventEmitter {
status: "queued",
queuedAt: sql`NOW()`,
privacy: input.publishOptions?.privacy || "private",
- epochs: input.publishOptions?.epochs || 2,
+ epochs: input.publishOptions?.epochs || 12,
maxAttempts: input.publishOptions?.maxAttempts || 3,
retryCount: 0,
});
@@ -367,7 +367,10 @@ export class AssetService extends EventEmitter {
workerId: process.pid.toString(),
walletAddress: wallet.address,
walletId: wallet.id,
- otnodeUrl: process.env.DKG_ENDPOINT,
+ otnodeUrl:
+ process.env.DKG_OTNODE_URL ||
+ process.env.DKG_ENDPOINT ||
+ "http://localhost:8900",
blockchain: wallet.blockchain,
status: "started",
startedAt: sql`NOW()`,
diff --git a/packages/plugin-dkg-publisher/src/services/DkgService.ts b/packages/plugin-dkg-publisher/src/services/DkgService.ts
index 0b61c8de..8aa7547a 100644
--- a/packages/plugin-dkg-publisher/src/services/DkgService.ts
+++ b/packages/plugin-dkg-publisher/src/services/DkgService.ts
@@ -1,5 +1,6 @@
import DKG from "dkg.js";
import { WalletService } from "./WalletService";
+import { buildDkgBlockchainConfig } from "../blockchainConfig";
export interface SparqlQueryResult {
success: boolean;
@@ -16,12 +17,17 @@ export interface DkgGetResult {
export class DkgService {
private dkgEndpoint: string;
private dkgBlockchain: string;
+ private dkgCustomRpc?: string;
private queryClient: any | null = null;
private walletService: WalletService;
constructor(walletService: WalletService) {
- this.dkgEndpoint = process.env.DKG_ENDPOINT || "http://localhost:8900";
+ this.dkgEndpoint =
+ process.env.DKG_OTNODE_URL ||
+ process.env.DKG_ENDPOINT ||
+ "http://localhost:8900";
this.dkgBlockchain = process.env.DKG_BLOCKCHAIN || "hardhat1:31337";
+ this.dkgCustomRpc = process.env.DKG_NODE_CUSTOM_RPC?.trim() || undefined;
this.walletService = walletService;
this.initializeQueryClient();
}
@@ -50,11 +56,11 @@ export class DkgService {
this.queryClient = new DKG({
endpoint: `${endpointUrl.protocol}//${endpointUrl.hostname}`,
port: endpointUrl.port,
- blockchain: {
+ blockchain: buildDkgBlockchainConfig({
name: this.dkgBlockchain,
publicKey: wallet.address,
privateKey: wallet.privateKey,
- },
+ }, this.dkgCustomRpc),
maxNumberOfRetries: 100,
frequency: 2,
contentType: "all",
@@ -182,11 +188,11 @@ export class DkgService {
const walletDkgClient = new DKG({
endpoint: `${endpointUrl.protocol}//${endpointUrl.hostname}`,
port: endpointUrl.port,
- blockchain: {
+ blockchain: buildDkgBlockchainConfig({
name: this.dkgBlockchain,
publicKey: wallet.address,
privateKey: privateKey,
- },
+ }, this.dkgCustomRpc),
maxNumberOfRetries: 600,
frequency: 2,
contentType: "all",
@@ -319,7 +325,14 @@ export class DkgService {
*/
async getNodeInfo(): Promise {
try {
- const dkgClient = this.createQueryDKGClient();
+ if (!this.queryClient) {
+ await this.initializeQueryClient();
+ if (!this.queryClient) {
+ throw new Error("Failed to initialize DKG query client");
+ }
+ }
+
+ const dkgClient = this.queryClient;
const nodeInfo = await dkgClient.node.info();
return {
diff --git a/packages/plugin-dkg-publisher/src/services/PublishingService.ts b/packages/plugin-dkg-publisher/src/services/PublishingService.ts
index 797c9178..87410d4d 100644
--- a/packages/plugin-dkg-publisher/src/services/PublishingService.ts
+++ b/packages/plugin-dkg-publisher/src/services/PublishingService.ts
@@ -17,11 +17,8 @@ export interface PublishResult {
export class PublishingService {
private dkgService: DkgService;
- constructor(
- private db: Database,
- dkgService?: DkgService,
- ) {
- this.dkgService = dkgService || new DkgService();
+ constructor(private db: Database, dkgService: DkgService) {
+ this.dkgService = dkgService;
}
/**
@@ -89,7 +86,7 @@ export class PublishingService {
// Try direct filesystem access
const storagePath =
process.env.STORAGE_PATH ||
- path.resolve(__dirname, "../../storage");
+ path.resolve(process.cwd(), "data/publisher");
const filePath = require("path").resolve(storagePath, filename);
console.log(`š Trying direct file access: ${filePath}`);
diff --git a/packages/plugin-dkg-publisher/src/services/QueueService.ts b/packages/plugin-dkg-publisher/src/services/QueueService.ts
index 0f366967..bd6c1fe2 100644
--- a/packages/plugin-dkg-publisher/src/services/QueueService.ts
+++ b/packages/plugin-dkg-publisher/src/services/QueueService.ts
@@ -2,7 +2,7 @@ import { EventEmitter } from "events";
import { Queue, Worker, QueueEvents } from "bullmq";
import IORedis from "ioredis";
import { createBullBoard } from "@bull-board/api";
-import { BullMQAdapter } from "@bull-board/api/bullMQAdapter";
+import { BullMQAdapter } from "@bull-board/api/bullMQAdapter.js";
import { ExpressAdapter } from "@bull-board/express";
import { PublishingService } from "./PublishingService";
import { WalletService } from "./WalletService";
diff --git a/packages/plugin-dkg-publisher/src/services/StorageService.ts b/packages/plugin-dkg-publisher/src/services/StorageService.ts
index 398483cf..9654a590 100644
--- a/packages/plugin-dkg-publisher/src/services/StorageService.ts
+++ b/packages/plugin-dkg-publisher/src/services/StorageService.ts
@@ -8,10 +8,13 @@ export class StorageService {
constructor() {
this.storagePath =
- process.env.STORAGE_PATH || path.resolve(__dirname, "../storage");
- const serverPort = process.env.PORT || "9200";
+ process.env.STORAGE_PATH || path.resolve(process.cwd(), "data/publisher");
+ const baseUrl =
+ process.env.EXPO_PUBLIC_MCP_URL ||
+ `http://localhost:${process.env.PORT || "9200"}`;
this.baseUrl =
- process.env.STORAGE_BASE_URL || `http://localhost:${serverPort}/storage`;
+ process.env.STORAGE_BASE_URL ||
+ new URL("/storage", baseUrl).toString().replace(/\/$/, "");
console.log(`š StorageService initialized:`);
console.log(` - storagePath: ${this.storagePath}`);
console.log(` - resolved path: ${path.resolve(this.storagePath)}`);
diff --git a/packages/plugin-dkg-publisher/tests/blockchain-config.spec.ts b/packages/plugin-dkg-publisher/tests/blockchain-config.spec.ts
new file mode 100644
index 00000000..1d7c10d0
--- /dev/null
+++ b/packages/plugin-dkg-publisher/tests/blockchain-config.spec.ts
@@ -0,0 +1,30 @@
+import { expect } from "chai";
+import { buildDkgBlockchainConfig } from "../dist/blockchainConfig.mjs";
+
+describe("buildDkgBlockchainConfig", () => {
+ it("omits rpc when custom rpc is missing or blank", () => {
+ const base = {
+ name: "otp:20430",
+ privateKey: "a".repeat(64),
+ publicKey: "0x123",
+ };
+
+ const withoutRpc = buildDkgBlockchainConfig(base);
+ const blankRpc = buildDkgBlockchainConfig(base, " ");
+
+ expect(withoutRpc).to.not.have.property("rpc");
+ expect(blankRpc).to.not.have.property("rpc");
+ });
+
+ it("includes trimmed rpc when provided", () => {
+ const base = {
+ name: "otp:20430",
+ privateKey: "a".repeat(64),
+ publicKey: "0x123",
+ };
+
+ const config = buildDkgBlockchainConfig(base, " https://rpc.example ");
+
+ expect(config).to.have.property("rpc", "https://rpc.example");
+ });
+});
diff --git a/packages/plugin-dkg-publisher/tests/dkg-publisher.spec.ts b/packages/plugin-dkg-publisher/tests/dkg-publisher.spec.ts
index ec20da41..b0ca7e4a 100644
--- a/packages/plugin-dkg-publisher/tests/dkg-publisher.spec.ts
+++ b/packages/plugin-dkg-publisher/tests/dkg-publisher.spec.ts
@@ -83,8 +83,18 @@ describe("@dkg/plugin-dkg-publisher checks", () => {
const publishTool = tools.find((tool) => tool.name === "knowledge-asset-publish");
expect(publishTool).to.not.equal(undefined);
expect(publishTool!.title).to.equal("Publish Knowledge Asset");
- expect(publishTool!.description).to.equal("Register a JSON-LD asset for publishing to the DKG");
+ expect(publishTool!.description).to.include(
+ "Register a JSON-LD asset for publishing to the DKG",
+ );
+ expect(publishTool!.description).to.include("async queue");
expect(publishTool!.inputSchema).to.not.equal(undefined);
+ expect((publishTool!.inputSchema as any).publishOptions).to.not.equal(
+ undefined,
+ );
+ expect((publishTool!.inputSchema as any).privacy).to.equal(undefined);
+ expect((publishTool!.inputSchema as any).priority).to.equal(undefined);
+ expect((publishTool!.inputSchema as any).epochs).to.equal(undefined);
+ expect((publishTool!.inputSchema as any).maxAttempts).to.equal(undefined);
});
it("should register API routes correctly", async () => {
@@ -159,6 +169,25 @@ describe("@dkg/plugin-dkg-publisher checks", () => {
});
describe("Data Structure Processing", () => {
+ it("should accept optional async publish controls", async () => {
+ const withPublishControls = createTestAsset({
+ publishOptions: {
+ privacy: "public",
+ priority: 25,
+ epochs: 4,
+ maxAttempts: 5,
+ },
+ });
+
+ const result = await mockMcpClient.callTool({
+ name: "knowledge-asset-publish",
+ arguments: withPublishControls,
+ });
+
+ expect(result.content).to.be.an("array");
+ expect(result.content).to.have.length.greaterThan(0);
+ });
+
it("should process different content types correctly", async () => {
const stringContent = createTestAsset({
content: {
@@ -362,4 +391,4 @@ describe("@dkg/plugin-dkg-publisher checks", () => {
}
});
});
-});
\ No newline at end of file
+});