diff --git a/bases/shared/instagoric-server/api/faucet-app/homeHandler.js b/bases/shared/instagoric-server/api/faucet-app/homeHandler.js
new file mode 100644
index 0000000..6433f72
--- /dev/null
+++ b/bases/shared/instagoric-server/api/faucet-app/homeHandler.js
@@ -0,0 +1,87 @@
+// @ts-check
+import { getDenoms } from '../../utils.js';
+import { AG0_MODE, COMMANDS } from '../../constants.js';
+
+export async function homeRoute(req, res) {
+ const denoms = await getDenoms();
+ let denomHtml = '';
+ denoms.forEach(denom => {
+ denomHtml += ``;
+ });
+ const denomsDropDownHtml = `
`;
+
+ const clientText = !AG0_MODE
+ ? `
+
+ `
+ : '';
+ res.send(
+ `Faucet
+
+
+
+ welcome to the faucet
+
+
+
+
+
+
+ `,
+ );
+}
diff --git a/bases/shared/instagoric-server/api/public-app/homeHandler.js b/bases/shared/instagoric-server/api/public-app/homeHandler.js
new file mode 100644
index 0000000..ef19146
--- /dev/null
+++ b/bases/shared/instagoric-server/api/public-app/homeHandler.js
@@ -0,0 +1,77 @@
+// @ts-check
+import {
+ NETDOMAIN,
+ NETNAME,
+ namespace,
+ revision,
+ DOCKERIMAGE,
+ DOCKERTAG,
+ podname,
+ FAKE,
+ chainId,
+} from '../../constants.js';
+import { getDockerImage } from '../../utils.js';
+import process from 'process';
+export async function homeRoute(req, res) {
+ let CLUSTER_NAME;
+ let dockerImage = await getDockerImage(namespace, podname, FAKE);
+ const domain = NETDOMAIN;
+ const netname = NETNAME;
+ const gcloudLoggingDatasource = 'P470A85C5170C7A1D';
+ const logsQuery = {
+ '62l': {
+ datasource: gcloudLoggingDatasource,
+ queries: [
+ {
+ queryText: `resource.labels.container_name=\"log-slog\" resource.labels.namespace_name=\"${namespace}\" resource.labels.cluster_name=\"${CLUSTER_NAME}\"`,
+ },
+ ],
+ },
+ };
+ const logsUrl = `https://monitor${domain}/explore?schemaVersion=1&panes=${encodeURI(
+ JSON.stringify(logsQuery),
+ )}&orgId=1`;
+ const dashboardUrl = `https://monitor${domain}/d/cdzujrg5sxvy8f/agoric-chain-metrics?var-cluster=${CLUSTER_NAME}&var-namespace=${namespace}&var-chain_id=${chainId}&orgId=1`;
+ res.send(`
+ Instagoric
+ ██╗███╗ ██╗███████╗████████╗ █████╗ ██████╗ ██████╗ ██████╗ ██╗ ██████╗
+ ██║████╗ ██║██╔════╝╚══██╔══╝██╔══██╗██╔════╝ ██╔═══██╗██╔══██╗██║██╔════╝
+ ██║██╔██╗ ██║███████╗ ██║ ███████║██║ ███╗██║ ██║██████╔╝██║██║
+ ██║██║╚██╗██║╚════██║ ██║ ██╔══██║██║ ██║██║ ██║██╔══██╗██║██║
+ ██║██║ ╚████║███████║ ██║ ██║ ██║╚██████╔╝╚██████╔╝██║ ██║██║╚██████╗
+ ╚═╝╚═╝ ╚═══╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═════╝
+
+ Chain: ${chainId}${
+ process.env.NETPURPOSE !== undefined
+ ? `\nPurpose: ${process.env.NETPURPOSE}`
+ : ''
+ }
+ Revision: ${revision}
+ Docker Image: ${DOCKERIMAGE || dockerImage.split(':')[0]}:${
+ DOCKERTAG || dockerImage.split(':')[1]
+ }
+ Revision Link: https://github.com/Agoric/agoric-sdk/tree/${revision}
+ Network Config: https://${netname}${domain}/network-config
+ Docker Compose: https://${netname}${domain}/docker-compose.yml
+ RPC: https://${netname}.rpc${domain}
+ gRPC: https://${netname}.grpc${domain}
+ API: https://${netname}.api${domain}
+ Explorer: https://${netname}.explorer${domain}
+ Faucet: https://${netname}.faucet${domain}
+ Logs: Click Here
+ Monitoring Dashboard: Click Here
+ VStorage: https://vstorage.agoric.net/?endpoint=https://${
+ netname === 'followmain' ? 'main-a' : netname
+ }.rpc.agoric.net
+
+ UIs:
+ Main-branch Wallet: https://main.wallet-app.pages.dev/wallet/
+ Main-branch Vaults: https://dapp-inter-test.pages.dev/?network=${netname}
+
+ ----
+ See more at https://agoric.com
+
+ `);
+}
diff --git a/bases/shared/instagoric-server/constants.js b/bases/shared/instagoric-server/constants.js
new file mode 100644
index 0000000..30512c6
--- /dev/null
+++ b/bases/shared/instagoric-server/constants.js
@@ -0,0 +1,74 @@
+// @ts-check
+import { fs } from 'zx';
+export const AG0_MODE = (process.env.AG0_MODE || 'false') === 'true';
+
+export const COMMANDS = {
+ 'SEND_BLD/IBC': 'send_bld_ibc',
+ SEND_AND_PROVISION_IST: 'send_ist_and_provision',
+ FUND_PROV_POOL: 'fund_provision_pool',
+ CUSTOM_DENOMS_LIST: 'custom_denoms_list',
+};
+
+export const BASE_AMOUNT = '25000000';
+
+export const CLIENT_AMOUNT =
+ process.env.CLIENT_AMOUNT || '25000000uist,25000000ibc/toyusdc';
+export const DELEGATE_AMOUNT =
+ process.env.DELEGATE_AMOUNT ||
+ '75000000ubld,25000000ibc/toyatom,25000000ibc/toyellie,25000000ibc/toyusdc,25000000ibc/toyollie';
+
+export const PROVISIONING_POOL_ADDR =
+ 'agoric1megzytg65cyrgzs6fvzxgrcqvwwl7ugpt62346';
+
+export const TRANSACTION_STATUS = {
+ FAILED: 1000,
+ NOT_FOUND: 1001,
+ SUCCESSFUL: 1002,
+};
+
+export const DOCKERTAG = process.env.DOCKERTAG; // Optional.
+export const DOCKERIMAGE = process.env.DOCKERIMAGE; // Optional.
+
+export const NETNAME = process.env.NETNAME || 'devnet';
+export const NETDOMAIN = process.env.NETDOMAIN || '.agoric.net';
+export const agBinary = AG0_MODE ? 'ag0' : 'agd';
+
+export const FAUCET_KEYNAME =
+ process.env.FAUCET_KEYNAME || process.env.WHALE_KEYNAME || 'self';
+
+export const podname = process.env.POD_NAME || 'validator-primary';
+export const INCLUDE_SEED = process.env.SEED_ENABLE || 'yes';
+export const NODE_ID =
+ process.env.NODE_ID || 'fb86a0993c694c981a28fa1ebd1fd692f345348b';
+export const RPC_PORT = 26657;
+export const agoricHome = process.env.AGORIC_HOME;
+export const chainId = process.env.CHAIN_ID;
+
+export const namespace =
+ process.env.NAMESPACE ||
+ fs.readFileSync('/var/run/secrets/kubernetes.io/serviceaccount/namespace', {
+ encoding: 'utf8',
+ flag: 'r',
+ });
+
+export const FAKE = process.env.FAKE || process.argv[2] === '--fake';
+
+let revision;
+if (FAKE) {
+ revision = 'fake_revision';
+} else {
+ revision =
+ process.env.AG0_MODE === 'true'
+ ? 'ag0'
+ : fs
+ .readFileSync(
+ '/usr/src/agoric-sdk/packages/solo/public/git-revision.txt',
+ {
+ encoding: 'utf8',
+ flag: 'r',
+ },
+ )
+ .trim();
+}
+
+export { revision };
diff --git a/bases/shared/instagoric-server/server.js b/bases/shared/instagoric-server/server.js
index bed7166..635fd04 100644
--- a/bases/shared/instagoric-server/server.js
+++ b/bases/shared/instagoric-server/server.js
@@ -3,54 +3,45 @@ import './lockdown.js';
import process from 'process';
import express from 'express';
-import https from 'https';
import tmp from 'tmp';
import { $, fetch, fs, nothrow, sleep } from 'zx';
+import {
+ getTransactionStatus,
+ sendFunds,
+ getDockerImage,
+ getServices,
+ getNetworkConfig,
+ dockerComposeYaml,
+ DataCache
+} from './utils.js';
+import {
+ AG0_MODE,
+ COMMANDS,
+ BASE_AMOUNT,
+ CLIENT_AMOUNT,
+ DELEGATE_AMOUNT,
+ PROVISIONING_POOL_ADDR,
+ TRANSACTION_STATUS,
+ DOCKERTAG,
+ DOCKERIMAGE,
+ NETNAME,
+ NETDOMAIN,
+ agBinary,
+ FAUCET_KEYNAME,
+ podname,
+ RPC_PORT,
+ agoricHome,
+ chainId,
+ namespace,
+ FAKE,
+} from './constants.js';
+import { homeRoute as faucetAppHomeRoute } from './api/faucet-app/homeHandler.js';
+import { homeRoute as publicAppHomeRoute } from './api/public-app/homeHandler.js';
import { makeSubscriptionKit } from '@agoric/notifier';
const { details: X } = globalThis.assert;
-const BASE_AMOUNT = "25000000";
-// Adding here to avoid ReferenceError for local server. Not needed for k8
-let CLUSTER_NAME;
-
-const CLIENT_AMOUNT =
- process.env.CLIENT_AMOUNT || '25000000uist,25000000ibc/toyusdc';
-const DELEGATE_AMOUNT =
- process.env.DELEGATE_AMOUNT ||
- '75000000ubld,25000000ibc/toyatom,25000000ibc/toyellie,25000000ibc/toyusdc,25000000ibc/toyollie';
-
-const COMMANDS = {
- "SEND_BLD/IBC": "send_bld_ibc",
- "SEND_AND_PROVISION_IST": "send_ist_and_provision",
- "FUND_PROV_POOL": "fund_provision_pool",
- "CUSTOM_DENOMS_LIST": "custom_denoms_list",
-};
-
-
-const PROVISIONING_POOL_ADDR = 'agoric1megzytg65cyrgzs6fvzxgrcqvwwl7ugpt62346';
-
-const DOCKERTAG = process.env.DOCKERTAG; // Optional.
-const DOCKERIMAGE = process.env.DOCKERIMAGE; // Optional.
-const FAUCET_KEYNAME =
- process.env.FAUCET_KEYNAME || process.env.WHALE_KEYNAME || 'self';
-const NETNAME = process.env.NETNAME || 'devnet';
-const NETDOMAIN = process.env.NETDOMAIN || '.agoric.net';
-const AG0_MODE = (process.env.AG0_MODE || 'false') === 'true';
-const agBinary = AG0_MODE ? 'ag0' : 'agd';
-const podname = process.env.POD_NAME || 'validator-primary';
-const INCLUDE_SEED = process.env.SEED_ENABLE || 'yes';
-const NODE_ID =
- process.env.NODE_ID || 'fb86a0993c694c981a28fa1ebd1fd692f345348b';
-const RPC_PORT = 26657;
-const TRANSACTION_STATUS = {
- FAILED: 1000,
- NOT_FOUND: 1001,
- SUCCESSFUL: 1002,
-};
-
-const FAKE = process.env.FAKE || process.argv[2] === '--fake';
if (FAKE) {
console.log('FAKE MODE');
const tmpDir = await new Promise((resolve, reject) => {
@@ -67,60 +58,13 @@ if (FAKE) {
process.env.AGORIC_HOME = tmpDir;
}
-const agoricHome = process.env.AGORIC_HOME;
assert(agoricHome, X`AGORIC_HOME not set`);
-
-const chainId = process.env.CHAIN_ID;
assert(chainId, X`CHAIN_ID not set`);
-let dockerImage;
-
-const namespace =
- process.env.NAMESPACE ||
- fs.readFileSync('/var/run/secrets/kubernetes.io/serviceaccount/namespace', {
- encoding: 'utf8',
- flag: 'r',
- });
-
-let revision;
-if (FAKE) {
- revision = 'fake_revision';
-} else {
- revision =
- process.env.AG0_MODE === 'true'
- ? 'ag0'
- : fs.readFileSync('/usr/src/agoric-sdk/packages/solo/public/git-revision.txt', {
- encoding: 'utf8',
- flag: 'r',
- }).trim();
-}
-
/**
* @param {string} relativeUrl
* @returns {Promise}
*/
-const makeKubernetesRequest = async relativeUrl => {
- const ca = await fs.readFile(
- '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
- 'utf8',
- );
- const token = await fs.readFile(
- '/var/run/secrets/kubernetes.io/serviceaccount/token',
- 'utf8',
- );
- const url = new URL(
- relativeUrl,
- 'https://kubernetes.default.svc.cluster.local',
- );
- const response = await fetch(url.href, {
- headers: {
- Authorization: `Bearer ${token}`,
- Accept: 'application/json',
- },
- agent: new https.Agent({ ca }),
- });
- return response.json();
-};
const getMetricsRequest = async relativeUrl => {
const url = new URL('http://localhost:26661/metrics');
@@ -129,100 +73,6 @@ const getMetricsRequest = async relativeUrl => {
};
// eslint-disable-next-line no-unused-vars
-async function getNodeId(node) {
- const response = await fetch(
- `http://${node}.${namespace}.svc.cluster.local:26657/status`,
- );
- return response.json();
-}
-
-async function getServices() {
- if (FAKE) {
- return new Map([
- ['validator-primary-ext', '1.1.1.1'],
- ['seed-ext', '1.1.1.2'],
- ]);
- }
- const services = await makeKubernetesRequest(
- `/api/v1/namespaces/${namespace}/services/`,
- );
- const map1 = new Map();
- for (const item of services.items) {
- const ingress = item.status?.loadBalancer?.ingress;
- if (ingress?.length > 0) {
- map1.set(item.metadata.name, ingress[0].ip);
- }
- }
- return map1;
-}
-
-const getNetworkConfig = async () => {
- const svc = await getServices();
- const file = FAKE
- ? './resources/network_info.json'
- : '/config/network/network_info.json';
- const buf = await fs.readFile(file, 'utf8');
- const ap = JSON.parse(buf);
- ap.chainName = chainId;
- ap.gci = `https://${NETNAME}.rpc${NETDOMAIN}:443/genesis`;
- ap.peers[0] = ap.peers[0].replace(
- 'validator-primary.instagoric.svc.cluster.local',
- svc.get('validator-primary-ext') ||
- `${podname}.${namespace}.svc.cluster.local`,
- );
- ap.peers[0] = ap.peers[0].replace(
- 'fb86a0993c694c981a28fa1ebd1fd692f345348b',
- `${NODE_ID}`,
- );
- ap.rpcAddrs = [`https://${NETNAME}.rpc${NETDOMAIN}:443`];
- ap.apiAddrs = [`https://${NETNAME}.api${NETDOMAIN}:443`];
- if (INCLUDE_SEED === 'yes') {
- ap.seeds[0] = ap.seeds[0].replace(
- 'seed.instagoric.svc.cluster.local',
- svc.get('seed-ext') || `seed.${namespace}.svc.cluster.local`,
- );
- } else {
- ap.seeds = [];
- }
-
- return JSON.stringify(ap);
-};
-class DataCache {
- constructor(fetchFunction, minutesToLive = 10) {
- this.millisecondsToLive = minutesToLive * 60 * 1000;
- this.fetchFunction = fetchFunction;
- this.cache = null;
- this.getData = this.getData.bind(this);
- this.resetCache = this.resetCache.bind(this);
- this.isCacheExpired = this.isCacheExpired.bind(this);
- this.fetchDate = new Date(0);
- }
-
- isCacheExpired() {
- return (
- this.fetchDate.getTime() + this.millisecondsToLive < new Date().getTime()
- );
- }
-
- getData() {
- if (!this.cache || this.isCacheExpired()) {
- console.log('fetch');
- return this.fetchFunction().then(data => {
- this.cache = data;
- this.fetchDate = new Date();
- return data;
- });
- } else {
- console.log('cache hit');
-
- return Promise.resolve(this.cache);
- }
- }
-
- resetCache() {
- this.fetchDate = new Date(0);
- }
-}
const ipsCache = new DataCache(getServices, 0.1);
const networkConfig = new DataCache(getNetworkConfig, 0.5);
const metricsCache = new DataCache(getMetricsRequest, 0.1);
@@ -255,50 +105,7 @@ publicapp.use(logReq);
privateapp.use(logReq);
faucetapp.use(logReq);
-publicapp.get('/', (req, res) => {
- const domain = NETDOMAIN;
- const netname = NETNAME;
- const gcloudLoggingDatasource = 'P470A85C5170C7A1D'
- const logsQuery = { "62l": { "datasource": gcloudLoggingDatasource, "queries": [{ "queryText": `resource.labels.container_name=\"log-slog\" resource.labels.namespace_name=\"${namespace}\" resource.labels.cluster_name=\"${CLUSTER_NAME}\"`}] } }
- const logsUrl = `https://monitor${domain}/explore?schemaVersion=1&panes=${encodeURI(JSON.stringify(logsQuery))}&orgId=1`
- const dashboardUrl = `https://monitor${domain}/d/cdzujrg5sxvy8f/agoric-chain-metrics?var-cluster=${CLUSTER_NAME}&var-namespace=${namespace}&var-chain_id=${chainId}&orgId=1`
- res.send(`
-Instagoric
-██╗███╗ ██╗███████╗████████╗ █████╗ ██████╗ ██████╗ ██████╗ ██╗ ██████╗
-██║████╗ ██║██╔════╝╚══██╔══╝██╔══██╗██╔════╝ ██╔═══██╗██╔══██╗██║██╔════╝
-██║██╔██╗ ██║███████╗ ██║ ███████║██║ ███╗██║ ██║██████╔╝██║██║
-██║██║╚██╗██║╚════██║ ██║ ██╔══██║██║ ██║██║ ██║██╔══██╗██║██║
-██║██║ ╚████║███████║ ██║ ██║ ██║╚██████╔╝╚██████╔╝██║ ██║██║╚██████╗
-╚═╝╚═╝ ╚═══╝╚══════╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═════╝
-
-Chain: ${chainId}${process.env.NETPURPOSE !== undefined
- ? `\nPurpose: ${process.env.NETPURPOSE}`
- : ''
- }
-Revision: ${revision}
-Docker Image: ${DOCKERIMAGE || dockerImage.split(':')[0]}:${DOCKERTAG || dockerImage.split(':')[1]
- }
-Revision Link: https://github.com/Agoric/agoric-sdk/tree/${revision}
-Network Config: https://${netname}${domain}/network-config
-Docker Compose: https://${netname}${domain}/docker-compose.yml
-RPC: https://${netname}.rpc${domain}
-gRPC: https://${netname}.grpc${domain}
-API: https://${netname}.api${domain}
-Explorer: https://${netname}.explorer${domain}
-Faucet: https://${netname}.faucet${domain}
-Logs: Click Here
-Monitoring Dashboard: Click Here
-VStorage: https://vstorage.agoric.net/?endpoint=https://${netname === 'followmain' ? 'main-a' : netname}.rpc.agoric.net
-
-UIs:
-Main-branch Wallet: https://main.wallet-app.pages.dev/wallet/
-Main-branch Vaults: https://dapp-inter-test.pages.dev/?network=${netname}
-
-----
-See more at https://agoric.com
-
- `);
-});
+publicapp.get('/', publicAppHomeRoute);
publicapp.get('/network-config', async (req, res) => {
res.setHeader('Content-type', 'text/plain;charset=UTF-8');
@@ -313,29 +120,8 @@ publicapp.get('/metrics-config', async (req, res) => {
res.send(result);
});
-const dockerComposeYaml = (dockerimage, dockertag, netname, netdomain) => `\
-version: "2.2"
-services:
- ag-solo:
- image: ${dockerimage}:\${SDK_TAG:-${dockertag}}
- ports:
- - "\${HOST_PORT:-8000}:\${PORT:-8000}"
- volumes:
- - "ag-solo-state:/state"
- - "$HOME/.agoric:/root/.agoric"
- environment:
- - "AG_SOLO_BASEDIR=/state/\${SOLO_HOME:-${dockertag}}"
- entrypoint: ag-solo
- command:
- - setup
- - --webhost=0.0.0.0
- - --webport=\${PORT:-8000}
- - --netconfig=\${NETCONFIG_URL:-https://${netname}${netdomain}/network-config}
-volumes:
- ag-solo-state:
-`;
-
-publicapp.get('/docker-compose.yml', (req, res) => {
+publicapp.get('/docker-compose.yml', async (req, res) => {
+ let dockerImage = await getDockerImage(namespace, podname, FAKE);
res.setHeader(
'Content-disposition',
'attachment; filename=docker-compose.yml',
@@ -408,35 +194,7 @@ const addRequest = (address, request) => {
publication.updateState(address);
};
-/**
- * Returns the status of a transaction against hash `txHash`.
- * The status is one of the values from `TRANSACTION_STATUS`
- * @param {string} txHash
- * @returns {Promise}
- */
-const getTransactionStatus = async txHash => {
- let { exitCode, stderr, stdout } = await nothrow($`\
- ${agBinary} query tx ${txHash} \
- --chain-id=${chainId} \
- --home=${agoricHome} \
- --node=http://localhost:${RPC_PORT} \
- --output=json \
- --type=hash \
- `);
- exitCode = exitCode ?? 1;
- // This check is brittle as this can also happen in case
- // an invalid txhash was provided. So there is no reliable
- // distinction between the case of invalid txhash and a
- // transaction currently in the mempool. We could use search
- // endpoint but that seems overkill to cover a case where
- // the only the deliberate use of invalid hash can effect the user
- if (exitCode && stderr.includes(`tx (${txHash}) not found`))
- return TRANSACTION_STATUS.NOT_FOUND;
-
- const code = Number(JSON.parse(stdout).code);
- return code ? TRANSACTION_STATUS.FAILED : TRANSACTION_STATUS.SUCCESSFUL;
-};
/**
* @param {string} address
@@ -479,46 +237,13 @@ const provisionAddress = async (address, clientType) => {
);
};
-/**
- * Send funds to `address`.
- * It only waits for the transaction
- * checks and doesn't wait for the
- * transaction to actually be included
- * in a block. The returned transaction
- * hash can be used to get the current status
- * of the transaction later
- * @param {string} address
- * @param {string} amount
- * @returns {Promise<[number, string]>}
- */
-const sendFunds = async (address, amount) => {
- let { exitCode, stdout } = await nothrow($`\
- ${agBinary} tx bank send ${FAUCET_KEYNAME} ${address} ${amount} \
- --broadcast-mode=sync \
- --chain-id=${chainId} \
- --keyring-backend=test \
- --keyring-dir=${agoricHome} \
- --node=http://localhost:${RPC_PORT} \
- --output=json \
- --yes \
- `);
- exitCode = exitCode ?? 1;
- if (exitCode) return [exitCode, ''];
- return [exitCode, String(JSON.parse(stdout).txhash)];
-};
// Faucet worker.
const constructAmountToSend = (amount, denoms) => denoms.map(denom => `${amount}${denom}`).join(',');
-const getDenoms = async () => {
- // Not handling pagination as it is used for testing. Limit 100 shoud suffice
- const result = await $`${agBinary} query bank total --limit=100 -o json`;
- const output = JSON.parse(result.stdout.trim());
- return output.supply.map((element) => element.denom);
-}
const startFaucetWorker = async () => {
console.log('Starting Faucet worker!');
@@ -593,90 +318,7 @@ privateapp.listen(privateport, () => {
});
-faucetapp.get('/', async (req, res) => {
-
- const denoms = await getDenoms();
- let denomHtml = '';
- denoms.forEach((denom) => {
- denomHtml += ``;
- })
- const denomsDropDownHtml =``
-
- const clientText = !AG0_MODE
- ? `
-
-`
- : '';
- res.send(
- `Faucet
-
-
-
- welcome to the faucet
-
-
-
-
-
-
-`,
- );
-});
+faucetapp.get('/', faucetAppHomeRoute);
faucetapp.use(
express.urlencoded({
@@ -774,14 +416,6 @@ faucetapp.listen(faucetport, () => {
console.log(`faucetapp listening on port ${faucetport}`);
});
-if (FAKE) {
- dockerImage = 'asdf:unknown';
-} else {
- const statefulSet = await makeKubernetesRequest(
- `/apis/apps/v1/namespaces/${namespace}/statefulsets/${podname}`,
- );
- dockerImage = statefulSet.spec.template.spec.containers[0].image;
-}
publicapp.listen(publicport, () => {
console.log(`publicapp listening on port ${publicport}`);
});
diff --git a/bases/shared/instagoric-server/utils.js b/bases/shared/instagoric-server/utils.js
new file mode 100644
index 0000000..ca33a8d
--- /dev/null
+++ b/bases/shared/instagoric-server/utils.js
@@ -0,0 +1,240 @@
+import {
+ agBinary,
+ RPC_PORT,
+ agoricHome,
+ chainId,
+ TRANSACTION_STATUS,
+ FAUCET_KEYNAME,
+ FAKE,
+ NETNAME,
+ NETDOMAIN,
+ podname,
+ namespace,
+ NODE_ID,
+ INCLUDE_SEED,
+} from './constants.js';
+import { $, fs, nothrow } from 'zx';
+import https from 'https';
+export const getDenoms = async () => {
+ // Not handling pagination as it is used for testing. Limit 100 shoud suffice
+
+ const result = await $`${agBinary} query bank total --limit=100 -o json`;
+ const output = JSON.parse(result.stdout.trim());
+ return output.supply.map(element => element.denom);
+};
+
+/**
+ * Returns the status of a transaction against hash `txHash`.
+ * The status is one of the values from `TRANSACTION_STATUS`
+ * @param {string} txHash
+ * @returns {Promise}
+ */
+export const getTransactionStatus = async txHash => {
+ let { exitCode, stderr, stdout } = await nothrow($`\
+ ${agBinary} query tx ${txHash} \
+ --chain-id=${chainId} \
+ --home=${agoricHome} \
+ --node=http://localhost:${RPC_PORT} \
+ --output=json \
+ --type=hash \
+ `);
+ exitCode = exitCode ?? 1;
+
+ // This check is brittle as this can also happen in case
+ // an invalid txhash was provided. So there is no reliable
+ // distinction between the case of invalid txhash and a
+ // transaction currently in the mempool. We could use search
+ // endpoint but that seems overkill to cover a case where
+ // the only the deliberate use of invalid hash can effect the user
+ if (exitCode && stderr.includes(`tx (${txHash}) not found`))
+ return TRANSACTION_STATUS.NOT_FOUND;
+
+ const code = Number(JSON.parse(stdout).code);
+ return code ? TRANSACTION_STATUS.FAILED : TRANSACTION_STATUS.SUCCESSFUL;
+};
+
+/**
+ * Send funds to `address`.
+ * It only waits for the transaction
+ * checks and doesn't wait for the
+ * transaction to actually be included
+ * in a block. The returned transaction
+ * hash can be used to get the current status
+ * of the transaction later
+ * @param {string} address
+ * @param {string} amount
+ * @returns {Promise<[number, string]>}
+ */
+export const sendFunds = async (address, amount) => {
+ let { exitCode, stdout } = await nothrow($`\
+ ${agBinary} tx bank send ${FAUCET_KEYNAME} ${address} ${amount} \
+ --broadcast-mode=sync \
+ --chain-id=${chainId} \
+ --keyring-backend=test \
+ --keyring-dir=${agoricHome} \
+ --node=http://localhost:${RPC_PORT} \
+ --output=json \
+ --yes \
+ `);
+ exitCode = exitCode ?? 1;
+
+ if (exitCode) return [exitCode, ''];
+ return [exitCode, String(JSON.parse(stdout).txhash)];
+};
+
+export const makeKubernetesRequest = async relativeUrl => {
+ const ca = await fs.readFile(
+ '/var/run/secrets/kubernetes.io/serviceaccount/ca.crt',
+ 'utf8',
+ );
+ const token = await fs.readFile(
+ '/var/run/secrets/kubernetes.io/serviceaccount/token',
+ 'utf8',
+ );
+ const url = new URL(
+ relativeUrl,
+ 'https://kubernetes.default.svc.cluster.local',
+ );
+ const response = await fetch(url.href, {
+ headers: {
+ Authorization: `Bearer ${token}`,
+ Accept: 'application/json',
+ },
+ agent: new https.Agent({ ca }),
+ });
+ return response.json();
+};
+
+export async function getDockerImage(namespace, podname, FAKE) {
+ if (FAKE) {
+ return 'asdf:unknown';
+ } else {
+ const statefulSet = await makeKubernetesRequest(
+ `/apis/apps/v1/namespaces/${namespace}/statefulsets/${podname}`,
+ );
+ return statefulSet.spec.template.spec.containers[0].image;
+ }
+}
+
+export async function getServices() {
+ if (FAKE) {
+ return new Map([
+ ['validator-primary-ext', '1.1.1.1'],
+ ['seed-ext', '1.1.1.2'],
+ ]);
+ }
+ const services = await makeKubernetesRequest(
+ `/api/v1/namespaces/${namespace}/services/`,
+ );
+ const map1 = new Map();
+ for (const item of services.items) {
+ const ingress = item.status?.loadBalancer?.ingress;
+ if (ingress?.length > 0) {
+ map1.set(item.metadata.name, ingress[0].ip);
+ }
+ }
+ return map1;
+}
+
+export const getNetworkConfig = async () => {
+ const svc = await getServices();
+ const file = FAKE
+ ? './resources/network_info.json'
+ : '/config/network/network_info.json';
+ const buf = await fs.readFile(file, 'utf8');
+ const ap = JSON.parse(buf);
+ ap.chainName = chainId;
+ ap.gci = `https://${NETNAME}.rpc${NETDOMAIN}:443/genesis`;
+ ap.peers[0] = ap.peers[0].replace(
+ 'validator-primary.instagoric.svc.cluster.local',
+ svc.get('validator-primary-ext') ||
+ `${podname}.${namespace}.svc.cluster.local`,
+ );
+ ap.peers[0] = ap.peers[0].replace(
+ 'fb86a0993c694c981a28fa1ebd1fd692f345348b',
+ `${NODE_ID}`,
+ );
+ ap.rpcAddrs = [`https://${NETNAME}.rpc${NETDOMAIN}:443`];
+ ap.apiAddrs = [`https://${NETNAME}.api${NETDOMAIN}:443`];
+ if (INCLUDE_SEED === 'yes') {
+ ap.seeds[0] = ap.seeds[0].replace(
+ 'seed.instagoric.svc.cluster.local',
+ svc.get('seed-ext') || `seed.${namespace}.svc.cluster.local`,
+ );
+ } else {
+ ap.seeds = [];
+ }
+
+ return JSON.stringify(ap);
+};
+
+export const dockerComposeYaml = (
+ dockerimage,
+ dockertag,
+ netname,
+ netdomain,
+) => `\
+version: "2.2"
+services:
+ ag-solo:
+ image: ${dockerimage}:\${SDK_TAG:-${dockertag}}
+ ports:
+ - "\${HOST_PORT:-8000}:\${PORT:-8000}"
+ volumes:
+ - "ag-solo-state:/state"
+ - "$HOME/.agoric:/root/.agoric"
+ environment:
+ - "AG_SOLO_BASEDIR=/state/\${SOLO_HOME:-${dockertag}}"
+ entrypoint: ag-solo
+ command:
+ - setup
+ - --webhost=0.0.0.0
+ - --webport=\${PORT:-8000}
+ - --netconfig=\${NETCONFIG_URL:-https://${netname}${netdomain}/network-config}
+volumes:
+ ag-solo-state:
+`;
+
+export class DataCache {
+ constructor(fetchFunction, minutesToLive = 10) {
+ this.millisecondsToLive = minutesToLive * 60 * 1000;
+ this.fetchFunction = fetchFunction;
+ this.cache = null;
+ this.getData = this.getData.bind(this);
+ this.resetCache = this.resetCache.bind(this);
+ this.isCacheExpired = this.isCacheExpired.bind(this);
+ this.fetchDate = new Date(0);
+ }
+
+ isCacheExpired() {
+ return (
+ this.fetchDate.getTime() + this.millisecondsToLive < new Date().getTime()
+ );
+ }
+
+ getData() {
+ if (!this.cache || this.isCacheExpired()) {
+ console.log('fetch');
+ return this.fetchFunction().then(data => {
+ this.cache = data;
+ this.fetchDate = new Date();
+ return data;
+ });
+ } else {
+ console.log('cache hit');
+
+ return Promise.resolve(this.cache);
+ }
+ }
+
+ resetCache() {
+ this.fetchDate = new Date(0);
+ }
+}
+
+export async function getNodeId(node) {
+ const response = await fetch(
+ `http://${node}.${namespace}.svc.cluster.local:26657/status`,
+ );
+ return response.json();
+}