diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml index b95177e..ca7c5c5 100644 --- a/.github/workflows/docker-build.yml +++ b/.github/workflows/docker-build.yml @@ -37,6 +37,9 @@ jobs: - name: Install dependencies run: npm ci + - name: Lint + run: npm run lint + - name: Run tests with coverage run: npm run test:coverage diff --git a/.github/workflows/refresh-l2beat-fallback.yml b/.github/workflows/refresh-l2beat-fallback.yml new file mode 100644 index 0000000..95f1ef1 --- /dev/null +++ b/.github/workflows/refresh-l2beat-fallback.yml @@ -0,0 +1,78 @@ +name: Refresh L2BEAT fallback + +# Runs the live L2BEAT scaling-summary endpoint weekly and opens a PR if the +# normalized output differs from the checked-in data/l2beat-fallback.json. +# Keeps the static safety net from drifting when the live API is unreachable. +on: + schedule: + # 06:00 UTC every Monday + - cron: '0 6 * * 1' + workflow_dispatch: + +permissions: + contents: write + pull-requests: write + +jobs: + refresh: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - uses: actions/setup-node@v4 + with: + node-version: 20 + + - name: Install dependencies + run: npm ci + + - name: Fetch and normalize live L2BEAT data + id: fetch + continue-on-error: true + run: | + node --input-type=module -e ' + import { normalizeL2BeatResponse } from "./src/sources/l2beat.js"; + const res = await fetch(process.env.L2BEAT_API_URL || "https://l2beat.com/api/scaling-summary"); + if (!res.ok) { process.stderr.write(`HTTP ${res.status}\n`); process.exit(1); } + const json = await res.json(); + const projects = normalizeL2BeatResponse(json) + .filter(p => Number.isSafeInteger(p.chainId)) + .map(p => ({ + slug: p.slug, + chainId: p.chainId, + displayName: p.displayName, + stage: p.stage, + category: p.category, + stack: p.stack, + daLayer: p.daLayer, + hostChainId: p.hostChainId + })); + const payload = { + schemaVersion: 1, + fetchedAt: new Date().toISOString(), + note: "Auto-refreshed weekly by .github/workflows/refresh-l2beat-fallback.yml. Excludes chains whose chainId exceeds Number.MAX_SAFE_INTEGER (e.g. Starknet).", + projects + }; + await import("node:fs/promises").then(fs => + fs.writeFile("data/l2beat-fallback.json", JSON.stringify(payload, null, 2) + "\n", "utf8") + ); + ' + + - name: Skip when fetch failed + if: steps.fetch.outcome != 'success' + run: echo "L2BEAT live API unreachable; skipping refresh until next run." + + - name: Create pull request if file changed + if: steps.fetch.outcome == 'success' + uses: peter-evans/create-pull-request@v6 + with: + commit-message: 'chore: refresh L2BEAT fallback data' + title: 'chore: refresh L2BEAT fallback data' + body: | + Automated weekly refresh of `data/l2beat-fallback.json` from the + live L2BEAT scaling-summary endpoint. Review the diff for any + unexpected stage transitions or removed/added projects before + merging. + branch: chore/refresh-l2beat-fallback + delete-branch: true + add-paths: data/l2beat-fallback.json diff --git a/.gitignore b/.gitignore index cb65579..5286ab0 100644 --- a/.gitignore +++ b/.gitignore @@ -148,4 +148,7 @@ vite.config.js.timestamp-* vite.config.ts.timestamp-* # MCP configuration file -.mcp.json \ No newline at end of file +.mcp.json + +# Graphify generated knowledge graph +graphify-out/ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 9077fd0..fd6b684 100644 --- a/Dockerfile +++ b/Dockerfile @@ -15,6 +15,9 @@ RUN npm ci --only=production # Copy application files COPY *.js ./ +COPY src/ ./src/ +COPY data/ ./data/ +COPY public/ ./public/ # Ensure app owns the working directory RUN chown -R app:app /app diff --git a/config.js b/config.js index 3b2124f..6d4c06b 100644 --- a/config.js +++ b/config.js @@ -46,6 +46,12 @@ export const SEARCH_RATE_LIMIT_MAX = parseIntEnv('SEARCH_RATE_LIMIT_MAX', 30); // RPC health check export const RPC_CHECK_TIMEOUT_MS = parseIntEnv('RPC_CHECK_TIMEOUT_MS', 8000); +/** + * @deprecated Unused since the unified rolling refresher (services/chainRefresher.js). + * The new loop processes one chain per tick; each chain's RPC endpoints are + * checked in parallel inside that chain's job. There is no global concurrency + * cap. Kept for backwards-compatible env parsing; safe to remove in v2. + */ export const RPC_CHECK_CONCURRENCY = parseIntEnv('RPC_CHECK_CONCURRENCY', 8); export const MAX_ENDPOINTS_PER_CHAIN = parseIntEnv('MAX_ENDPOINTS_PER_CHAIN', 5); @@ -69,6 +75,18 @@ export const DATA_SOURCE_SLIP44 = parseStringEnv( 'DATA_SOURCE_SLIP44', 'https://raw.githubusercontent.com/satoshilabs/slips/master/slip-0044.md' ); +export const DATA_SOURCE_L2BEAT_API = parseStringEnv( + 'DATA_SOURCE_L2BEAT_API', + 'https://l2beat.com/api/scaling-summary' +); +export const L2BEAT_FETCH_TIMEOUT_MS = parseIntEnv('L2BEAT_FETCH_TIMEOUT_MS', 10000); +/** + * @deprecated Cadence is now driven by the unified rolling refresher + * (CHAIN_REFRESHER_TICK_MS × queue length). Kept so /scaling/status can keep + * exposing the value as a hint to consumers, but no longer used for + * scheduling. Safe to remove in v2 once consumers migrate to /refresher. + */ +export const L2BEAT_REFRESH_INTERVAL_MS = parseIntEnv('L2BEAT_REFRESH_INTERVAL_MS', 300000); // Disk cache export const DATA_CACHE_ENABLED = parseBooleanEnv('DATA_CACHE_ENABLED', true); diff --git a/data/l2beat-fallback.json b/data/l2beat-fallback.json new file mode 100644 index 0000000..ef3e184 --- /dev/null +++ b/data/l2beat-fallback.json @@ -0,0 +1,34 @@ +{ + "schemaVersion": 1, + "fetchedAt": "2026-05-05T00:00:00.000Z", + "note": "Hand-curated last-known-good fallback for src/sources/l2beat.js. Used only when the live l2beat.com API is unreachable. Refresh manually when stage classifications change. Source of truth: https://l2beat.com. Excludes chains whose chainId exceeds Number.MAX_SAFE_INTEGER (e.g. Starknet's CAIP-2 numeric ID 0x534e5f4d41494e); the live API can still surface them once the indexer learns to handle BigInt chain IDs.", + "projects": [ + { "slug": "arbitrum", "chainId": 42161, "displayName": "Arbitrum One", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "Arbitrum Orbit", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "optimism", "chainId": 10, "displayName": "OP Mainnet", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "base", "chainId": 8453, "displayName": "Base", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "zksync-era", "chainId": 324, "displayName": "ZKsync Era", "stage": "Stage 0", "category": "ZK Rollup", "stack": "ZK Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "linea", "chainId": 59144, "displayName": "Linea", "stage": "Stage 0", "category": "ZK Rollup", "stack": "Linea zkEVM", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "polygonzkevm", "chainId": 1101, "displayName": "Polygon zkEVM", "stage": "Stage 1", "category": "ZK Rollup", "stack": "Polygon CDK", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "scroll", "chainId": 534352, "displayName": "Scroll", "stage": "Stage 1", "category": "ZK Rollup", "stack": "Scroll", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "blast", "chainId": 81457, "displayName": "Blast", "stage": "Stage 0", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "mantle", "chainId": 5000, "displayName": "Mantle", "stage": "Stage 0", "category": "Optimium", "stack": "OP Stack", "daLayer": "Mantle DA", "hostChainId": 1 }, + { "slug": "zora", "chainId": 7777777, "displayName": "Zora", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "mode", "chainId": 34443, "displayName": "Mode", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "manta-pacific", "chainId": 169, "displayName": "Manta Pacific", "stage": "Stage 0", "category": "Optimium", "stack": "OP Stack", "daLayer": "Celestia", "hostChainId": 1 }, + { "slug": "lisk", "chainId": 1135, "displayName": "Lisk", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "fraxtal", "chainId": 252, "displayName": "Fraxtal", "stage": "Stage 0", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "bob", "chainId": 60808, "displayName": "BOB", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "world-chain", "chainId": 480, "displayName": "World Chain", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "xlayer", "chainId": 196, "displayName": "X Layer", "stage": "Stage 0", "category": "Validium", "stack": "Polygon CDK", "daLayer": "DAC", "hostChainId": 1 }, + { "slug": "taiko", "chainId": 167000, "displayName": "Taiko Alethia", "stage": "Stage 1", "category": "ZK Rollup", "stack": "Taiko", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "redstone", "chainId": 690, "displayName": "Redstone", "stage": "Stage 0", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "ink", "chainId": 57073, "displayName": "Ink", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "soneium", "chainId": 1868, "displayName": "Soneium", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "unichain", "chainId": 130, "displayName": "Unichain", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "zircuit", "chainId": 48900, "displayName": "Zircuit", "stage": "Stage 0", "category": "ZK Rollup", "stack": "ZK Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "metis", "chainId": 1088, "displayName": "Metis Andromeda", "stage": "Stage 0", "category": "Optimium", "stack": "OP Stack", "daLayer": "Metis DA", "hostChainId": 1 }, + { "slug": "boba", "chainId": 288, "displayName": "Boba Network", "stage": "Stage 0", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "kroma", "chainId": 255, "displayName": "Kroma", "stage": "Stage 1", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 }, + { "slug": "morph", "chainId": 2818, "displayName": "Morph", "stage": "Stage 0", "category": "Optimistic Rollup", "stack": "OP Stack", "daLayer": "Ethereum", "hostChainId": 1 } + ] +} diff --git a/dataService.js b/dataService.js index 775feaf..3498b0f 100644 --- a/dataService.js +++ b/dataService.js @@ -1,1884 +1,38 @@ -import { - DATA_SOURCE_THE_GRAPH, DATA_SOURCE_CHAINLIST, - DATA_SOURCE_CHAINS, DATA_SOURCE_SLIP44, - RPC_CHECK_TIMEOUT_MS, RPC_CHECK_CONCURRENCY, - DATA_CACHE_ENABLED, DATA_CACHE_FILE -} from './config.js'; -import { mkdir, readFile, rename, rm, writeFile } from 'node:fs/promises'; -import { dirname, resolve } from 'node:path'; -import { proxyFetch } from './fetchUtil.js'; -import { jsonRpcCall } from './rpcUtil.js'; - -// Data source URLs (from config, overridable via env) -const DATA_SOURCES = { - theGraph: DATA_SOURCE_THE_GRAPH, - chainlist: DATA_SOURCE_CHAINLIST, - chains: DATA_SOURCE_CHAINS, - slip44: DATA_SOURCE_SLIP44 -}; - -// Cache for data -let cachedData = { - theGraph: null, - chainlist: null, - chains: null, - slip44: null, - indexed: null, - lastUpdated: null, - rpcHealth: {}, - lastRpcCheck: null -}; - -let rpcCheckInProgress = false; -let rpcCheckPending = false; -let dataRefreshPromise = null; -let startupInitializationPromise = null; -let startupInitialized = false; - -const SNAPSHOT_SCHEMA_VERSION = 1; -const DATA_CACHE_PATH = resolve(DATA_CACHE_FILE); - -function applyDataToCache(data) { - cachedData.theGraph = data.theGraph ?? null; - cachedData.chainlist = data.chainlist ?? null; - cachedData.chains = data.chains ?? null; - cachedData.slip44 = data.slip44 ?? {}; - cachedData.indexed = data.indexed ?? null; - cachedData.lastUpdated = data.lastUpdated ?? null; - cachedData.rpcHealth = data.rpcHealth ?? {}; - cachedData.lastRpcCheck = data.lastRpcCheck ?? null; -} - -function countLoadedSources(data) { - let loaded = 0; - - if (data.theGraph !== null) loaded++; - if (data.chainlist !== null) loaded++; - if (data.chains !== null) loaded++; - if (data.slip44Text !== null) loaded++; - - return loaded; -} - -function isValidIndexedData(indexed) { - if (!indexed || typeof indexed !== 'object') { - return false; - } - - return ( - Array.isArray(indexed.all) && - indexed.byChainId && - typeof indexed.byChainId === 'object' && - indexed.byName && - typeof indexed.byName === 'object' - ); -} - -function isValidSnapshot(snapshot) { - if (!snapshot || typeof snapshot !== 'object') { - return false; - } - - if (snapshot.schemaVersion !== SNAPSHOT_SCHEMA_VERSION) { - return false; - } - - if (typeof snapshot.writtenAt !== 'string') { - return false; - } - - const data = snapshot.data; - if (!data || typeof data !== 'object') { - return false; - } - - if (!isValidIndexedData(data.indexed)) { - return false; - } - - if (typeof data.lastUpdated !== 'string') { - return false; - } - - return true; -} - -function createSnapshotPayload(data) { - return { - schemaVersion: SNAPSHOT_SCHEMA_VERSION, - writtenAt: new Date().toISOString(), - data: { - theGraph: data.theGraph ?? null, - chainlist: data.chainlist ?? null, - chains: data.chains ?? null, - slip44: data.slip44 ?? {}, - indexed: data.indexed ?? { byChainId: {}, byName: {}, all: [] }, - lastUpdated: data.lastUpdated ?? new Date().toISOString(), - rpcHealth: data.rpcHealth ?? {}, - lastRpcCheck: data.lastRpcCheck ?? null - } - }; -} - -async function readSnapshotFromDisk() { - if (!DATA_CACHE_ENABLED) { - return null; - } - - try { - const raw = await readFile(DATA_CACHE_PATH, 'utf8'); - const parsed = JSON.parse(raw); - - if (!isValidSnapshot(parsed)) { - console.warn(`Ignoring invalid cache snapshot at ${DATA_CACHE_PATH}`); - return null; - } - - return parsed.data; - } catch (error) { - if (error?.code === 'ENOENT') { - return null; - } - - console.warn(`Failed to read cache snapshot at ${DATA_CACHE_PATH}: ${error.message}`); - return null; - } -} - -async function writeSnapshotToDiskAtomic(data) { - if (!DATA_CACHE_ENABLED) { - return; - } - - const snapshot = createSnapshotPayload(data); - const tempPath = `${DATA_CACHE_PATH}.tmp-${process.pid}-${Date.now()}`; - - try { - await mkdir(dirname(DATA_CACHE_PATH), { recursive: true }); - await writeFile(tempPath, JSON.stringify(snapshot), 'utf8'); - await rename(tempPath, DATA_CACHE_PATH); - } catch (error) { - try { - await rm(tempPath, { force: true }); - } catch { - // Best effort cleanup for temp file. - } - - console.warn(`Failed to persist cache snapshot at ${DATA_CACHE_PATH}: ${error.message}`); - } -} - -async function fetchAndBuildData() { - console.log('Loading data from all sources...'); - - const results = await Promise.allSettled([ - fetchData(DATA_SOURCES.theGraph), - fetchData(DATA_SOURCES.chainlist), - fetchData(DATA_SOURCES.chains), - fetchData(DATA_SOURCES.slip44, 'text') - ]); - - const theGraph = results[0].status === 'fulfilled' ? results[0].value : null; - const chainlist = results[1].status === 'fulfilled' ? results[1].value : null; - const chains = results[2].status === 'fulfilled' ? results[2].value : null; - const slip44Text = results[3].status === 'fulfilled' ? results[3].value : null; - - // Log any failed sources - const sourceNames = ['theGraph', 'chainlist', 'chains', 'slip44']; - results.forEach((result, i) => { - if (result.status === 'rejected') { - console.error(`Failed to load ${sourceNames[i]}: ${result.reason?.message || result.reason}`); - } - }); - - const slip44 = parseSLIP44(slip44Text); - const indexed = indexData(theGraph, chainlist, chains, slip44); - - return { - data: { - theGraph, - chainlist, - chains, - slip44, - indexed, - lastUpdated: new Date().toISOString(), - rpcHealth: {}, - lastRpcCheck: null - }, - loadedSourceCount: countLoadedSources({ theGraph, chainlist, chains, slip44Text }) - }; -} - -async function refreshDataWithGuard(options = {}) { - const { - requireAtLeastOneSource = false, - logSuccessMessage = true - } = options; - - if (dataRefreshPromise) { - return dataRefreshPromise; - } - - dataRefreshPromise = (async () => { - const { data, loadedSourceCount } = await fetchAndBuildData(); - - if (requireAtLeastOneSource && loadedSourceCount === 0) { - throw new Error('All data sources failed during data refresh'); - } - - applyDataToCache(data); - await writeSnapshotToDiskAtomic(cachedData); - - if (logSuccessMessage) { - console.log(`Data loaded successfully. Total chains: ${cachedData.indexed.all.length}`); - } - - return cachedData; - })(); - - try { - return await dataRefreshPromise; - } finally { - dataRefreshPromise = null; - } -} - -/** - * Fetch data from a URL with error handling - */ -export async function fetchData(url, format = 'json') { - try { - const response = await proxyFetch(url); - if (!response.ok) { - throw new Error(`HTTP error! status: ${response.status}`); - } - - if (format === 'json') { - return await response.json(); - } else if (format === 'text') { - return await response.text(); - } - } catch (error) { - console.error(`Error fetching data from ${url}:`, error.message); - return null; - } -} - -/** - * Parse SLIP-0044 markdown file to extract coin types - * Table structure: | Coin type | Path component | Symbol | Coin | - * Uses "Coin type" as the key (id) - */ -export function parseSLIP44(markdown) { - if (!markdown) return {}; - - const slip44Data = {}; - const lines = markdown.split('\n'); - let inTable = false; - - for (const line of lines) { - const trimmed = line.trim(); - if (!trimmed.startsWith('|') || !line.includes('|')) { - continue; - } - - // Detect table rows (format: | Coin type | Path component | Symbol | Coin |) - const cells = line.split('|').map(cell => cell.trim()).filter(Boolean); - - // Skip header and separator rows - if (cells[0] === 'Coin type' || cells[0].includes('-')) { - inTable = true; - continue; - } - - if (!inTable || cells.length < 4) { - continue; - } - - const coinTypeNum = Number.parseInt(cells[0], 10); - if (Number.isNaN(coinTypeNum)) { - continue; - } - - slip44Data[coinTypeNum] = { - coinType: coinTypeNum, - pathComponent: cells[1], - symbol: cells[2], - coin: cells[3] - }; - } - - return slip44Data; -} - -/** - * Build a mapping of network IDs to chain IDs from The Graph data - */ -function buildNetworkIdToChainIdMap(theGraph) { - const networkIdToChainId = {}; - - if (Array.isArray(theGraph?.networks)) { - theGraph.networks.forEach(network => { - // Extract chain ID from caip2Id (format: "eip155:1" or "beacon:11155111") - // Note: Only numeric chain IDs are supported; named beacon chains (e.g., "beacon:mainnet") - // won't be mapped but will still add tags to their target chains if relations exist - if (network.caip2Id) { - const match = network.caip2Id.match(/^(?:eip155|beacon):(\d+)$/); - if (match) { - const chainId = Number.parseInt(match[1], 10); - networkIdToChainId[network.id] = chainId; - } - } - }); - } - - return networkIdToChainId; -} - -/** - * Helper function to add Beacon tag to a target chain - */ -function addBeaconTagToTargetChain(indexed, targetChainId) { - if (targetChainId !== undefined && indexed.byChainId[targetChainId]) { - if (!indexed.byChainId[targetChainId].tags) { - indexed.byChainId[targetChainId].tags = []; - } - if (!indexed.byChainId[targetChainId].tags.includes('Beacon')) { - indexed.byChainId[targetChainId].tags.push('Beacon'); - } - } -} - -/** - * Helper function to get bridge URL from a bridge object or string - */ -function getBridgeUrl(bridge) { - if (typeof bridge === 'string') { - return bridge; - } - return bridge?.url ?? null; -} - /** - * Helper function to merge bridge URLs into a chain's bridges array - */ -function mergeBridges(chain, newBridges) { - if (!newBridges || !Array.isArray(newBridges)) { - return; - } - - if (!chain.bridges) { - chain.bridges = []; - } - - // Build a set of existing bridge URLs for comparison - const existingBridgeUrls = new Set( - chain.bridges.map(getBridgeUrl).filter(url => url !== null) - ); - - newBridges.forEach(bridge => { - const url = getBridgeUrl(bridge); - if (url && !existingBridgeUrls.has(url)) { - chain.bridges.push(bridge); - existingBridgeUrls.add(url); - } - }); -} - -/** - * Process L2 parent relation from chains.json - */ -function processL2ParentRelation(chain, indexed) { - if (chain.parent?.type !== 'L2' || !chain.parent?.chain) { - return; - } - - const match = chain.parent.chain.match(/^eip155-(\d+)$/); - if (!match) return; - - const chainId = chain.chainId; - const parentChainId = Number.parseInt(match[1], 10); - - if (!indexed.byChainId[chainId]) return; - - // Add L2 tag - if (!indexed.byChainId[chainId].tags.includes('L2')) { - indexed.byChainId[chainId].tags.push('L2'); - } - - // Add l2Of relation if it doesn't exist - const existingRelation = indexed.byChainId[chainId].relations.find( - r => r.kind === 'l2Of' && r.chainId === parentChainId - ); - - if (!existingRelation) { - indexed.byChainId[chainId].relations.push({ - kind: 'l2Of', - network: chain.parent.chain, - chainId: parentChainId, - source: 'chains' - }); - } - - // Extract bridge URLs - mergeBridges(indexed.byChainId[chainId], chain.parent.bridges); -} - -/** - * Process testnet parent relation from chains.json - * Chains with parent.type === "testnet" have a parent.chain like "eip155-1" pointing to their mainnet - */ -function processTestnetParentRelation(chain, indexed) { - if (chain.parent?.type !== 'testnet' || !chain.parent?.chain) { - return; - } - - const match = chain.parent.chain.match(/^eip155-(\d+)$/); - if (!match) return; - - const chainId = chain.chainId; - const mainnetChainId = Number.parseInt(match[1], 10); - - if (!indexed.byChainId[chainId]) return; - - // Add testnetOf relation if it doesn't exist - const existingRelation = indexed.byChainId[chainId].relations.find( - r => r.kind === 'testnetOf' && r.chainId === mainnetChainId - ); - - if (!existingRelation) { - indexed.byChainId[chainId].relations.push({ - kind: 'testnetOf', - network: chain.parent.chain, - chainId: mainnetChainId, - source: 'chains' - }); - } -} - -/** - * Merge RPC URLs from a source array into an existing chain's rpc array, - * deduplicating by URL string. - * @param {Object} existingChain - The chain object to merge into - * @param {Array} newRpcUrls - Array of RPC entries (string or {url: string}) - */ -function mergeRpcUrlsFromArray(existingChain, newRpcUrls) { - if (!newRpcUrls || !Array.isArray(newRpcUrls)) { - return; - } - - if (!existingChain.rpc) { - existingChain.rpc = []; - } - - const existingRpcUrls = new Set(); - existingChain.rpc.forEach(rpc => { - const url = typeof rpc === 'string' ? rpc : rpc.url; - if (url) existingRpcUrls.add(url); - }); - - newRpcUrls.forEach(rpc => { - const url = typeof rpc === 'string' ? rpc : rpc.url; - if (url && !existingRpcUrls.has(url)) { - existingChain.rpc.push(rpc); - existingRpcUrls.add(url); - } - }); -} - -/** - * Merge single chainlist entry into indexed data - */ -function mergeChainlistEntry(chainData, indexed) { - const chainId = chainData.chainId; - - if (indexed.byChainId[chainId]) { - mergeRpcUrlsFromArray(indexed.byChainId[chainId], chainData.rpc); - - if (!indexed.byChainId[chainId].sources.includes('chainlist')) { - indexed.byChainId[chainId].sources.push('chainlist'); - } - - if (chainData.status && !indexed.byChainId[chainId].status) { - indexed.byChainId[chainId].status = chainData.status; - } - } else { - indexed.byChainId[chainId] = { - chainId: Number(chainId), - name: chainData.name, - rpc: chainData.rpc || [], - sources: ['chainlist'], - tags: [], - relations: [], - status: chainData.status || 'active' - }; - } - - // Mark as testnet if applicable - if ((chainData.slip44 === 1 || chainData.isTestnet === true)) { - if (!indexed.byChainId[chainId].tags.includes('Testnet')) { - indexed.byChainId[chainId].tags.push('Testnet'); - } - } -} - -/** - * Extract chain ID from caip2Id format (e.g., "eip155:1") - */ -function extractChainIdFromCaip2Id(caip2Id) { - if (!caip2Id) return null; - const match = caip2Id.match(/^eip155:(\d+)$/); - return match ? Number.parseInt(match[1], 10) : null; -} - -/** - * Create new chain entry from The Graph network data - */ -function createTheGraphChainEntry(chainId, network) { - return { - chainId, - name: network.fullName || network.shortName || network.id || 'Unknown', - shortName: network.shortName, - nativeCurrency: { symbol: network.nativeToken }, - rpc: network.rpcUrls || [], - explorers: network.explorerUrls || [], - sources: ['theGraph'], - tags: [], - relations: [], - status: 'active' - }; -} - - -/** - * Process a single The Graph relation - */ -function processTheGraphRelation(relation, chainId, indexed, networkIdToChainId) { - const { kind, network: targetNetworkId } = relation; - const targetChainId = networkIdToChainId[targetNetworkId]; - - const relationData = { - kind, - network: targetNetworkId, - ...(targetChainId !== undefined && { chainId: targetChainId }), - source: 'theGraph' - }; - - indexed.byChainId[chainId].relations.push(relationData); - - // Add tags based on relation kind - if (kind === 'testnetOf' && !indexed.byChainId[chainId].tags.includes('Testnet')) { - indexed.byChainId[chainId].tags.push('Testnet'); - } else if (kind === 'l2Of' && !indexed.byChainId[chainId].tags.includes('L2')) { - indexed.byChainId[chainId].tags.push('L2'); - } else if (kind === 'beaconOf') { - addBeaconTagToTargetChain(indexed, targetChainId); - } -} - -/** - * Create or merge The Graph chain entry - */ -function createOrMergeTheGraphChain(chainId, network, indexed) { - if (indexed.byChainId[chainId]) { - if (!indexed.byChainId[chainId].sources.includes('theGraph')) { - indexed.byChainId[chainId].sources.push('theGraph'); - } - mergeRpcUrlsFromArray(indexed.byChainId[chainId], network.rpcUrls); - - // Ensure arrays exist - if (!indexed.byChainId[chainId].tags) indexed.byChainId[chainId].tags = []; - if (!indexed.byChainId[chainId].relations) indexed.byChainId[chainId].relations = []; - } else { - indexed.byChainId[chainId] = createTheGraphChainEntry(chainId, network); - } -} - -/** - * Add testnet tag if network is marked as testnet - */ -function addTestnetTagIfApplicable(chainId, network, indexed) { - if (network.networkType === 'testnet') { - if (!indexed.byChainId[chainId].tags.includes('Testnet')) { - indexed.byChainId[chainId].tags.push('Testnet'); - } - } -} - -/** - * Process all relations for a The Graph network - */ -function processTheGraphNetworkRelations(network, chainId, indexed, networkIdToChainId) { - if (network.relations && Array.isArray(network.relations)) { - network.relations.forEach(relation => { - processTheGraphRelation(relation, chainId, indexed, networkIdToChainId); - }); - } -} - -/** - * Add The Graph specific data to chain - */ -function addTheGraphSpecificData(chainId, network, indexed) { - indexed.byChainId[chainId].theGraph = { - id: network.id, - fullName: network.fullName, - shortName: network.shortName, - caip2Id: network.caip2Id, - aliases: network.aliases, - networkType: network.networkType, - services: network.services, - nativeToken: network.nativeToken - }; -} - -/** - * Add chain to name index - */ -function addChainToNameIndex(chainId, network, indexed) { - const nameLower = (network.fullName || network.shortName || '').toLowerCase(); - if (nameLower && !indexed.byName[nameLower]) { - indexed.byName[nameLower] = []; - } - if (nameLower && !indexed.byName[nameLower].includes(chainId)) { - indexed.byName[nameLower].push(chainId); - } -} - -/** - * Process beacon chain relations - */ -function processBeaconChainRelations(network, networkIdToChainId, indexed) { - if (network.relations && Array.isArray(network.relations)) { - network.relations.forEach(relation => { - if (relation.kind === 'beaconOf') { - const targetChainId = networkIdToChainId[relation.network]; - addBeaconTagToTargetChain(indexed, targetChainId); - } - }); - } -} - -/** - * Process The Graph network entry - */ -function processTheGraphNetwork(network, indexed, networkIdToChainId) { - const chainId = extractChainIdFromCaip2Id(network.caip2Id); - const isBeaconChain = network.caip2Id?.startsWith('beacon:'); - - if (chainId !== null) { - createOrMergeTheGraphChain(chainId, network, indexed); - addTestnetTagIfApplicable(chainId, network, indexed); - processTheGraphNetworkRelations(network, chainId, indexed, networkIdToChainId); - addTheGraphSpecificData(chainId, network, indexed); - addChainToNameIndex(chainId, network, indexed); - } else if (isBeaconChain) { - processBeaconChainRelations(network, networkIdToChainId, indexed); - } -} - -/** - * Index all data into a searchable structure - */ -export function indexData(theGraph, chainlist, chains, slip44) { - const indexed = { - byChainId: {}, - byName: {}, - all: [] - }; - - // Build network ID to chain ID mapping for resolving relations - const networkIdToChainId = buildNetworkIdToChainIdMap(theGraph); - - // Index chains data - if (Array.isArray(chains)) { - chains.forEach(chain => { - const chainId = chain.chainId; - if (chainId !== undefined) { - if (!indexed.byChainId[chainId]) { - indexed.byChainId[chainId] = { - chainId, - name: chain.name, - shortName: chain.shortName, - network: chain.network, - nativeCurrency: chain.nativeCurrency, - rpc: chain.rpc || [], - explorers: chain.explorers || [], - infoURL: chain.infoURL, - sources: ['chains'], - tags: [], - relations: [], - status: chain.status || 'active' // Default to 'active' if not present - }; - } - - // Check slip44 for testnet marking - if (chain.slip44 === 1) { - if (!indexed.byChainId[chainId].tags.includes('Testnet')) { - indexed.byChainId[chainId].tags.push('Testnet'); - } - } - - const nameLower = (chain.name || '').toLowerCase(); - if (!indexed.byName[nameLower]) { - indexed.byName[nameLower] = []; - } - indexed.byName[nameLower].push(chainId); - } - }); - - // Process L2 and testnet relations and bridge URLs from parent field in chains.json - chains.forEach(chain => { - if (chain.chainId !== undefined) { - processL2ParentRelation(chain, indexed); - processTestnetParentRelation(chain, indexed); - } - }); - } - - // Merge chainlist RPC data - // chainlist is an array of chain objects, each with chainId, name, rpc, etc. - if (chainlist && Array.isArray(chainlist)) { - chainlist.forEach(chainData => { - const chainId = chainData.chainId; - - // Skip if chainId is not valid - if (chainId === undefined || chainId === null || Number.isNaN(Number(chainId))) { - return; - } - - mergeChainlistEntry(chainData, indexed); - }); - - // Second pass: Extract bridge URLs from parent.bridges in chainlist - chainlist.forEach(chainData => { - const chainId = chainData.chainId; - - // Skip if chainId is not valid - if (chainId === undefined || chainId === null || Number.isNaN(Number(chainId))) { - return; - } - - // Extract bridge URLs from parent.bridges - if (indexed.byChainId[chainId] && chainData.parent?.bridges) { - mergeBridges(indexed.byChainId[chainId], chainData.parent.bridges); - } - }); - } - - // Merge The Graph registry data - // The Graph uses caip2Id format (e.g., "eip155:1" for Ethereum mainnet) - if (Array.isArray(theGraph?.networks)) { - theGraph.networks.forEach(network => { - processTheGraphNetwork(network, indexed, networkIdToChainId); - }); - } - - // Add SLIP-0044 data - if (slip44) { - Object.keys(indexed.byChainId).forEach(chainId => { - const chain = indexed.byChainId[chainId]; - if (chain.slip44 !== undefined && slip44[chain.slip44]) { - chain.slip44Info = slip44[chain.slip44]; - } - }); - } - - // Set default status to "active" for chains without status - Object.keys(indexed.byChainId).forEach(chainId => { - const chain = indexed.byChainId[chainId]; - if (!chain.status) { - chain.status = 'active'; - } - }); - - // Add reverse relations: mainnetOf and parentOf - Object.keys(indexed.byChainId).forEach(chainId => { - const chain = indexed.byChainId[chainId]; - - if (chain.relations && Array.isArray(chain.relations)) { - chain.relations.forEach(relation => { - // Add mainnetOf reverse relation for testnetOf - if (relation.kind === 'testnetOf' && relation.chainId !== undefined) { - const mainnetChain = indexed.byChainId[relation.chainId]; - if (mainnetChain) { - // Check if mainnetOf relation doesn't already exist - const existingMainnetOf = mainnetChain.relations.find( - r => r.kind === 'mainnetOf' && r.chainId === Number.parseInt(chainId, 10) - ); - - if (!existingMainnetOf) { - mainnetChain.relations.push({ - kind: 'mainnetOf', - network: chain.name || chain.shortName || chainId.toString(), - chainId: Number.parseInt(chainId, 10), - source: relation.source - }); - } - } - } - - // Add parentOf reverse relation for l2Of - if (relation.kind === 'l2Of' && relation.chainId !== undefined) { - const parentChain = indexed.byChainId[relation.chainId]; - if (parentChain) { - // Check if parentOf relation doesn't already exist - const existingParentOf = parentChain.relations.find( - r => r.kind === 'parentOf' && r.chainId === Number.parseInt(chainId, 10) - ); - - if (!existingParentOf) { - parentChain.relations.push({ - kind: 'parentOf', - network: chain.name || chain.shortName || chainId.toString(), - chainId: Number.parseInt(chainId, 10), - source: relation.source - }); - } - } - } - }); - } - }); - - // Build all chains array - indexed.all = Object.values(indexed.byChainId); - - return indexed; -} - -/** - * Load and cache all data sources - */ -export async function loadData() { - return refreshDataWithGuard({ requireAtLeastOneSource: true }); -} - -/** - * Initialize data on startup using a stale-first strategy: - * 1. Load valid snapshot from disk if available. - * 2. Trigger background refresh and keep serving stale data on failures. - * 3. Fallback to blocking load if no valid snapshot exists. - */ -export async function initializeDataOnStartup(options = {}) { - const { onBackgroundRefreshSuccess } = options; - - if (startupInitialized) { - return cachedData; - } - - if (startupInitializationPromise) { - return startupInitializationPromise; - } - - startupInitializationPromise = (async () => { - const snapshotData = await readSnapshotFromDisk(); - - if (snapshotData) { - applyDataToCache(snapshotData); - startupInitialized = true; - console.log(`Loaded cached snapshot from ${DATA_CACHE_PATH}. Total chains: ${cachedData.indexed.all.length}`); - - refreshDataWithGuard({ requireAtLeastOneSource: true }) - .then(() => { - console.log('Background refresh completed successfully.'); - if (typeof onBackgroundRefreshSuccess === 'function') { - onBackgroundRefreshSuccess(); - } - }) - .catch(error => { - console.error(`Background refresh failed; continuing with cached data: ${error.message || error}`); - }); - - return cachedData; - } - - console.log('No valid cache snapshot found. Loading data from remote sources...'); - const loadedData = await loadData(); - startupInitialized = true; - return loadedData; - })(); - - try { - return await startupInitializationPromise; - } finally { - startupInitializationPromise = null; - } -} - -/** - * Get cached data - */ -export function getCachedData() { - return cachedData; -} - -function flattenRpcHealthResults() { - return Object.entries(cachedData.rpcHealth || {}).flatMap(([chainId, results]) => { - const numericChainId = Number.parseInt(chainId, 10); - const chainName = cachedData.indexed?.byChainId?.[numericChainId]?.name ?? `Chain ${chainId}`; - - return (Array.isArray(results) ? results : []).map((result) => ({ - chainId: numericChainId, - chainName, - url: result.url, - status: result.ok ? 'working' : 'failed', - clientVersion: result.clientVersion ?? null, - blockNumber: result.blockHeight ?? null, - latencyMs: result.latencyMs ?? null, - error: result.error ?? null - })); - }); -} - -export function getRpcMonitoringResults() { - const results = flattenRpcHealthResults(); - const workingEndpoints = results.filter(result => result.status === 'working').length; - const failedEndpoints = results.length - workingEndpoints; - - return { - lastUpdated: cachedData.lastRpcCheck, - totalEndpoints: results.length, - testedEndpoints: results.length, - workingEndpoints, - failedEndpoints, - results - }; -} - -export function getRpcMonitoringStatus() { - return { - isMonitoring: rpcCheckInProgress, - lastUpdated: cachedData.lastRpcCheck - }; -} - -/** - * Search chains by various criteria - */ -export function searchChains(query) { - if (!cachedData.indexed) { - return []; - } - - const results = []; - const queryLower = query.toLowerCase(); - - // Search by chain ID (exact match) - const parsedChainId = Number.parseInt(query, 10); - if (!Number.isNaN(parsedChainId)) { - const chain = getChainById(parsedChainId); - if (chain) { - results.push(chain); - } - } - - // Search by name (partial match) - cachedData.indexed.all.forEach(chain => { - if (chain.name?.toLowerCase().includes(queryLower)) { - if (!results.some(r => r.chainId === chain.chainId)) { - results.push(getChainById(chain.chainId)); - } - } - if (chain.shortName?.toLowerCase().includes(queryLower)) { - if (!results.some(r => r.chainId === chain.chainId)) { - results.push(getChainById(chain.chainId)); - } - } - }); - - return results; -} - -/** - * Get chain by ID (returns full data including rpc, relations, theGraph) - */ -function getChainByIdRaw(chainId) { - if (!cachedData.indexed) { - return null; - } - - return cachedData.indexed.byChainId[chainId] || null; -} - -/** - * Transform chain to API format (without rpc, relations, and with flattened theGraph fields) - */ -function transformChain(chain) { - if (!chain) { - return null; - } - - // Create transformed chain object - const transformedChain = { - chainId: chain.chainId, - name: chain.name, - shortName: chain.shortName, - }; - - // Add theGraph fields if available - if (chain.theGraph) { - transformedChain['theGraph-id'] = chain.theGraph.id; - transformedChain.fullName = chain.theGraph.fullName; - transformedChain.caip2Id = chain.theGraph.caip2Id; - if (chain.theGraph.aliases) { - transformedChain.aliases = chain.theGraph.aliases; - } - } - - // Add other fields - if (chain.nativeCurrency) { - transformedChain.nativeCurrency = chain.nativeCurrency; - } - if (chain.explorers) { - transformedChain.explorers = chain.explorers; - } - if (chain.infoURL) { - transformedChain.infoURL = chain.infoURL; - } - if (chain.sources) { - transformedChain.sources = chain.sources; - } - if (chain.tags) { - transformedChain.tags = chain.tags; - } - if (chain.status) { - transformedChain.status = chain.status; - } - if (chain.bridges) { - transformedChain.bridges = chain.bridges; - } - - return transformedChain; -} - -/** - * Get chain by ID (transformed format without rpc, relations, and with flattened theGraph fields) - */ -export function getChainById(chainId) { - const chain = getChainByIdRaw(chainId); - return transformChain(chain); -} - -/** - * Get all chains (transformed format without rpc, relations, and with flattened theGraph fields) - */ -export function getAllChains() { - if (!cachedData.indexed) { - return []; - } - - // Transform all chains using the helper function - return cachedData.indexed.all.map(transformChain); -} - -/** - * Count chains by tag categories - * @param {Array} chains - Array of chain objects - * @returns {{ totalChains: number, totalMainnets: number, totalTestnets: number, totalL2s: number, totalBeacons: number }} - */ -export function countChainsByTag(chains) { - const totalChains = chains.length; - let totalTestnets = 0; - let totalL2s = 0; - let totalBeacons = 0; - let totalMainnets = 0; - - for (const chain of chains) { - const tags = chain.tags || []; - const isTestnet = tags.includes('Testnet'); - const isL2 = tags.includes('L2'); - const isBeacon = tags.includes('Beacon'); - - if (isTestnet) totalTestnets += 1; - if (isL2) totalL2s += 1; - if (isBeacon) totalBeacons += 1; - if (!isTestnet && !isL2 && !isBeacon) totalMainnets += 1; - } - - return { totalChains, totalMainnets, totalTestnets, totalL2s, totalBeacons }; -} - -/** - * Add value to a keyword set if it is a non-empty string - */ -function addKeywordValue(set, value) { - if (typeof value !== 'string') { - return; - } - - const normalized = value.trim(); - if (normalized.length > 0) { - set.add(normalized); - } -} - -/** - * Add tokenized words from a text value into a target set - */ -function addTokenKeywords(set, value) { - if (typeof value !== 'string') { - return; - } - - const tokens = value - .toLowerCase() - .split(/[^a-z0-9]+/i) - .filter(token => token.length >= 2); - - tokens.forEach(token => set.add(token)); -} - -const keywordSortCollator = new Intl.Collator('en', { - numeric: true, - sensitivity: 'base' -}); - -function sortKeywordSet(set) { - return Array.from(set).sort((a, b) => keywordSortCollator.compare(a, b)); -} - -/** - * Extract software client name from a client version string. - * Example: "Geth/v1.13.0" -> "Geth" - */ -function extractClientName(clientVersion) { - if (typeof clientVersion !== 'string') { - return null; - } - - const trimmed = clientVersion.trim(); - if (!trimmed) { - return null; - } - - const slashIndex = trimmed.indexOf('/'); - const candidate = slashIndex === -1 ? trimmed : trimmed.slice(0, slashIndex); - return candidate || null; -} - -/** - * Get extracted keywords from indexed chains and RPC health data - */ -export function getAllKeywords() { - if (!cachedData.indexed) { - return { - totalKeywords: 0, - keywords: { - blockchainNames: [], - networkNames: [], - softwareClients: [], - currencySymbols: [], - tags: [], - relationKinds: [], - sources: [], - statuses: [], - generic: [] - } - }; - } - - const blockchainNames = new Set(); - const networkNames = new Set(); - const softwareClients = new Set(); - const currencySymbols = new Set(); - const tags = new Set(); - const relationKinds = new Set(); - const sources = new Set(); - const statuses = new Set(); - const generic = new Set(); - - cachedData.indexed.all.forEach(chain => { - addKeywordValue(blockchainNames, chain.name); - addKeywordValue(networkNames, chain.network); - addKeywordValue(networkNames, chain.shortName); - addKeywordValue(networkNames, chain.theGraph?.id); - addKeywordValue(networkNames, chain.theGraph?.caip2Id); - addKeywordValue(currencySymbols, chain.nativeCurrency?.symbol); - addKeywordValue(statuses, chain.status); - - addTokenKeywords(generic, chain.name); - addTokenKeywords(generic, chain.network); - addTokenKeywords(generic, chain.shortName); - addTokenKeywords(generic, chain.theGraph?.fullName); - - if (Array.isArray(chain.sources)) { - chain.sources.forEach(source => addKeywordValue(sources, source)); - } - - if (Array.isArray(chain.tags)) { - chain.tags.forEach(tag => { - addKeywordValue(tags, tag); - addTokenKeywords(generic, tag); - }); - } - - if (Array.isArray(chain.relations)) { - chain.relations.forEach(relation => { - addKeywordValue(relationKinds, relation.kind); - addKeywordValue(networkNames, relation.network); - addTokenKeywords(generic, relation.network); - }); - } - }); - - Object.values(cachedData.rpcHealth || {}).forEach(results => { - if (!Array.isArray(results)) { - return; - } - - results.forEach(result => { - const clientName = extractClientName(result?.clientVersion); - if (clientName) { - addKeywordValue(softwareClients, clientName); - addTokenKeywords(generic, clientName); - } - addTokenKeywords(generic, result?.clientVersion); - }); - }); - - const keywords = { - blockchainNames: sortKeywordSet(blockchainNames), - networkNames: sortKeywordSet(networkNames), - softwareClients: sortKeywordSet(softwareClients), - currencySymbols: sortKeywordSet(currencySymbols), - tags: sortKeywordSet(tags), - relationKinds: sortKeywordSet(relationKinds), - sources: sortKeywordSet(sources), - statuses: sortKeywordSet(statuses), - generic: sortKeywordSet(generic) - }; - - const totalKeywords = Object.values(keywords).reduce( - (acc, keywordList) => acc + keywordList.length, - 0 - ); - - return { - totalKeywords, - keywords - }; -} - -/** - * Get all relations from all chains - * Returns relations with nested structure: { parentChainId: { childChainId: {...} } } - */ -export function getAllRelations() { - if (!cachedData.indexed) { - return {}; - } - - const allRelations = {}; - - // Allowed relation kinds (parentOf will be renamed to l1Of in the output) - const allowedKinds = new Set(['l2Of', 'parentOf', 'testnetOf', 'mainnetOf']); - - cachedData.indexed.all.forEach(chain => { - if (chain.relations?.length > 0) { - chain.relations.forEach(relation => { - // Only include allowed relation kinds and those with chainId - if (allowedKinds.has(relation.kind) && relation.chainId !== undefined) { - let parentChainId, childChainId, parentName, childName; - - // Rename parentOf to l1Of - let kind = relation.kind; - if (kind === 'parentOf') { - kind = 'l1Of'; - } - - // Determine parent and child based on relation type - if (kind === 'l1Of' || kind === 'mainnetOf') { - // For l1Of (parentOf) and mainnetOf: the chain having the relation is the parent - parentChainId = chain.chainId; - childChainId = relation.chainId; - parentName = chain.name; - const childChain = cachedData.indexed.byChainId[childChainId]; - childName = childChain ? childChain.name : relation.network; - } else { - // For l2Of and testnetOf: the chain having the relation is the child - childChainId = chain.chainId; - parentChainId = relation.chainId; - childName = chain.name; - const parentChain = cachedData.indexed.byChainId[parentChainId]; - parentName = parentChain ? parentChain.name : relation.network; - } - - // Use nested structure: parentChainId -> childChainId -> relation data - const parentKey = String(parentChainId); - const childKey = String(childChainId); - - // Initialize parent entry if it doesn't exist - if (!allRelations[parentKey]) { - allRelations[parentKey] = {}; - } - - // Store relation under child chainId within parent's object - allRelations[parentKey][childKey] = { - parentName, - kind, - childName, - chainId: childChainId, - source: relation.source - }; - } - }); - } - }); - - return allRelations; -} - -/** - * Get relations for a specific chain by ID - */ -export function getRelationsById(chainId) { - if (!cachedData.indexed) { - return null; - } - - const chain = cachedData.indexed.byChainId[chainId]; - - if (!chain) { - return null; - } - - return { - chainId: chain.chainId, - chainName: chain.name, - relations: chain.relations || [] - }; -} - -/** - * BFS graph traversal of chain relations starting from a given chain ID - * @param {number} startChainId - The chain ID to start traversal from - * @param {number} maxDepth - Maximum traversal depth (default: 2) - * @returns {Object|null} Traversal result with nodes and edges, or null if chain not found - */ -function collectRelationEdges(chain, chainId, depth, visited, edges, queue, seenEdges) { - const relations = chain.relations || []; - for (const rel of relations) { - if (rel.chainId === undefined) continue; - - // Deduplicate bidirectional edges (A→B and B→A with same kind) using O(1) Set lookup - const a = Math.min(chainId, rel.chainId); - const b = Math.max(chainId, rel.chainId); - const edgeKey = `${a}-${b}-${rel.kind}`; - if (!seenEdges.has(edgeKey)) { - seenEdges.add(edgeKey); - edges.push({ - from: chainId, - to: rel.chainId, - kind: rel.kind, - source: rel.source - }); - } - - if (!visited.has(rel.chainId)) { - queue.push({ chainId: rel.chainId, depth: depth + 1 }); - } - } -} - -export function traverseRelations(startChainId, maxDepth = 2) { - if (!cachedData.indexed) return null; - - const startChain = cachedData.indexed.byChainId[startChainId]; - if (!startChain) return null; - - const visited = new Set(); - const seenEdges = new Set(); - const queue = [{ chainId: startChainId, depth: 0 }]; - const nodes = []; - const edges = []; - - while (queue.length > 0) { - const { chainId, depth } = queue.shift(); - if (visited.has(chainId)) continue; - visited.add(chainId); - - const chain = cachedData.indexed.byChainId[chainId]; - if (!chain) continue; - - nodes.push({ - chainId: chain.chainId, - name: chain.name, - tags: chain.tags || [], - depth - }); - - if (depth < maxDepth) { - collectRelationEdges(chain, chainId, depth, visited, edges, queue, seenEdges); - } - } - - return { - startChainId, - startChainName: startChain.name, - maxDepth, - totalNodes: nodes.length, - totalEdges: edges.length, - nodes, - edges - }; -} - -/** - * Extract endpoints from a chain (helper function) - */ -function extractEndpoints(chain) { - if (!chain) { - return null; - } - - const endpoints = { - chainId: chain.chainId, - name: chain.name, - rpc: chain.rpc || [], - firehose: [], - substreams: [] - }; - - // Extract firehose and substreams from theGraph services - if (chain.theGraph?.services) { - if (chain.theGraph.services.firehose) { - endpoints.firehose = chain.theGraph.services.firehose; - } - if (chain.theGraph.services.substreams) { - endpoints.substreams = chain.theGraph.services.substreams; - } - } - - return endpoints; -} - -/** - * Get endpoints for a specific chain by ID - */ -export function getEndpointsById(chainId) { - const chain = getChainByIdRaw(chainId); - return extractEndpoints(chain); -} - -/** - * Get endpoints for all chains - */ -export function getAllEndpoints() { - if (!cachedData.indexed) { - return []; - } - - return cachedData.indexed.all.map(extractEndpoints); -} - -/** - * Normalize an RPC entry to a plain URL string - */ -function normalizeRpcUrl(rpcEntry) { - if (!rpcEntry) return null; - if (typeof rpcEntry === 'string') return rpcEntry; - if (typeof rpcEntry === 'object' && rpcEntry.url) return rpcEntry.url; - return null; -} - -/** - * Convert a block height (hex or number) to a numeric value - */ -function parseBlockHeight(value) { - if (typeof value === 'number') { - return Number.isFinite(value) ? value : null; - } - - if (typeof value === 'string') { - if (value.startsWith('0x')) { - const parsed = Number.parseInt(value, 16); - return Number.isNaN(parsed) ? null : parsed; - } - - const parsed = Number(value); - return Number.isNaN(parsed) ? null : parsed; - } - - return null; -} - -/** - * Check a single RPC endpoint for client version and latest block height - */ -async function checkRpcEndpoint(url) { - const result = { - url, - ok: false, - clientVersion: null, - blockHeight: null, - error: null - }; - - if (!url?.startsWith('http')) { - result.error = 'Unsupported RPC URL'; - return result; - } - - if (url.includes('${')) { - result.error = 'RPC URL requires API key substitution'; - return result; - } - - try { - const [clientVersion, blockNumber] = await Promise.all([ - jsonRpcCall(url, 'web3_clientVersion', { timeoutMs: RPC_CHECK_TIMEOUT_MS }), - jsonRpcCall(url, 'eth_blockNumber', { timeoutMs: RPC_CHECK_TIMEOUT_MS }) - ]); - - result.clientVersion = clientVersion || null; - result.blockHeight = parseBlockHeight(blockNumber); - result.ok = Boolean(result.clientVersion) && result.blockHeight !== null; - } catch (error) { - result.error = error.message; - } - - return result; -} - -/** - * Run RPC health checks across all endpoints - */ -export async function runRpcHealthCheck() { - if (!cachedData.indexed) { - console.warn('RPC health check skipped: data not loaded'); - return; - } - - const dataVersion = cachedData.lastUpdated; - const endpoints = getAllEndpoints(); - const tasks = []; - const results = {}; - - endpoints.forEach(({ chainId, rpc }) => { - const normalizedUrls = (rpc || []).map(normalizeRpcUrl).filter(Boolean); - const validUrls = Array.from(new Set(normalizedUrls)).filter(url => url.startsWith('http')); - - if (validUrls.length === 0) { - return; - } - - validUrls.forEach(url => tasks.push({ chainId, url })); - if (!results[chainId]) { - results[chainId] = []; - } - }); - - cachedData.rpcHealth = {}; - cachedData.lastRpcCheck = null; - - if (tasks.length === 0) { - console.warn('RPC health check skipped: no RPC endpoints found'); - return; - } - - let taskIndex = 0; - const worker = async () => { - while (taskIndex < tasks.length) { - const current = taskIndex++; - const task = tasks[current]; - const status = await checkRpcEndpoint(task.url); - - if (!results[task.chainId]) { - results[task.chainId] = []; - } - - results[task.chainId].push(status); - } - }; - - const workerCount = Math.min(RPC_CHECK_CONCURRENCY, tasks.length); - const workers = Array.from({ length: workerCount }, worker); - await Promise.all(workers); - - if (cachedData.lastUpdated !== dataVersion) { - console.warn('RPC health check skipped: data changed during run'); - return; - } - - cachedData.rpcHealth = results; - cachedData.lastRpcCheck = new Date().toISOString(); - console.log(`RPC health check completed: ${tasks.length} endpoints tested across ${Object.keys(results).length} chains`); -} - -/** - * Start the RPC health check in the background (no-op if already running) - */ -export function startRpcHealthCheck() { - if (rpcCheckInProgress) { - rpcCheckPending = true; - return; - } - - rpcCheckInProgress = true; - rpcCheckPending = false; - runRpcHealthCheck() - .catch(error => { - console.error('RPC health check failed:', error.message || error); - }) - .finally(() => { - rpcCheckInProgress = false; - - if (rpcCheckPending) { - startRpcHealthCheck(); - } - }); -} - -// Helper function to get chain from different sources -function getChainFromSource(chainId, source) { - if (source === 'theGraph') { - return cachedData.theGraph.networks?.find(n => { - if (n.caip2Id) { - const match = n.caip2Id.match(/^eip155:(\d+)$/); - return match && Number.parseInt(match[1], 10) === chainId; - } - return false; - }); - } else if (source === 'chainlist') { - return cachedData.chainlist?.find(c => c.chainId === chainId); - } else if (source === 'chains') { - return cachedData.chains?.find(c => c.chainId === chainId); - } - return null; -} - -// Rule 1: Check for relation conflicts -function validateRule1RelationConflicts(chain, errors) { - if (!chain.relations || chain.relations.length === 0) return; - - const graphRelations = chain.relations.filter(r => r.source === 'theGraph'); - - graphRelations.forEach(graphRel => { - if (graphRel.kind === 'testnetOf' && graphRel.chainId) { - if (!chain.tags.includes('Testnet')) { - errors.push({ - rule: 1, - chainId: chain.chainId, - chainName: chain.name, - type: 'relation_tag_conflict', - message: `Chain ${chain.chainId} (${chain.name}) has testnetOf relation but is not tagged as Testnet`, - graphRelation: graphRel - }); - } - - const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); - if (chainlistChain?.isTestnet === false) { - errors.push({ - rule: 1, - chainId: chain.chainId, - chainName: chain.name, - type: 'relation_source_conflict', - message: `Chain ${chain.chainId} (${chain.name}) has testnetOf relation in theGraph but isTestnet=false in chainlist`, - graphRelation: graphRel, - chainlistData: { isTestnet: chainlistChain.isTestnet } - }); - } - } - - if (graphRel.kind === 'l2Of' && graphRel.chainId) { - if (!chain.tags.includes('L2')) { - errors.push({ - rule: 1, - chainId: chain.chainId, - chainName: chain.name, - type: 'relation_tag_conflict', - message: `Chain ${chain.chainId} (${chain.name}) has l2Of relation but is not tagged as L2`, - graphRelation: graphRel - }); - } - } - }); -} - -// Rule 2: Check slip44 testnet mismatch -function validateRule2Slip44Mismatch(chain, errors) { - const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); - const chainsChain = getChainFromSource(chain.chainId, 'chains'); - - if (chainlistChain?.slip44 === 1 && chainlistChain.isTestnet === false) { - errors.push({ - rule: 2, - chainId: chain.chainId, - chainName: chain.name, - type: 'slip44_testnet_mismatch', - message: `Chain ${chain.chainId} (${chain.name}) has slip44=1 (testnet indicator) but isTestnet=false in chainlist`, - slip44: chainlistChain.slip44, - isTestnet: chainlistChain.isTestnet - }); - } - - if (chainsChain?.slip44 === 1 && !chain.tags.includes('Testnet')) { - errors.push({ - rule: 2, - chainId: chain.chainId, - chainName: chain.name, - type: 'slip44_testnet_mismatch', - message: `Chain ${chain.chainId} (${chain.name}) has slip44=1 (testnet indicator) in chains.json but not tagged as Testnet`, - slip44: chainsChain.slip44, - tags: chain.tags - }); - } -} - -// Rule 3: Check name testnet mismatch -function validateRule3NameTestnetMismatch(chain, errors) { - const fullName = chain.theGraph?.fullName || chain.name || ''; - const nameLower = fullName.toLowerCase(); - - if ((nameLower.includes('testnet') || nameLower.includes('devnet')) && !chain.tags.includes('Testnet')) { - errors.push({ - rule: 3, - chainId: chain.chainId, - chainName: chain.name, - type: 'name_testnet_mismatch', - message: `Chain ${chain.chainId} (${chain.name}) has "Testnet" or "Devnet" in full name "${fullName}" but not tagged as Testnet`, - fullName: fullName, - tags: chain.tags - }); - } -} - -// Rule 4: Check sepolia/hoodie without L2 tag or relations -function validateRule4SepoliaHoodie(chain, errors) { - const fullName = chain.theGraph?.fullName || chain.name || ''; - const nameLower = fullName.toLowerCase(); - - if (nameLower.includes('sepolia') || nameLower.includes('hoodie')) { - const hasL2Tag = chain.tags.includes('L2'); - const hasRelations = chain.relations && chain.relations.length > 0; - - if (!hasL2Tag && !hasRelations) { - errors.push({ - rule: 4, - chainId: chain.chainId, - chainName: chain.name, - type: 'sepolia_hoodie_no_l2_or_relations', - message: `Chain ${chain.chainId} (${chain.name}) contains "sepolia" or "hoodie" but not tagged as L2 and has no relations`, - fullName: fullName, - tags: chain.tags, - relations: chain.relations - }); - } - } -} - -// Rule 5: Check status conflicts across sources -function validateRule5StatusConflicts(chain, errors) { - const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); - const chainsChain = getChainFromSource(chain.chainId, 'chains'); - - const statuses = []; - if (chainlistChain?.status) { - statuses.push({ source: 'chainlist', status: chainlistChain.status }); - } - if (chainsChain?.status) { - statuses.push({ source: 'chains', status: chainsChain.status }); - } - - const deprecatedInSources = statuses.filter(s => s.status === 'deprecated'); - const activeInSources = statuses.filter(s => s.status === 'active'); - - if (deprecatedInSources.length > 0 && activeInSources.length > 0) { - errors.push({ - rule: 5, - chainId: chain.chainId, - chainName: chain.name, - type: 'status_conflict', - message: `Chain ${chain.chainId} (${chain.name}) has conflicting status across sources`, - statuses: statuses - }); - } - - return statuses; -} - -// Rule 6: Check Goerli not deprecated -function validateRule6GoerliDeprecated(chain, statuses, errors) { - const fullName = chain.theGraph?.fullName || chain.name || ''; - const nameLower = fullName.toLowerCase(); - - if (nameLower.includes('goerli')) { - const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); - const chainsChain = getChainFromSource(chain.chainId, 'chains'); - - const isDeprecated = chain.status === 'deprecated' || - chainlistChain?.status === 'deprecated' || - chainsChain?.status === 'deprecated'; - - if (!isDeprecated) { - errors.push({ - rule: 6, - chainId: chain.chainId, - chainName: chain.name, - type: 'goerli_not_deprecated', - message: `Chain ${chain.chainId} (${chain.name}) contains "Goerli" but is not marked as deprecated`, - fullName: fullName, - status: chain.status, - statusInSources: statuses - }); - } - } -} - -// Validate a single chain -function validateChain(chain, errors) { - validateRule1RelationConflicts(chain, errors); - validateRule2Slip44Mismatch(chain, errors); - validateRule3NameTestnetMismatch(chain, errors); - validateRule4SepoliaHoodie(chain, errors); - const statuses = validateRule5StatusConflicts(chain, errors); - validateRule6GoerliDeprecated(chain, statuses, errors); -} - -/** - * Validate chain data for potential human errors - * Returns an object with validation results categorized by error type - */ -export function validateChainData() { - if (!cachedData.indexed || !cachedData.theGraph || !cachedData.chainlist || !cachedData.chains) { - return { - error: 'Data not loaded. Please reload data sources first.', - errors: [] - }; - } - - const errors = []; - - // Iterate through all indexed chains - Object.values(cachedData.indexed.byChainId).forEach(chain => { - validateChain(chain, errors); - }); - - // Group errors by rule - const errorsByRule = { - rule1_relation_conflicts: errors.filter(e => e.rule === 1), - rule2_slip44_testnet_mismatch: errors.filter(e => e.rule === 2), - rule3_name_testnet_mismatch: errors.filter(e => e.rule === 3), - rule4_sepolia_hoodie_issues: errors.filter(e => e.rule === 4), - rule5_status_conflicts: errors.filter(e => e.rule === 5), - rule6_goerli_not_deprecated: errors.filter(e => e.rule === 6) - }; - - return { - totalErrors: errors.length, - errorsByRule: errorsByRule, - summary: { - rule1: errorsByRule.rule1_relation_conflicts.length, - rule2: errorsByRule.rule2_slip44_testnet_mismatch.length, - rule3: errorsByRule.rule3_name_testnet_mismatch.length, - rule4: errorsByRule.rule4_sepolia_hoodie_issues.length, - rule5: errorsByRule.rule5_status_conflicts.length, - rule6: errorsByRule.rule6_goerli_not_deprecated.length - }, - allErrors: errors - }; -} - - - + * Backwards-compatible facade. Implementation lives under src/. + * + * **New code should import directly from the per-domain modules under src/** + * (e.g. `src/store/queries.js`, `src/services/loader.js`). This file exists + * to keep existing imports — including external consumers, MCP tooling, and + * the integration test mocks — working while the codebase migrates. + * + * Do not add new exports here. When a new function is added to src/, callers + * should import it from its real location. + */ + +export { fetchData } from './src/transport/fetch.js'; +export { parseSLIP44 } from './src/sources/slip44.js'; +export { indexData } from './src/store/indexer.js'; +export { getCachedData } from './src/store/cache.js'; +export { + searchChains, + getChainById, + getAllChains, + countChainsByTag, + getEndpointsById, + getAllEndpoints, + getRpcMonitoringResults +} from './src/store/queries.js'; +export { + runRpcHealthCheck, + startRpcHealthCheck, + getRpcMonitoringStatus +} from './src/services/rpcHealth.js'; +export { getAllKeywords } from './src/domain/keywords.js'; +export { + getAllRelations, + getRelationsById, + traverseRelations +} from './src/domain/relations.js'; +export { validateChainData } from './src/services/validation.js'; +export { loadData, initializeDataOnStartup } from './src/services/loader.js'; diff --git a/eslint.config.js b/eslint.config.js new file mode 100644 index 0000000..91df0e9 --- /dev/null +++ b/eslint.config.js @@ -0,0 +1,40 @@ +// ESLint flat config (eslint 10+). +// +// Minimal setup: the only real rule today is `no-restricted-imports` on +// `src/**/*.js`, which prevents new code from importing the legacy +// `dataService.js` facade. New code should depend on the per-domain +// modules under `src/` directly so the facade can eventually be deleted. +// +// To run: `npm run lint`. CI runs it via the test workflow. + +export default [ + { + ignores: [ + 'node_modules/**', + 'graphify-out/**', + '.cache/**', + 'coverage/**', + 'public/**' + ] + }, + + // Rule: nothing under src/ may import the legacy dataService.js facade. + // Routes should depend on per-domain modules under src/ directly; lower + // layers (store/domain/sources/services) likewise. The integration tests + // mock each src/ path individually via vi.hoisted() so this constraint + // doesn't break test setup. + { + files: ['src/**/*.js'], + rules: { + 'no-restricted-imports': ['error', { + paths: [{ + name: '../../dataService.js', + message: 'Import from the peer module under src/ instead. dataService.js is a thin re-export facade for legacy callers only; new code should not depend on it.' + }, { + name: '../../../dataService.js', + message: 'Import from the peer module under src/ instead. dataService.js is a thin re-export facade for legacy callers only; new code should not depend on it.' + }] + }] + } + } +]; diff --git a/fetchUtil.js b/fetchUtil.js index 36b2a30..1d3ffde 100644 --- a/fetchUtil.js +++ b/fetchUtil.js @@ -1,5 +1,6 @@ import { HttpsProxyAgent } from 'https-proxy-agent'; import { PROXY_URL, PROXY_ENABLED } from './config.js'; +import { logger } from './src/util/logger.js'; /** * Proxy-aware fetch wrapper @@ -9,14 +10,12 @@ import { PROXY_URL, PROXY_ENABLED } from './config.js'; let proxyAgent = null; -// Initialize proxy agent if configured if (PROXY_ENABLED) { try { proxyAgent = new HttpsProxyAgent(PROXY_URL); - console.log(`Proxy enabled: ${PROXY_URL.replace(/:[^:@]*@/, ':****@')}`); // Hide password in logs + logger.info({ proxy: PROXY_URL.replace(/:[^:@]*@/, ':****@') }, 'Proxy enabled'); } catch (error) { - console.error(`Failed to initialize proxy agent: ${error.message}`); - console.error('Proxy will be disabled. Continuing without proxy support.'); + logger.error({ err: error.message }, 'Failed to initialize proxy agent; continuing without proxy'); } } diff --git a/index.js b/index.js index 11d7e50..5d89019 100644 --- a/index.js +++ b/index.js @@ -1,588 +1,10 @@ -import Fastify from 'fastify'; -import cors from '@fastify/cors'; -import rateLimit from '@fastify/rate-limit'; -import helmet from '@fastify/helmet'; -import fastifyStatic from '@fastify/static'; -import { readFile } from 'node:fs/promises'; -import { basename, resolve, dirname, join } from 'node:path'; import { fileURLToPath as toFilePath } from 'node:url'; -import pkg from './package.json' with { type: 'json' }; -import { loadData, initializeDataOnStartup, getCachedData, searchChains, getChainById, getAllChains, getAllRelations, getRelationsById, getEndpointsById, getAllEndpoints, getAllKeywords, validateChainData, traverseRelations, countChainsByTag, getRpcMonitoringResults, getRpcMonitoringStatus, startRpcHealthCheck } from './dataService.js'; -import { getClientsByChain, summarizeChainClients } from './clientsView.js'; -import { getPricesForChains, getPriceForChain, prefetchAllPrices } from './priceService.js'; -import { - PORT, HOST, BODY_LIMIT, MAX_PARAM_LENGTH, - RATE_LIMIT_MAX, RATE_LIMIT_WINDOW_MS, - RELOAD_RATE_LIMIT_MAX, SEARCH_RATE_LIMIT_MAX, - MAX_SEARCH_QUERY_LENGTH, CORS_ORIGIN, - DATA_SOURCE_THE_GRAPH, DATA_SOURCE_CHAINLIST, - DATA_SOURCE_CHAINS, DATA_SOURCE_SLIP44, - DATA_CACHE_ENABLED, DATA_CACHE_FILE -} from './config.js'; +import { buildApp } from './src/http/app.js'; +import { PORT, HOST } from './config.js'; -/** - * Build and configure the Fastify application - * @param {Object} options - Options for the Fastify instance - * @param {boolean} options.logger - Enable logging (default: true) - * @param {number} options.bodyLimit - Request body size limit - * @param {number} options.maxParamLength - Max parameter length - * @param {boolean} options.loadDataOnStartup - Load data on startup (default: true) - * @returns {Promise} Configured Fastify instance - */ -export async function buildApp(options = {}) { - const { - logger = true, - bodyLimit = BODY_LIMIT, - maxParamLength = MAX_PARAM_LENGTH, - loadDataOnStartup = true - } = options; +export { buildApp }; - const fastify = Fastify({ - logger, - bodyLimit, - maxParamLength - }); - - // Security: CORS - await fastify.register(cors, { - origin: CORS_ORIGIN === '*' ? true : CORS_ORIGIN.split(',').map(s => s.trim()), - credentials: false - }); - - // Security: Helmet (security headers) - await fastify.register(helmet, { - contentSecurityPolicy: { - directives: { - defaultSrc: ["'self'"], - scriptSrc: ["'self'"], - styleSrc: ["'self'"], - fontSrc: ["'self'"], - connectSrc: ["'self'"], - imgSrc: ["'self'", "data:"] - } - } - }); - - // Serve public/ directory for the 3D visualization UI - const __dir = dirname(toFilePath(import.meta.url)); - await fastify.register(fastifyStatic, { - root: join(__dir, 'public'), - prefix: '/ui/', - decorateReply: false - }); - - // Security: Rate limiting - await fastify.register(rateLimit, { - max: RATE_LIMIT_MAX, - timeWindow: RATE_LIMIT_WINDOW_MS - }); - - // Load data on startup - if (loadDataOnStartup) { - await initializeDataOnStartup({ - onBackgroundRefreshSuccess: () => { - startRpcHealthCheck(); - } - }); - startRpcHealthCheck(); - // Warm the price cache in the background so the first /chains request - // doesn't pay a CoinGecko round-trip. Failures are silent. - prefetchAllPrices().catch(err => { - console.warn(`Initial price prefetch failed: ${err.message}`); - }); - } - - /** - * Health check endpoint - */ - fastify.get('/health', async () => { - const cachedData = getCachedData(); - return { - status: 'ok', - dataLoaded: cachedData.indexed !== null, - lastUpdated: cachedData.lastUpdated, - totalChains: cachedData.indexed ? cachedData.indexed.all.length : 0 - }; - }); - - /** - * Get all chains - */ - fastify.get('/chains', async (request, reply) => { - const { tag } = request.query; - let chains = getAllChains(); - - // Filter by tag if provided (validate against known tags) - if (tag) { - const validTags = ['Testnet', 'L2', 'Beacon']; - if (!validTags.includes(tag)) { - return sendError(reply, 400, `Invalid tag. Allowed: ${validTags.join(', ')}`); - } - chains = chains.filter(chain => chain.tags?.includes(tag)); - } - - const chainIds = chains.map(c => c.chainId); - const priceMap = await getPricesForChains(chainIds); - const enrichedChains = chains.map(chain => ({ - ...chain, - price: priceMap.get(chain.chainId) ?? null - })); - - return { - count: enrichedChains.length, - chains: enrichedChains - }; - }); - - /** - * Get chain by ID - */ - fastify.get('/chains/:id', async (request, reply) => { - const chainId = parseIntParam(request.params.id); - if (chainId === null) { - return sendError(reply, 400, 'Invalid chain ID'); - } - - const chain = getChainById(chainId); - if (!chain) { - return sendError(reply, 404, 'Chain not found'); - } - - const price = await getPriceForChain(chainId); - return { ...chain, price }; - }); - - /** - * Search chains (tighter rate limit) - */ - fastify.get('/search', { - config: { - rateLimit: { - max: SEARCH_RATE_LIMIT_MAX, - timeWindow: RATE_LIMIT_WINDOW_MS - } - } - }, async (request, reply) => { - const { q } = request.query; - - if (!q) { - return sendError(reply, 400, 'Query parameter "q" is required'); - } - - if (q.length > MAX_SEARCH_QUERY_LENGTH) { - return sendError(reply, 400, `Query too long. Max length: ${MAX_SEARCH_QUERY_LENGTH}`); - } - - const results = searchChains(q); - - return { - query: q, - count: results.length, - results - }; - }); - - /** - * Get all chain relations - */ - fastify.get('/relations', async () => { - const relations = getAllRelations(); - - return relations; - }); - - /** - * Get relations for a specific chain by ID - */ - fastify.get('/relations/:id', async (request, reply) => { - const chainId = parseIntParam(request.params.id); - if (chainId === null) { - return sendError(reply, 400, 'Invalid chain ID'); - } - - const result = getRelationsById(chainId); - if (!result) { - return sendError(reply, 404, 'Chain not found'); - } - - return result; - }); - - /** - * BFS graph traversal of chain relations - */ - fastify.get('/relations/:id/graph', async (request, reply) => { - const chainId = parseIntParam(request.params.id); - if (chainId === null) { - return sendError(reply, 400, 'Invalid chain ID'); - } - - const depth = request.query.depth === undefined ? 2 : parseIntParam(request.query.depth); - if (depth === null || depth < 1 || depth > 5) { - return sendError(reply, 400, 'Invalid depth. Must be between 1 and 5'); - } - - const result = traverseRelations(chainId, depth); - if (!result) { - return sendError(reply, 404, 'Chain not found'); - } - - return result; - }); - - /** - * Get all endpoints - */ - fastify.get('/endpoints', async () => { - const endpoints = getAllEndpoints(); - - return { - count: endpoints.length, - endpoints - }; - }); - - /** - * Get endpoints for a specific chain by ID - */ - fastify.get('/endpoints/:id', async (request, reply) => { - const chainId = parseIntParam(request.params.id); - if (chainId === null) { - return sendError(reply, 400, 'Invalid chain ID'); - } - - const result = getEndpointsById(chainId); - if (!result) { - return sendError(reply, 404, 'Chain not found'); - } - - return result; - }); - - /** - * Get raw data sources - */ - fastify.get('/sources', async () => { - const cachedData = getCachedData(); - return { - lastUpdated: cachedData.lastUpdated, - sources: { - theGraph: cachedData.theGraph ? 'loaded' : 'not loaded', - chainlist: cachedData.chainlist ? 'loaded' : 'not loaded', - chains: cachedData.chains ? 'loaded' : 'not loaded', - slip44: cachedData.slip44 ? 'loaded' : 'not loaded' - } - }; - }); - - /** - * Export cached snapshot file - */ - fastify.get('/export', async (_request, reply) => { - if (!DATA_CACHE_ENABLED) { - return sendError(reply, 503, 'Data cache export is disabled'); - } - - const filePath = resolve(DATA_CACHE_FILE); - - try { - const raw = await readFile(filePath, 'utf8'); - const exportData = JSON.parse(raw); - - reply.header('Content-Type', 'application/json; charset=utf-8'); - reply.header('Content-Disposition', `attachment; filename="${basename(filePath)}"`); - return exportData; - } catch (error) { - if (error?.code === 'ENOENT') { - return sendError(reply, 404, 'Export file not found'); - } - - if (error instanceof SyntaxError) { - return sendError(reply, 500, 'Export file is not valid JSON'); - } - - fastify.log.error(error, 'Failed to export cache file'); - return sendError(reply, 500, 'Failed to export cache file'); - } - }); - - /** - * Get SLIP-0044 coin types as JSON - */ - fastify.get('/slip44', async (_request, reply) => { - const cachedData = getCachedData(); - - if (!cachedData.slip44) { - return sendError(reply, 503, 'SLIP-0044 data not loaded'); - } - - return { - count: Object.keys(cachedData.slip44).length, - coinTypes: cachedData.slip44 - }; - }); - - /** - * Get specific SLIP-0044 coin type by ID - */ - fastify.get('/slip44/:coinType', async (request, reply) => { - const coinType = parseIntParam(request.params.coinType); - if (coinType === null) { - return sendError(reply, 400, 'Invalid coin type'); - } - - const cachedData = getCachedData(); - if (!cachedData.slip44?.[coinType]) { - return sendError(reply, 404, 'Coin type not found'); - } - - return cachedData.slip44[coinType]; - }); - - /** - * Reload data from sources (tighter rate limit) - */ - fastify.post('/reload', { - config: { - rateLimit: { - max: RELOAD_RATE_LIMIT_MAX, - timeWindow: RATE_LIMIT_WINDOW_MS - } - } - }, async (request, reply) => { - try { - await loadData(); - startRpcHealthCheck(); - const cachedData = getCachedData(); - return { - status: 'success', - lastUpdated: cachedData.lastUpdated, - totalChains: cachedData.indexed ? cachedData.indexed.all.length : 0 - }; - } catch (error) { - fastify.log.error(error, 'Failed to reload data'); - return sendError(reply, 500, 'Failed to reload data'); - } - }); - - /** - * Validate chain data for potential human errors - */ - fastify.get('/validate', async (_request, reply) => { - const validationResults = validateChainData(); - - if (validationResults.error) { - return sendError(reply, 503, validationResults.error); - } - - return validationResults; - }); - - /** - * Get extracted keywords from indexed chain and RPC monitor data - */ - fastify.get('/keywords', async () => { - const keywordResults = getAllKeywords(); - const cachedData = getCachedData(); - - return { - lastUpdated: cachedData.lastUpdated, - ...keywordResults - }; - }); - - /** - * Get RPC monitoring results - */ - fastify.get('/rpc-monitor', async () => { - const results = getRpcMonitoringResults(); - const status = getRpcMonitoringStatus(); - - return { - ...status, - ...results - }; - }); - - /** - * Get RPC monitoring results for a specific chain - */ - fastify.get('/rpc-monitor/:id', async (request, reply) => { - const chainId = parseIntParam(request.params.id); - if (chainId === null) { - return sendError(reply, 400, 'Invalid chain ID'); - } - - const results = getRpcMonitoringResults(); - const chainResults = results.results.filter(r => r.chainId === chainId); - - if (chainResults.length === 0) { - return sendError(reply, 404, 'No monitoring results found for this chain'); - } - - let workingCount = 0; - let failedCount = 0; - for (const r of chainResults) { - if (r.status === 'working') workingCount++; - else if (r.status === 'failed') failedCount++; - } - - return { - chainId, - chainName: chainResults[0].chainName, - totalEndpoints: chainResults.length, - workingEndpoints: workingCount, - failedEndpoints: failedCount, - lastUpdated: results.lastUpdated, - endpoints: chainResults, - clients: summarizeChainClients(chainResults)?.clients ?? [] - }; - }); - - /** - * Get aggregated client software across all chains - */ - fastify.get('/clients', async () => { - const results = getRpcMonitoringResults(); - const chains = getClientsByChain(); - return { - lastUpdated: results.lastUpdated, - count: chains.length, - chains - }; - }); - - /** - * Get client software for a specific chain - */ - fastify.get('/clients/:id', async (request, reply) => { - const chainId = parseIntParam(request.params.id); - if (chainId === null) { - return sendError(reply, 400, 'Invalid chain ID'); - } - - const summary = getClientsByChain(chainId); - if (!summary) { - return sendError(reply, 404, 'No client data found for this chain'); - } - - return summary; - }); - - /** - * Get aggregate stats - */ - fastify.get('/stats', async () => { - const chains = getAllChains(); - const monitorResults = getRpcMonitoringResults(); - - const { totalChains, totalMainnets, totalTestnets, totalL2s, totalBeacons } = countChainsByTag(chains); - - const rpcWorking = monitorResults.workingEndpoints; - const rpcFailed = monitorResults.failedEndpoints || 0; - const rpcTested = monitorResults.testedEndpoints; - const rpcHealthPercent = rpcTested > 0 ? Math.round((rpcWorking / rpcTested) * 10000) / 100 : null; - - return { - totalChains, - totalMainnets, - totalTestnets, - totalL2s, - totalBeacons, - rpc: { - totalEndpoints: monitorResults.totalEndpoints, - tested: rpcTested, - working: rpcWorking, - failed: rpcFailed, - healthPercent: rpcHealthPercent - }, - lastUpdated: monitorResults.lastUpdated - }; - }); - - /** - * Root endpoint with API information - */ - fastify.get('/', async (request, reply) => { - return { - name: 'Chains API', - version: pkg.version, - description: 'API query service for blockchain chain data from multiple sources', - endpoints: { - '/health': 'Health check and data status', - '/chains': 'Get all chains (optional ?tag=Testnet|L2|Beacon)', - '/chains/:id': 'Get chain by ID', - '/search?q={query}': 'Search chains by name or ID', - '/relations': 'Get all chain relations data', - '/relations/:id': 'Get relations for a specific chain by ID', - '/endpoints': 'Get all chain endpoints (RPC, firehose, substreams)', - '/endpoints/:id': 'Get endpoints for a specific chain by ID', - '/sources': 'Get data sources status', - '/export': 'Export cached snapshot file', - '/slip44': 'Get all SLIP-0044 coin types as JSON', - '/slip44/:coinType': 'Get specific SLIP-0044 coin type by ID', - '/reload': 'Reload data from sources (POST)', - '/validate': 'Validate chain data for potential human errors', - '/keywords': 'Get extracted keywords (blockchain names, network names, client names, etc.)', - '/rpc-monitor': 'Get RPC endpoint monitoring results', - '/rpc-monitor/:id': 'Get RPC monitoring results for a specific chain by ID', - '/clients': 'Get aggregated client software (name, version, GitHub repo) across all chains', - '/clients/:id': 'Get client software running on a specific chain by ID', - '/stats': 'Get aggregate stats (chain counts, RPC health percentage)', - '/relations/:id/graph?depth=N': 'BFS graph traversal of chain relations (default depth: 2)' - }, - dataSources: [ - DATA_SOURCE_THE_GRAPH, - DATA_SOURCE_CHAINLIST, - DATA_SOURCE_CHAINS, - DATA_SOURCE_SLIP44 - ] - }; - }); - - return fastify; -} - -// Helper functions for reducing duplication - -/** - * Parse and validate an integer parameter - * @param {string} param - Parameter value to parse - * @returns {number|null} Parsed integer or null if invalid - */ -function parseIntParam(param) { - if (typeof param === 'number') { - return Number.isInteger(param) ? param : null; - } - - if (typeof param !== 'string') { - return null; - } - - const normalized = param.trim(); - if (!/^-?\d+$/.test(normalized)) { - return null; - } - - const parsed = Number.parseInt(normalized, 10); - return Number.isNaN(parsed) ? null : parsed; -} - -/** - * Send a standardized error response - * @param {FastifyReply} reply - Fastify reply object - * @param {number} code - HTTP status code - * @param {string} message - Error message - */ -function sendError(reply, code, message) { - return reply.code(code).send({ error: message }); -} - -// Only run the server if this file is executed directly (CLI mode) -// This allows the file to be imported for testing without starting the server const __filename = toFilePath(import.meta.url); - -// Check if this file is being run directly const isMainModule = process.argv[1] === __filename; if (isMainModule) { diff --git a/mcp-server-http.js b/mcp-server-http.js index 3c07961..ab96603 100755 --- a/mcp-server-http.js +++ b/mcp-server-http.js @@ -12,6 +12,7 @@ import express from 'express'; import { createRequire } from 'node:module'; import { initializeDataOnStartup, getCachedData, startRpcHealthCheck } from './dataService.js'; import { getToolDefinitions, handleToolCall } from './mcp-tools.js'; +import { logger } from './src/util/logger.js'; const require = createRequire(import.meta.url); const { version } = require('./package.json'); @@ -73,7 +74,7 @@ const mcpPostHandler = async (req, res) => { const sessionId = req.headers['mcp-session-id']; if (sessionId) { - console.log(`Received MCP request for session: ${sessionId}`); + logger.info({ sessionId }, 'Received MCP request'); } try { @@ -87,7 +88,7 @@ const mcpPostHandler = async (req, res) => { transport = new StreamableHTTPServerTransport({ sessionIdGenerator: () => randomUUID(), onsessioninitialized: (sessionId) => { - console.log(`Session initialized with ID: ${sessionId}`); + logger.info({ sessionId }, 'MCP session initialized'); transports[sessionId] = transport; }, }); @@ -96,7 +97,7 @@ const mcpPostHandler = async (req, res) => { transport.onclose = () => { const sid = transport.sessionId; if (sid && transports[sid]) { - console.log(`Transport closed for session ${sid}`); + logger.info({ sessionId: sid }, 'MCP transport closed'); delete transports[sid]; } }; @@ -122,7 +123,7 @@ const mcpPostHandler = async (req, res) => { // Handle request with existing transport await transport.handleRequest(req, res, req.body); } catch (error) { - console.error('Error handling MCP request:', error); + logger.error({ err: error.message || error }, 'Error handling MCP request'); if (!res.headersSent) { res.status(500).json({ jsonrpc: '2.0', @@ -145,13 +146,13 @@ const mcpDeleteHandler = async (req, res) => { return; } - console.log(`Received session termination request for session ${sessionId}`); + logger.info({ sessionId }, 'Received MCP session termination request'); try { const transport = transports[sessionId]; await transport.handleRequest(req, res); } catch (error) { - console.error('Error handling session termination:', error); + logger.error({ err: error.message || error }, 'Error handling MCP session termination'); if (!res.headersSent) { res.status(500).send('Error processing session termination'); } @@ -192,33 +193,36 @@ app.get('/', (req, res) => { // Start server const server = app.listen(MCP_PORT, MCP_HOST, () => { - console.log(`Chains API MCP HTTP Server listening on http://${MCP_HOST}:${MCP_PORT}`); - console.log(`MCP endpoint: http://${MCP_HOST}:${MCP_PORT}/mcp`); - console.log(`Health check: http://${MCP_HOST}:${MCP_PORT}/health`); + logger.info( + { + url: `http://${MCP_HOST}:${MCP_PORT}`, + mcpEndpoint: `http://${MCP_HOST}:${MCP_PORT}/mcp`, + healthEndpoint: `http://${MCP_HOST}:${MCP_PORT}/health` + }, + 'Chains API MCP HTTP Server listening' + ); }); -// Handle server startup errors server.on('error', (error) => { - console.error('Failed to start MCP HTTP server:', error); + logger.error({ err: error.message || error }, 'Failed to start MCP HTTP server'); process.exit(1); }); // Handle graceful shutdown process.on('SIGINT', async () => { - console.log('Shutting down MCP HTTP server...'); + logger.info('Shutting down MCP HTTP server'); - // Close all active transports for (const sessionId in transports) { try { - console.log(`Closing transport for session ${sessionId}`); + logger.info({ sessionId }, 'Closing MCP transport'); await transports[sessionId].close(); delete transports[sessionId]; } catch (error) { - console.error(`Error closing transport for session ${sessionId}:`, error); + logger.error({ sessionId, err: error.message || error }, 'Error closing MCP transport'); } } - console.log('Server shutdown complete'); + logger.info('MCP server shutdown complete'); process.exit(0); }); diff --git a/mcp-tools.js b/mcp-tools.js index c709b5e..029ce74 100644 --- a/mcp-tools.js +++ b/mcp-tools.js @@ -14,6 +14,7 @@ import { getRpcMonitoringResults, getRpcMonitoringStatus, } from './dataService.js'; +import { getL2BeatRefreshStatus } from './src/services/l2beatRefresher.js'; import { getClientsByChain } from './clientsView.js'; import { getPricesForChains, getPriceForChain } from './priceService.js'; @@ -176,6 +177,36 @@ export function getToolDefinitions() { required: ['chainId'], }, }, + { + name: 'get_scaling_chains', + description: 'List chains classified by L2BEAT as scaling solutions (Optimistic Rollup, ZK Rollup, Validium, Optimium). Returns each chain\'s L2BEAT view (stage, category, stack, DA layer, host chain, TVS) plus a refresher freshness block indicating whether the data is live or from the static fallback snapshot.', + inputSchema: { + type: 'object', + properties: {}, + }, + }, + { + name: 'get_l2beat_by_id', + description: 'Get L2BEAT scaling data for a single chain by chain ID. Includes stage classification, category, stack, DA layer, host chain, TVS, activity, and per-chain freshness metadata.', + inputSchema: { + type: 'object', + properties: { + chainId: { + type: 'number', + description: 'The chain ID to fetch L2BEAT data for (e.g., 42161 for Arbitrum One)', + }, + }, + required: ['chainId'], + }, + }, + { + name: 'get_refresher_status', + description: 'Get the unified rolling chain refresher\'s current state: tick interval, in-flight status, queue depth, sweep cursor, plus per-job-type status for L2BEAT batches and RPC sweeps. Useful for diagnosing data freshness or stuck refreshes.', + inputSchema: { + type: 'object', + properties: {}, + }, + }, { name: 'get_clients', description: 'Get execution client software (name, version, GitHub repo, language) running on a chain, aggregated from live RPC endpoints. Omit chainId to get a summary across all chains.', @@ -487,6 +518,34 @@ function handleGetClients(args) { // --- Dispatch map --- +function handleGetScalingChains() { + const chains = getAllChains().filter((c) => c.l2Beat); + return textResponse({ + count: chains.length, + refresher: getL2BeatRefreshStatus(), + chains, + }); +} + +function handleGetL2BeatById(args) { + const { chainId } = args; + if (!isValidChainId(chainId)) { + return errorResponse('Invalid chainId', 'chainId must be a positive integer'); + } + const chain = getChainById(chainId); + if (!chain) { + return errorResponse('Not found', `No chain with chainId ${chainId}`); + } + if (!chain.l2Beat) { + return errorResponse('Not found', `Chain ${chainId} (${chain.name}) is not classified by L2BEAT`); + } + return textResponse(chain); +} + +function handleGetRefresherStatus() { + return textResponse(getL2BeatRefreshStatus()); +} + const toolHandlers = { get_chains: handleGetChains, get_chain_by_id: handleGetChainById, @@ -501,6 +560,9 @@ const toolHandlers = { traverse_relations: handleTraverseRelations, get_rpc_monitor: handleGetRpcMonitor, get_rpc_monitor_by_id: handleGetRpcMonitorById, + get_scaling_chains: handleGetScalingChains, + get_l2beat_by_id: handleGetL2BeatById, + get_refresher_status: handleGetRefresherStatus, get_clients: handleGetClients, }; diff --git a/package-lock.json b/package-lock.json index 7cf57de..17a8a00 100644 --- a/package-lock.json +++ b/package-lock.json @@ -14,9 +14,11 @@ "@fastify/rate-limit": "^10.3.0", "@fastify/static": "^9.1.1", "@modelcontextprotocol/sdk": "^1.26.0", + "ajv-errors": "^3.0.0", "express": "^5.2.1", "fastify": "^5.8.5", - "https-proxy-agent": "^7.0.6" + "https-proxy-agent": "^7.0.6", + "pino": "^10.3.0" }, "bin": { "chains-api-mcp": "mcp-server.js", @@ -25,6 +27,7 @@ "devDependencies": { "@fast-check/vitest": "^0.2.4", "@vitest/coverage-v8": "^4.0.18", + "eslint": "^10.3.0", "fast-check": "^4.5.3", "vitest": "^4.0.18" }, @@ -534,6 +537,113 @@ "node": ">=18" } }, + "node_modules/@eslint-community/eslint-utils": { + "version": "4.9.1", + "resolved": "https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.1.tgz", + "integrity": "sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "eslint-visitor-keys": "^3.4.3" + }, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + }, + "peerDependencies": { + "eslint": "^6.0.0 || ^7.0.0 || >=8.0.0" + } + }, + "node_modules/@eslint-community/eslint-utils/node_modules/eslint-visitor-keys": { + "version": "3.4.3", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz", + "integrity": "sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/@eslint-community/regexpp": { + "version": "4.12.2", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz", + "integrity": "sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew==", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.0.0 || ^14.0.0 || >=16.0.0" + } + }, + "node_modules/@eslint/config-array": { + "version": "0.23.5", + "resolved": "https://registry.npmjs.org/@eslint/config-array/-/config-array-0.23.5.tgz", + "integrity": "sha512-Y3kKLvC1dvTOT+oGlqNQ1XLqK6D1HU2YXPc52NmAlJZbMMWDzGYXMiPRJ8TYD39muD/OTjlZmNJ4ib7dvSrMBA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/object-schema": "^3.0.5", + "debug": "^4.3.1", + "minimatch": "^10.2.4" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + } + }, + "node_modules/@eslint/config-helpers": { + "version": "0.5.5", + "resolved": "https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.5.5.tgz", + "integrity": "sha512-eIJYKTCECbP/nsKaaruF6LW967mtbQbsw4JTtSVkUQc9MneSkbrgPJAbKl9nWr0ZeowV8BfsarBmPpBzGelA2w==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^1.2.1" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + } + }, + "node_modules/@eslint/core": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@eslint/core/-/core-1.2.1.tgz", + "integrity": "sha512-MwcE1P+AZ4C6DWlpin/OmOA54mmIZ/+xZuJiQd4SyB29oAJjN30UW9wkKNptW2ctp4cEsvhlLY/CsQ1uoHDloQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@types/json-schema": "^7.0.15" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + } + }, + "node_modules/@eslint/object-schema": { + "version": "3.0.5", + "resolved": "https://registry.npmjs.org/@eslint/object-schema/-/object-schema-3.0.5.tgz", + "integrity": "sha512-vqTaUEgxzm+YDSdElad6PiRoX4t8VGDjCtt05zn4nU810UIx/uNEV7/lZJ6KwFThKZOzOxzXy48da+No7HZaMw==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + } + }, + "node_modules/@eslint/plugin-kit": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.7.1.tgz", + "integrity": "sha512-rZAP3aVgB9ds9KOeUSL+zZ21hPmo8dh6fnIFwRQj5EAZl9gzR7wxYbYXYysAM8CTqGmUGyp2S4kUdV17MnGuWQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@eslint/core": "^1.2.1", + "levn": "^0.4.1" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + } + }, "node_modules/@fast-check/vitest": { "version": "0.2.4", "resolved": "https://registry.npmjs.org/@fast-check/vitest/-/vitest-0.2.4.tgz", @@ -804,6 +914,72 @@ "hono": "^4" } }, + "node_modules/@humanfs/core": { + "version": "0.19.2", + "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.2.tgz", + "integrity": "sha512-UhXNm+CFMWcbChXywFwkmhqjs3PRCmcSa/hfBgLIb7oQ5HNb1wS0icWsGtSAUNgefHeI+eBrA8I1fxmbHsGdvA==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/types": "^0.15.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/node": { + "version": "0.16.8", + "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.8.tgz", + "integrity": "sha512-gE1eQNZ3R++kTzFUpdGlpmy8kDZD/MLyHqDwqjkVQI0JMdI1D51sy1H958PNXYkM2rAac7e5/CnIKZrHtPh3BQ==", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@humanfs/core": "^0.19.2", + "@humanfs/types": "^0.15.0", + "@humanwhocodes/retry": "^0.4.0" + }, + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanfs/types": { + "version": "0.15.0", + "resolved": "https://registry.npmjs.org/@humanfs/types/-/types-0.15.0.tgz", + "integrity": "sha512-ZZ1w0aoQkwuUuC7Yf+7sdeaNfqQiiLcSRbfI08oAxqLtpXQr9AIVX7Ay7HLDuiLYAaFPu8oBYNq/QIi9URHJ3Q==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18.0" + } + }, + "node_modules/@humanwhocodes/module-importer": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz", + "integrity": "sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=12.22" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, + "node_modules/@humanwhocodes/retry": { + "version": "0.4.3", + "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz", + "integrity": "sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=18.18" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/nzakas" + } + }, "node_modules/@jridgewell/resolve-uri": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", @@ -1262,6 +1438,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/esrecurse": { + "version": "4.3.1", + "resolved": "https://registry.npmjs.org/@types/esrecurse/-/esrecurse-4.3.1.tgz", + "integrity": "sha512-xJBAbDifo5hpffDBuHl0Y8ywswbiAp/Wi7Y/GtAgSlZyIABppyurxVueOPE8LUQOxdlgi6Zqce7uoEpqNTeiUw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/estree": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", @@ -1269,6 +1452,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz", + "integrity": "sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA==", + "dev": true, + "license": "MIT" + }, "node_modules/@vitest/coverage-v8": { "version": "4.0.18", "resolved": "https://registry.npmjs.org/@vitest/coverage-v8/-/coverage-v8-4.0.18.tgz", @@ -1430,6 +1620,29 @@ "node": ">= 0.6" } }, + "node_modules/acorn": { + "version": "8.16.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.16.0.tgz", + "integrity": "sha512-UVJyE9MttOsBQIDKw1skb9nAwQuR5wuGD3+82K6JgJlm/Y+KI92oNsMNGZCYdDsVtRHSak0pcV5Dno5+4jh9sw==", + "dev": true, + "license": "MIT", + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-jsx": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz", + "integrity": "sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^6.0.0 || ^7.0.0 || ^8.0.0" + } + }, "node_modules/agent-base": { "version": "7.1.4", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz", @@ -1455,6 +1668,15 @@ "url": "https://github.com/sponsors/epoberezkin" } }, + "node_modules/ajv-errors": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/ajv-errors/-/ajv-errors-3.0.0.tgz", + "integrity": "sha512-V3wD15YHfHz6y0KdhYFjyy9vWtEVALT9UrxfN3zqlI6dMioHnJrqOYfyPKol3oqrnCM9uwkcdCwkJ0WUcbLMTQ==", + "license": "MIT", + "peerDependencies": { + "ajv": "^8.0.1" + } + }, "node_modules/ajv-formats": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/ajv-formats/-/ajv-formats-3.0.1.tgz", @@ -1698,6 +1920,13 @@ } } }, + "node_modules/deep-is": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", + "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==", + "dev": true, + "license": "MIT" + }, "node_modules/depd": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/depd/-/depd-2.0.0.tgz", @@ -1830,6 +2059,185 @@ "integrity": "sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==", "license": "MIT" }, + "node_modules/escape-string-regexp": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz", + "integrity": "sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/eslint": { + "version": "10.3.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-10.3.0.tgz", + "integrity": "sha512-XbEXaRva5cF0ZQB8w6MluHA0kZZfV2DuCMJ3ozyEOHLwDpZX2Lmm/7Pp0xdJmI0GL1W05VH5VwIFHEm1Vcw2gw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@eslint-community/eslint-utils": "^4.8.0", + "@eslint-community/regexpp": "^4.12.2", + "@eslint/config-array": "^0.23.5", + "@eslint/config-helpers": "^0.5.5", + "@eslint/core": "^1.2.1", + "@eslint/plugin-kit": "^0.7.1", + "@humanfs/node": "^0.16.6", + "@humanwhocodes/module-importer": "^1.0.1", + "@humanwhocodes/retry": "^0.4.2", + "@types/estree": "^1.0.6", + "ajv": "^6.14.0", + "cross-spawn": "^7.0.6", + "debug": "^4.3.2", + "escape-string-regexp": "^4.0.0", + "eslint-scope": "^9.1.2", + "eslint-visitor-keys": "^5.0.1", + "espree": "^11.2.0", + "esquery": "^1.7.0", + "esutils": "^2.0.2", + "fast-deep-equal": "^3.1.3", + "file-entry-cache": "^8.0.0", + "find-up": "^5.0.0", + "glob-parent": "^6.0.2", + "ignore": "^5.2.0", + "imurmurhash": "^0.1.4", + "is-glob": "^4.0.0", + "json-stable-stringify-without-jsonify": "^1.0.1", + "minimatch": "^10.2.4", + "natural-compare": "^1.4.0", + "optionator": "^0.9.3" + }, + "bin": { + "eslint": "bin/eslint.js" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://eslint.org/donate" + }, + "peerDependencies": { + "jiti": "*" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + } + } + }, + "node_modules/eslint-scope": { + "version": "9.1.2", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-9.1.2.tgz", + "integrity": "sha512-xS90H51cKw0jltxmvmHy2Iai1LIqrfbw57b79w/J7MfvDfkIkFZ+kj6zC3BjtUwh150HsSSdxXZcsuv72miDFQ==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "@types/esrecurse": "^4.3.1", + "@types/estree": "^1.0.8", + "esrecurse": "^4.3.0", + "estraverse": "^5.2.0" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint-visitor-keys": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-5.0.1.tgz", + "integrity": "sha512-tD40eHxA35h0PEIZNeIjkHoDR4YjjJp34biM0mDvplBe//mB+IHCqHDGV7pxF+7MklTvighcCPPZC7ynWyjdTA==", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/eslint/node_modules/ajv": { + "version": "6.15.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.15.0.tgz", + "integrity": "sha512-fgFx7Hfoq60ytK2c7DhnF8jIvzYgOMxfugjLOSMHjLIPgenqa7S7oaagATUq99mV6IYvN2tRmC0wnTYX6iPbMw==", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/eslint/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true, + "license": "MIT" + }, + "node_modules/espree": { + "version": "11.2.0", + "resolved": "https://registry.npmjs.org/espree/-/espree-11.2.0.tgz", + "integrity": "sha512-7p3DrVEIopW1B1avAGLuCSh1jubc01H2JHc8B4qqGblmg5gI9yumBgACjWo4JlIc04ufug4xJ3SQI8HkS/Rgzw==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "acorn": "^8.16.0", + "acorn-jsx": "^5.3.2", + "eslint-visitor-keys": "^5.0.1" + }, + "engines": { + "node": "^20.19.0 || ^22.13.0 || >=24" + }, + "funding": { + "url": "https://opencollective.com/eslint" + } + }, + "node_modules/esquery": { + "version": "1.7.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.7.0.tgz", + "integrity": "sha512-Ap6G0WQwcU/LHsvLwON1fAQX9Zp0A2Y6Y/cJBl9r/JbW90Zyg4/zbG6zzKa2OTALELarYHmKu0GhpM5EO+7T0g==", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "estraverse": "^5.1.0" + }, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz", + "integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz", + "integrity": "sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, "node_modules/estree-walker": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-3.0.3.tgz", @@ -1840,6 +2248,16 @@ "@types/estree": "^1.0.0" } }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/etag": { "version": "1.8.1", "resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz", @@ -1985,6 +2403,13 @@ "integrity": "sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==", "license": "MIT" }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==", + "dev": true, + "license": "MIT" + }, "node_modules/fast-json-stringify": { "version": "6.3.0", "resolved": "https://registry.npmjs.org/fast-json-stringify/-/fast-json-stringify-6.3.0.tgz", @@ -2009,6 +2434,13 @@ "rfdc": "^1.2.0" } }, + "node_modules/fast-levenshtein": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz", + "integrity": "sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw==", + "dev": true, + "license": "MIT" + }, "node_modules/fast-querystring": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/fast-querystring/-/fast-querystring-1.1.2.tgz", @@ -2110,6 +2542,19 @@ } } }, + "node_modules/file-entry-cache": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz", + "integrity": "sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "flat-cache": "^4.0.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, "node_modules/finalhandler": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-2.1.1.tgz", @@ -2145,6 +2590,44 @@ "node": ">=20" } }, + "node_modules/find-up": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz", + "integrity": "sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng==", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^6.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/flat-cache": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz", + "integrity": "sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "flatted": "^3.2.9", + "keyv": "^4.5.4" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/flatted": { + "version": "3.4.2", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz", + "integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==", + "dev": true, + "license": "ISC" + }, "node_modules/forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -2241,6 +2724,19 @@ "url": "https://github.com/sponsors/isaacs" } }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/gopd": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", @@ -2361,6 +2857,26 @@ "url": "https://opencollective.com/express" } }, + "node_modules/ignore": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz", + "integrity": "sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", @@ -2385,6 +2901,29 @@ "node": ">= 10" } }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/is-promise": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-4.0.0.tgz", @@ -2452,6 +2991,13 @@ "dev": true, "license": "MIT" }, + "node_modules/json-buffer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz", + "integrity": "sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==", + "dev": true, + "license": "MIT" + }, "node_modules/json-schema-ref-resolver": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/json-schema-ref-resolver/-/json-schema-ref-resolver-3.0.0.tgz", @@ -2483,6 +3029,37 @@ "integrity": "sha512-fQhoXdcvc3V28x7C7BMs4P5+kNlgUURe2jmUT1T//oBRMDrqy1QPelJimwZGo7Hg9VPV3EQV5Bnq4hbFy2vetA==", "license": "BSD-2-Clause" }, + "node_modules/json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw==", + "dev": true, + "license": "MIT" + }, + "node_modules/keyv": { + "version": "4.5.4", + "resolved": "https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz", + "integrity": "sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==", + "dev": true, + "license": "MIT", + "dependencies": { + "json-buffer": "3.0.1" + } + }, + "node_modules/levn": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", + "integrity": "sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1", + "type-check": "~0.4.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/light-my-request": { "version": "6.6.0", "resolved": "https://registry.npmjs.org/light-my-request/-/light-my-request-6.6.0.tgz", @@ -2520,6 +3097,22 @@ ], "license": "MIT" }, + "node_modules/locate-path": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz", + "integrity": "sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^5.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/lru-cache": { "version": "11.2.6", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-11.2.6.tgz", @@ -2683,6 +3276,13 @@ "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" } }, + "node_modules/natural-compare": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", + "integrity": "sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==", + "dev": true, + "license": "MIT" + }, "node_modules/negotiator": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/negotiator/-/negotiator-1.0.0.tgz", @@ -2754,6 +3354,56 @@ "wrappy": "1" } }, + "node_modules/optionator": { + "version": "0.9.4", + "resolved": "https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz", + "integrity": "sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==", + "dev": true, + "license": "MIT", + "dependencies": { + "deep-is": "^0.1.3", + "fast-levenshtein": "^2.0.6", + "levn": "^0.4.1", + "prelude-ls": "^1.2.1", + "type-check": "^0.4.0", + "word-wrap": "^1.2.5" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/p-limit": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz", + "integrity": "sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz", + "integrity": "sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^3.0.2" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/parseurl": { "version": "1.3.3", "resolved": "https://registry.npmjs.org/parseurl/-/parseurl-1.3.3.tgz", @@ -2763,6 +3413,16 @@ "node": ">= 0.8" } }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, "node_modules/path-key": { "version": "3.1.1", "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", @@ -2900,6 +3560,16 @@ "node": "^10 || ^12 || >=14" } }, + "node_modules/prelude-ls": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz", + "integrity": "sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/process-warning": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/process-warning/-/process-warning-5.0.0.tgz", @@ -2938,6 +3608,16 @@ "node": ">= 0.10" } }, + "node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, "node_modules/pure-rand": { "version": "7.0.1", "resolved": "https://registry.npmjs.org/pure-rand/-/pure-rand-7.0.1.tgz", @@ -3461,6 +4141,19 @@ "node": ">=0.6" } }, + "node_modules/type-check": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", + "integrity": "sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew==", + "dev": true, + "license": "MIT", + "dependencies": { + "prelude-ls": "^1.2.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/type-is": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/type-is/-/type-is-2.0.1.tgz", @@ -3484,6 +4177,16 @@ "node": ">= 0.8" } }, + "node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "punycode": "^2.1.0" + } + }, "node_modules/vary": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/vary/-/vary-1.1.2.tgz", @@ -3678,12 +4381,35 @@ "node": ">=8" } }, + "node_modules/word-wrap": { + "version": "1.2.5", + "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz", + "integrity": "sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==", "license": "ISC" }, + "node_modules/yocto-queue": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", + "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/zod": { "version": "4.3.6", "resolved": "https://registry.npmjs.org/zod/-/zod-4.3.6.tgz", diff --git a/package.json b/package.json index a132236..94089fc 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,8 @@ "mcp:http": "node mcp-server-http.js", "test": "vitest run", "test:watch": "vitest", - "test:coverage": "vitest run --coverage" + "test:coverage": "vitest run --coverage", + "lint": "eslint src/" }, "repository": { "type": "git", @@ -37,17 +38,17 @@ "@fastify/rate-limit": "^10.3.0", "@fastify/static": "^9.1.1", "@modelcontextprotocol/sdk": "^1.26.0", + "ajv-errors": "^3.0.0", "express": "^5.2.1", "fastify": "^5.8.5", - "https-proxy-agent": "^7.0.6" + "https-proxy-agent": "^7.0.6", + "pino": "^10.3.0" }, "devDependencies": { "@fast-check/vitest": "^0.2.4", "@vitest/coverage-v8": "^4.0.18", + "eslint": "^10.3.0", "fast-check": "^4.5.3", "vitest": "^4.0.18" - }, - "engines": { - "node": ">=20" } } diff --git a/src/domain/keywords.js b/src/domain/keywords.js new file mode 100644 index 0000000..28e5081 --- /dev/null +++ b/src/domain/keywords.js @@ -0,0 +1,129 @@ +import { cachedData } from '../store/cache.js'; + +function addKeywordValue(set, value) { + if (typeof value !== 'string') return; + const normalized = value.trim(); + if (normalized.length > 0) set.add(normalized); +} + +function addTokenKeywords(set, value) { + if (typeof value !== 'string') return; + const tokens = value + .toLowerCase() + .split(/[^a-z0-9]+/i) + .filter(token => token.length >= 2); + tokens.forEach(token => set.add(token)); +} + +const keywordSortCollator = new Intl.Collator('en', { + numeric: true, + sensitivity: 'base' +}); + +function sortKeywordSet(set) { + return Array.from(set).sort((a, b) => keywordSortCollator.compare(a, b)); +} + +function extractClientName(clientVersion) { + if (typeof clientVersion !== 'string') return null; + const trimmed = clientVersion.trim(); + if (!trimmed) return null; + const slashIndex = trimmed.indexOf('/'); + const candidate = slashIndex === -1 ? trimmed : trimmed.slice(0, slashIndex); + return candidate || null; +} + +const EMPTY_KEYWORDS = { + totalKeywords: 0, + keywords: { + blockchainNames: [], + networkNames: [], + softwareClients: [], + currencySymbols: [], + tags: [], + relationKinds: [], + sources: [], + statuses: [], + generic: [] + } +}; + +export function getAllKeywords() { + if (!cachedData.indexed) return structuredClone(EMPTY_KEYWORDS); + + const blockchainNames = new Set(); + const networkNames = new Set(); + const softwareClients = new Set(); + const currencySymbols = new Set(); + const tags = new Set(); + const relationKinds = new Set(); + const sources = new Set(); + const statuses = new Set(); + const generic = new Set(); + + cachedData.indexed.all.forEach(chain => { + addKeywordValue(blockchainNames, chain.name); + addKeywordValue(networkNames, chain.network); + addKeywordValue(networkNames, chain.shortName); + addKeywordValue(networkNames, chain.theGraph?.id); + addKeywordValue(networkNames, chain.theGraph?.caip2Id); + addKeywordValue(currencySymbols, chain.nativeCurrency?.symbol); + addKeywordValue(statuses, chain.status); + + addTokenKeywords(generic, chain.name); + addTokenKeywords(generic, chain.network); + addTokenKeywords(generic, chain.shortName); + addTokenKeywords(generic, chain.theGraph?.fullName); + + if (Array.isArray(chain.sources)) { + chain.sources.forEach(source => addKeywordValue(sources, source)); + } + + if (Array.isArray(chain.tags)) { + chain.tags.forEach(tag => { + addKeywordValue(tags, tag); + addTokenKeywords(generic, tag); + }); + } + + if (Array.isArray(chain.relations)) { + chain.relations.forEach(relation => { + addKeywordValue(relationKinds, relation.kind); + addKeywordValue(networkNames, relation.network); + addTokenKeywords(generic, relation.network); + }); + } + }); + + Object.values(cachedData.rpcHealth || {}).forEach(results => { + if (!Array.isArray(results)) return; + + results.forEach(result => { + const clientName = extractClientName(result?.clientVersion); + if (clientName) { + addKeywordValue(softwareClients, clientName); + addTokenKeywords(generic, clientName); + } + addTokenKeywords(generic, result?.clientVersion); + }); + }); + + const keywords = { + blockchainNames: sortKeywordSet(blockchainNames), + networkNames: sortKeywordSet(networkNames), + softwareClients: sortKeywordSet(softwareClients), + currencySymbols: sortKeywordSet(currencySymbols), + tags: sortKeywordSet(tags), + relationKinds: sortKeywordSet(relationKinds), + sources: sortKeywordSet(sources), + statuses: sortKeywordSet(statuses), + generic: sortKeywordSet(generic) + }; + + const totalKeywords = Object.values(keywords).reduce( + (acc, keywordList) => acc + keywordList.length, + 0 + ); + + return { totalKeywords, keywords }; +} diff --git a/src/domain/relations.js b/src/domain/relations.js new file mode 100644 index 0000000..ebc0793 --- /dev/null +++ b/src/domain/relations.js @@ -0,0 +1,130 @@ +import { cachedData } from '../store/cache.js'; + +const ALLOWED_KINDS = new Set(['l2Of', 'parentOf', 'testnetOf', 'mainnetOf']); + +export function getAllRelations() { + if (!cachedData.indexed) return {}; + + const allRelations = {}; + + cachedData.indexed.all.forEach(chain => { + if (!chain.relations?.length) return; + + chain.relations.forEach(relation => { + if (!ALLOWED_KINDS.has(relation.kind) || relation.chainId === undefined) return; + + let kind = relation.kind === 'parentOf' ? 'l1Of' : relation.kind; + + let parentChainId, childChainId, parentName, childName; + if (kind === 'l1Of' || kind === 'mainnetOf') { + parentChainId = chain.chainId; + childChainId = relation.chainId; + parentName = chain.name; + const childChain = cachedData.indexed.byChainId[childChainId]; + childName = childChain ? childChain.name : relation.network; + } else { + childChainId = chain.chainId; + parentChainId = relation.chainId; + childName = chain.name; + const parentChain = cachedData.indexed.byChainId[parentChainId]; + parentName = parentChain ? parentChain.name : relation.network; + } + + const parentKey = String(parentChainId); + const childKey = String(childChainId); + + if (!allRelations[parentKey]) allRelations[parentKey] = {}; + + allRelations[parentKey][childKey] = { + parentName, + kind, + childName, + chainId: childChainId, + source: relation.source + }; + }); + }); + + return allRelations; +} + +export function getRelationsById(chainId) { + if (!cachedData.indexed) return null; + + const chain = cachedData.indexed.byChainId[chainId]; + if (!chain) return null; + + return { + chainId: chain.chainId, + chainName: chain.name, + relations: chain.relations || [] + }; +} + +function collectRelationEdges(chain, chainId, depth, visited, edges, queue, seenEdges) { + const relations = chain.relations || []; + for (const rel of relations) { + if (rel.chainId === undefined) continue; + + // Deduplicate bidirectional edges (A→B and B→A with same kind). + const a = Math.min(chainId, rel.chainId); + const b = Math.max(chainId, rel.chainId); + const edgeKey = `${a}-${b}-${rel.kind}`; + if (!seenEdges.has(edgeKey)) { + seenEdges.add(edgeKey); + edges.push({ + from: chainId, + to: rel.chainId, + kind: rel.kind, + source: rel.source + }); + } + + if (!visited.has(rel.chainId)) { + queue.push({ chainId: rel.chainId, depth: depth + 1 }); + } + } +} + +export function traverseRelations(startChainId, maxDepth = 2) { + if (!cachedData.indexed) return null; + + const startChain = cachedData.indexed.byChainId[startChainId]; + if (!startChain) return null; + + const visited = new Set(); + const seenEdges = new Set(); + const queue = [{ chainId: startChainId, depth: 0 }]; + const nodes = []; + const edges = []; + + while (queue.length > 0) { + const { chainId, depth } = queue.shift(); + if (visited.has(chainId)) continue; + visited.add(chainId); + + const chain = cachedData.indexed.byChainId[chainId]; + if (!chain) continue; + + nodes.push({ + chainId: chain.chainId, + name: chain.name, + tags: chain.tags || [], + depth + }); + + if (depth < maxDepth) { + collectRelationEdges(chain, chainId, depth, visited, edges, queue, seenEdges); + } + } + + return { + startChainId, + startChainName: startChain.name, + maxDepth, + totalNodes: nodes.length, + totalEdges: edges.length, + nodes, + edges + }; +} diff --git a/src/http/app.js b/src/http/app.js new file mode 100644 index 0000000..12c2fb4 --- /dev/null +++ b/src/http/app.js @@ -0,0 +1,172 @@ +import { dirname, join } from 'node:path'; +import { fileURLToPath as toFilePath } from 'node:url'; +import Fastify from 'fastify'; +import cors from '@fastify/cors'; +import rateLimit from '@fastify/rate-limit'; +import helmet from '@fastify/helmet'; +import fastifyStatic from '@fastify/static'; +import ajvErrors from 'ajv-errors'; +import { initializeDataOnStartup } from '../services/loader.js'; +import { startRpcHealthCheck } from '../services/rpcHealth.js'; +import { startL2BeatRefresh } from '../services/l2beatRefresher.js'; +import { + BODY_LIMIT, + MAX_PARAM_LENGTH, + RATE_LIMIT_MAX, + RATE_LIMIT_WINDOW_MS, + CORS_ORIGIN +} from '../../config.js'; +import { chainsRoutes } from './routes/chains.js'; +import { relationsRoutes } from './routes/relations.js'; +import { endpointsRoutes } from './routes/endpoints.js'; +import { slip44Routes } from './routes/slip44.js'; +import { rpcMonitorRoutes } from './routes/rpcMonitor.js'; +import { clientsRoutes } from './routes/clients.js'; +import { scalingRoutes } from './routes/scaling.js'; +import { adminRoutes } from './routes/admin.js'; +import { metricsRoute } from './routes/metrics.js'; +import { refresherRoute } from './routes/refresher.js'; +import { rootRoute } from './routes/root.js'; +import { prefetchAllPrices } from '../../priceService.js'; +import { logger } from '../util/logger.js'; + +function resolveCorsOrigin(value) { + if (value === '*') return true; + return value.split(',').map(s => s.trim()); +} + +/** + * Map a JSON Schema validation failure into the project's `{ error: ... }` + * envelope. + * + * Preferred path: schemas declare per-keyword messages via `errorMessage` + * (ajv-errors). When that's present, ajv emits a synthetic error with + * `keyword: 'errorMessage'` and the schema-author's message in `.message`. + * For schemas that haven't been migrated yet, fall through to a generic + * "Invalid {dataVar}" string. Routes can override on a per-route basis. + */ +function formatSchemaValidationError(errors, dataVar) { + // Prefer the route-author's `errorMessage` when present. + const authored = errors.find(e => e.keyword === 'errorMessage' && typeof e.message === 'string'); + if (authored) { + const err = new Error(authored.message); + err.statusCode = 400; + return err; + } + // additionalProperties needs the offending name interpolated; route + // authors can't put `${...}` in their schema string, so handle here. + const extra = errors.find(e => e.keyword === 'additionalProperties'); + if (extra) { + const where = dataVar === 'querystring' ? 'query parameter' : 'field'; + const err = new Error(`Unknown ${where}: "${extra.params.additionalProperty}"`); + err.statusCode = 400; + return err; + } + const first = errors[0]; + const err = new Error(first.message || `Invalid ${dataVar}`); + err.statusCode = 400; + return err; +} + +export async function buildApp(options = {}) { + const { + logger = true, + bodyLimit = BODY_LIMIT, + maxParamLength = MAX_PARAM_LENGTH, + loadDataOnStartup = true + } = options; + + const fastify = Fastify({ + logger, + bodyLimit, + maxParamLength, + schemaErrorFormatter: formatSchemaValidationError, + ajv: { + customOptions: { + removeAdditional: false, + useDefaults: true, + coerceTypes: 'array', + allErrors: true // required for ajv-errors to inspect all violations + }, + plugins: [ajvErrors] + } + }); + + fastify.setErrorHandler((error, _request, reply) => { + // 4xx: validation errors are safe to surface to clients. + if (error.validation || error.statusCode === 400) { + return reply.code(400).send({ error: error.message }); + } + // 5xx: log full detail server-side, return generic message to client. + // Prevents leaking internal stack/file paths and database queries. + const statusCode = error.statusCode || 500; + fastify.log.error(error); + if (statusCode >= 500) { + return reply.code(statusCode).send({ error: 'Internal Server Error' }); + } + return reply.code(statusCode).send({ error: error.message || 'Error' }); + }); + + await fastify.register(cors, { + origin: resolveCorsOrigin(CORS_ORIGIN), + credentials: false + }); + + await fastify.register(helmet, { + contentSecurityPolicy: { + directives: { + defaultSrc: ["'self'"], + scriptSrc: ["'self'"], + styleSrc: ["'self'"], + fontSrc: ["'self'"], + connectSrc: ["'self'"], + imgSrc: ["'self'", 'data:'] + } + } + }); + + // Serve public/ directory for the 3D visualization UI. + // Resolve relative to the project root (two levels up from src/http/). + const __dir = dirname(toFilePath(import.meta.url)); + await fastify.register(fastifyStatic, { + root: join(__dir, '..', '..', 'public'), + prefix: '/ui/', + decorateReply: false + }); + + await fastify.register(rateLimit, { + max: RATE_LIMIT_MAX, + timeWindow: RATE_LIMIT_WINDOW_MS + }); + + if (loadDataOnStartup) { + await initializeDataOnStartup({ + onBackgroundRefreshSuccess: () => { + startRpcHealthCheck(); + startL2BeatRefresh(); + } + }); + startRpcHealthCheck(); + startL2BeatRefresh(); + // Warm the price cache in the background so the first /chains request + // doesn't pay a CoinGecko round-trip. Failures are silent — a cold + // cache falls back to per-request fetching with the same timeout. + prefetchAllPrices().catch(err => { + logger.warn({ err: err.message }, 'Initial price prefetch failed'); + }); + } + + await fastify.register(adminRoutes); + await fastify.register(chainsRoutes); + await fastify.register(relationsRoutes); + await fastify.register(endpointsRoutes); + await fastify.register(slip44Routes); + await fastify.register(rpcMonitorRoutes); + await fastify.register(clientsRoutes); + await fastify.register(scalingRoutes); + await fastify.register(metricsRoute); + await fastify.register(refresherRoute); + await fastify.register(rootRoute); + + return fastify; +} diff --git a/src/http/routes/admin.js b/src/http/routes/admin.js new file mode 100644 index 0000000..fbc7f61 --- /dev/null +++ b/src/http/routes/admin.js @@ -0,0 +1,203 @@ +import { readFile } from 'node:fs/promises'; +import { basename, resolve } from 'node:path'; +import { getCachedData } from '../../store/cache.js'; +import { + getAllChains, + getRpcMonitoringResults, + countChainsByTag +} from '../../store/queries.js'; +import { getAllKeywords } from '../../domain/keywords.js'; +import { loadData } from '../../services/loader.js'; +import { startRpcHealthCheck, getRpcMonitoringStatus } from '../../services/rpcHealth.js'; +import { validateChainData } from '../../services/validation.js'; +import { getL2BeatRefreshStatus } from '../../services/l2beatRefresher.js'; +import { + RELOAD_RATE_LIMIT_MAX, + RATE_LIMIT_WINDOW_MS, + DATA_CACHE_ENABLED, + DATA_CACHE_FILE +} from '../../../config.js'; +import { sendError } from '../util/sendError.js'; + +function ageSeconds(isoTimestamp) { + if (!isoTimestamp) return null; + const ms = Date.now() - new Date(isoTimestamp).getTime(); + if (!Number.isFinite(ms) || ms < 0) return null; + return Math.round(ms / 1000); +} + +function sourceFreshness(cache) { + const dataAge = ageSeconds(cache.lastUpdated); + const hasL2Beat = cache.l2beat != null + && Array.isArray(cache.l2beat.projects) + && cache.l2beat.projects.length > 0; + return { + theGraph: { loaded: cache.theGraph != null, ageSeconds: cache.theGraph != null ? dataAge : null }, + chainlist: { loaded: cache.chainlist != null, ageSeconds: cache.chainlist != null ? dataAge : null }, + chains: { loaded: cache.chains != null, ageSeconds: cache.chains != null ? dataAge : null }, + // slip44 distinguishes failure (null) from empty parse ({}), see loader.js. + slip44: { loaded: cache.slip44 != null, ageSeconds: cache.slip44 != null ? dataAge : null }, + l2beat: { + loaded: hasL2Beat, + ageSeconds: ageSeconds(cache.l2beat?.fetchedAt), + source: cache.l2beat?.source ?? null + } + }; +} + +function deriveOverallStatus(sources, refreshers) { + const coreSources = ['theGraph', 'chainlist', 'chains']; + const coreLoaded = coreSources.every(s => sources[s].loaded); + if (!coreLoaded) return 'down'; + + const supplementaryDegraded = !sources.slip44.loaded || !sources.l2beat.loaded; + const rpcStale = refreshers.rpc.lastRunAt && + ageSeconds(refreshers.rpc.lastRunAt) > 30 * 60; // > 30 min + const l2beatStale = refreshers.l2beat.lastRefreshAt && + refreshers.l2beat.intervalMs && + ageSeconds(refreshers.l2beat.lastRefreshAt) > (refreshers.l2beat.intervalMs / 1000) * 2; + + if (supplementaryDegraded || rpcStale || l2beatStale) return 'degraded'; + return 'ok'; +} + +export async function adminRoutes(fastify) { + fastify.get('/health', async () => { + const cachedData = getCachedData(); + const sources = sourceFreshness(cachedData); + const rpcStatus = getRpcMonitoringStatus(); + const l2beatStatus = getL2BeatRefreshStatus(); + const refreshers = { + rpc: { + isRunning: rpcStatus.isMonitoring, + lastRunAt: rpcStatus.lastUpdated + }, + l2beat: l2beatStatus + }; + + return { + status: deriveOverallStatus(sources, refreshers), + dataLoaded: cachedData.indexed !== null, + lastUpdated: cachedData.lastUpdated, + totalChains: cachedData.indexed ? cachedData.indexed.all.length : 0, + sources, + refreshers + }; + }); + + fastify.get('/sources', async () => { + const cachedData = getCachedData(); + return { + lastUpdated: cachedData.lastUpdated, + sources: { + theGraph: cachedData.theGraph ? 'loaded' : 'not loaded', + chainlist: cachedData.chainlist ? 'loaded' : 'not loaded', + chains: cachedData.chains ? 'loaded' : 'not loaded', + slip44: cachedData.slip44 != null ? 'loaded' : 'not loaded', + l2beat: cachedData.l2beat?.projects?.length > 0 ? 'loaded' : 'not loaded' + } + }; + }); + + fastify.get('/export', { + config: { + rateLimit: { + max: RELOAD_RATE_LIMIT_MAX, + timeWindow: RATE_LIMIT_WINDOW_MS + } + } + }, async (_request, reply) => { + if (!DATA_CACHE_ENABLED) { + return sendError(reply, 503, 'Data cache export is disabled'); + } + + const filePath = resolve(DATA_CACHE_FILE); + + try { + const raw = await readFile(filePath, 'utf8'); + const exportData = JSON.parse(raw); + + reply.header('Content-Type', 'application/json; charset=utf-8'); + reply.header('Content-Disposition', `attachment; filename="${basename(filePath)}"`); + return exportData; + } catch (error) { + if (error?.code === 'ENOENT') { + return sendError(reply, 404, 'Export file not found'); + } + + if (error instanceof SyntaxError) { + return sendError(reply, 500, 'Export file is not valid JSON'); + } + + fastify.log.error(error, 'Failed to export cache file'); + return sendError(reply, 500, 'Failed to export cache file'); + } + }); + + fastify.post('/reload', { + config: { + rateLimit: { + max: RELOAD_RATE_LIMIT_MAX, + timeWindow: RATE_LIMIT_WINDOW_MS + } + } + }, async (_request, reply) => { + try { + await loadData(); + startRpcHealthCheck(); + const cachedData = getCachedData(); + return { + status: 'success', + lastUpdated: cachedData.lastUpdated, + totalChains: cachedData.indexed ? cachedData.indexed.all.length : 0 + }; + } catch (error) { + fastify.log.error(error, 'Failed to reload data'); + return sendError(reply, 500, 'Failed to reload data'); + } + }); + + fastify.get('/validate', async (_request, reply) => { + const validationResults = validateChainData(); + if (validationResults.error) { + return sendError(reply, 503, validationResults.error); + } + return validationResults; + }); + + fastify.get('/keywords', async () => { + const keywordResults = getAllKeywords(); + const cachedData = getCachedData(); + return { lastUpdated: cachedData.lastUpdated, ...keywordResults }; + }); + + fastify.get('/stats', async () => { + const chains = getAllChains(); + const monitorResults = getRpcMonitoringResults(); + + const { totalChains, totalMainnets, totalTestnets, totalL2s, totalBeacons } = countChainsByTag(chains); + + const rpcWorking = monitorResults.workingEndpoints; + const rpcFailed = monitorResults.failedEndpoints || 0; + const rpcTested = monitorResults.testedEndpoints; + const rpcHealthPercent = rpcTested > 0 + ? Math.round((rpcWorking / rpcTested) * 10000) / 100 + : null; + + return { + totalChains, + totalMainnets, + totalTestnets, + totalL2s, + totalBeacons, + rpc: { + totalEndpoints: monitorResults.totalEndpoints, + tested: rpcTested, + working: rpcWorking, + failed: rpcFailed, + healthPercent: rpcHealthPercent + }, + lastUpdated: monitorResults.lastUpdated + }; + }); +} diff --git a/src/http/routes/chains.js b/src/http/routes/chains.js new file mode 100644 index 0000000..523af56 --- /dev/null +++ b/src/http/routes/chains.js @@ -0,0 +1,91 @@ +import { searchChains, getChainById, getAllChains } from '../../store/queries.js'; +import { getPricesForChains, getPriceForChain } from '../../../priceService.js'; +import { MAX_SEARCH_QUERY_LENGTH, RATE_LIMIT_WINDOW_MS, SEARCH_RATE_LIMIT_MAX } from '../../../config.js'; +import { parseIntParam } from '../util/parseIntParam.js'; +import { sendError } from '../util/sendError.js'; + +const VALID_TAGS = ['Testnet', 'L2', 'Beacon', 'ZK', 'Validium', 'Optimium']; + +export async function chainsRoutes(fastify) { + fastify.get('/chains', { + schema: { + querystring: { + type: 'object', + properties: { + tag: { + type: 'string', + enum: VALID_TAGS, + errorMessage: { enum: `Invalid tag. Allowed: ${VALID_TAGS.join(', ')}` } + } + }, + additionalProperties: false + } + } + }, async (request) => { + const { tag } = request.query; + let chains = getAllChains(); + if (tag) { + chains = chains.filter(chain => chain.tags?.includes(tag)); + } + const chainIds = chains.map(c => c.chainId); + const priceMap = await getPricesForChains(chainIds); + const enriched = chains.map(chain => ({ + ...chain, + price: priceMap.get(chain.chainId) ?? null + })); + return { count: enriched.length, chains: enriched }; + }); + + fastify.get('/chains/:id', { + schema: { + params: { + type: 'object', + properties: { + id: { + type: 'string', + pattern: '^-?\\d+$', + errorMessage: 'Invalid chain ID' + } + }, + required: ['id'] + } + } + }, async (request, reply) => { + const chainId = parseIntParam(request.params.id); + const chain = getChainById(chainId); + if (!chain) return sendError(reply, 404, 'Chain not found'); + const price = await getPriceForChain(chainId); + return { ...chain, price }; + }); + + fastify.get('/search', { + config: { + rateLimit: { max: SEARCH_RATE_LIMIT_MAX, timeWindow: RATE_LIMIT_WINDOW_MS } + }, + schema: { + querystring: { + type: 'object', + properties: { + q: { + type: 'string', + minLength: 1, + maxLength: MAX_SEARCH_QUERY_LENGTH, + errorMessage: { + minLength: 'Query parameter "q" is required', + maxLength: `Query too long. Max length: ${MAX_SEARCH_QUERY_LENGTH}` + } + } + }, + required: ['q'], + additionalProperties: false, + errorMessage: { + required: { q: 'Query parameter "q" is required' } + } + } + } + }, async (request) => { + const { q } = request.query; + const results = searchChains(q); + return { query: q, count: results.length, results }; + }); +} diff --git a/src/http/routes/clients.js b/src/http/routes/clients.js new file mode 100644 index 0000000..29c02f7 --- /dev/null +++ b/src/http/routes/clients.js @@ -0,0 +1,39 @@ +import { getClientsByChain } from '../../../clientsView.js'; +import { getRpcMonitoringResults } from '../../store/queries.js'; +import { parseIntParam } from '../util/parseIntParam.js'; +import { sendError } from '../util/sendError.js'; + +export async function clientsRoutes(fastify) { + fastify.get('/clients', async () => { + const results = getRpcMonitoringResults(); + const chains = getClientsByChain(); + return { + lastUpdated: results.lastUpdated, + count: chains.length, + chains + }; + }); + + fastify.get('/clients/:id', { + schema: { + params: { + type: 'object', + properties: { + id: { + type: 'string', + pattern: '^-?\\d+$', + errorMessage: 'Invalid chain ID' + } + }, + required: ['id'] + } + } + }, async (request, reply) => { + const chainId = parseIntParam(request.params.id); + const summary = getClientsByChain(chainId); + if (!summary) { + return sendError(reply, 404, 'No monitoring data available yet for this chain'); + } + return summary; + }); +} diff --git a/src/http/routes/endpoints.js b/src/http/routes/endpoints.js new file mode 100644 index 0000000..ba25760 --- /dev/null +++ b/src/http/routes/endpoints.js @@ -0,0 +1,31 @@ +import { getAllEndpoints, getEndpointsById } from '../../store/queries.js'; +import { parseIntParam } from '../util/parseIntParam.js'; +import { sendError } from '../util/sendError.js'; + +const intIdParam = { + type: 'object', + properties: { + id: { + type: 'string', + pattern: '^-?\\d+$', + errorMessage: 'Invalid chain ID' + } + }, + required: ['id'] +}; + +export async function endpointsRoutes(fastify) { + fastify.get('/endpoints', async () => { + const endpoints = getAllEndpoints(); + return { count: endpoints.length, endpoints }; + }); + + fastify.get('/endpoints/:id', { + schema: { params: intIdParam } + }, async (request, reply) => { + const chainId = parseIntParam(request.params.id); + const result = getEndpointsById(chainId); + if (!result) return sendError(reply, 404, 'Chain not found'); + return result; + }); +} diff --git a/src/http/routes/metrics.js b/src/http/routes/metrics.js new file mode 100644 index 0000000..776c856 --- /dev/null +++ b/src/http/routes/metrics.js @@ -0,0 +1,54 @@ +import { getCachedData } from '../../store/cache.js'; +import { getRpcMonitoringStatus } from '../../services/rpcHealth.js'; +import { validateChainData } from '../../services/validation.js'; +import { getL2BeatRefreshStatus } from '../../services/l2beatRefresher.js'; +import { renderMetrics } from '../../util/metrics.js'; + +/** + * GET /metrics — Prometheus exposition format. Scrape this endpoint to + * monitor source freshness, refresh outcomes, RPC checks, and validation + * error counts. Mounted as text/plain so existing scrapers parse it + * without configuration. + * + * Validation is O(N chains × M rules) and would dominate /metrics latency + * if recomputed on every scrape. Cache the result for VALIDATION_CACHE_MS + * (default 30s) — well under Prometheus' default 15s scrape interval and + * the chain refresh cadence, so freshness loss is negligible. + */ +const VALIDATION_CACHE_MS = 30_000; +let validationCache = { summary: null, computedAt: 0 }; + +function cachedValidationSummary() { + const now = Date.now(); + if (now - validationCache.computedAt < VALIDATION_CACHE_MS) { + return validationCache.summary; + } + try { + const report = validateChainData(); + validationCache = { + summary: report.error ? null : report.summary, + computedAt: now + }; + } catch { + validationCache = { summary: null, computedAt: now }; + } + return validationCache.summary; +} + +// Test-only helper. +export function _resetMetricsValidationCacheForTests() { + validationCache = { summary: null, computedAt: 0 }; +} + +export async function metricsRoute(fastify) { + fastify.get('/metrics', async (_request, reply) => { + const cache = getCachedData(); + const rpcStatus = getRpcMonitoringStatus(); + const l2beatStatus = getL2BeatRefreshStatus(); + const validationSummary = cachedValidationSummary(); + + const body = renderMetrics({ cache, rpcStatus, l2beatStatus, validationSummary }); + reply.header('Content-Type', 'text/plain; version=0.0.4'); + return body; + }); +} diff --git a/src/http/routes/refresher.js b/src/http/routes/refresher.js new file mode 100644 index 0000000..ab7dbf7 --- /dev/null +++ b/src/http/routes/refresher.js @@ -0,0 +1,10 @@ +import { getChainRefresherStatus } from '../../services/chainRefresher.js'; + +/** + * GET /refresher — current state of the unified rolling refresher. + * Useful for ops dashboards: sweep cursor, queue depth, last tick, and + * per-job-type status (l2beat last refresh, RPC sweep completion). + */ +export async function refresherRoute(fastify) { + fastify.get('/refresher', async () => getChainRefresherStatus()); +} diff --git a/src/http/routes/relations.js b/src/http/routes/relations.js new file mode 100644 index 0000000..6326b10 --- /dev/null +++ b/src/http/routes/relations.js @@ -0,0 +1,57 @@ +import { getAllRelations, getRelationsById, traverseRelations } from '../../domain/relations.js'; +import { parseIntParam } from '../util/parseIntParam.js'; +import { sendError } from '../util/sendError.js'; + +const MIN_DEPTH = 1; +const MAX_DEPTH = 5; +const DEFAULT_DEPTH = 2; + +const intIdParam = { + type: 'object', + properties: { + id: { + type: 'string', + pattern: '^-?\\d+$', + errorMessage: 'Invalid chain ID' + } + }, + required: ['id'] +}; + +export async function relationsRoutes(fastify) { + fastify.get('/relations', async () => getAllRelations()); + + fastify.get('/relations/:id', { + schema: { params: intIdParam } + }, async (request, reply) => { + const chainId = parseIntParam(request.params.id); + const result = getRelationsById(chainId); + if (!result) return sendError(reply, 404, 'Chain not found'); + return result; + }); + + fastify.get('/relations/:id/graph', { + schema: { + params: intIdParam, + querystring: { + type: 'object', + properties: { + depth: { + type: 'integer', + minimum: MIN_DEPTH, + maximum: MAX_DEPTH, + default: DEFAULT_DEPTH, + errorMessage: `Invalid depth. Must be between ${MIN_DEPTH} and ${MAX_DEPTH}` + } + }, + additionalProperties: false + } + } + }, async (request, reply) => { + const chainId = parseIntParam(request.params.id); + const depth = request.query.depth ?? DEFAULT_DEPTH; + const result = traverseRelations(chainId, depth); + if (!result) return sendError(reply, 404, 'Chain not found'); + return result; + }); +} diff --git a/src/http/routes/root.js b/src/http/routes/root.js new file mode 100644 index 0000000..828848f --- /dev/null +++ b/src/http/routes/root.js @@ -0,0 +1,51 @@ +import pkg from '../../../package.json' with { type: 'json' }; +import { + DATA_SOURCE_THE_GRAPH, + DATA_SOURCE_CHAINLIST, + DATA_SOURCE_CHAINS, + DATA_SOURCE_SLIP44, + DATA_SOURCE_L2BEAT_API +} from '../../../config.js'; + +const ENDPOINTS = { + '/health': 'Health check and data status', + '/chains': 'Get all chains (optional ?tag=Testnet|L2|Beacon)', + '/chains/:id': 'Get chain by ID', + '/search?q={query}': 'Search chains by name or ID', + '/relations': 'Get all chain relations data', + '/relations/:id': 'Get relations for a specific chain by ID', + '/endpoints': 'Get all chain endpoints (RPC, firehose, substreams)', + '/endpoints/:id': 'Get endpoints for a specific chain by ID', + '/sources': 'Get data sources status', + '/export': 'Export cached snapshot file', + '/slip44': 'Get all SLIP-0044 coin types as JSON', + '/slip44/:coinType': 'Get specific SLIP-0044 coin type by ID', + '/reload': 'Reload data from sources (POST)', + '/validate': 'Validate chain data for potential human errors', + '/keywords': 'Get extracted keywords (blockchain names, network names, client names, etc.)', + '/rpc-monitor': 'Get RPC endpoint monitoring results', + '/rpc-monitor/:id': 'Get RPC monitoring results for a specific chain by ID', + '/stats': 'Get aggregate stats (chain counts, RPC health percentage)', + '/relations/:id/graph?depth=N': 'BFS graph traversal of chain relations (default depth: 2)', + '/scaling': 'Get all chains with L2BEAT scaling data (stage, category, DA layer, TVS)', + '/scaling/:id': 'Get L2BEAT scaling data for a specific chain by ID', + '/scaling/status': 'Get L2BEAT refresher status (last refresh, source, errors)', + '/metrics': 'Prometheus exposition format (counters + gauges for source freshness, refreshes, validation)', + '/refresher': 'Unified rolling refresher status (queue depth, sweep cursor, per-job-type state)' +}; + +export async function rootRoute(fastify) { + fastify.get('/', async () => ({ + name: 'Chains API', + version: pkg.version, + description: 'API query service for blockchain chain data from multiple sources', + endpoints: ENDPOINTS, + dataSources: [ + DATA_SOURCE_THE_GRAPH, + DATA_SOURCE_CHAINLIST, + DATA_SOURCE_CHAINS, + DATA_SOURCE_SLIP44, + DATA_SOURCE_L2BEAT_API + ] + })); +} diff --git a/src/http/routes/rpcMonitor.js b/src/http/routes/rpcMonitor.js new file mode 100644 index 0000000..2132ed7 --- /dev/null +++ b/src/http/routes/rpcMonitor.js @@ -0,0 +1,52 @@ +import { getRpcMonitoringResults } from '../../store/queries.js'; +import { getRpcMonitoringStatus } from '../../services/rpcHealth.js'; +import { summarizeChainClients } from '../../../clientsView.js'; +import { parseIntParam } from '../util/parseIntParam.js'; +import { sendError } from '../util/sendError.js'; + +const intIdParam = { + type: 'object', + properties: { + id: { + type: 'string', + pattern: '^-?\\d+$', + errorMessage: 'Invalid chain ID' + } + }, + required: ['id'] +}; + +export async function rpcMonitorRoutes(fastify) { + fastify.get('/rpc-monitor', async () => { + const results = getRpcMonitoringResults(); + const status = getRpcMonitoringStatus(); + return { ...status, ...results }; + }); + + fastify.get('/rpc-monitor/:id', { + schema: { params: intIdParam } + }, async (request, reply) => { + const chainId = parseIntParam(request.params.id); + + const results = getRpcMonitoringResults(); + const chainResults = results.results.filter(r => r.chainId === chainId); + + if (chainResults.length === 0) { + return sendError(reply, 404, 'No monitoring results found for this chain'); + } + + const workingCount = chainResults.filter(r => r.status === 'working').length; + const failedCount = chainResults.filter(r => r.status === 'failed').length; + + return { + chainId, + chainName: chainResults[0].chainName, + totalEndpoints: chainResults.length, + workingEndpoints: workingCount, + failedEndpoints: failedCount, + lastUpdated: results.lastUpdated, + endpoints: chainResults, + clients: summarizeChainClients(chainResults)?.clients ?? [] + }; + }); +} diff --git a/src/http/routes/scaling.js b/src/http/routes/scaling.js new file mode 100644 index 0000000..ef3eafd --- /dev/null +++ b/src/http/routes/scaling.js @@ -0,0 +1,56 @@ +import { getAllChains, getChainById } from '../../store/queries.js'; +import { getL2BeatRefreshStatus } from '../../services/l2beatRefresher.js'; +import { parseIntParam } from '../util/parseIntParam.js'; +import { sendError } from '../util/sendError.js'; + +/** + * /scaling — projects with L2BEAT data (any chain that L2BEAT classifies). + * /scaling/:id — single chain's L2BEAT view. + * + * Returns empty / 404 when L2BEAT data hasn't loaded yet (live API gated and + * static fallback unavailable). When the live API succeeds the per-chain + * `l2Beat.dataFreshness` is `'live'`; when only the static snapshot is + * available it's `'fallback'`. Chains the merge couldn't reach have no + * `l2Beat` field at all (rather than a synthetic `'unavailable'` marker). + * + * Known gap: Starknet (CAIP-2 numeric ID 0x534e5f4d41494e = 23448594291968334) + * exceeds Number.MAX_SAFE_INTEGER and is omitted from data/l2beat-fallback.json. + * The live API can still surface Starknet — and the indexer will accept it as + * a key — but precision-sensitive lookups via `parseIntParam(:id)` will not + * round-trip its chainId. Switching the codebase to BigInt chainIds is the + * proper fix; until then, /scaling/:id is best-effort for that chain. + */ +export async function scalingRoutes(fastify) { + fastify.get('/scaling', async () => { + const chains = getAllChains().filter(c => c.l2Beat); + return { + count: chains.length, + refresher: getL2BeatRefreshStatus(), + chains + }; + }); + + fastify.get('/scaling/status', async () => getL2BeatRefreshStatus()); + + fastify.get('/scaling/:id', { + schema: { + params: { + type: 'object', + properties: { + id: { + type: 'string', + pattern: '^-?\\d+$', + errorMessage: 'Invalid chain ID' + } + }, + required: ['id'] + } + } + }, async (request, reply) => { + const chainId = parseIntParam(request.params.id); + const chain = getChainById(chainId); + if (!chain) return sendError(reply, 404, 'Chain not found'); + if (!chain.l2Beat) return sendError(reply, 404, 'No L2BEAT data for this chain'); + return chain; + }); +} diff --git a/src/http/routes/slip44.js b/src/http/routes/slip44.js new file mode 100644 index 0000000..400aa3b --- /dev/null +++ b/src/http/routes/slip44.js @@ -0,0 +1,39 @@ +import { getCachedData } from '../../store/cache.js'; +import { parseIntParam } from '../util/parseIntParam.js'; +import { sendError } from '../util/sendError.js'; + +export async function slip44Routes(fastify) { + fastify.get('/slip44', async (_request, reply) => { + const cachedData = getCachedData(); + if (!cachedData.slip44) { + return sendError(reply, 503, 'SLIP-0044 data not loaded'); + } + return { + count: Object.keys(cachedData.slip44).length, + coinTypes: cachedData.slip44 + }; + }); + + fastify.get('/slip44/:coinType', { + schema: { + params: { + type: 'object', + properties: { + coinType: { + type: 'string', + pattern: '^-?\\d+$', + errorMessage: 'Invalid coin type' + } + }, + required: ['coinType'] + } + } + }, async (request, reply) => { + const coinType = parseIntParam(request.params.coinType); + const cachedData = getCachedData(); + if (!cachedData.slip44?.[coinType]) { + return sendError(reply, 404, 'Coin type not found'); + } + return cachedData.slip44[coinType]; + }); +} diff --git a/src/http/util/parseIntParam.js b/src/http/util/parseIntParam.js new file mode 100644 index 0000000..c6409b2 --- /dev/null +++ b/src/http/util/parseIntParam.js @@ -0,0 +1,17 @@ +export function parseIntParam(param) { + if (typeof param === 'number') { + return Number.isInteger(param) ? param : null; + } + + if (typeof param !== 'string') { + return null; + } + + const normalized = param.trim(); + if (!/^-?\d+$/.test(normalized)) { + return null; + } + + const parsed = Number.parseInt(normalized, 10); + return Number.isNaN(parsed) ? null : parsed; +} diff --git a/src/http/util/sendError.js b/src/http/util/sendError.js new file mode 100644 index 0000000..5615e18 --- /dev/null +++ b/src/http/util/sendError.js @@ -0,0 +1,3 @@ +export function sendError(reply, code, message) { + return reply.code(code).send({ error: message }); +} diff --git a/src/services/chainRefresher.js b/src/services/chainRefresher.js new file mode 100644 index 0000000..80064c2 --- /dev/null +++ b/src/services/chainRefresher.js @@ -0,0 +1,324 @@ +/** + * Unified rolling refresher. + * + * Replaces the two parallel scheduler patterns (services/rpcHealth.js + * setInterval + services/l2beatRefresher.js setInterval) with one queue + * and one tick. Every SWEEP_TICK_MS the loop pops a single job: + * + * queue = [ + * { type: 'l2beat_batch' }, // 1 job + * { type: 'chain_rpc', chainId: N }, { type: 'chain_rpc', chainId: M }, ... + * ] + * + * When the queue empties, a fresh sweep is enqueued from the current + * indexed chains. This spreads RPC fan-out evenly across the sweep + * window (~5 min for 300 chains at 1 tick/sec) instead of a thundering + * herd at start-of-loop. + * + * The existing services/rpcHealth.js and services/l2beatRefresher.js + * modules become thin shims delegating to this module so the old API + * surface (startRpcHealthCheck, startL2BeatRefresh, getRpcMonitoringStatus, + * getL2BeatRefreshStatus, runRpcHealthCheck, runL2BeatRefresh) keeps + * working unchanged. + */ +import { jsonRpcCall } from '../../rpcUtil.js'; +import { + RPC_CHECK_TIMEOUT_MS, + L2BEAT_REFRESH_INTERVAL_MS, + MAX_ENDPOINTS_PER_CHAIN +} from '../../config.js'; +import { logger } from '../util/logger.js'; +import { incCounter } from '../util/metrics.js'; +import { cachedData } from '../store/cache.js'; +import { indexL2BeatSource } from '../store/indexer.js'; +import { fetchL2Beat } from '../sources/l2beat.js'; + +const SWEEP_TICK_MS = Number(process.env.CHAIN_REFRESHER_TICK_MS) || 1000; + +let queue = []; +let cursor = { + jobIndex: 0, + totalJobs: 0, + sweepNumber: 0, + sweepStartedAt: null, + // Snapshot of cachedData.lastUpdated at sweep start. Used to detect + // inter-job races (loadData() ran between job N and job N+1). The + // remaining jobs in the sweep are dropped on detection so a refresh + // doesn't write a frankensweep of mixed data versions. + sweepDataVersion: null +}; +let tickTimer = null; +let tickInFlight = false; +let lastTickAt = null; +let lastTickJobType = null; + +// Per-job-type status (read by the legacy getX status accessors). +let l2beatState = { + lastRefreshAt: null, + lastRefreshSource: null, + lastRefreshProjectCount: 0, + lastRefreshError: null +}; + +let rpcState = { + isMonitoring: false, + lastSweepCompletedAt: null, + endpointsCheckedThisSweep: 0 +}; + +// ───────────────────────── job processors ───────────────────────── + +function normalizeRpcUrl(rpcEntry) { + if (!rpcEntry) return null; + if (typeof rpcEntry === 'string') return rpcEntry; + if (typeof rpcEntry === 'object' && rpcEntry.url) return rpcEntry.url; + return null; +} + +function parseBlockHeight(value) { + if (typeof value === 'number') return Number.isFinite(value) ? value : null; + if (typeof value === 'string') { + if (value.startsWith('0x')) { + const parsed = Number.parseInt(value, 16); + return Number.isNaN(parsed) ? null : parsed; + } + const parsed = Number(value); + return Number.isNaN(parsed) ? null : parsed; + } + return null; +} + +async function checkRpcEndpoint(url) { + const result = { url, ok: false, clientVersion: null, blockHeight: null, error: null }; + + if (!url?.startsWith('http')) { + result.error = 'Unsupported RPC URL'; + return result; + } + if (url.includes('${')) { + result.error = 'RPC URL requires API key substitution'; + return result; + } + + try { + const [clientVersion, blockNumber] = await Promise.all([ + jsonRpcCall(url, 'web3_clientVersion', { timeoutMs: RPC_CHECK_TIMEOUT_MS }), + jsonRpcCall(url, 'eth_blockNumber', { timeoutMs: RPC_CHECK_TIMEOUT_MS }) + ]); + result.clientVersion = clientVersion || null; + result.blockHeight = parseBlockHeight(blockNumber); + result.ok = Boolean(result.clientVersion) && result.blockHeight !== null; + } catch (error) { + result.error = error.message; + } + return result; +} + +/** + * Check every RPC URL for a single chain and write results to cache. + * Per-chain `lastTested` timestamp lands on the indexed chain entry so + * /chains/:id surfaces freshness without a separate accessor. + */ +export async function processChainRpc(chainId) { + if (!cachedData.indexed?.byChainId?.[chainId]) return; + const chain = cachedData.indexed.byChainId[chainId]; + + const dataVersion = cachedData.lastUpdated; + const normalized = (chain.rpc || []).map(normalizeRpcUrl).filter(Boolean); + // Dedupe, keep only HTTP(S), then cap per-chain fan-out so large chain + // entries don't create per-tick request bursts that ignore the configured + // MAX_ENDPOINTS_PER_CHAIN ceiling. + const urls = Array.from(new Set(normalized)) + .filter(u => u.startsWith('http')) + .slice(0, MAX_ENDPOINTS_PER_CHAIN); + if (urls.length === 0) return; + + rpcState.isMonitoring = true; + const results = await Promise.all(urls.map(checkRpcEndpoint)); + rpcState.isMonitoring = false; + rpcState.endpointsCheckedThisSweep += results.length; + + // Race guard: a concurrent loadData() may have replaced the cache. + if (cachedData.lastUpdated !== dataVersion) { + logger.warn({ chainId }, 'Chain RPC check skipped: data changed during run'); + return; + } + + if (!cachedData.rpcHealth) cachedData.rpcHealth = {}; + cachedData.rpcHealth[chainId] = results; + chain.lastTested = new Date().toISOString(); + incCounter('chains_api_rpc_check_total', { outcome: 'completed' }, results.length); +} + +/** + * Fetch L2BEAT data and re-merge into the index. Mirrors the previous + * runL2BeatRefresh contract but lives inside the unified scheduler. + */ +export async function processL2BeatBatch() { + if (!cachedData.indexed) { + logger.warn('L2BEAT refresh skipped: data not loaded'); + return { skipped: 'no-data' }; + } + + const dataVersion = cachedData.lastUpdated; + let fresh; + try { + fresh = await fetchL2Beat(); + } catch (err) { + l2beatState.lastRefreshError = err.message; + logger.error({ err: err.message }, 'L2BEAT refresh failed'); + incCounter('chains_api_refresh_total', { refresher: 'l2beat', outcome: 'error' }); + return { skipped: 'fetch-error', error: err.message }; + } + + if (cachedData.lastUpdated !== dataVersion) { + logger.warn('L2BEAT refresh skipped: data changed during run'); + incCounter('chains_api_refresh_total', { refresher: 'l2beat', outcome: 'data-changed' }); + return { skipped: 'data-changed' }; + } + + cachedData.l2beat = fresh; + indexL2BeatSource(fresh, cachedData.indexed); + + l2beatState.lastRefreshAt = new Date().toISOString(); + l2beatState.lastRefreshSource = fresh.source; + l2beatState.lastRefreshProjectCount = fresh.projects.length; + l2beatState.lastRefreshError = null; + + logger.info( + { source: fresh.source, projects: fresh.projects.length }, + 'L2BEAT refresh completed' + ); + incCounter('chains_api_refresh_total', { refresher: 'l2beat', outcome: fresh.source }); + return { source: fresh.source, projectCount: fresh.projects.length }; +} + +// ───────────────────────── scheduler ───────────────────────── + +function buildSweepQueue() { + const chains = cachedData.indexed?.all || []; + const jobs = [{ type: 'l2beat_batch' }]; + for (const c of chains) { + jobs.push({ type: 'chain_rpc', chainId: c.chainId }); + } + return jobs; +} + +function onSweepStart() { + cursor = { + jobIndex: 0, + totalJobs: queue.length, + sweepNumber: cursor.sweepNumber + 1, + sweepStartedAt: new Date().toISOString(), + sweepDataVersion: cachedData.lastUpdated + }; + rpcState.endpointsCheckedThisSweep = 0; +} + +function onSweepEnd() { + rpcState.lastSweepCompletedAt = new Date().toISOString(); + cachedData.lastRpcCheck = rpcState.lastSweepCompletedAt; + logger.info( + { + sweepNumber: cursor.sweepNumber, + endpointsChecked: rpcState.endpointsCheckedThisSweep, + durationMs: Date.now() - new Date(cursor.sweepStartedAt).getTime() + }, + 'Chain refresher sweep completed' + ); +} + +export async function tickOnce() { + if (tickInFlight) return; + tickInFlight = true; + lastTickAt = new Date().toISOString(); + try { + if (queue.length === 0) { + queue = buildSweepQueue(); + onSweepStart(); + } + + // Inter-job race guard: if a concurrent loadData() bumped lastUpdated + // mid-sweep, the queue references chainIds from the old data version. + // Drop the rest of the sweep — the next tick will rebuild from scratch. + if ( + cursor.sweepDataVersion !== null && + cachedData.lastUpdated !== cursor.sweepDataVersion + ) { + logger.warn( + { sweepNumber: cursor.sweepNumber, droppedJobs: queue.length }, + 'Chain refresher sweep aborted: data version changed mid-sweep' + ); + queue = []; + return; + } + + const job = queue.shift(); + cursor.jobIndex++; + lastTickJobType = job?.type ?? null; + + if (job?.type === 'l2beat_batch') { + await processL2BeatBatch(); + } else if (job?.type === 'chain_rpc') { + await processChainRpc(job.chainId); + } + + if (queue.length === 0 && cursor.totalJobs > 0) { + onSweepEnd(); + } + } catch (err) { + logger.error({ err: err.message || err }, 'Chain refresher tick failed'); + } finally { + tickInFlight = false; + } +} + +export function startChainRefresher() { + if (tickTimer) return; + tickTimer = setInterval(() => { + tickOnce().catch(err => logger.error({ err: err.message || err }, 'Tick swallowed error')); + }, SWEEP_TICK_MS); + tickTimer.unref?.(); + // Kick off the first tick immediately so the first L2BEAT batch happens + // without waiting one SWEEP_TICK_MS. + tickOnce().catch(err => logger.error({ err: err.message || err }, 'Initial tick swallowed error')); +} + +export function stopChainRefresher() { + if (tickTimer) { + clearInterval(tickTimer); + tickTimer = null; + } +} + +export function getChainRefresherStatus() { + return { + tickIntervalMs: SWEEP_TICK_MS, + isTickInFlight: tickInFlight, + lastTickAt, + lastTickJobType, + queueDepth: queue.length, + sweep: cursor, + l2beat: { + ...l2beatState, + intervalMs: L2BEAT_REFRESH_INTERVAL_MS + }, + rpc: { + isMonitoring: rpcState.isMonitoring, + lastSweepCompletedAt: rpcState.lastSweepCompletedAt, + endpointsCheckedThisSweep: rpcState.endpointsCheckedThisSweep + } + }; +} + +// Test-only helper. +export function _resetChainRefresherForTests() { + stopChainRefresher(); + queue = []; + cursor = { jobIndex: 0, totalJobs: 0, sweepNumber: 0, sweepStartedAt: null, sweepDataVersion: null }; + tickInFlight = false; + lastTickAt = null; + lastTickJobType = null; + l2beatState = { lastRefreshAt: null, lastRefreshSource: null, lastRefreshProjectCount: 0, lastRefreshError: null }; + rpcState = { isMonitoring: false, lastSweepCompletedAt: null, endpointsCheckedThisSweep: 0 }; +} diff --git a/src/services/l2beatRefresher.js b/src/services/l2beatRefresher.js new file mode 100644 index 0000000..4c65b6d --- /dev/null +++ b/src/services/l2beatRefresher.js @@ -0,0 +1,39 @@ +/** + * Backwards-compatible shim. Implementation lives in chainRefresher.js + * (the unified rolling refresher). This module preserves the old API: + * runL2BeatRefresh, startL2BeatRefresh, stopL2BeatRefresh, + * getL2BeatRefreshStatus. + * + * New code should import from chainRefresher.js directly. + */ +import { L2BEAT_REFRESH_INTERVAL_MS } from '../../config.js'; +import { + startChainRefresher, + stopChainRefresher, + processL2BeatBatch, + getChainRefresherStatus +} from './chainRefresher.js'; + +export async function runL2BeatRefresh() { + return processL2BeatBatch(); +} + +export function startL2BeatRefresh() { + startChainRefresher(); +} + +export function stopL2BeatRefresh() { + stopChainRefresher(); +} + +export function getL2BeatRefreshStatus() { + const status = getChainRefresherStatus(); + return { + isRefreshing: status.isTickInFlight && status.lastTickJobType === 'l2beat_batch', + lastRefreshAt: status.l2beat.lastRefreshAt, + lastRefreshSource: status.l2beat.lastRefreshSource, + lastRefreshError: status.l2beat.lastRefreshError, + lastRefreshProjectCount: status.l2beat.lastRefreshProjectCount, + intervalMs: L2BEAT_REFRESH_INTERVAL_MS + }; +} diff --git a/src/services/loader.js b/src/services/loader.js new file mode 100644 index 0000000..c4e4e0f --- /dev/null +++ b/src/services/loader.js @@ -0,0 +1,168 @@ +import { + DATA_SOURCE_THE_GRAPH, + DATA_SOURCE_CHAINLIST, + DATA_SOURCE_CHAINS, + DATA_SOURCE_SLIP44 +} from '../../config.js'; +import { fetchData } from '../transport/fetch.js'; +import { parseSLIP44 } from '../sources/slip44.js'; +import { fetchL2Beat } from '../sources/l2beat.js'; +import { indexData } from '../store/indexer.js'; +import { cachedData, applyDataToCache } from '../store/cache.js'; +import { + readSnapshotFromDisk, + writeSnapshotToDiskAtomic, + DATA_CACHE_PATH +} from '../store/snapshot.js'; +import { logger } from '../util/logger.js'; + +const DATA_SOURCES = { + theGraph: DATA_SOURCE_THE_GRAPH, + chainlist: DATA_SOURCE_CHAINLIST, + chains: DATA_SOURCE_CHAINS, + slip44: DATA_SOURCE_SLIP44 +}; + +let dataRefreshPromise = null; +let startupInitializationPromise = null; +let startupInitialized = false; + +/** + * Count how many of the three chain registries (theGraph, chainlist, chains) + * loaded successfully. SLIP-0044 is excluded because it only contributes + * coin-type metadata, not chain entries — if every chain registry fails but + * SLIP-0044 succeeds, the API would otherwise come up with an empty index. + * L2BEAT is also excluded because it has its own static fallback. + */ +function countLoadedChainSources(data) { + let loaded = 0; + if (data.theGraph !== null) loaded++; + if (data.chainlist !== null) loaded++; + if (data.chains !== null) loaded++; + return loaded; +} + +async function fetchAndBuildData() { + logger.info('Loading data from all sources'); + + const results = await Promise.allSettled([ + fetchData(DATA_SOURCES.theGraph), + fetchData(DATA_SOURCES.chainlist), + fetchData(DATA_SOURCES.chains), + fetchData(DATA_SOURCES.slip44, 'text'), + fetchL2Beat() + ]); + + const theGraph = results[0].status === 'fulfilled' ? results[0].value : null; + const chainlist = results[1].status === 'fulfilled' ? results[1].value : null; + const chains = results[2].status === 'fulfilled' ? results[2].value : null; + const slip44Text = results[3].status === 'fulfilled' ? results[3].value : null; + const l2beat = results[4].status === 'fulfilled' ? results[4].value : null; + + const sourceNames = ['theGraph', 'chainlist', 'chains', 'slip44', 'l2beat']; + results.forEach((result, i) => { + if (result.status === 'rejected') { + logger.error({ source: sourceNames[i], err: result.reason?.message || result.reason }, 'Failed to load source'); + } + }); + + // Only parse SLIP-44 when fetch actually returned something; otherwise keep + // null so /sources can distinguish "fetch failed" from "fetched, empty". + const slip44 = slip44Text === null ? null : parseSLIP44(slip44Text); + const indexed = indexData(theGraph, chainlist, chains, slip44, l2beat); + + return { + data: { + theGraph, + chainlist, + chains, + slip44, + l2beat, + indexed, + lastUpdated: new Date().toISOString(), + rpcHealth: {}, + lastRpcCheck: null + }, + loadedSourceCount: countLoadedChainSources({ theGraph, chainlist, chains }) + }; +} + +async function refreshDataWithGuard(options = {}) { + const { requireAtLeastOneSource = false, logSuccessMessage = true } = options; + + if (dataRefreshPromise) return dataRefreshPromise; + + dataRefreshPromise = (async () => { + const { data, loadedSourceCount } = await fetchAndBuildData(); + + if (requireAtLeastOneSource && loadedSourceCount === 0) { + throw new Error('All chain registry sources failed during data refresh'); + } + + applyDataToCache(data); + await writeSnapshotToDiskAtomic(cachedData); + + if (logSuccessMessage) { + logger.info({ totalChains: cachedData.indexed.all.length }, 'Data loaded successfully'); + } + + return cachedData; + })(); + + try { + return await dataRefreshPromise; + } finally { + dataRefreshPromise = null; + } +} + +export async function loadData() { + return refreshDataWithGuard({ requireAtLeastOneSource: true }); +} + +/** + * Stale-first startup: + * 1. Load valid snapshot from disk if available. + * 2. Trigger background refresh; keep serving stale data on failure. + * 3. Fall back to a blocking load if no valid snapshot exists. + */ +export async function initializeDataOnStartup(options = {}) { + const { onBackgroundRefreshSuccess } = options; + + if (startupInitialized) return cachedData; + if (startupInitializationPromise) return startupInitializationPromise; + + startupInitializationPromise = (async () => { + const snapshotData = await readSnapshotFromDisk(); + + if (snapshotData) { + applyDataToCache(snapshotData); + startupInitialized = true; + logger.info({ path: DATA_CACHE_PATH, totalChains: cachedData.indexed.all.length }, 'Loaded cached snapshot'); + + refreshDataWithGuard({ requireAtLeastOneSource: true }) + .then(() => { + logger.info('Background refresh completed successfully'); + if (typeof onBackgroundRefreshSuccess === 'function') { + onBackgroundRefreshSuccess(); + } + }) + .catch(error => { + logger.error({ err: error.message || error }, 'Background refresh failed; continuing with cached data'); + }); + + return cachedData; + } + + logger.info('No valid cache snapshot found. Loading data from remote sources'); + const loadedData = await loadData(); + startupInitialized = true; + return loadedData; + })(); + + try { + return await startupInitializationPromise; + } finally { + startupInitializationPromise = null; + } +} diff --git a/src/services/rpcHealth.js b/src/services/rpcHealth.js new file mode 100644 index 0000000..7213bc1 --- /dev/null +++ b/src/services/rpcHealth.js @@ -0,0 +1,83 @@ +/** + * Backwards-compatible shim. Implementation lives in chainRefresher.js + * (the unified rolling refresher). This module preserves the old API + * surface: startRpcHealthCheck, runRpcHealthCheck, getRpcMonitoringStatus. + * + * New code should import from chainRefresher.js directly. + */ +import { logger } from '../util/logger.js'; +import { incCounter } from '../util/metrics.js'; +import { cachedData } from '../store/cache.js'; +import { + startChainRefresher, + processChainRpc, + getChainRefresherStatus +} from './chainRefresher.js'; + +export function getRpcMonitoringStatus() { + const status = getChainRefresherStatus(); + return { + isMonitoring: status.rpc.isMonitoring, + lastUpdated: cachedData.lastRpcCheck + }; +} + +/** + * Drain a full RPC sweep right now (used by /reload and tests). Differs + * from the rolling tick path: here we process every chain back-to-back + * instead of one chain per tick, so the caller gets blocking semantics. + */ +export async function runRpcHealthCheck() { + if (!cachedData.indexed) { + logger.warn('RPC health check skipped: data not loaded'); + return; + } + + const dataVersion = cachedData.lastUpdated; + const chains = cachedData.indexed.all || []; + + // Detect "no endpoints" to preserve the old log message + early return. + const totalEndpoints = chains.reduce((acc, c) => { + const urls = (c.rpc || []) + .map(r => (typeof r === 'string' ? r : r?.url)) + .filter(u => typeof u === 'string' && u.startsWith('http')); + return acc + new Set(urls).size; + }, 0); + + // Reset state at the start of an all-at-once sweep (legacy contract). + cachedData.rpcHealth = {}; + cachedData.lastRpcCheck = null; + + if (totalEndpoints === 0) { + logger.warn('RPC health check skipped: no RPC endpoints found'); + return; + } + + for (const chain of chains) { + // Abort mid-sweep if a concurrent loadData() replaces the cache, so we + // don't leak partial results from the old data version into the fresh + // cache before the final guard below runs. + if (cachedData.lastUpdated !== dataVersion) { + logger.warn('RPC health check aborted: data changed mid-sweep'); + return; + } + await processChainRpc(chain.chainId); + } + + if (cachedData.lastUpdated !== dataVersion) { + logger.warn('RPC health check skipped: data changed during run'); + return; + } + + cachedData.lastRpcCheck = new Date().toISOString(); + const checkedChainCount = Object.keys(cachedData.rpcHealth).length; + logger.info( + { endpointsTested: totalEndpoints, chainsChecked: checkedChainCount }, + 'RPC health check completed' + ); + incCounter('chains_api_rpc_check_total', { outcome: 'completed' }); +} + +export function startRpcHealthCheck() { + startChainRefresher(); +} diff --git a/src/services/rpcHealthState.js b/src/services/rpcHealthState.js new file mode 100644 index 0000000..a12f93f --- /dev/null +++ b/src/services/rpcHealthState.js @@ -0,0 +1,20 @@ +// State for the RPC health checker, isolated so that store/queries.js can read +// the in-progress flag without importing the runner (which would create a cycle). +let rpcCheckInProgress = false; +let rpcCheckPending = false; + +export function getRpcCheckInProgress() { + return rpcCheckInProgress; +} + +export function setRpcCheckInProgress(value) { + rpcCheckInProgress = Boolean(value); +} + +export function getRpcCheckPending() { + return rpcCheckPending; +} + +export function setRpcCheckPending(value) { + rpcCheckPending = Boolean(value); +} diff --git a/src/services/validation.js b/src/services/validation.js new file mode 100644 index 0000000..c3a3f19 --- /dev/null +++ b/src/services/validation.js @@ -0,0 +1,554 @@ +import { cachedData } from '../store/cache.js'; + +function getChainFromSource(chainId, source) { + if (source === 'theGraph') { + return cachedData.theGraph.networks?.find(n => { + if (n.caip2Id) { + const match = n.caip2Id.match(/^eip155:(\d+)$/); + return match && Number.parseInt(match[1], 10) === chainId; + } + return false; + }); + } + if (source === 'chainlist') return cachedData.chainlist?.find(c => c.chainId === chainId); + if (source === 'chains') return cachedData.chains?.find(c => c.chainId === chainId); + return null; +} + +function validateRule1RelationConflicts(chain, errors) { + if (!chain.relations || chain.relations.length === 0) return; + + const graphRelations = chain.relations.filter(r => r.source === 'theGraph'); + + graphRelations.forEach(graphRel => { + if (graphRel.kind === 'testnetOf' && graphRel.chainId) { + if (!chain.tags.includes('Testnet')) { + errors.push({ + rule: 1, + chainId: chain.chainId, + chainName: chain.name, + type: 'relation_tag_conflict', + message: `Chain ${chain.chainId} (${chain.name}) has testnetOf relation but is not tagged as Testnet`, + graphRelation: graphRel + }); + } + + const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); + if (chainlistChain?.isTestnet === false) { + errors.push({ + rule: 1, + chainId: chain.chainId, + chainName: chain.name, + type: 'relation_source_conflict', + message: `Chain ${chain.chainId} (${chain.name}) has testnetOf relation in theGraph but isTestnet=false in chainlist`, + graphRelation: graphRel, + chainlistData: { isTestnet: chainlistChain.isTestnet } + }); + } + } + + if (graphRel.kind === 'l2Of' && graphRel.chainId) { + if (!chain.tags.includes('L2')) { + errors.push({ + rule: 1, + chainId: chain.chainId, + chainName: chain.name, + type: 'relation_tag_conflict', + message: `Chain ${chain.chainId} (${chain.name}) has l2Of relation but is not tagged as L2`, + graphRelation: graphRel + }); + } + } + }); +} + +function validateRule2Slip44Mismatch(chain, errors) { + const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); + const chainsChain = getChainFromSource(chain.chainId, 'chains'); + + if (chainlistChain?.slip44 === 1 && chainlistChain.isTestnet === false) { + errors.push({ + rule: 2, + chainId: chain.chainId, + chainName: chain.name, + type: 'slip44_testnet_mismatch', + message: `Chain ${chain.chainId} (${chain.name}) has slip44=1 (testnet indicator) but isTestnet=false in chainlist`, + slip44: chainlistChain.slip44, + isTestnet: chainlistChain.isTestnet + }); + } + + if (chainsChain?.slip44 === 1 && !chain.tags.includes('Testnet')) { + errors.push({ + rule: 2, + chainId: chain.chainId, + chainName: chain.name, + type: 'slip44_testnet_mismatch', + message: `Chain ${chain.chainId} (${chain.name}) has slip44=1 (testnet indicator) in chains.json but not tagged as Testnet`, + slip44: chainsChain.slip44, + tags: chain.tags + }); + } +} + +function validateRule3NameTestnetMismatch(chain, errors) { + const fullName = chain.theGraph?.fullName || chain.name || ''; + const nameLower = fullName.toLowerCase(); + + if ((nameLower.includes('testnet') || nameLower.includes('devnet')) && !chain.tags.includes('Testnet')) { + errors.push({ + rule: 3, + chainId: chain.chainId, + chainName: chain.name, + type: 'name_testnet_mismatch', + message: `Chain ${chain.chainId} (${chain.name}) has "Testnet" or "Devnet" in full name "${fullName}" but not tagged as Testnet`, + fullName, + tags: chain.tags + }); + } +} + +function validateRule4SepoliaHoodie(chain, errors) { + const fullName = chain.theGraph?.fullName || chain.name || ''; + const nameLower = fullName.toLowerCase(); + + if (nameLower.includes('sepolia') || nameLower.includes('hoodie')) { + const hasL2Tag = chain.tags.includes('L2'); + const hasRelations = chain.relations && chain.relations.length > 0; + + if (!hasL2Tag && !hasRelations) { + errors.push({ + rule: 4, + chainId: chain.chainId, + chainName: chain.name, + type: 'sepolia_hoodie_no_l2_or_relations', + message: `Chain ${chain.chainId} (${chain.name}) contains "sepolia" or "hoodie" but not tagged as L2 and has no relations`, + fullName, + tags: chain.tags, + relations: chain.relations + }); + } + } +} + +function validateRule5StatusConflicts(chain, errors) { + const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); + const chainsChain = getChainFromSource(chain.chainId, 'chains'); + + const statuses = []; + if (chainlistChain?.status) statuses.push({ source: 'chainlist', status: chainlistChain.status }); + if (chainsChain?.status) statuses.push({ source: 'chains', status: chainsChain.status }); + + const deprecatedInSources = statuses.filter(s => s.status === 'deprecated'); + const activeInSources = statuses.filter(s => s.status === 'active'); + + if (deprecatedInSources.length > 0 && activeInSources.length > 0) { + errors.push({ + rule: 5, + chainId: chain.chainId, + chainName: chain.name, + type: 'status_conflict', + message: `Chain ${chain.chainId} (${chain.name}) has conflicting status across sources`, + statuses + }); + } + + return statuses; +} + +function validateRule6GoerliDeprecated(chain, statuses, errors) { + const fullName = chain.theGraph?.fullName || chain.name || ''; + const nameLower = fullName.toLowerCase(); + + if (!nameLower.includes('goerli')) return; + + const chainlistChain = getChainFromSource(chain.chainId, 'chainlist'); + const chainsChain = getChainFromSource(chain.chainId, 'chains'); + + const isDeprecated = chain.status === 'deprecated' || + chainlistChain?.status === 'deprecated' || + chainsChain?.status === 'deprecated'; + + if (!isDeprecated) { + errors.push({ + rule: 6, + chainId: chain.chainId, + chainName: chain.name, + type: 'goerli_not_deprecated', + message: `Chain ${chain.chainId} (${chain.name}) contains "Goerli" but is not marked as deprecated`, + fullName, + status: chain.status, + statusInSources: statuses + }); + } +} + +function validateRule7L2BeatMissingClassification(chain, errors) { + if (!chain.l2Beat) return; + + // L2BEAT classifies the chain as a scaling solution. If no other source has + // also marked it (via an l2Of/testnetOf relation from theGraph or chains), + // then L2BEAT is alone — the upstream chain registries may be stale. + const otherSourceConfirms = (chain.relations || []).some(r => + (r.kind === 'l2Of' || r.kind === 'testnetOf') && + (r.source === 'theGraph' || r.source === 'chains') + ); + + if (!otherSourceConfirms) { + errors.push({ + rule: 7, + chainId: chain.chainId, + chainName: chain.name, + type: 'l2beat_missing_classification', + message: `Chain ${chain.chainId} (${chain.name}) is classified by L2BEAT (stage: ${chain.l2Beat.stage || 'n/a'}, category: ${chain.l2Beat.category || 'n/a'}) but no l2Of/testnetOf relation from theGraph or chains confirms it`, + l2BeatStage: chain.l2Beat.stage, + l2BeatCategory: chain.l2Beat.category, + l2BeatSlug: chain.l2Beat.slug + }); + } +} + +function validateRule8L2BeatHostChainNoRelation(chain, errors) { + if (!chain.l2Beat?.hostChainId) return; + + const hostId = chain.l2Beat.hostChainId; + const matchingRelation = (chain.relations || []).find(r => + (r.kind === 'l2Of' || r.kind === 'testnetOf') && r.chainId === hostId + ); + + if (!matchingRelation) { + errors.push({ + rule: 8, + chainId: chain.chainId, + chainName: chain.name, + type: 'l2beat_hostchain_no_relation', + message: `Chain ${chain.chainId} (${chain.name}) has L2BEAT hostChainId=${hostId} but no l2Of/testnetOf relation pointing to it`, + l2BeatHostChainId: hostId, + existingRelationTargets: (chain.relations || []) + .filter(r => r.kind === 'l2Of' || r.kind === 'testnetOf') + .map(r => ({ kind: r.kind, chainId: r.chainId })) + }); + } +} + +function validateRule9L2BeatCategoryNameMismatch(chain, errors) { + if (!chain.l2Beat?.category) return; + + const fullName = (chain.l2Beat.displayName || chain.theGraph?.fullName || chain.name || '').toLowerCase(); + const category = chain.l2Beat.category.toLowerCase(); + + const nameLooksZk = fullName.includes('zk') || fullName.includes('zero-knowledge'); + const nameLooksOptimistic = fullName.includes('optimistic') || fullName.includes('optimism'); + + let mismatchReason = null; + if (category.includes('zk') && nameLooksOptimistic && !nameLooksZk) { + mismatchReason = `L2BEAT category "${chain.l2Beat.category}" but name suggests optimistic`; + } else if (category.includes('optimistic') && nameLooksZk && !nameLooksOptimistic) { + mismatchReason = `L2BEAT category "${chain.l2Beat.category}" but name suggests ZK`; + } + + if (mismatchReason) { + errors.push({ + rule: 9, + chainId: chain.chainId, + chainName: chain.name, + type: 'l2beat_category_name_mismatch', + message: `Chain ${chain.chainId} (${chain.name}): ${mismatchReason}`, + l2BeatCategory: chain.l2Beat.category, + fullName: chain.l2Beat.displayName || chain.theGraph?.fullName || chain.name + }); + } +} + +const L2BEAT_HIGH_TVS_THRESHOLD_USD = 1_000_000_000; + +function validateRule11L2BeatStageZeroHighTvs(chain, errors) { + if (!chain.l2Beat) return; + if (chain.l2Beat.stage !== 'Stage 0') return; + if (typeof chain.l2Beat.tvs !== 'number') return; + if (chain.l2Beat.tvs < L2BEAT_HIGH_TVS_THRESHOLD_USD) return; + + errors.push({ + rule: 11, + chainId: chain.chainId, + chainName: chain.name, + type: 'l2beat_stage_zero_high_tvs', + message: `Chain ${chain.chainId} (${chain.name}) has Stage 0 classification but TVS of $${(chain.l2Beat.tvs / 1e9).toFixed(2)}B — risk signal worth surfacing`, + l2BeatStage: chain.l2Beat.stage, + l2BeatTvs: chain.l2Beat.tvs + }); +} + +/** + * Rule 10 is global: iterates over L2BEAT's raw project list rather than + * per-chain, so it can flag projects whose chainId isn't in our registry. + */ +function validateRule10L2BeatUnknownChains(errors) { + const projects = cachedData.l2beat?.projects; + if (!Array.isArray(projects) || projects.length === 0) return; + + for (const project of projects) { + if (project.chainId === null || project.chainId === undefined) continue; + if (cachedData.indexed.byChainId[project.chainId]) continue; + + errors.push({ + rule: 10, + chainId: project.chainId, + chainName: project.displayName, + type: 'l2beat_unknown_chain', + message: `L2BEAT lists chainId ${project.chainId} (${project.displayName || project.slug}) but it's not in our chain registry`, + l2BeatSlug: project.slug, + l2BeatStage: project.stage, + l2BeatCategory: project.category + }); + } +} + +const RPC_BLOCK_HEIGHT_DRIFT_THRESHOLD = 100; + +function validateRule12RpcBlockHeightDrift(chain, errors) { + const results = cachedData.rpcHealth?.[chain.chainId]; + if (!Array.isArray(results) || results.length < 2) return; + + const heights = results + .filter(r => r.ok && typeof r.blockHeight === 'number') + .map(r => ({ url: r.url, blockHeight: r.blockHeight })); + + if (heights.length < 2) return; + + let min = heights[0]; + let max = heights[0]; + for (const h of heights) { + if (h.blockHeight < min.blockHeight) min = h; + if (h.blockHeight > max.blockHeight) max = h; + } + + const drift = max.blockHeight - min.blockHeight; + if (drift > RPC_BLOCK_HEIGHT_DRIFT_THRESHOLD) { + errors.push({ + rule: 12, + chainId: chain.chainId, + chainName: chain.name, + type: 'rpc_block_height_drift', + message: `Chain ${chain.chainId} (${chain.name}) has working RPC endpoints reporting block heights ${drift} blocks apart — likely a stuck or forked endpoint`, + drift, + threshold: RPC_BLOCK_HEIGHT_DRIFT_THRESHOLD, + laggingEndpoint: min, + leadingEndpoint: max + }); + } +} + +function normalizeChainName(name) { + return (name || '') + .toLowerCase() + .replace(/\bmainnet\b/g, '') + .replace(/[^a-z0-9]/g, '') + .trim(); +} + +/** + * Heuristic check — known false positives and false negatives: + * - "BNB" / "BNB Smart Chain" → suppressed (substring relationship) + * - "Polygon" / "Matic Network" → flagged correctly + * - "Optimism" / "OP Mainnet" → suppressed (substring after normalization) + * - Single-letter typos in long names may slip through (substring still matches) + * Treat results as advisory: investigate, don't fail builds on rule-13 hits. + * Levenshtein-based variant deferred until we see real production false-rates. + */ +function validateRule13NameDisagreement(chain, errors) { + if (!chain.theGraph?.fullName) return; + if (!Array.isArray(chain.sources) || !chain.sources.includes('chains')) return; + + const chainsName = chain.name; + const theGraphName = chain.theGraph.fullName; + + const a = normalizeChainName(chainsName); + const b = normalizeChainName(theGraphName); + + if (!a || !b || a === b) return; + if (a.includes(b) || b.includes(a)) return; + + errors.push({ + rule: 13, + chainId: chain.chainId, + chainName: chain.name, + type: 'name_disagreement', + severity: 'info', // advisory; the rule is a substring-based heuristic + message: `Chain ${chain.chainId}: chains.json name "${chainsName}" disagrees with theGraph fullName "${theGraphName}"`, + chainsName, + theGraphName + }); +} + +function validateRule14NativeCurrencyMismatch(chain, errors) { + const chainsSymbol = chain.nativeCurrency?.symbol; + const theGraphSymbol = chain.theGraph?.nativeToken; + + if (!chainsSymbol || !theGraphSymbol) return; + if (chainsSymbol.toUpperCase() === theGraphSymbol.toUpperCase()) return; + + errors.push({ + rule: 14, + chainId: chain.chainId, + chainName: chain.name, + type: 'native_currency_mismatch', + message: `Chain ${chain.chainId} (${chain.name}): native currency symbol mismatch — chains.json="${chainsSymbol}", theGraph="${theGraphSymbol}"`, + chainsSymbol, + theGraphSymbol + }); +} + +function validateRule15Slip44NativeSymbolMismatch(chain, errors) { + const slip44Symbol = chain.slip44Info?.symbol; + const nativeSymbol = chain.nativeCurrency?.symbol; + + if (!slip44Symbol || !nativeSymbol) return; + if (slip44Symbol.toUpperCase() === nativeSymbol.toUpperCase()) return; + + errors.push({ + rule: 15, + chainId: chain.chainId, + chainName: chain.name, + type: 'slip44_native_symbol_mismatch', + message: `Chain ${chain.chainId} (${chain.name}): SLIP-44 symbol "${slip44Symbol}" disagrees with native currency symbol "${nativeSymbol}"`, + slip44Symbol, + nativeSymbol, + slip44CoinType: chain.slip44Info?.coinType + }); +} + +function extractRpcUrls(rpcArray) { + if (!Array.isArray(rpcArray)) return new Set(); + return new Set( + rpcArray + .map(r => (typeof r === 'string' ? r : r?.url)) + .filter(url => typeof url === 'string' && url.length > 0) + ); +} + +function rawSourceRpcUrls(chainId, source) { + const raw = source === 'chains' ? cachedData.chains : cachedData.chainlist; + if (!Array.isArray(raw)) return new Set(); + const entry = raw.find(c => c?.chainId === chainId); + return extractRpcUrls(entry?.rpc); +} + +function isUrlHealthy(chainId, url) { + const results = cachedData.rpcHealth?.[chainId]; + if (!Array.isArray(results)) return false; + return results.some(r => r.url === url && r.ok); +} + +function validateRule16RpcUrlInOneSourceOnly(chain, errors) { + if (!Array.isArray(chain.sources)) return; + if (!chain.sources.includes('chainlist') || !chain.sources.includes('chains')) return; + + const chainlistUrls = rawSourceRpcUrls(chain.chainId, 'chainlist'); + const chainsUrls = rawSourceRpcUrls(chain.chainId, 'chains'); + if (chainlistUrls.size === 0 || chainsUrls.size === 0) return; + + const onlyInChainlistHealthy = []; + for (const url of chainlistUrls) { + if (!chainsUrls.has(url) && isUrlHealthy(chain.chainId, url)) { + onlyInChainlistHealthy.push(url); + } + } + const onlyInChainsHealthy = []; + for (const url of chainsUrls) { + if (!chainlistUrls.has(url) && isUrlHealthy(chain.chainId, url)) { + onlyInChainsHealthy.push(url); + } + } + + if (onlyInChainlistHealthy.length === 0 && onlyInChainsHealthy.length === 0) return; + + errors.push({ + rule: 16, + chainId: chain.chainId, + chainName: chain.name, + type: 'rpc_url_in_one_source_only', + message: `Chain ${chain.chainId} (${chain.name}) has healthy RPC URLs present in one source only — the other source may need updating`, + onlyInChainlistHealthy, + onlyInChainsHealthy + }); +} + +function validateChain(chain, errors) { + validateRule1RelationConflicts(chain, errors); + validateRule2Slip44Mismatch(chain, errors); + validateRule3NameTestnetMismatch(chain, errors); + validateRule4SepoliaHoodie(chain, errors); + const statuses = validateRule5StatusConflicts(chain, errors); + validateRule6GoerliDeprecated(chain, statuses, errors); + validateRule7L2BeatMissingClassification(chain, errors); + validateRule8L2BeatHostChainNoRelation(chain, errors); + validateRule9L2BeatCategoryNameMismatch(chain, errors); + validateRule11L2BeatStageZeroHighTvs(chain, errors); + validateRule12RpcBlockHeightDrift(chain, errors); + validateRule13NameDisagreement(chain, errors); + validateRule14NativeCurrencyMismatch(chain, errors); + validateRule15Slip44NativeSymbolMismatch(chain, errors); + validateRule16RpcUrlInOneSourceOnly(chain, errors); +} + +export function validateChainData() { + if (!cachedData.indexed || !cachedData.theGraph || !cachedData.chainlist || !cachedData.chains) { + return { + error: 'Data not loaded. Please reload data sources first.', + errors: [] + }; + } + + const errors = []; + + Object.values(cachedData.indexed.byChainId).forEach(chain => { + validateChain(chain, errors); + }); + + // Rule 10 is global (iterates L2BEAT projects, not chains). + validateRule10L2BeatUnknownChains(errors); + + const errorsByRule = { + rule1_relation_conflicts: errors.filter(e => e.rule === 1), + rule2_slip44_testnet_mismatch: errors.filter(e => e.rule === 2), + rule3_name_testnet_mismatch: errors.filter(e => e.rule === 3), + rule4_sepolia_hoodie_issues: errors.filter(e => e.rule === 4), + rule5_status_conflicts: errors.filter(e => e.rule === 5), + rule6_goerli_not_deprecated: errors.filter(e => e.rule === 6), + rule7_l2beat_missing_classification: errors.filter(e => e.rule === 7), + rule8_l2beat_hostchain_no_relation: errors.filter(e => e.rule === 8), + rule9_l2beat_category_name_mismatch: errors.filter(e => e.rule === 9), + rule10_l2beat_unknown_chains: errors.filter(e => e.rule === 10), + rule11_l2beat_stage_zero_high_tvs: errors.filter(e => e.rule === 11), + rule12_rpc_block_height_drift: errors.filter(e => e.rule === 12), + rule13_name_disagreement: errors.filter(e => e.rule === 13), + rule14_native_currency_mismatch: errors.filter(e => e.rule === 14), + rule15_slip44_native_symbol_mismatch: errors.filter(e => e.rule === 15), + rule16_rpc_url_in_one_source_only: errors.filter(e => e.rule === 16) + }; + + return { + totalErrors: errors.length, + errorsByRule, + summary: { + rule1: errorsByRule.rule1_relation_conflicts.length, + rule2: errorsByRule.rule2_slip44_testnet_mismatch.length, + rule3: errorsByRule.rule3_name_testnet_mismatch.length, + rule4: errorsByRule.rule4_sepolia_hoodie_issues.length, + rule5: errorsByRule.rule5_status_conflicts.length, + rule6: errorsByRule.rule6_goerli_not_deprecated.length, + rule7: errorsByRule.rule7_l2beat_missing_classification.length, + rule8: errorsByRule.rule8_l2beat_hostchain_no_relation.length, + rule9: errorsByRule.rule9_l2beat_category_name_mismatch.length, + rule10: errorsByRule.rule10_l2beat_unknown_chains.length, + rule11: errorsByRule.rule11_l2beat_stage_zero_high_tvs.length, + rule12: errorsByRule.rule12_rpc_block_height_drift.length, + rule13: errorsByRule.rule13_name_disagreement.length, + rule14: errorsByRule.rule14_native_currency_mismatch.length, + rule15: errorsByRule.rule15_slip44_native_symbol_mismatch.length, + rule16: errorsByRule.rule16_rpc_url_in_one_source_only.length + }, + allErrors: errors + }; +} diff --git a/src/sources/l2beat.js b/src/sources/l2beat.js new file mode 100644 index 0000000..22b2108 --- /dev/null +++ b/src/sources/l2beat.js @@ -0,0 +1,143 @@ +import { readFile } from 'node:fs/promises'; +import { dirname, join } from 'node:path'; +import { fileURLToPath } from 'node:url'; +import { DATA_SOURCE_L2BEAT_API, L2BEAT_FETCH_TIMEOUT_MS } from '../../config.js'; +import { proxyFetch } from '../../fetchUtil.js'; +import { logger } from '../util/logger.js'; + +const __dir = dirname(fileURLToPath(import.meta.url)); +const FALLBACK_PATH = join(__dir, '..', '..', 'data', 'l2beat-fallback.json'); + +/** + * Fetch L2BEAT scaling-summary data, with graceful fallback to a checked-in + * static snapshot when the live API is unreachable (403, timeout, network). + * + * Returns: { source: 'live'|'fallback'|'unavailable', fetchedAt, projects: [] } + */ +export async function fetchL2Beat() { + const live = await fetchLive(); + if (live) return live; + return loadFallback(); +} + +async function fetchLive() { + const controller = new AbortController(); + const timer = setTimeout(() => controller.abort(), L2BEAT_FETCH_TIMEOUT_MS); + try { + const response = await proxyFetch(DATA_SOURCE_L2BEAT_API, { signal: controller.signal }); + if (!response.ok) { + logger.warn({ status: response.status }, 'L2BEAT live fetch failed; falling back to static snapshot'); + return null; + } + const json = await response.json(); + const projects = normalizeL2BeatResponse(json); + return { source: 'live', fetchedAt: new Date().toISOString(), projects }; + } catch (err) { + const reason = err.name === 'AbortError' ? `timeout after ${L2BEAT_FETCH_TIMEOUT_MS}ms` : err.message; + logger.warn({ reason }, 'L2BEAT live fetch failed; falling back to static snapshot'); + return null; + } finally { + clearTimeout(timer); + } +} + +async function loadFallback() { + try { + const raw = await readFile(FALLBACK_PATH, 'utf8'); + const data = JSON.parse(raw); + const projects = Array.isArray(data?.projects) ? data.projects : []; + return { source: 'fallback', fetchedAt: data?.fetchedAt ?? null, projects }; + } catch (err) { + logger.warn({ err: err.message }, 'L2BEAT fallback unavailable'); + return { source: 'unavailable', fetchedAt: null, projects: [] }; + } +} + +/** + * Normalize L2BEAT's scaling-summary payload to a stable internal shape. + * Defensive about field names because L2BEAT's site contract is undocumented. + */ +export function normalizeL2BeatResponse(json) { + const projects = extractProjectsArray(json); + + return projects + .map(normalizeProject) + .filter(p => p.slug && p.chainId !== null && p.chainId !== undefined); +} + +function extractProjectsArray(json) { + if (Array.isArray(json?.projects)) return json.projects; + if (Array.isArray(json?.data?.projects)) return json.data.projects; + if (Array.isArray(json?.data)) return json.data; + if (Array.isArray(json)) return json; + return []; +} + +function normalizeProject(p) { + return { + slug: p.slug ?? p.id ?? p.display?.slug ?? null, + displayName: p.name ?? p.display?.name ?? p.displayName ?? null, + chainId: extractChainId(p), + category: p.category ?? p.type ?? null, + stage: extractStage(p), + stack: p.stack ?? p.providerName ?? p.display?.stack ?? null, + daLayer: extractDaLayer(p), + hostChainId: p.hostChain?.chainId ?? p.hostChainId ?? null, + purposes: Array.isArray(p.purposes) ? p.purposes : [], + tvs: extractTvs(p), + tvsBreakdown: p.tvs?.breakdown ?? p.tvsBreakdown ?? null, + activity: p.activity ?? null, + links: p.links ?? p.display?.links ?? null, + riskView: p.riskView ?? null, + milestones: Array.isArray(p.milestones) ? p.milestones : null + }; +} + +function extractChainId(p) { + const candidate = p.chainId + ?? p.chainConfig?.chainId + ?? p.chains?.[0]?.chainId + ?? p.eip155Id; + return coerceChainId(candidate); +} + +/** + * Coerce an L2BEAT-provided chainId to a finite integer. Handles numbers, + * decimal strings ("8453"), and CAIP-2 strings ("eip155:8453"). Returns + * null for anything we can't represent as a safe integer so downstream + * indexing stays consistent. + */ +function coerceChainId(value) { + if (value === null || value === undefined) return null; + if (typeof value === 'number') { + return Number.isInteger(value) && Number.isSafeInteger(value) ? value : null; + } + if (typeof value === 'string') { + const match = value.match(/^(?:eip155:)?(\d+)$/); + if (!match) return null; + const n = Number(match[1]); + return Number.isSafeInteger(n) ? n : null; + } + return null; +} + +function extractStage(p) { + if (typeof p.stage === 'string') return p.stage; + if (typeof p.stage?.stage === 'string') return p.stage.stage; + if (typeof p.stage?.value === 'string') return p.stage.value; + return null; +} + +function extractDaLayer(p) { + if (typeof p.daLayer === 'string') return p.daLayer; + if (typeof p.daLayer?.name === 'string') return p.daLayer.name; + if (typeof p.dataAvailability?.layer === 'string') return p.dataAvailability.layer; + return null; +} + +function extractTvs(p) { + if (typeof p.tvs === 'number') return p.tvs; + if (typeof p.tvs?.total === 'number') return p.tvs.total; + if (typeof p.tvs?.breakdown?.total === 'number') return p.tvs.breakdown.total; + return null; +} diff --git a/src/sources/slip44.js b/src/sources/slip44.js new file mode 100644 index 0000000..fbc42a1 --- /dev/null +++ b/src/sources/slip44.js @@ -0,0 +1,37 @@ +/** + * Parse SLIP-0044 markdown to extract coin types. + * Table structure: | Coin type | Path component | Symbol | Coin | + */ +export function parseSLIP44(markdown) { + if (!markdown) return {}; + + const slip44Data = {}; + const lines = markdown.split('\n'); + let inTable = false; + + for (const line of lines) { + const trimmed = line.trim(); + if (!trimmed.startsWith('|') || !line.includes('|')) continue; + + const cells = line.split('|').map(cell => cell.trim()).filter(Boolean); + + if (cells[0] === 'Coin type' || cells[0].includes('-')) { + inTable = true; + continue; + } + + if (!inTable || cells.length < 4) continue; + + const coinTypeNum = Number.parseInt(cells[0], 10); + if (Number.isNaN(coinTypeNum)) continue; + + slip44Data[coinTypeNum] = { + coinType: coinTypeNum, + pathComponent: cells[1], + symbol: cells[2], + coin: cells[3] + }; + } + + return slip44Data; +} diff --git a/src/store/cache.js b/src/store/cache.js new file mode 100644 index 0000000..38371ae --- /dev/null +++ b/src/store/cache.js @@ -0,0 +1,30 @@ +export const cachedData = { + theGraph: null, + chainlist: null, + chains: null, + slip44: null, + l2beat: null, + indexed: null, + lastUpdated: null, + rpcHealth: {}, + lastRpcCheck: null +}; + +export function applyDataToCache(data) { + cachedData.theGraph = data.theGraph ?? null; + cachedData.chainlist = data.chainlist ?? null; + cachedData.chains = data.chains ?? null; + // Preserve null vs {} distinction so /sources can report whether SLIP-0044 + // actually loaded vs returned no rows. Defaults to {} only when caller + // didn't pass slip44 at all (e.g. test seeds). + cachedData.slip44 = data.slip44 === undefined ? {} : data.slip44; + cachedData.l2beat = data.l2beat ?? null; + cachedData.indexed = data.indexed ?? null; + cachedData.lastUpdated = data.lastUpdated ?? null; + cachedData.rpcHealth = data.rpcHealth ?? {}; + cachedData.lastRpcCheck = data.lastRpcCheck ?? null; +} + +export function getCachedData() { + return cachedData; +} diff --git a/src/store/indexer.js b/src/store/indexer.js new file mode 100644 index 0000000..41a7a8a --- /dev/null +++ b/src/store/indexer.js @@ -0,0 +1,526 @@ +/** + * Build a mapping of network IDs to chain IDs from The Graph data + */ +function buildNetworkIdToChainIdMap(theGraph) { + const networkIdToChainId = {}; + + if (Array.isArray(theGraph?.networks)) { + theGraph.networks.forEach(network => { + // Extract chain ID from caip2Id (format: "eip155:1" or "beacon:11155111") + // Note: only numeric chain IDs are mapped; named beacon chains + // (e.g. "beacon:mainnet") still add tags via relations. + if (network.caip2Id) { + const match = network.caip2Id.match(/^(?:eip155|beacon):(\d+)$/); + if (match) { + const chainId = Number.parseInt(match[1], 10); + networkIdToChainId[network.id] = chainId; + } + } + }); + } + + return networkIdToChainId; +} + +function addBeaconTagToTargetChain(indexed, targetChainId) { + if (targetChainId !== undefined && indexed.byChainId[targetChainId]) { + if (!indexed.byChainId[targetChainId].tags) { + indexed.byChainId[targetChainId].tags = []; + } + if (!indexed.byChainId[targetChainId].tags.includes('Beacon')) { + indexed.byChainId[targetChainId].tags.push('Beacon'); + } + } +} + +function getBridgeUrl(bridge) { + if (typeof bridge === 'string') return bridge; + return bridge?.url ?? null; +} + +function mergeBridges(chain, newBridges) { + if (!newBridges || !Array.isArray(newBridges)) return; + + if (!chain.bridges) chain.bridges = []; + + const existingBridgeUrls = new Set( + chain.bridges.map(getBridgeUrl).filter(url => url !== null) + ); + + newBridges.forEach(bridge => { + const url = getBridgeUrl(bridge); + if (url && !existingBridgeUrls.has(url)) { + chain.bridges.push(bridge); + existingBridgeUrls.add(url); + } + }); +} + +function processL2ParentRelation(chain, indexed) { + if (chain.parent?.type !== 'L2' || !chain.parent?.chain) return; + + const match = chain.parent.chain.match(/^eip155-(\d+)$/); + if (!match) return; + + const chainId = chain.chainId; + const parentChainId = Number.parseInt(match[1], 10); + + if (!indexed.byChainId[chainId]) return; + + if (!indexed.byChainId[chainId].tags.includes('L2')) { + indexed.byChainId[chainId].tags.push('L2'); + } + + const existingRelation = indexed.byChainId[chainId].relations.find( + r => r.kind === 'l2Of' && r.chainId === parentChainId + ); + + if (!existingRelation) { + indexed.byChainId[chainId].relations.push({ + kind: 'l2Of', + network: chain.parent.chain, + chainId: parentChainId, + source: 'chains' + }); + } + + mergeBridges(indexed.byChainId[chainId], chain.parent.bridges); +} + +function processTestnetParentRelation(chain, indexed) { + if (chain.parent?.type !== 'testnet' || !chain.parent?.chain) return; + + const match = chain.parent.chain.match(/^eip155-(\d+)$/); + if (!match) return; + + const chainId = chain.chainId; + const mainnetChainId = Number.parseInt(match[1], 10); + + if (!indexed.byChainId[chainId]) return; + + const existingRelation = indexed.byChainId[chainId].relations.find( + r => r.kind === 'testnetOf' && r.chainId === mainnetChainId + ); + + if (!existingRelation) { + indexed.byChainId[chainId].relations.push({ + kind: 'testnetOf', + network: chain.parent.chain, + chainId: mainnetChainId, + source: 'chains' + }); + } +} + +/** + * Merge RPC URLs from a source array into an existing chain's rpc array, + * deduplicating by URL string. + */ +function mergeRpcUrlsFromArray(existingChain, newRpcUrls) { + if (!newRpcUrls || !Array.isArray(newRpcUrls)) return; + + if (!existingChain.rpc) existingChain.rpc = []; + + const existingRpcUrls = new Set(); + existingChain.rpc.forEach(rpc => { + const url = typeof rpc === 'string' ? rpc : rpc.url; + if (url) existingRpcUrls.add(url); + }); + + newRpcUrls.forEach(rpc => { + const url = typeof rpc === 'string' ? rpc : rpc.url; + if (url && !existingRpcUrls.has(url)) { + existingChain.rpc.push(rpc); + existingRpcUrls.add(url); + } + }); +} + +function mergeChainlistEntry(chainData, indexed) { + const chainId = chainData.chainId; + + if (indexed.byChainId[chainId]) { + mergeRpcUrlsFromArray(indexed.byChainId[chainId], chainData.rpc); + + if (!indexed.byChainId[chainId].sources.includes('chainlist')) { + indexed.byChainId[chainId].sources.push('chainlist'); + } + + if (chainData.status && !indexed.byChainId[chainId].status) { + indexed.byChainId[chainId].status = chainData.status; + } + + if (chainData.slip44 !== undefined && indexed.byChainId[chainId].slip44 === undefined) { + indexed.byChainId[chainId].slip44 = chainData.slip44; + } + } else { + indexed.byChainId[chainId] = { + chainId: Number(chainId), + name: chainData.name, + rpc: chainData.rpc || [], + sources: ['chainlist'], + tags: [], + relations: [], + status: chainData.status || 'active', + ...(chainData.slip44 !== undefined && { slip44: chainData.slip44 }) + }; + } + + if (chainData.slip44 === 1 || chainData.isTestnet === true) { + if (!indexed.byChainId[chainId].tags.includes('Testnet')) { + indexed.byChainId[chainId].tags.push('Testnet'); + } + } +} + +function extractChainIdFromCaip2Id(caip2Id) { + if (!caip2Id) return null; + const match = caip2Id.match(/^eip155:(\d+)$/); + return match ? Number.parseInt(match[1], 10) : null; +} + +function createTheGraphChainEntry(chainId, network) { + return { + chainId, + name: network.fullName || network.shortName || network.id || 'Unknown', + shortName: network.shortName, + nativeCurrency: { symbol: network.nativeToken }, + rpc: network.rpcUrls || [], + explorers: network.explorerUrls || [], + sources: ['theGraph'], + tags: [], + relations: [], + status: 'active' + }; +} + +function processTheGraphRelation(relation, chainId, indexed, networkIdToChainId) { + const { kind, network: targetNetworkId } = relation; + const targetChainId = networkIdToChainId[targetNetworkId]; + + const relationData = { + kind, + network: targetNetworkId, + ...(targetChainId !== undefined && { chainId: targetChainId }), + source: 'theGraph' + }; + + indexed.byChainId[chainId].relations.push(relationData); + + if (kind === 'testnetOf' && !indexed.byChainId[chainId].tags.includes('Testnet')) { + indexed.byChainId[chainId].tags.push('Testnet'); + } else if (kind === 'l2Of' && !indexed.byChainId[chainId].tags.includes('L2')) { + indexed.byChainId[chainId].tags.push('L2'); + } else if (kind === 'beaconOf') { + addBeaconTagToTargetChain(indexed, targetChainId); + } +} + +function createOrMergeTheGraphChain(chainId, network, indexed) { + if (indexed.byChainId[chainId]) { + if (!indexed.byChainId[chainId].sources.includes('theGraph')) { + indexed.byChainId[chainId].sources.push('theGraph'); + } + mergeRpcUrlsFromArray(indexed.byChainId[chainId], network.rpcUrls); + + if (!indexed.byChainId[chainId].tags) indexed.byChainId[chainId].tags = []; + if (!indexed.byChainId[chainId].relations) indexed.byChainId[chainId].relations = []; + } else { + indexed.byChainId[chainId] = createTheGraphChainEntry(chainId, network); + } +} + +function addTestnetTagIfApplicable(chainId, network, indexed) { + if (network.networkType === 'testnet') { + if (!indexed.byChainId[chainId].tags.includes('Testnet')) { + indexed.byChainId[chainId].tags.push('Testnet'); + } + } +} + +function processTheGraphNetworkRelations(network, chainId, indexed, networkIdToChainId) { + if (network.relations && Array.isArray(network.relations)) { + network.relations.forEach(relation => { + processTheGraphRelation(relation, chainId, indexed, networkIdToChainId); + }); + } +} + +function addTheGraphSpecificData(chainId, network, indexed) { + indexed.byChainId[chainId].theGraph = { + id: network.id, + fullName: network.fullName, + shortName: network.shortName, + caip2Id: network.caip2Id, + aliases: network.aliases, + networkType: network.networkType, + services: network.services, + nativeToken: network.nativeToken + }; +} + +function addChainToNameIndex(chainId, network, indexed) { + const nameLower = (network.fullName || network.shortName || '').toLowerCase(); + if (nameLower && !indexed.byName[nameLower]) { + indexed.byName[nameLower] = []; + } + if (nameLower && !indexed.byName[nameLower].includes(chainId)) { + indexed.byName[nameLower].push(chainId); + } +} + +function processBeaconChainRelations(network, networkIdToChainId, indexed) { + if (network.relations && Array.isArray(network.relations)) { + network.relations.forEach(relation => { + if (relation.kind === 'beaconOf') { + const targetChainId = networkIdToChainId[relation.network]; + addBeaconTagToTargetChain(indexed, targetChainId); + } + }); + } +} + +function processTheGraphNetwork(network, indexed, networkIdToChainId) { + const chainId = extractChainIdFromCaip2Id(network.caip2Id); + const isBeaconChain = network.caip2Id?.startsWith('beacon:'); + + if (chainId !== null) { + createOrMergeTheGraphChain(chainId, network, indexed); + addTestnetTagIfApplicable(chainId, network, indexed); + processTheGraphNetworkRelations(network, chainId, indexed, networkIdToChainId); + addTheGraphSpecificData(chainId, network, indexed); + addChainToNameIndex(chainId, network, indexed); + } else if (isBeaconChain) { + processBeaconChainRelations(network, networkIdToChainId, indexed); + } +} + +function indexChainsSource(chains, indexed) { + if (!Array.isArray(chains)) return; + + chains.forEach(chain => { + const chainId = chain.chainId; + if (chainId === undefined) return; + + if (!indexed.byChainId[chainId]) { + indexed.byChainId[chainId] = { + chainId, + name: chain.name, + shortName: chain.shortName, + network: chain.network, + nativeCurrency: chain.nativeCurrency, + rpc: chain.rpc || [], + explorers: chain.explorers || [], + infoURL: chain.infoURL, + sources: ['chains'], + tags: [], + relations: [], + status: chain.status || 'active', + ...(chain.slip44 !== undefined && { slip44: chain.slip44 }) + }; + } else if (chain.slip44 !== undefined && indexed.byChainId[chainId].slip44 === undefined) { + indexed.byChainId[chainId].slip44 = chain.slip44; + } + + if (chain.slip44 === 1) { + if (!indexed.byChainId[chainId].tags.includes('Testnet')) { + indexed.byChainId[chainId].tags.push('Testnet'); + } + } + + const nameLower = (chain.name || '').toLowerCase(); + if (!indexed.byName[nameLower]) indexed.byName[nameLower] = []; + indexed.byName[nameLower].push(chainId); + }); + + chains.forEach(chain => { + if (chain.chainId !== undefined) { + processL2ParentRelation(chain, indexed); + processTestnetParentRelation(chain, indexed); + } + }); +} + +function indexChainlistSource(chainlist, indexed) { + if (!chainlist || !Array.isArray(chainlist)) return; + + chainlist.forEach(chainData => { + const chainId = chainData.chainId; + if (chainId === undefined || chainId === null || Number.isNaN(Number(chainId))) return; + mergeChainlistEntry(chainData, indexed); + }); + + chainlist.forEach(chainData => { + const chainId = chainData.chainId; + if (chainId === undefined || chainId === null || Number.isNaN(Number(chainId))) return; + if (indexed.byChainId[chainId] && chainData.parent?.bridges) { + mergeBridges(indexed.byChainId[chainId], chainData.parent.bridges); + } + }); +} + +function indexTheGraphSource(theGraph, indexed, networkIdToChainId) { + if (Array.isArray(theGraph?.networks)) { + theGraph.networks.forEach(network => { + processTheGraphNetwork(network, indexed, networkIdToChainId); + }); + } +} + +function attachSlip44Info(slip44, indexed) { + if (!slip44) return; + Object.keys(indexed.byChainId).forEach(chainId => { + const chain = indexed.byChainId[chainId]; + if (chain.slip44 !== undefined && slip44[chain.slip44]) { + chain.slip44Info = slip44[chain.slip44]; + } + }); +} + +function applyDefaultStatus(indexed) { + Object.keys(indexed.byChainId).forEach(chainId => { + const chain = indexed.byChainId[chainId]; + if (!chain.status) chain.status = 'active'; + }); +} + +function addReverseRelations(indexed) { + Object.keys(indexed.byChainId).forEach(chainId => { + const chain = indexed.byChainId[chainId]; + if (!chain.relations || !Array.isArray(chain.relations)) return; + + chain.relations.forEach(relation => { + if (relation.kind === 'testnetOf' && relation.chainId !== undefined) { + const mainnetChain = indexed.byChainId[relation.chainId]; + if (mainnetChain) { + const existing = mainnetChain.relations.find( + r => r.kind === 'mainnetOf' && r.chainId === Number.parseInt(chainId, 10) + ); + if (!existing) { + mainnetChain.relations.push({ + kind: 'mainnetOf', + network: chain.name || chain.shortName || chainId.toString(), + chainId: Number.parseInt(chainId, 10), + source: relation.source + }); + } + } + } + + if (relation.kind === 'l2Of' && relation.chainId !== undefined) { + const parentChain = indexed.byChainId[relation.chainId]; + if (parentChain) { + const existing = parentChain.relations.find( + r => r.kind === 'parentOf' && r.chainId === Number.parseInt(chainId, 10) + ); + if (!existing) { + parentChain.relations.push({ + kind: 'parentOf', + network: chain.name || chain.shortName || chainId.toString(), + chainId: Number.parseInt(chainId, 10), + source: relation.source + }); + } + } + } + }); + }); +} + +// Tags that this function attaches solely because L2BEAT classified the chain. +// Listed here so the stale-cleanup pass can drop them when a project disappears. +const L2BEAT_DERIVED_TAGS = new Set(['L2', 'ZK', 'Validium', 'Optimium']); + +export function indexL2BeatSource(l2beat, indexed) { + // l2beat itself missing (e.g. data load skipped entirely) → no-op. + if (!l2beat) return; + + // Normalize project chainIds to numbers up front so all downstream + // comparisons (Set membership + byChainId lookups) use one type. + const projects = Array.isArray(l2beat.projects) ? l2beat.projects : []; + const normalizedProjects = projects + .map(p => ({ ...p, chainId: Number(p.chainId) })) + .filter(p => Number.isSafeInteger(p.chainId)); + const freshChainIds = new Set(normalizedProjects.map(p => p.chainId)); + + // Stale cleanup: a chain that previously had l2Beat data but isn't in the + // fresh project list (project removed from L2BEAT, or empty refresh) loses + // its l2Beat field, the 'l2beat' source, and any L2BEAT-attributable tags. + // Tag cleanup is conservative — only tags that this function is the sole + // emitter of are removed. + for (const chain of Object.values(indexed.byChainId)) { + if (chain.l2Beat && !freshChainIds.has(chain.chainId)) { + delete chain.l2Beat; + if (Array.isArray(chain.sources)) { + chain.sources = chain.sources.filter(s => s !== 'l2beat'); + } + if (Array.isArray(chain.tags)) { + chain.tags = chain.tags.filter(t => !L2BEAT_DERIVED_TAGS.has(t)); + } + } + } + + for (const project of normalizedProjects) { + const chain = indexed.byChainId[project.chainId]; + if (!chain) continue; + + chain.l2Beat = { + slug: project.slug, + displayName: project.displayName, + stage: project.stage, + category: project.category, + stack: project.stack, + daLayer: project.daLayer, + hostChainId: project.hostChainId, + purposes: project.purposes ?? [], + tvs: project.tvs, + tvsBreakdown: project.tvsBreakdown, + activity: project.activity, + links: project.links, + riskView: project.riskView, + milestones: project.milestones, + dataFreshness: l2beat.source, + fetchedAt: l2beat.fetchedAt + }; + + if (!Array.isArray(chain.tags)) chain.tags = []; + if (!chain.tags.includes('L2')) chain.tags.push('L2'); + if (project.category === 'ZK Rollup' && !chain.tags.includes('ZK')) { + chain.tags.push('ZK'); + } + if (project.category === 'Validium' && !chain.tags.includes('Validium')) { + chain.tags.push('Validium'); + } + if (project.category === 'Optimium' && !chain.tags.includes('Optimium')) { + chain.tags.push('Optimium'); + } + + if (!Array.isArray(chain.sources)) chain.sources = []; + if (!chain.sources.includes('l2beat')) chain.sources.push('l2beat'); + } +} + +/** + * Index all data into a searchable structure. + */ +export function indexData(theGraph, chainlist, chains, slip44, l2beat) { + const indexed = { + byChainId: {}, + byName: {}, + all: [] + }; + + const networkIdToChainId = buildNetworkIdToChainIdMap(theGraph); + + indexChainsSource(chains, indexed); + indexChainlistSource(chainlist, indexed); + indexTheGraphSource(theGraph, indexed, networkIdToChainId); + attachSlip44Info(slip44, indexed); + applyDefaultStatus(indexed); + addReverseRelations(indexed); + indexL2BeatSource(l2beat, indexed); + + indexed.all = Object.values(indexed.byChainId); + + return indexed; +} diff --git a/src/store/queries.js b/src/store/queries.js new file mode 100644 index 0000000..9730bd4 --- /dev/null +++ b/src/store/queries.js @@ -0,0 +1,190 @@ +import { cachedData } from './cache.js'; + +function getChainByIdRaw(chainId) { + if (!cachedData.indexed) return null; + return cachedData.indexed.byChainId[chainId] || null; +} + +function transformChain(chain) { + if (!chain) return null; + + const transformedChain = { + chainId: chain.chainId, + name: chain.name, + shortName: chain.shortName + }; + + if (chain.theGraph) { + transformedChain['theGraph-id'] = chain.theGraph.id; + transformedChain.fullName = chain.theGraph.fullName; + transformedChain.caip2Id = chain.theGraph.caip2Id; + if (chain.theGraph.aliases) { + transformedChain.aliases = chain.theGraph.aliases; + } + } + + if (chain.nativeCurrency) transformedChain.nativeCurrency = chain.nativeCurrency; + if (chain.explorers) transformedChain.explorers = chain.explorers; + if (chain.infoURL) transformedChain.infoURL = chain.infoURL; + if (chain.sources) transformedChain.sources = chain.sources; + if (chain.tags) transformedChain.tags = chain.tags; + if (chain.status) transformedChain.status = chain.status; + if (chain.bridges) transformedChain.bridges = chain.bridges; + if (chain.l2Beat) transformedChain.l2Beat = chain.l2Beat; + + return transformedChain; +} + +export function getChainById(chainId) { + return transformChain(getChainByIdRaw(chainId)); +} + +// Memoize getAllChains() so /chains, /scaling, /stats, etc. can hit the same +// transformed array within one data version without re-running transformChain +// over every entry. Keyed by cachedData.lastUpdated — invalidated automatically +// on loadData(); also invalidated when the cache is hot-merged (e.g. +// indexL2BeatSource adds fields without bumping lastUpdated). +let allChainsCache = { lastUpdated: null, lastL2BeatFetchedAt: null, value: null }; + +function invalidateAllChainsCacheIfStale() { + const current = { + lastUpdated: cachedData.lastUpdated, + lastL2BeatFetchedAt: cachedData.l2beat?.fetchedAt ?? null + }; + if ( + allChainsCache.lastUpdated !== current.lastUpdated || + allChainsCache.lastL2BeatFetchedAt !== current.lastL2BeatFetchedAt + ) { + allChainsCache = { ...current, value: null }; + } +} + +export function getAllChains() { + if (!cachedData.indexed) return []; + invalidateAllChainsCacheIfStale(); + if (allChainsCache.value === null) { + allChainsCache.value = cachedData.indexed.all.map(transformChain); + } + return allChainsCache.value; +} + +// Test-only helper. +export function _resetGetAllChainsCacheForTests() { + allChainsCache = { lastUpdated: null, lastL2BeatFetchedAt: null, value: null }; +} + +export function searchChains(query) { + if (!cachedData.indexed) return []; + + const results = []; + const queryLower = query.toLowerCase(); + + const parsedChainId = Number.parseInt(query, 10); + if (!Number.isNaN(parsedChainId)) { + const chain = getChainById(parsedChainId); + if (chain) results.push(chain); + } + + cachedData.indexed.all.forEach(chain => { + if (chain.name?.toLowerCase().includes(queryLower)) { + if (!results.some(r => r.chainId === chain.chainId)) { + results.push(getChainById(chain.chainId)); + } + } + if (chain.shortName?.toLowerCase().includes(queryLower)) { + if (!results.some(r => r.chainId === chain.chainId)) { + results.push(getChainById(chain.chainId)); + } + } + }); + + return results; +} + +export function countChainsByTag(chains) { + const totalChains = chains.length; + let totalTestnets = 0; + let totalL2s = 0; + let totalBeacons = 0; + let totalMainnets = 0; + + for (const chain of chains) { + const tags = chain.tags || []; + const isTestnet = tags.includes('Testnet'); + const isL2 = tags.includes('L2'); + const isBeacon = tags.includes('Beacon'); + + if (isTestnet) totalTestnets += 1; + if (isL2) totalL2s += 1; + if (isBeacon) totalBeacons += 1; + if (!isTestnet && !isL2 && !isBeacon) totalMainnets += 1; + } + + return { totalChains, totalMainnets, totalTestnets, totalL2s, totalBeacons }; +} + +function extractEndpoints(chain) { + if (!chain) return null; + + const endpoints = { + chainId: chain.chainId, + name: chain.name, + rpc: chain.rpc || [], + firehose: [], + substreams: [] + }; + + if (chain.theGraph?.services) { + if (chain.theGraph.services.firehose) { + endpoints.firehose = chain.theGraph.services.firehose; + } + if (chain.theGraph.services.substreams) { + endpoints.substreams = chain.theGraph.services.substreams; + } + } + + return endpoints; +} + +export function getEndpointsById(chainId) { + return extractEndpoints(getChainByIdRaw(chainId)); +} + +export function getAllEndpoints() { + if (!cachedData.indexed) return []; + return cachedData.indexed.all.map(extractEndpoints); +} + +function flattenRpcHealthResults() { + return Object.entries(cachedData.rpcHealth || {}).flatMap(([chainId, results]) => { + const numericChainId = Number.parseInt(chainId, 10); + const chainName = cachedData.indexed?.byChainId?.[numericChainId]?.name ?? `Chain ${chainId}`; + + return (Array.isArray(results) ? results : []).map((result) => ({ + chainId: numericChainId, + chainName, + url: result.url, + status: result.ok ? 'working' : 'failed', + clientVersion: result.clientVersion ?? null, + blockNumber: result.blockHeight ?? null, + latencyMs: result.latencyMs ?? null, + error: result.error ?? null + })); + }); +} + +export function getRpcMonitoringResults() { + const results = flattenRpcHealthResults(); + const workingEndpoints = results.filter(result => result.status === 'working').length; + const failedEndpoints = results.length - workingEndpoints; + + return { + lastUpdated: cachedData.lastRpcCheck, + totalEndpoints: results.length, + testedEndpoints: results.length, + workingEndpoints, + failedEndpoints, + results + }; +} + diff --git a/src/store/snapshot.js b/src/store/snapshot.js new file mode 100644 index 0000000..d24c3ee --- /dev/null +++ b/src/store/snapshot.js @@ -0,0 +1,93 @@ +import { mkdir, readFile, rename, rm, writeFile } from 'node:fs/promises'; +import { dirname, resolve } from 'node:path'; +import { DATA_CACHE_ENABLED, DATA_CACHE_FILE } from '../../config.js'; +import { logger } from '../util/logger.js'; + +const SNAPSHOT_SCHEMA_VERSION = 1; +const DATA_CACHE_PATH = resolve(DATA_CACHE_FILE); + +export { DATA_CACHE_PATH }; + +function isValidIndexedData(indexed) { + if (!indexed || typeof indexed !== 'object') return false; + return ( + Array.isArray(indexed.all) && + indexed.byChainId && + typeof indexed.byChainId === 'object' && + indexed.byName && + typeof indexed.byName === 'object' + ); +} + +function isValidSnapshot(snapshot) { + if (!snapshot || typeof snapshot !== 'object') return false; + if (snapshot.schemaVersion !== SNAPSHOT_SCHEMA_VERSION) return false; + if (typeof snapshot.writtenAt !== 'string') return false; + + const data = snapshot.data; + if (!data || typeof data !== 'object') return false; + if (!isValidIndexedData(data.indexed)) return false; + if (typeof data.lastUpdated !== 'string') return false; + + return true; +} + +function createSnapshotPayload(data) { + return { + schemaVersion: SNAPSHOT_SCHEMA_VERSION, + writtenAt: new Date().toISOString(), + data: { + theGraph: data.theGraph ?? null, + chainlist: data.chainlist ?? null, + chains: data.chains ?? null, + // Preserve null (fetch failed) vs {} (fetched, empty) so the freshness + // signal survives a snapshot round-trip. + slip44: data.slip44 === undefined ? {} : data.slip44, + l2beat: data.l2beat ?? null, + indexed: data.indexed ?? { byChainId: {}, byName: {}, all: [] }, + lastUpdated: data.lastUpdated ?? new Date().toISOString(), + rpcHealth: data.rpcHealth ?? {}, + lastRpcCheck: data.lastRpcCheck ?? null + } + }; +} + +export async function readSnapshotFromDisk() { + if (!DATA_CACHE_ENABLED) return null; + + try { + const raw = await readFile(DATA_CACHE_PATH, 'utf8'); + const parsed = JSON.parse(raw); + + if (!isValidSnapshot(parsed)) { + logger.warn({ path: DATA_CACHE_PATH }, 'Ignoring invalid cache snapshot'); + return null; + } + + return parsed.data; + } catch (error) { + if (error?.code === 'ENOENT') return null; + logger.warn({ path: DATA_CACHE_PATH, err: error.message }, 'Failed to read cache snapshot'); + return null; + } +} + +export async function writeSnapshotToDiskAtomic(data) { + if (!DATA_CACHE_ENABLED) return; + + const snapshot = createSnapshotPayload(data); + const tempPath = `${DATA_CACHE_PATH}.tmp-${process.pid}-${Date.now()}`; + + try { + await mkdir(dirname(DATA_CACHE_PATH), { recursive: true }); + await writeFile(tempPath, JSON.stringify(snapshot), 'utf8'); + await rename(tempPath, DATA_CACHE_PATH); + } catch (error) { + try { + await rm(tempPath, { force: true }); + } catch { + // best-effort temp cleanup + } + logger.warn({ path: DATA_CACHE_PATH, err: error.message }, 'Failed to persist cache snapshot'); + } +} diff --git a/src/transport/fetch.js b/src/transport/fetch.js new file mode 100644 index 0000000..7076eec --- /dev/null +++ b/src/transport/fetch.js @@ -0,0 +1,37 @@ +import { proxyFetch } from '../../fetchUtil.js'; +import { logger } from '../util/logger.js'; +import { incCounter } from '../util/metrics.js'; + +const SUPPORTED_FORMATS = new Set(['json', 'text']); + +/** + * Fetch JSON or text from a URL using proxyFetch. + * Returns null on error rather than throwing, so loaders can use + * Promise.allSettled-style handling with consistent shapes. + */ +export async function fetchData(url, format = 'json') { + // Validate before issuing any network I/O so unsupported callers fail + // deterministically without a wasted outbound request. + if (!SUPPORTED_FORMATS.has(format)) { + logger.error({ url, format }, 'Unsupported fetch format'); + incCounter('chains_api_source_fetch_total', { url, outcome: 'bad_format' }); + return null; + } + + try { + const response = await proxyFetch(url); + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + + // Parse the body BEFORE incrementing the success counter so a body-parse + // failure doesn't double-count as both success and error in the catch. + const body = format === 'json' ? await response.json() : await response.text(); + incCounter('chains_api_source_fetch_total', { url, outcome: 'success' }); + return body; + } catch (error) { + logger.error({ url, err: error.message }, 'Source fetch failed'); + incCounter('chains_api_source_fetch_total', { url, outcome: 'error' }); + return null; + } +} diff --git a/src/util/logger.js b/src/util/logger.js new file mode 100644 index 0000000..bfb197d --- /dev/null +++ b/src/util/logger.js @@ -0,0 +1,14 @@ +import { pino } from 'pino'; + +/** + * Shared pino logger for modules outside the Fastify request lifecycle + * (sources, services, store). Fastify has its own request-scoped logger; + * use this one in background jobs and module-level code so log output stays + * structured and consistent (JSON in production, pretty in TTY dev). + * + * Level is controlled via LOG_LEVEL env var (default: 'info'). + */ +export const logger = pino({ + level: process.env.LOG_LEVEL || 'info', + base: { component: 'chains-api' } +}); diff --git a/src/util/metrics.js b/src/util/metrics.js new file mode 100644 index 0000000..3f2dfb4 --- /dev/null +++ b/src/util/metrics.js @@ -0,0 +1,120 @@ +/** + * Lightweight, dependency-free Prometheus-format metrics. + * + * Tracks counters (monotonic) and gauges (point-in-time). Counters are + * incremented from anywhere; gauges are computed on /metrics scrape from + * the cache so they always reflect current state without an updater loop. + * + * Exposes a single render() function that emits Prometheus text exposition + * format (https://prometheus.io/docs/instrumenting/exposition_formats/). + */ + +const counters = new Map(); + +/** + * Escape a string for use as a Prometheus label value. Per the exposition + * format spec, label values must escape `\` (as `\\`), `"` (as `\"`), and + * newlines (as `\n`). Order matters: backslash first, otherwise the literal + * `\` inserted by the quote-escape would itself get re-escaped. + */ +function escapeLabelValue(value) { + return String(value) + .replace(/\\/g, '\\\\') + .replace(/"/g, '\\"') + .replace(/\n/g, '\\n'); +} + +function counterKey(name, labels) { + const labelStr = Object.entries(labels || {}) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([k, v]) => `${k}="${escapeLabelValue(v)}"`) + .join(','); + return labelStr ? `${name}{${labelStr}}` : name; +} + +export function incCounter(name, labels = {}, value = 1) { + const key = counterKey(name, labels); + counters.set(key, (counters.get(key) || 0) + value); +} + +function formatCounters(lines) { + // Group by metric name for proper HELP/TYPE headers. + const byName = new Map(); + for (const [key, value] of counters.entries()) { + const name = key.split('{')[0]; + if (!byName.has(name)) byName.set(name, []); + byName.get(name).push([key, value]); + } + for (const [name, entries] of byName.entries()) { + lines.push(`# HELP ${name} ${METRIC_HELP[name] || ''}`); + lines.push(`# TYPE ${name} counter`); + for (const [key, value] of entries) { + lines.push(`${key} ${value}`); + } + } +} + +const METRIC_HELP = { + chains_api_source_fetch_total: 'Number of source fetch attempts by source and outcome', + chains_api_refresh_total: 'Number of background refresh runs by refresher and outcome', + chains_api_rpc_check_total: 'Number of RPC endpoint health checks by outcome' +}; + +/** + * Emit Prometheus exposition format. Gauges are computed on the fly from + * the live cache to avoid drift. + */ +export function renderMetrics({ cache, rpcStatus, l2beatStatus, validationSummary }) { + const lines = []; + + formatCounters(lines); + + // Gauges + lines.push('# HELP chains_api_chains_total Total chains in the index'); + lines.push('# TYPE chains_api_chains_total gauge'); + lines.push(`chains_api_chains_total ${cache?.indexed?.all?.length ?? 0}`); + + lines.push('# HELP chains_api_source_loaded Source loaded status (1=loaded, 0=not)'); + lines.push('# TYPE chains_api_source_loaded gauge'); + lines.push(`chains_api_source_loaded{source="theGraph"} ${cache?.theGraph != null ? 1 : 0}`); + lines.push(`chains_api_source_loaded{source="chainlist"} ${cache?.chainlist != null ? 1 : 0}`); + lines.push(`chains_api_source_loaded{source="chains"} ${cache?.chains != null ? 1 : 0}`); + lines.push(`chains_api_source_loaded{source="slip44"} ${cache?.slip44 != null ? 1 : 0}`); + lines.push(`chains_api_source_loaded{source="l2beat"} ${cache?.l2beat?.projects?.length > 0 ? 1 : 0}`); + + if (cache?.lastUpdated) { + const age = Math.max(0, Math.round((Date.now() - new Date(cache.lastUpdated).getTime()) / 1000)); + lines.push('# HELP chains_api_data_age_seconds Age of indexed data in seconds'); + lines.push('# TYPE chains_api_data_age_seconds gauge'); + lines.push(`chains_api_data_age_seconds ${age}`); + } + + if (l2beatStatus?.lastRefreshAt) { + const age = Math.max(0, Math.round((Date.now() - new Date(l2beatStatus.lastRefreshAt).getTime()) / 1000)); + lines.push('# HELP chains_api_l2beat_refresh_age_seconds Seconds since the last L2BEAT refresh'); + lines.push('# TYPE chains_api_l2beat_refresh_age_seconds gauge'); + lines.push(`chains_api_l2beat_refresh_age_seconds ${age}`); + } + + if (rpcStatus?.lastUpdated) { + const age = Math.max(0, Math.round((Date.now() - new Date(rpcStatus.lastUpdated).getTime()) / 1000)); + lines.push('# HELP chains_api_rpc_check_age_seconds Seconds since the last RPC health sweep'); + lines.push('# TYPE chains_api_rpc_check_age_seconds gauge'); + lines.push(`chains_api_rpc_check_age_seconds ${age}`); + } + + if (validationSummary) { + lines.push('# HELP chains_api_validation_errors Total validation errors by rule number'); + lines.push('# TYPE chains_api_validation_errors gauge'); + for (const [ruleKey, count] of Object.entries(validationSummary)) { + lines.push(`chains_api_validation_errors{rule="${escapeLabelValue(ruleKey)}"} ${count}`); + } + } + + return lines.join('\n') + '\n'; +} + +// Test-only helper. +export function _resetMetricsForTests() { + counters.clear(); +} diff --git a/tests/integration/api.test.js b/tests/integration/api.test.js index 87af4cc..98783dd 100644 --- a/tests/integration/api.test.js +++ b/tests/integration/api.test.js @@ -7,7 +7,74 @@ vi.mock('node:fs/promises', () => ({ readFile: vi.fn() })); -// Mock priceService +// Shared mock fn instances. Hoisted so multiple vi.mock factories below can +// reference the same identities — the test body uses `dataService.X` while +// route handlers under src/http/ import directly from src/store/, src/domain/, +// src/services/. Hoisting gives us one set of fns wired into all paths. +const mocks = vi.hoisted(() => ({ + loadData: vi.fn(), + initializeDataOnStartup: vi.fn(), + getCachedData: vi.fn(), + searchChains: vi.fn(), + getChainById: vi.fn(), + getAllChains: vi.fn(), + getAllRelations: vi.fn(), + getRelationsById: vi.fn(), + traverseRelations: vi.fn(), + getEndpointsById: vi.fn(), + getAllEndpoints: vi.fn(), + validateChainData: vi.fn(), + getRpcMonitoringResults: vi.fn(), + getRpcMonitoringStatus: vi.fn(), + startRpcHealthCheck: vi.fn(), + runRpcHealthCheck: vi.fn(), + getAllKeywords: vi.fn(), + countChainsByTag: vi.fn() +})); + +// Mock each src/ module that HTTP route handlers import from. These are the +// real seams now; dataService.js is just a thin re-export facade. +vi.mock('../../src/store/cache.js', () => ({ + cachedData: { theGraph: null, chainlist: null, chains: null, slip44: null, l2beat: null, indexed: null, lastUpdated: null, rpcHealth: {}, lastRpcCheck: null }, + applyDataToCache: vi.fn(), + getCachedData: mocks.getCachedData +})); + +vi.mock('../../src/store/queries.js', () => ({ + searchChains: mocks.searchChains, + getChainById: mocks.getChainById, + getAllChains: mocks.getAllChains, + getEndpointsById: mocks.getEndpointsById, + getAllEndpoints: mocks.getAllEndpoints, + countChainsByTag: mocks.countChainsByTag, + getRpcMonitoringResults: mocks.getRpcMonitoringResults +})); + +vi.mock('../../src/domain/relations.js', () => ({ + getAllRelations: mocks.getAllRelations, + getRelationsById: mocks.getRelationsById, + traverseRelations: mocks.traverseRelations +})); + +vi.mock('../../src/domain/keywords.js', () => ({ + getAllKeywords: mocks.getAllKeywords +})); + +vi.mock('../../src/services/loader.js', () => ({ + loadData: mocks.loadData, + initializeDataOnStartup: mocks.initializeDataOnStartup +})); + +vi.mock('../../src/services/rpcHealth.js', () => ({ + startRpcHealthCheck: mocks.startRpcHealthCheck, + runRpcHealthCheck: mocks.runRpcHealthCheck, + getRpcMonitoringStatus: mocks.getRpcMonitoringStatus +})); + +vi.mock('../../src/services/validation.js', () => ({ + validateChainData: mocks.validateChainData +})); + vi.mock('../../priceService.js', () => ({ getPricesForChains: vi.fn(async (chainIds) => { const map = new Map(); @@ -22,250 +89,9 @@ vi.mock('../../priceService.js', () => ({ }), getCoinGeckoId: vi.fn(() => null), clearPriceCache: vi.fn(), - prefetchAllPrices: vi.fn(async () => {}), + prefetchAllPrices: vi.fn(async () => {}) })); -// Mock the modules before importing -vi.mock('../../dataService.js', async () => { - const actual = await vi.importActual('../../dataService.js'); - return { - ...actual, - loadData: vi.fn().mockResolvedValue({ - indexed: { - all: [], - byChainId: {} - }, - lastUpdated: new Date().toISOString() - }), - initializeDataOnStartup: vi.fn().mockResolvedValue({ - indexed: { - all: [], - byChainId: {} - }, - lastUpdated: new Date().toISOString() - }), - getCachedData: vi.fn(() => ({ - indexed: { - all: [ - { - chainId: 1, - name: 'Ethereum Mainnet', - tags: ['L1'], - sources: ['chains'] - }, - { - chainId: 137, - name: 'Polygon', - tags: ['L2'], - sources: ['chainlist'] - }, - { - chainId: 11155111, - name: 'Sepolia', - tags: ['Testnet'], - sources: ['chainlist'] - } - ], - byChainId: { - 1: { - chainId: 1, - name: 'Ethereum Mainnet', - tags: ['L1'], - sources: ['chains'], - relations: [] - }, - 137: { - chainId: 137, - name: 'Polygon', - tags: ['L2'], - sources: ['chainlist'], - relations: [{ kind: 'l2Of', chainId: 1 }] - }, - 11155111: { - chainId: 11155111, - name: 'Sepolia', - tags: ['Testnet'], - sources: ['chainlist'], - relations: [] - } - } - }, - theGraph: { status: 'loaded' }, - chainlist: { status: 'loaded' }, - chains: { status: 'loaded' }, - slip44: { - 60: { symbol: 'ETH', name: 'Ether' }, - 966: { symbol: 'MATIC', name: 'Polygon' } - }, - lastUpdated: new Date().toISOString() - })), - searchChains: vi.fn((query) => { - const lowerQuery = query.toLowerCase(); - if (lowerQuery.includes('eth') || query === '1') { - return [{ - chainId: 1, - name: 'Ethereum Mainnet', - tags: ['L1'] - }]; - } - return []; - }), - getChainById: vi.fn((id) => { - if (id === 1) { - return { - chainId: 1, - name: 'Ethereum Mainnet', - tags: ['L1'], - sources: ['chains'] - }; - } - return null; - }), - getAllChains: vi.fn(() => [ - { - chainId: 1, - name: 'Ethereum Mainnet', - tags: ['L1'] - }, - { - chainId: 137, - name: 'Polygon', - tags: ['L2'] - }, - { - chainId: 11155111, - name: 'Sepolia', - tags: ['Testnet'] - } - ]), - getAllRelations: vi.fn(() => ({ - '1': { - '137': { - parentName: 'Ethereum Mainnet', - kind: 'l1Of', - childName: 'Polygon', - chainId: 137 - } - } - })), - getRelationsById: vi.fn((id) => { - if (id === 137) { - return { - chainId: 137, - chainName: 'Polygon', - relations: [{ kind: 'l2Of', chainId: 1 }] - }; - } - return null; - }), - getEndpointsById: vi.fn((id) => { - if (id === 1) { - return { - chainId: 1, - name: 'Ethereum Mainnet', - rpc: ['https://eth.llamarpc.com'], - firehose: [], - substreams: [] - }; - } - return null; - }), - getAllEndpoints: vi.fn(() => [ - { - chainId: 1, - name: 'Ethereum Mainnet', - rpc: ['https://eth.llamarpc.com'], - firehose: [], - substreams: [] - } - ]), - validateChainData: vi.fn(() => ({ - totalErrors: 2, - errorsByRule: { - rule1_relation_conflicts: [ - { - rule: 1, - chainId: 137, - chainName: 'Polygon', - message: 'Example validation error' - } - ], - rule2_slip44_testnet_mismatch: [], - rule3_name_testnet_mismatch: [ - { - rule: 3, - chainId: 11155111, - chainName: 'Sepolia', - message: 'Name contains testnet keyword' - } - ], - rule4_sepolia_hoodie_issues: [], - rule5_status_conflicts: [], - rule6_goerli_not_deprecated: [] - }, - summary: { - rule1: 1, - rule2: 0, - rule3: 1, - rule4: 0, - rule5: 0, - rule6: 0 - }, - allErrors: [ - { - rule: 1, - chainId: 137, - chainName: 'Polygon', - message: 'Example validation error' - }, - { - rule: 3, - chainId: 11155111, - chainName: 'Sepolia', - message: 'Name contains testnet keyword' - } - ] - })), - getRpcMonitoringResults: vi.fn(() => ({ - lastUpdated: new Date().toISOString(), - totalEndpoints: 100, - testedEndpoints: 50, - workingEndpoints: 30, - failedEndpoints: 20, - results: [ - { - chainId: 1, - chainName: 'Ethereum Mainnet', - url: 'https://eth.llamarpc.com', - status: 'working', - blockNumber: 12345678, - latencyMs: 150, - error: null - } - ] - })), - getRpcMonitoringStatus: vi.fn(() => ({ - isMonitoring: false, - lastUpdated: new Date().toISOString() - })), - startRpcHealthCheck: vi.fn(), - getAllKeywords: vi.fn(() => ({ - totalKeywords: 13, - keywords: { - blockchainNames: ['Ethereum Mainnet', 'Polygon'], - networkNames: ['eth', 'matic'], - softwareClients: ['Geth'], - currencySymbols: ['ETH', 'MATIC'], - tags: ['L2', 'Testnet'], - relationKinds: ['l2Of'], - sources: ['chainlist', 'chains'], - statuses: ['active'], - generic: ['ethereum', 'geth'] - } - })) - }; -}); - vi.mock('../../clientsView.js', () => ({ getClientsByChain: vi.fn((chainId) => { const samples = { @@ -274,36 +100,22 @@ vi.mock('../../clientsView.js', () => ({ chainName: 'Ethereum Mainnet', totalNodes: 2, unknownNodes: 0, - clients: [ - { - name: 'geth', - repo: 'ethereum/go-ethereum', - language: 'Go', - website: 'https://geth.ethereum.org', - layer: 'execution', - known: true, - nodeCount: 2, - versions: [{ version: 'v1.14.5', nodeCount: 2 }] - } - ] + clients: [{ + name: 'geth', repo: 'ethereum/go-ethereum', language: 'Go', + website: 'https://geth.ethereum.org', layer: 'execution', known: true, + nodeCount: 2, versions: [{ version: 'v1.14.5', nodeCount: 2 }] + }] }, 137: { chainId: 137, chainName: 'Polygon', totalNodes: 1, unknownNodes: 0, - clients: [ - { - name: 'bor', - repo: 'maticnetwork/bor', - language: 'Go', - website: 'https://polygon.technology', - layer: 'execution', - known: true, - nodeCount: 1, - versions: [{ version: 'v1.3.0', nodeCount: 1 }] - } - ] + clients: [{ + name: 'bor', repo: 'maticnetwork/bor', language: 'Go', + website: 'https://polygon.technology', layer: 'execution', known: true, + nodeCount: 1, versions: [{ version: 'v1.3.0', nodeCount: 1 }] + }] } }; if (chainId === undefined) return Object.values(samples); @@ -311,28 +123,142 @@ vi.mock('../../clientsView.js', () => ({ }), summarizeChainClients: vi.fn((chainResults) => { if (!chainResults || chainResults.length === 0) return null; - const chainId = chainResults[0].chainId; return { - chainId, + chainId: chainResults[0].chainId, chainName: chainResults[0].chainName, totalNodes: chainResults.length, unknownNodes: 0, - clients: [ - { - name: 'geth', - repo: 'ethereum/go-ethereum', - language: 'Go', - website: 'https://geth.ethereum.org', - layer: 'execution', - known: true, - nodeCount: chainResults.length, - versions: [{ version: 'v1.14.5', nodeCount: chainResults.length }] - } - ] + clients: [{ + name: 'geth', repo: 'ethereum/go-ethereum', language: 'Go', + website: 'https://geth.ethereum.org', layer: 'execution', known: true, + nodeCount: chainResults.length, + versions: [{ version: 'v1.14.5', nodeCount: chainResults.length }] + }] }; }) })); +// Set default implementations for the hoisted mocks. Can't do this in +// vi.hoisted() because closures over the data would be re-created each +// suite; this gives us one stable set used everywhere. +function installMockDefaults() { + mocks.loadData.mockResolvedValue({ + indexed: { all: [], byChainId: {} }, + lastUpdated: new Date().toISOString() + }); + mocks.initializeDataOnStartup.mockResolvedValue({ + indexed: { all: [], byChainId: {} }, + lastUpdated: new Date().toISOString() + }); + mocks.getCachedData.mockImplementation(() => ({ + indexed: { + all: [ + { chainId: 1, name: 'Ethereum Mainnet', tags: ['L1'], sources: ['chains'] }, + { chainId: 137, name: 'Polygon', tags: ['L2'], sources: ['chainlist'] }, + { chainId: 11155111, name: 'Sepolia', tags: ['Testnet'], sources: ['chainlist'] } + ], + byChainId: { + 1: { chainId: 1, name: 'Ethereum Mainnet', tags: ['L1'], sources: ['chains'], relations: [] }, + 137: { chainId: 137, name: 'Polygon', tags: ['L2'], sources: ['chainlist'], relations: [{ kind: 'l2Of', chainId: 1 }] }, + 11155111: { chainId: 11155111, name: 'Sepolia', tags: ['Testnet'], sources: ['chainlist'], relations: [] } + } + }, + theGraph: { status: 'loaded' }, + chainlist: { status: 'loaded' }, + chains: { status: 'loaded' }, + slip44: { 60: { symbol: 'ETH', name: 'Ether' }, 966: { symbol: 'MATIC', name: 'Polygon' } }, + l2beat: { source: 'live', fetchedAt: new Date().toISOString(), projects: [{ slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }] }, + lastUpdated: new Date().toISOString() + })); + mocks.searchChains.mockImplementation((query) => { + const lowerQuery = query.toLowerCase(); + if (lowerQuery.includes('eth') || query === '1') { + return [{ chainId: 1, name: 'Ethereum Mainnet', tags: ['L1'] }]; + } + return []; + }); + mocks.getChainById.mockImplementation((id) => { + if (id === 1) return { chainId: 1, name: 'Ethereum Mainnet', tags: ['L1'], sources: ['chains'] }; + return null; + }); + mocks.getAllChains.mockReturnValue([ + { chainId: 1, name: 'Ethereum Mainnet', tags: ['L1'] }, + { chainId: 137, name: 'Polygon', tags: ['L2'] }, + { chainId: 11155111, name: 'Sepolia', tags: ['Testnet'] } + ]); + mocks.getAllRelations.mockReturnValue({ + '1': { '137': { parentName: 'Ethereum Mainnet', kind: 'l1Of', childName: 'Polygon', chainId: 137 } } + }); + mocks.getRelationsById.mockImplementation((id) => { + if (id === 137) return { chainId: 137, chainName: 'Polygon', relations: [{ kind: 'l2Of', chainId: 1 }] }; + return null; + }); + mocks.traverseRelations.mockReturnValue(null); + mocks.getEndpointsById.mockImplementation((id) => { + if (id === 1) { + return { chainId: 1, name: 'Ethereum Mainnet', rpc: ['https://eth.llamarpc.com'], firehose: [], substreams: [] }; + } + return null; + }); + mocks.getAllEndpoints.mockReturnValue([ + { chainId: 1, name: 'Ethereum Mainnet', rpc: ['https://eth.llamarpc.com'], firehose: [], substreams: [] } + ]); + mocks.validateChainData.mockReturnValue({ + totalErrors: 2, + errorsByRule: { + rule1_relation_conflicts: [{ rule: 1, chainId: 137, chainName: 'Polygon', message: 'Example validation error' }], + rule2_slip44_testnet_mismatch: [], + rule3_name_testnet_mismatch: [{ rule: 3, chainId: 11155111, chainName: 'Sepolia', message: 'Name contains testnet keyword' }], + rule4_sepolia_hoodie_issues: [], + rule5_status_conflicts: [], + rule6_goerli_not_deprecated: [] + }, + summary: { rule1: 1, rule2: 0, rule3: 1, rule4: 0, rule5: 0, rule6: 0 }, + allErrors: [ + { rule: 1, chainId: 137, chainName: 'Polygon', message: 'Example validation error' }, + { rule: 3, chainId: 11155111, chainName: 'Sepolia', message: 'Name contains testnet keyword' } + ] + }); + mocks.getRpcMonitoringResults.mockReturnValue({ + lastUpdated: new Date().toISOString(), + totalEndpoints: 100, + testedEndpoints: 50, + workingEndpoints: 30, + failedEndpoints: 20, + results: [ + { chainId: 1, chainName: 'Ethereum Mainnet', url: 'https://eth.llamarpc.com', status: 'working', blockNumber: 12345678, latencyMs: 150, error: null } + ] + }); + mocks.getRpcMonitoringStatus.mockReturnValue({ isMonitoring: false, lastUpdated: new Date().toISOString() }); + mocks.getAllKeywords.mockReturnValue({ + totalKeywords: 13, + keywords: { + blockchainNames: ['Ethereum Mainnet', 'Polygon'], + networkNames: ['eth', 'matic'], + softwareClients: ['Geth'], + currencySymbols: ['ETH', 'MATIC'], + tags: ['L2', 'Testnet'], + relationKinds: ['l2Of'], + sources: ['chainlist', 'chains'], + statuses: ['active'], + generic: ['ethereum', 'geth'] + } + }); + mocks.countChainsByTag.mockReturnValue({ + totalChains: 3, + totalMainnets: 1, + totalTestnets: 1, + totalL2s: 1, + totalBeacons: 0 + }); +} + +installMockDefaults(); + +// Legacy test references: `dataService.X` still resolves to the same hoisted +// mock fn instance because dataService.js re-exports from the mocked src/ +// modules. No code change needed in the test bodies below. + describe('API Endpoints', () => { let app; @@ -400,6 +326,55 @@ describe('API Endpoints', () => { expect(data).toHaveProperty('lastUpdated'); expect(data).toHaveProperty('totalChains'); }); + + it('exposes per-source freshness and per-refresher status', async () => { + const response = await app.inject({ method: 'GET', url: '/health' }); + const data = JSON.parse(response.payload); + + expect(data.sources).toBeDefined(); + for (const source of ['theGraph', 'chainlist', 'chains', 'slip44', 'l2beat']) { + expect(data.sources[source]).toHaveProperty('loaded'); + expect(data.sources[source]).toHaveProperty('ageSeconds'); + } + + expect(data.refreshers).toBeDefined(); + expect(data.refreshers.rpc).toHaveProperty('isRunning'); + expect(data.refreshers.l2beat).toHaveProperty('lastRefreshAt'); + expect(data.refreshers.l2beat).toHaveProperty('intervalMs'); + }); + }); + + describe('GET /refresher', () => { + it('returns the unified refresher status block', async () => { + const response = await app.inject({ method: 'GET', url: '/refresher' }); + expect(response.statusCode).toBe(200); + const data = JSON.parse(response.payload); + + expect(data).toHaveProperty('tickIntervalMs'); + expect(data).toHaveProperty('isTickInFlight'); + expect(data).toHaveProperty('queueDepth'); + expect(data).toHaveProperty('sweep'); + expect(data.sweep).toHaveProperty('sweepNumber'); + expect(data).toHaveProperty('l2beat'); + expect(data).toHaveProperty('rpc'); + }); + }); + + describe('GET /metrics', () => { + it('returns Prometheus exposition format with text/plain content type', async () => { + const response = await app.inject({ method: 'GET', url: '/metrics' }); + expect(response.statusCode).toBe(200); + expect(response.headers['content-type']).toMatch(/text\/plain/); + expect(response.body).toContain('# HELP chains_api_chains_total'); + expect(response.body).toContain('# TYPE chains_api_chains_total gauge'); + }); + + it('includes a source-loaded gauge for each of the 5 sources', async () => { + const response = await app.inject({ method: 'GET', url: '/metrics' }); + for (const source of ['theGraph', 'chainlist', 'chains', 'slip44', 'l2beat']) { + expect(response.body).toContain(`chains_api_source_loaded{source="${source}"}`); + } + }); }); describe('GET /chains', () => { @@ -459,23 +434,16 @@ describe('API Endpoints', () => { expect(data.error).toContain('Invalid tag'); }); - it('should include price field on each chain', async () => { + it('should return 400 for unknown query parameters (schema additionalProperties)', async () => { const response = await app.inject({ method: 'GET', - url: '/chains' + url: '/chains?tags=L2' // typo: should be ?tag= }); - expect(response.statusCode).toBe(200); + expect(response.statusCode).toBe(400); const data = JSON.parse(response.payload); - expect(data.chains.length > 0).toBe(true); - data.chains.forEach(chain => { - expect(chain).toHaveProperty('price'); - }); - // Ethereum should have a real price, others should be null - const eth = data.chains.find(c => c.chainId === 1); - expect(eth.price).toEqual({ usd: 2000.5, updatedAt: '2026-05-01T00:00:00.000Z' }); - const polygon = data.chains.find(c => c.chainId === 137); - expect(polygon.price).toBeNull(); + expect(data.error).toContain('Unknown query parameter'); + expect(data.error).toContain('tags'); }); }); @@ -524,19 +492,6 @@ describe('API Endpoints', () => { const data = JSON.parse(response.payload); expect(data).toHaveProperty('error', 'Invalid chain ID'); }); - - it('should include price field when known chain', async () => { - const response = await app.inject({ - method: 'GET', - url: '/chains/1' - }); - - expect(response.statusCode).toBe(200); - const data = JSON.parse(response.payload); - expect(data).toHaveProperty('price'); - expect(data.price).toEqual({ usd: 2000.5, updatedAt: '2026-05-01T00:00:00.000Z' }); - }); - }); describe('GET /search', () => { @@ -972,65 +927,6 @@ describe('API Endpoints', () => { const data = JSON.parse(response.payload); expect(data).toHaveProperty('error', 'No monitoring results found for this chain'); }); - - it('should include clients summary in the response', async () => { - const response = await app.inject({ - method: 'GET', - url: '/rpc-monitor/1' - }); - - expect(response.statusCode).toBe(200); - const data = JSON.parse(response.payload); - expect(data).toHaveProperty('clients'); - expect(Array.isArray(data.clients)).toBe(true); - expect(data.clients[0]).toMatchObject({ name: 'geth', repo: 'ethereum/go-ethereum' }); - }); - }); - - describe('GET /clients', () => { - it('returns aggregated clients across all chains', async () => { - const response = await app.inject({ method: 'GET', url: '/clients' }); - - expect(response.statusCode).toBe(200); - const data = JSON.parse(response.payload); - expect(data).toHaveProperty('count', 2); - expect(data).toHaveProperty('chains'); - expect(Array.isArray(data.chains)).toBe(true); - expect(data.chains[0]).toHaveProperty('clients'); - }); - }); - - describe('GET /clients/:id', () => { - it('returns client summary for a known chain', async () => { - const response = await app.inject({ method: 'GET', url: '/clients/1' }); - - expect(response.statusCode).toBe(200); - const data = JSON.parse(response.payload); - expect(data).toMatchObject({ - chainId: 1, - chainName: 'Ethereum Mainnet', - totalNodes: 2 - }); - expect(data.clients[0]).toMatchObject({ - name: 'geth', - repo: 'ethereum/go-ethereum', - nodeCount: 2 - }); - }); - - it('returns 400 for invalid chain ID', async () => { - const response = await app.inject({ method: 'GET', url: '/clients/not-a-number' }); - - expect(response.statusCode).toBe(400); - expect(JSON.parse(response.payload)).toHaveProperty('error', 'Invalid chain ID'); - }); - - it('returns 404 when no client data exists for chain', async () => { - const response = await app.inject({ method: 'GET', url: '/clients/999999' }); - - expect(response.statusCode).toBe(404); - expect(JSON.parse(response.payload)).toHaveProperty('error', 'No client data found for this chain'); - }); }); describe('GET /validate', () => { diff --git a/tests/unit/dataService.test.js b/tests/unit/dataService.test.js index 0395522..ad95bfd 100644 --- a/tests/unit/dataService.test.js +++ b/tests/unit/dataService.test.js @@ -1,5 +1,6 @@ import { describe, it, expect, vi, beforeEach } from 'vitest'; import { resolve } from 'node:path'; +import { logger } from '../../src/util/logger.js'; // Mock config before importing dataService vi.mock('../../config.js', () => ({ @@ -11,6 +12,7 @@ vi.mock('../../config.js', () => ({ DATA_CACHE_FILE: '.cache/test-data-cache.json', RPC_CHECK_TIMEOUT_MS: 8000, RPC_CHECK_CONCURRENCY: 8, + MAX_ENDPOINTS_PER_CHAIN: 5, PROXY_URL: '', PROXY_ENABLED: false })); @@ -1226,20 +1228,14 @@ describe('loadData', () => { .mockRejectedValueOnce(new Error('Error 3')) .mockRejectedValueOnce(new Error('Error 4')); - const result = await loadData(); - - expect(result.theGraph).toBeNull(); - expect(result.chainlist).toBeNull(); - expect(result.chains).toBeNull(); - expect(result.slip44).toEqual({}); - expect(result.indexed.all).toHaveLength(0); + await expect(loadData()).rejects.toThrow('All chain registry sources failed during data refresh'); }); it('should reset rpcHealth and lastRpcCheck on load', async () => { global.fetch - .mockResolvedValueOnce({ ok: true, json: async () => null }) - .mockResolvedValueOnce({ ok: true, json: async () => null }) - .mockResolvedValueOnce({ ok: true, json: async () => null }) + .mockResolvedValueOnce({ ok: true, json: async () => ({ networks: [] }) }) + .mockResolvedValueOnce({ ok: true, json: async () => [] }) + .mockResolvedValueOnce({ ok: true, json: async () => [] }) .mockResolvedValueOnce({ ok: true, text: async () => '' }); const result = await loadData(); @@ -1250,9 +1246,9 @@ describe('loadData', () => { it('should set lastUpdated timestamp', async () => { global.fetch - .mockResolvedValueOnce({ ok: true, json: async () => null }) - .mockResolvedValueOnce({ ok: true, json: async () => null }) - .mockResolvedValueOnce({ ok: true, json: async () => null }) + .mockResolvedValueOnce({ ok: true, json: async () => ({ networks: [] }) }) + .mockResolvedValueOnce({ ok: true, json: async () => [] }) + .mockResolvedValueOnce({ ok: true, json: async () => [] }) .mockResolvedValueOnce({ ok: true, text: async () => '' }); const beforeTime = Date.now(); @@ -1271,9 +1267,9 @@ describe('loadData', () => { | 60 | 0x8000003c | ETH | Ethereum |`; global.fetch - .mockResolvedValueOnce({ ok: true, json: async () => null }) - .mockResolvedValueOnce({ ok: true, json: async () => null }) - .mockResolvedValueOnce({ ok: true, json: async () => null }) + .mockResolvedValueOnce({ ok: true, json: async () => ({ networks: [] }) }) + .mockResolvedValueOnce({ ok: true, json: async () => [] }) + .mockResolvedValueOnce({ ok: true, json: async () => [] }) .mockResolvedValueOnce({ ok: true, text: async () => mockSlip44 @@ -1292,16 +1288,15 @@ describe('runRpcHealthCheck', () => { }); it('should skip health check if data not loaded', async () => { - const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); - - // Reload module to get fresh state without data vi.resetModules(); + const { logger: freshLogger } = await import('../../src/util/logger.js'); + const warnSpy = vi.spyOn(freshLogger, 'warn').mockImplementation(() => {}); const { runRpcHealthCheck: freshRun } = await import('../../dataService.js'); await freshRun(); - expect(consoleWarnSpy).toHaveBeenCalledWith('RPC health check skipped: data not loaded'); - consoleWarnSpy.mockRestore(); + expect(warnSpy).toHaveBeenCalledWith('RPC health check skipped: data not loaded'); + warnSpy.mockRestore(); }); it('should skip health check if no RPC endpoints found', async () => { @@ -1320,11 +1315,11 @@ describe('runRpcHealthCheck', () => { await loadData(); - const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + const warnSpy = vi.spyOn(logger, 'warn').mockImplementation(() => {}); await runRpcHealthCheck(); - expect(consoleWarnSpy).toHaveBeenCalledWith('RPC health check skipped: no RPC endpoints found'); - consoleWarnSpy.mockRestore(); + expect(warnSpy).toHaveBeenCalledWith('RPC health check skipped: no RPC endpoints found'); + warnSpy.mockRestore(); }); it('should successfully check RPC endpoints with valid responses', async () => { @@ -1366,7 +1361,7 @@ describe('runRpcHealthCheck', () => { json: async () => ({ jsonrpc: '2.0', id: 1, result: '0xabcdef' }) }); - const consoleLogSpy = vi.spyOn(console, 'log').mockImplementation(() => {}); + const infoSpy = vi.spyOn(logger, 'info').mockImplementation(() => {}); await runRpcHealthCheck(); const cachedData = getCachedData(); @@ -1375,9 +1370,9 @@ describe('runRpcHealthCheck', () => { expect(cachedData.rpcHealth[1]).toHaveLength(2); expect(cachedData.lastRpcCheck).toBeDefined(); - // Verify console.log was called with completion message - expect(consoleLogSpy).toHaveBeenCalledWith(expect.stringContaining('RPC health check completed')); - consoleLogSpy.mockRestore(); + // pino signature is logger.info(obj, msg) — first arg is the structured context + expect(infoSpy).toHaveBeenCalledWith(expect.any(Object), expect.stringContaining('RPC health check completed')); + infoSpy.mockRestore(); }); it('should handle RPC endpoint with unsupported URL', async () => { @@ -1399,12 +1394,12 @@ describe('runRpcHealthCheck', () => { await loadData(); - const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + const warnSpy = vi.spyOn(logger, 'warn').mockImplementation(() => {}); await runRpcHealthCheck(); // Should skip because no valid HTTP endpoints - expect(consoleWarnSpy).toHaveBeenCalledWith('RPC health check skipped: no RPC endpoints found'); - consoleWarnSpy.mockRestore(); + expect(warnSpy).toHaveBeenCalledWith('RPC health check skipped: no RPC endpoints found'); + warnSpy.mockRestore(); }); it('should handle RPC endpoint requiring API key substitution', async () => { @@ -1549,11 +1544,11 @@ describe('runRpcHealthCheck', () => { .mockResolvedValueOnce({ ok: true, json: async () => mockChains }) .mockResolvedValueOnce({ ok: true, text: async () => '' }); - const consoleWarnSpy = vi.spyOn(console, 'warn').mockImplementation(() => {}); + const warnSpy = vi.spyOn(logger, 'warn').mockImplementation(() => {}); await runRpcHealthCheck(); - expect(consoleWarnSpy).toHaveBeenCalledWith('RPC health check skipped: data changed during run'); - consoleWarnSpy.mockRestore(); + expect(warnSpy).toHaveBeenCalledWith('RPC health check skipped: data changed during run'); + warnSpy.mockRestore(); }); it('should deduplicate RPC URLs', async () => { @@ -2258,7 +2253,7 @@ describe('initializeDataOnStartup with disk cache', () => { global.fetch.mockRejectedValue(new Error('network down')); - await expect(mod.loadData()).rejects.toThrow('All data sources failed during data refresh'); + await expect(mod.loadData()).rejects.toThrow('All chain registry sources failed during data refresh'); expect(mod.getCachedData().indexed.byChainId[25].name).toBe('Fresh Chain'); }); diff --git a/tests/unit/domain/relations.test.js b/tests/unit/domain/relations.test.js new file mode 100644 index 0000000..2f751b1 --- /dev/null +++ b/tests/unit/domain/relations.test.js @@ -0,0 +1,134 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { applyDataToCache } from '../../../src/store/cache.js'; +import { + getAllRelations, + getRelationsById, + traverseRelations +} from '../../../src/domain/relations.js'; + +function setupIndexed() { + const ethereum = { + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [ + { kind: 'parentOf', chainId: 10, network: 'optimism', source: 'chains' } + ] + }; + const optimism = { + chainId: 10, + name: 'Optimism', + tags: ['L2'], + relations: [] + }; + const sepolia = { + chainId: 11155111, + name: 'Sepolia', + tags: ['Testnet'], + relations: [ + { kind: 'testnetOf', chainId: 1, network: 'mainnet', source: 'theGraph' } + ] + }; + + applyDataToCache({ + indexed: { + byChainId: { 1: ethereum, 10: optimism, 11155111: sepolia }, + byName: {}, + all: [ethereum, optimism, sepolia] + } + }); +} + +describe('domain/relations', () => { + beforeEach(() => { + applyDataToCache({}); + }); + + describe('getAllRelations', () => { + it('returns {} when no data is loaded', () => { + expect(getAllRelations()).toEqual({}); + }); + + it('renames parentOf to l1Of in the output', () => { + setupIndexed(); + const all = getAllRelations(); + expect(all['1']['10'].kind).toBe('l1Of'); + expect(all['1']['10'].parentName).toBe('Ethereum'); + expect(all['1']['10'].childName).toBe('Optimism'); + }); + + it('groups relations by parent chainId', () => { + setupIndexed(); + const all = getAllRelations(); + expect(Object.keys(all)).toEqual(expect.arrayContaining(['1'])); + expect(all['1']['10']).toBeDefined(); + expect(all['1']['11155111']).toBeDefined(); + }); + }); + + describe('getRelationsById', () => { + it('returns null when no data is loaded', () => { + expect(getRelationsById(1)).toBeNull(); + }); + + it('returns null for unknown chains', () => { + setupIndexed(); + expect(getRelationsById(999)).toBeNull(); + }); + + it('returns the chain name and raw relations array', () => { + setupIndexed(); + const result = getRelationsById(11155111); + expect(result.chainId).toBe(11155111); + expect(result.chainName).toBe('Sepolia'); + expect(result.relations).toHaveLength(1); + expect(result.relations[0].kind).toBe('testnetOf'); + }); + }); + + describe('traverseRelations', () => { + it('returns null when no data or chain is missing', () => { + expect(traverseRelations(1)).toBeNull(); + setupIndexed(); + expect(traverseRelations(999)).toBeNull(); + }); + + it('returns BFS nodes and edges with depth annotations', () => { + setupIndexed(); + const result = traverseRelations(1, 2); + expect(result.startChainId).toBe(1); + expect(result.startChainName).toBe('Ethereum'); + expect(result.totalNodes).toBeGreaterThanOrEqual(2); + expect(result.totalEdges).toBeGreaterThanOrEqual(1); + const depths = result.nodes.map(n => n.depth); + expect(depths).toContain(0); + expect(depths).toContain(1); + }); + + it('deduplicates undirected edges (same {min,max,kind} key)', () => { + const ethereum = { + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [{ kind: 'parentOf', chainId: 10, network: 'optimism', source: 'chains' }] + }; + const optimism = { + chainId: 10, + name: 'Optimism', + tags: ['L2'], + relations: [{ kind: 'parentOf', chainId: 1, network: 'eip155-1', source: 'chains' }] + }; + applyDataToCache({ + indexed: { + byChainId: { 1: ethereum, 10: optimism }, + byName: {}, + all: [ethereum, optimism] + } + }); + + const result = traverseRelations(1, 3); + const parentOfEdges = result.edges.filter(e => e.kind === 'parentOf'); + expect(parentOfEdges).toHaveLength(1); + }); + }); +}); diff --git a/tests/unit/http/admin.test.js b/tests/unit/http/admin.test.js new file mode 100644 index 0000000..2372de5 --- /dev/null +++ b/tests/unit/http/admin.test.js @@ -0,0 +1,198 @@ +import { describe, it, expect, beforeEach, vi } from 'vitest'; + +vi.mock('../../../src/store/cache.js', () => ({ + getCachedData: vi.fn() +})); + +vi.mock('../../../src/store/queries.js', () => ({ + getAllChains: vi.fn(() => []), + getRpcMonitoringResults: vi.fn(() => ({ + lastUpdated: null, + totalEndpoints: 0, + testedEndpoints: 0, + workingEndpoints: 0, + failedEndpoints: 0, + results: [] + })), + countChainsByTag: vi.fn(() => ({ totalChains: 0, totalMainnets: 0, totalTestnets: 0, totalL2s: 0, totalBeacons: 0 })) +})); + +vi.mock('../../../src/domain/keywords.js', () => ({ + getAllKeywords: vi.fn(() => ({ totalKeywords: 0, keywords: {} })) +})); + +vi.mock('../../../src/services/loader.js', () => ({ + loadData: vi.fn() +})); + +vi.mock('../../../src/services/rpcHealth.js', () => ({ + startRpcHealthCheck: vi.fn(), + getRpcMonitoringStatus: vi.fn(() => ({ isMonitoring: false, lastUpdated: null })) +})); + +vi.mock('../../../src/services/validation.js', () => ({ + validateChainData: vi.fn(() => ({ totalErrors: 0, errorsByRule: {}, summary: {}, allErrors: [] })) +})); + +vi.mock('../../../src/services/l2beatRefresher.js', () => ({ + getL2BeatRefreshStatus: vi.fn(() => ({ + isRefreshing: false, + lastRefreshAt: null, + lastRefreshSource: null, + lastRefreshError: null, + lastRefreshProjectCount: 0, + intervalMs: 300000 + })) +})); + +vi.mock('../../../config.js', () => ({ + RELOAD_RATE_LIMIT_MAX: 5, + RATE_LIMIT_WINDOW_MS: 60000, + DATA_CACHE_ENABLED: false, + DATA_CACHE_FILE: '.cache/test-data.json' +})); + +import Fastify from 'fastify'; +import { getCachedData } from '../../../src/store/cache.js'; +import { getRpcMonitoringStatus } from '../../../src/services/rpcHealth.js'; +import { getL2BeatRefreshStatus } from '../../../src/services/l2beatRefresher.js'; +import { adminRoutes } from '../../../src/http/routes/admin.js'; + +// Local alias to keep the test bodies readable. +const dataService = { getCachedData, getRpcMonitoringStatus }; + +async function buildApp() { + const app = Fastify({ logger: false }); + await app.register(adminRoutes); + return app; +} + +describe('GET /health (deepened)', () => { + let app; + + beforeEach(async () => { + vi.clearAllMocks(); + app = await buildApp(); + }); + + it('returns status=ok and per-source freshness when all core sources loaded', async () => { + const now = new Date().toISOString(); + dataService.getCachedData.mockReturnValue({ + theGraph: { networks: [] }, + chainlist: [], + chains: [], + slip44: { 60: {} }, + l2beat: { source: 'live', fetchedAt: now, projects: [{ slug: 'arbitrum', chainId: 42161 }] }, + indexed: { all: [{ chainId: 1 }] }, + lastUpdated: now + }); + dataService.getRpcMonitoringStatus.mockReturnValue({ isMonitoring: false, lastUpdated: now }); + getL2BeatRefreshStatus.mockReturnValue({ + isRefreshing: false, + lastRefreshAt: now, + lastRefreshSource: 'live', + lastRefreshError: null, + lastRefreshProjectCount: 1, + intervalMs: 300000 + }); + + const res = await app.inject({ method: 'GET', url: '/health' }); + const body = res.json(); + expect(body.status).toBe('ok'); + expect(body.sources.theGraph.loaded).toBe(true); + expect(body.sources.l2beat.loaded).toBe(true); + expect(body.sources.l2beat.source).toBe('live'); + expect(typeof body.sources.theGraph.ageSeconds).toBe('number'); + expect(body.refreshers.l2beat.lastRefreshAt).toBe(now); + }); + + it('returns status=down when a core source is missing', async () => { + dataService.getCachedData.mockReturnValue({ + theGraph: null, + chainlist: [], + chains: [], + slip44: {}, + l2beat: null, + indexed: null, + lastUpdated: null + }); + + const res = await app.inject({ method: 'GET', url: '/health' }); + expect(res.json().status).toBe('down'); + }); + + it('returns status=degraded when slip44 fetch failed (null) but core sources loaded', async () => { + const now = new Date().toISOString(); + dataService.getCachedData.mockReturnValue({ + theGraph: {}, + chainlist: [], + chains: [], + slip44: null, // fetch failed + l2beat: { source: 'live', fetchedAt: now, projects: [{ chainId: 1 }] }, + indexed: { all: [] }, + lastUpdated: now + }); + + const res = await app.inject({ method: 'GET', url: '/health' }); + const body = res.json(); + expect(body.status).toBe('degraded'); + expect(body.sources.slip44.loaded).toBe(false); + }); + + it('marks l2beat as not loaded when fallback returned no projects', async () => { + const now = new Date().toISOString(); + dataService.getCachedData.mockReturnValue({ + theGraph: {}, + chainlist: [], + chains: [], + slip44: { 60: {} }, + l2beat: { source: 'unavailable', fetchedAt: null, projects: [] }, + indexed: { all: [] }, + lastUpdated: now + }); + + const res = await app.inject({ method: 'GET', url: '/health' }); + const body = res.json(); + expect(body.sources.l2beat.loaded).toBe(false); + expect(body.sources.l2beat.source).toBe('unavailable'); + }); +}); + +describe('GET /sources (extended with l2beat + slip44 null awareness)', () => { + let app; + + beforeEach(async () => { + vi.clearAllMocks(); + app = await buildApp(); + }); + + it('reports slip44: not loaded when slip44 is null (fetch failed)', async () => { + dataService.getCachedData.mockReturnValue({ + theGraph: {}, + chainlist: [], + chains: [], + slip44: null, + l2beat: { projects: [] }, + indexed: { all: [] }, + lastUpdated: null + }); + + const res = await app.inject({ method: 'GET', url: '/sources' }); + expect(res.json().sources.slip44).toBe('not loaded'); + }); + + it('reports l2beat: loaded when projects array is non-empty', async () => { + dataService.getCachedData.mockReturnValue({ + theGraph: {}, + chainlist: [], + chains: [], + slip44: {}, + l2beat: { projects: [{ chainId: 1 }] }, + indexed: { all: [] }, + lastUpdated: null + }); + + const res = await app.inject({ method: 'GET', url: '/sources' }); + expect(res.json().sources.l2beat).toBe('loaded'); + }); +}); diff --git a/tests/unit/http/metrics.test.js b/tests/unit/http/metrics.test.js new file mode 100644 index 0000000..109d4d9 --- /dev/null +++ b/tests/unit/http/metrics.test.js @@ -0,0 +1,113 @@ +import { describe, it, expect, beforeEach, vi } from 'vitest'; + +vi.mock('../../../src/store/cache.js', () => ({ + getCachedData: vi.fn() +})); + +vi.mock('../../../src/services/rpcHealth.js', () => ({ + getRpcMonitoringStatus: vi.fn(() => ({ isMonitoring: false, lastUpdated: null })) +})); + +vi.mock('../../../src/services/validation.js', () => ({ + validateChainData: vi.fn(() => ({ + totalErrors: 0, + summary: { rule1: 0, rule12: 3, rule13: 1 }, + errorsByRule: {}, + allErrors: [] + })) +})); + +vi.mock('../../../src/services/l2beatRefresher.js', () => ({ + getL2BeatRefreshStatus: vi.fn(() => ({ + isRefreshing: false, + lastRefreshAt: null, + lastRefreshSource: null, + lastRefreshError: null, + lastRefreshProjectCount: 0, + intervalMs: 300000 + })) +})); + +import Fastify from 'fastify'; +import { getCachedData } from '../../../src/store/cache.js'; +import { metricsRoute } from '../../../src/http/routes/metrics.js'; +import { incCounter, _resetMetricsForTests } from '../../../src/util/metrics.js'; + +// Local alias to keep test body using `dataService.getCachedData.mockReturnValue(...)`. +const dataService = { getCachedData }; + +async function buildApp() { + const app = Fastify({ logger: false }); + await app.register(metricsRoute); + return app; +} + +describe('GET /metrics (Prometheus exposition)', () => { + let app; + + beforeEach(async () => { + vi.clearAllMocks(); + _resetMetricsForTests(); + app = await buildApp(); + }); + + it('returns text/plain content type', async () => { + dataService.getCachedData.mockReturnValue({ indexed: { all: [] } }); + const res = await app.inject({ method: 'GET', url: '/metrics' }); + expect(res.statusCode).toBe(200); + expect(res.headers['content-type']).toMatch(/text\/plain/); + }); + + it('renders chains_api_chains_total gauge', async () => { + dataService.getCachedData.mockReturnValue({ + indexed: { all: new Array(123).fill({}) } + }); + const res = await app.inject({ method: 'GET', url: '/metrics' }); + expect(res.body).toContain('chains_api_chains_total 123'); + }); + + it('renders source-loaded gauges for all 5 sources', async () => { + dataService.getCachedData.mockReturnValue({ + theGraph: {}, + chainlist: [], + chains: [], + slip44: {}, + l2beat: { projects: [{ chainId: 1 }] }, + indexed: { all: [] } + }); + const res = await app.inject({ method: 'GET', url: '/metrics' }); + expect(res.body).toMatch(/chains_api_source_loaded\{source="theGraph"\} 1/); + expect(res.body).toMatch(/chains_api_source_loaded\{source="l2beat"\} 1/); + }); + + it('renders 0 for sources that failed to load', async () => { + dataService.getCachedData.mockReturnValue({ + theGraph: null, + chainlist: null, + chains: null, + slip44: null, + l2beat: null, + indexed: { all: [] } + }); + const res = await app.inject({ method: 'GET', url: '/metrics' }); + expect(res.body).toMatch(/chains_api_source_loaded\{source="theGraph"\} 0/); + expect(res.body).toMatch(/chains_api_source_loaded\{source="l2beat"\} 0/); + }); + + it('renders incremented counters with labels', async () => { + dataService.getCachedData.mockReturnValue({ indexed: { all: [] } }); + incCounter('chains_api_refresh_total', { refresher: 'l2beat', outcome: 'live' }, 3); + incCounter('chains_api_refresh_total', { refresher: 'l2beat', outcome: 'fallback' }); + + const res = await app.inject({ method: 'GET', url: '/metrics' }); + expect(res.body).toMatch(/chains_api_refresh_total\{outcome="live",refresher="l2beat"\} 3/); + expect(res.body).toMatch(/chains_api_refresh_total\{outcome="fallback",refresher="l2beat"\} 1/); + }); + + it('renders validation error counts per rule from the summary', async () => { + dataService.getCachedData.mockReturnValue({ indexed: { all: [] } }); + const res = await app.inject({ method: 'GET', url: '/metrics' }); + expect(res.body).toMatch(/chains_api_validation_errors\{rule="rule12"\} 3/); + expect(res.body).toMatch(/chains_api_validation_errors\{rule="rule13"\} 1/); + }); +}); diff --git a/tests/unit/http/parseIntParam.test.js b/tests/unit/http/parseIntParam.test.js new file mode 100644 index 0000000..ced0ab0 --- /dev/null +++ b/tests/unit/http/parseIntParam.test.js @@ -0,0 +1,39 @@ +import { describe, it, expect } from 'vitest'; +import { parseIntParam } from '../../../src/http/util/parseIntParam.js'; + +describe('parseIntParam', () => { + it('returns the value unchanged for integer numbers', () => { + expect(parseIntParam(0)).toBe(0); + expect(parseIntParam(42)).toBe(42); + expect(parseIntParam(-5)).toBe(-5); + }); + + it('returns null for non-integer numbers', () => { + expect(parseIntParam(1.5)).toBeNull(); + expect(parseIntParam(Number.NaN)).toBeNull(); + expect(parseIntParam(Infinity)).toBeNull(); + }); + + it('parses well-formed integer strings', () => { + expect(parseIntParam('1')).toBe(1); + expect(parseIntParam(' 42 ')).toBe(42); + expect(parseIntParam('-7')).toBe(-7); + }); + + it('rejects strings that contain anything other than digits', () => { + expect(parseIntParam('1.5')).toBeNull(); + expect(parseIntParam('1e3')).toBeNull(); + expect(parseIntParam('0x10')).toBeNull(); + expect(parseIntParam('42abc')).toBeNull(); + expect(parseIntParam('')).toBeNull(); + expect(parseIntParam(' ')).toBeNull(); + }); + + it('returns null for non-string non-number inputs', () => { + expect(parseIntParam(null)).toBeNull(); + expect(parseIntParam(undefined)).toBeNull(); + expect(parseIntParam([])).toBeNull(); + expect(parseIntParam({})).toBeNull(); + expect(parseIntParam(true)).toBeNull(); + }); +}); diff --git a/tests/unit/http/sendError.test.js b/tests/unit/http/sendError.test.js new file mode 100644 index 0000000..96421c4 --- /dev/null +++ b/tests/unit/http/sendError.test.js @@ -0,0 +1,25 @@ +import { describe, it, expect, vi } from 'vitest'; +import { sendError } from '../../../src/http/util/sendError.js'; + +function createReply() { + const reply = {}; + reply.code = vi.fn().mockReturnValue(reply); + reply.send = vi.fn().mockReturnValue(reply); + return reply; +} + +describe('sendError', () => { + it('sets the status code and JSON error body', () => { + const reply = createReply(); + sendError(reply, 400, 'Invalid chain ID'); + + expect(reply.code).toHaveBeenCalledWith(400); + expect(reply.send).toHaveBeenCalledWith({ error: 'Invalid chain ID' }); + }); + + it('returns the reply so handlers can return it directly', () => { + const reply = createReply(); + const result = sendError(reply, 503, 'unavailable'); + expect(result).toBe(reply); + }); +}); diff --git a/tests/unit/index.test.js b/tests/unit/index.test.js index 7ecfcd4..8d262e1 100644 --- a/tests/unit/index.test.js +++ b/tests/unit/index.test.js @@ -16,52 +16,61 @@ vi.mock('../../config.js', () => ({ DATA_SOURCE_CHAINLIST: 'https://example.com/chainlist.json', DATA_SOURCE_CHAINS: 'https://example.com/chains.json', DATA_SOURCE_SLIP44: 'https://example.com/slip44.md', + DATA_SOURCE_L2BEAT_API: 'https://example.com/l2beat-summary', + L2BEAT_FETCH_TIMEOUT_MS: 1000, + L2BEAT_REFRESH_INTERVAL_MS: 60000, DATA_CACHE_ENABLED: false, DATA_CACHE_FILE: '.cache/test-data-cache.json', PROXY_URL: '', PROXY_ENABLED: false })); +// Stub the L2BEAT refresher so buildApp doesn't kick off a real network fetch. +vi.mock('../../src/services/l2beatRefresher.js', () => ({ + startL2BeatRefresh: vi.fn(), + stopL2BeatRefresh: vi.fn(), + runL2BeatRefresh: vi.fn(), + getL2BeatRefreshStatus: vi.fn(() => ({ + isRefreshing: false, + lastRefreshAt: null, + lastRefreshSource: null, + lastRefreshError: null, + lastRefreshProjectCount: 0, + intervalMs: 60000 + })) +})); + // Capture the onBackgroundRefreshSuccess callback let capturedCallback = null; -vi.mock('../../dataService.js', async () => { - const actual = await vi.importActual('../../dataService.js'); - return { - ...actual, - loadData: vi.fn().mockResolvedValue({}), - initializeDataOnStartup: vi.fn(async (options) => { - if (options?.onBackgroundRefreshSuccess) { - capturedCallback = options.onBackgroundRefreshSuccess; - } - return { indexed: { all: [], byChainId: {} }, lastUpdated: new Date().toISOString() }; - }), - getCachedData: vi.fn(() => ({ - indexed: { all: [], byChainId: {} }, - lastUpdated: new Date().toISOString(), - rpcHealth: {}, - lastRpcCheck: null - })), - searchChains: vi.fn(() => []), - getChainById: vi.fn(() => null), - getAllChains: vi.fn(() => []), - getAllRelations: vi.fn(() => ({})), - getRelationsById: vi.fn(() => null), - getEndpointsById: vi.fn(() => null), - getAllEndpoints: vi.fn(() => []), - getAllKeywords: vi.fn(() => ({})), - getRpcMonitoringResults: vi.fn(() => ({ - lastUpdated: null, - totalEndpoints: 0, - testedEndpoints: 0, - workingEndpoints: 0, - failedEndpoints: 0, - results: [] - })), - getRpcMonitoringStatus: vi.fn(() => ({ isMonitoring: false, lastUpdated: null })), - startRpcHealthCheck: vi.fn(), - validateChainData: vi.fn(() => []) - }; +// Shared mock fn instances used across the src/ module vi.mocks below. +const mocks = vi.hoisted(() => ({ + loadData: vi.fn(), + initializeDataOnStartup: vi.fn(), + startRpcHealthCheck: vi.fn(), + runRpcHealthCheck: vi.fn(), + getRpcMonitoringStatus: vi.fn(() => ({ isMonitoring: false, lastUpdated: null })) +})); + +vi.mock('../../src/services/loader.js', () => ({ + loadData: mocks.loadData, + initializeDataOnStartup: mocks.initializeDataOnStartup +})); + +vi.mock('../../src/services/rpcHealth.js', () => ({ + startRpcHealthCheck: mocks.startRpcHealthCheck, + runRpcHealthCheck: mocks.runRpcHealthCheck, + getRpcMonitoringStatus: mocks.getRpcMonitoringStatus +})); + +// Default implementations. initializeDataOnStartup captures the +// onBackgroundRefreshSuccess callback so we can invoke it from the test. +mocks.loadData.mockResolvedValue({}); +mocks.initializeDataOnStartup.mockImplementation(async (options) => { + if (options?.onBackgroundRefreshSuccess) { + capturedCallback = options.onBackgroundRefreshSuccess; + } + return { indexed: { all: [], byChainId: {} }, lastUpdated: new Date().toISOString() }; }); vi.mock('node:fs/promises', () => ({ @@ -99,7 +108,7 @@ describe('index.js - onBackgroundRefreshSuccess callback', () => { // Invoke it to exercise the callback capturedCallback(); - expect(dataService.startRpcHealthCheck).toHaveBeenCalled(); + expect(mocks.startRpcHealthCheck).toHaveBeenCalled(); await app.close(); }); diff --git a/tests/unit/mcp-tools.test.js b/tests/unit/mcp-tools.test.js index 0908b23..a59b1cc 100644 --- a/tests/unit/mcp-tools.test.js +++ b/tests/unit/mcp-tools.test.js @@ -36,6 +36,23 @@ vi.mock('../../dataService.js', () => ({ })), validateChainData: vi.fn(() => ({ totalErrors: 0, errorsByRule: {}, summary: {}, allErrors: [] })), traverseRelations: vi.fn(() => null), + countChainsByTag: vi.fn((chains) => { + let totalTestnets = 0; + let totalL2s = 0; + let totalBeacons = 0; + let totalMainnets = 0; + for (const chain of chains) { + const tags = chain.tags || []; + const isTestnet = tags.includes('Testnet'); + const isL2 = tags.includes('L2'); + const isBeacon = tags.includes('Beacon'); + if (isTestnet) totalTestnets += 1; + if (isL2) totalL2s += 1; + if (isBeacon) totalBeacons += 1; + if (!isTestnet && !isL2 && !isBeacon) totalMainnets += 1; + } + return { totalChains: chains.length, totalMainnets, totalTestnets, totalL2s, totalBeacons }; + }), getRpcMonitoringResults: vi.fn(() => ({ lastUpdated: '2024-01-01T00:00:00.000Z', totalEndpoints: 0, @@ -50,12 +67,22 @@ vi.mock('../../dataService.js', () => ({ })), })); +vi.mock('../../src/services/l2beatRefresher.js', () => ({ + getL2BeatRefreshStatus: vi.fn(() => ({ + isRefreshing: false, + lastRefreshAt: '2026-05-05T12:00:00.000Z', + lastRefreshSource: 'live', + lastRefreshError: null, + lastRefreshProjectCount: 28, + intervalMs: 300000, + })), +})); + vi.mock('../../clientsView.js', () => ({ getClientsByChain: vi.fn(() => null), summarizeChainClients: vi.fn(() => null), })); -// Mock priceService before importing vi.mock('../../priceService.js', () => ({ getPricesForChains: vi.fn(async (chainIds) => { const map = new Map(); @@ -124,10 +151,10 @@ describe('MCP Tools - Shared Module', () => { }); describe('getToolDefinitions', () => { - it('should return an array of 14 tools', () => { + it('should return an array of 17 tools', () => { const tools = getToolDefinitions(); expect(Array.isArray(tools)).toBe(true); - expect(tools.length).toBe(14); + expect(tools.length).toBe(17); }); it('should include all expected tool names', () => { @@ -843,5 +870,108 @@ describe('MCP Tools - Shared Module', () => { expect(data.message).toBe('Database error'); }); }); + + describe('handleToolCall - get_scaling_chains', () => { + it('returns chains with l2Beat data plus refresher status block', async () => { + vi.mocked(dataService.getAllChains).mockReturnValue([ + { chainId: 1, name: 'Ethereum', tags: [] }, + { + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + l2Beat: { slug: 'arbitrum', stage: 'Stage 1', category: 'Optimistic Rollup' }, + }, + { + chainId: 10, + name: 'OP Mainnet', + tags: ['L2'], + l2Beat: { slug: 'optimism', stage: 'Stage 1', category: 'Optimistic Rollup' }, + }, + ]); + + const result = await handleToolCall('get_scaling_chains', {}); + expect(result.isError).toBeUndefined(); + const data = JSON.parse(result.content[0].text); + expect(data.count).toBe(2); + expect(data.chains.map((c) => c.chainId)).toEqual([42161, 10]); + expect(data.refresher.lastRefreshSource).toBe('live'); + }); + + it('returns count=0 when no chains have l2Beat data', async () => { + vi.mocked(dataService.getAllChains).mockReturnValue([ + { chainId: 1, name: 'Ethereum', tags: [] }, + ]); + const result = await handleToolCall('get_scaling_chains', {}); + expect(result.isError).toBeUndefined(); + const data = JSON.parse(result.content[0].text); + expect(data.count).toBe(0); + expect(data.chains).toEqual([]); + }); + }); + + describe('handleToolCall - get_l2beat_by_id', () => { + it('returns the chain when L2BEAT data is present', async () => { + vi.mocked(dataService.getChainById).mockReturnValue({ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + l2Beat: { slug: 'arbitrum', stage: 'Stage 1', category: 'Optimistic Rollup' }, + }); + const result = await handleToolCall('get_l2beat_by_id', { chainId: 42161 }); + expect(result.isError).toBeUndefined(); + const data = JSON.parse(result.content[0].text); + expect(data.chainId).toBe(42161); + expect(data.l2Beat.slug).toBe('arbitrum'); + }); + + it('returns an error when the chain has no L2BEAT data', async () => { + vi.mocked(dataService.getChainById).mockReturnValue({ + chainId: 1, + name: 'Ethereum', + tags: [], + }); + const result = await handleToolCall('get_l2beat_by_id', { chainId: 1 }); + expect(result.isError).toBe(true); + const data = JSON.parse(result.content[0].text); + expect(data.error).toBe('Not found'); + expect(data.message).toContain('not classified by L2BEAT'); + }); + + it('returns an error when the chain does not exist', async () => { + vi.mocked(dataService.getChainById).mockReturnValue(null); + const result = await handleToolCall('get_l2beat_by_id', { chainId: 999999 }); + expect(result.isError).toBe(true); + const data = JSON.parse(result.content[0].text); + expect(data.error).toBe('Not found'); + expect(data.message).toContain('No chain with chainId'); + }); + + it('rejects invalid chainId', async () => { + const result = await handleToolCall('get_l2beat_by_id', { chainId: 'abc' }); + expect(result.isError).toBe(true); + const data = JSON.parse(result.content[0].text); + expect(data.error).toBe('Invalid chainId'); + }); + }); + + describe('handleToolCall - get_refresher_status', () => { + it('returns the unified refresher status block', async () => { + const result = await handleToolCall('get_refresher_status', {}); + expect(result.isError).toBeUndefined(); + const data = JSON.parse(result.content[0].text); + expect(data).toHaveProperty('lastRefreshAt'); + expect(data).toHaveProperty('lastRefreshSource', 'live'); + expect(data).toHaveProperty('intervalMs'); + }); + }); + + describe('getToolDefinitions includes new tools', () => { + it('exposes the three L2BEAT/scaling/refresher tools', () => { + const names = getToolDefinitions().map((t) => t.name); + expect(names).toContain('get_scaling_chains'); + expect(names).toContain('get_l2beat_by_id'); + expect(names).toContain('get_refresher_status'); + }); + }); }); diff --git a/tests/unit/services/chainRefresher.test.js b/tests/unit/services/chainRefresher.test.js new file mode 100644 index 0000000..822f658 --- /dev/null +++ b/tests/unit/services/chainRefresher.test.js @@ -0,0 +1,223 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; + +vi.mock('../../../src/sources/l2beat.js', () => ({ + fetchL2Beat: vi.fn() +})); + +vi.mock('../../../rpcUtil.js', () => ({ + jsonRpcCall: vi.fn() +})); + +vi.mock('../../../config.js', () => ({ + RPC_CHECK_TIMEOUT_MS: 5000, + RPC_CHECK_CONCURRENCY: 8, + MAX_ENDPOINTS_PER_CHAIN: 5, + L2BEAT_REFRESH_INTERVAL_MS: 60000, + DATA_SOURCE_L2BEAT_API: 'https://l2beat.test/api/scaling-summary', + L2BEAT_FETCH_TIMEOUT_MS: 1000, + PROXY_URL: '', + PROXY_ENABLED: false +})); + +import { fetchL2Beat } from '../../../src/sources/l2beat.js'; +import { jsonRpcCall } from '../../../rpcUtil.js'; +import { applyDataToCache, cachedData } from '../../../src/store/cache.js'; +import { + processChainRpc, + processL2BeatBatch, + tickOnce, + getChainRefresherStatus, + _resetChainRefresherForTests +} from '../../../src/services/chainRefresher.js'; + +function seedChain(chainId, rpc = []) { + const chain = { + chainId, + name: `Chain ${chainId}`, + tags: [], + relations: [], + sources: ['chainlist'], + rpc + }; + return chain; +} + +function seedCacheWith(chains) { + const byChainId = {}; + for (const c of chains) byChainId[c.chainId] = c; + applyDataToCache({ + indexed: { byChainId, byName: {}, all: chains }, + lastUpdated: '2026-05-05T00:00:00.000Z' + }); +} + +describe('chainRefresher', () => { + beforeEach(() => { + _resetChainRefresherForTests(); + applyDataToCache({}); + fetchL2Beat.mockReset(); + jsonRpcCall.mockReset(); + }); + + afterEach(() => { + _resetChainRefresherForTests(); + }); + + describe('processChainRpc', () => { + it('is a no-op when chain is not in the index', async () => { + seedCacheWith([seedChain(1)]); + await processChainRpc(999); + expect(cachedData.rpcHealth?.[999]).toBeUndefined(); + }); + + it('writes per-endpoint results and stamps chain.lastTested', async () => { + seedCacheWith([seedChain(1, ['https://rpc-a.example', 'https://rpc-b.example'])]); + jsonRpcCall + .mockResolvedValueOnce('Geth/v1.0') // rpc-a clientVersion + .mockResolvedValueOnce('0x10') // rpc-a blockNumber + .mockResolvedValueOnce('Erigon/v1.0') // rpc-b clientVersion + .mockResolvedValueOnce('0x12'); // rpc-b blockNumber + + await processChainRpc(1); + + expect(cachedData.rpcHealth[1]).toHaveLength(2); + expect(cachedData.rpcHealth[1][0].ok).toBe(true); + expect(cachedData.indexed.byChainId[1].lastTested).toMatch(/^\d{4}-\d{2}-\d{2}T/); + }); + + it('respects the data-version race guard', async () => { + seedCacheWith([seedChain(1, ['https://rpc-a.example'])]); + + // Have jsonRpcCall mutate cachedData.lastUpdated mid-flight, simulating + // a concurrent loadData() during the RPC sweep. + jsonRpcCall.mockImplementation(async () => { + cachedData.lastUpdated = '2026-05-05T01:00:00.000Z'; + return 'whatever'; + }); + + await processChainRpc(1); + + // The race guard should have skipped writing rpcHealth. + expect(cachedData.rpcHealth?.[1]).toBeUndefined(); + }); + + it('skips chains with no http endpoints', async () => { + seedCacheWith([seedChain(1, ['wss://only-websocket.example'])]); + await processChainRpc(1); + expect(cachedData.rpcHealth?.[1]).toBeUndefined(); + expect(jsonRpcCall).not.toHaveBeenCalled(); + }); + }); + + describe('processL2BeatBatch', () => { + it('skips when no data is loaded', async () => { + const result = await processL2BeatBatch(); + expect(result).toEqual({ skipped: 'no-data' }); + }); + + it('writes cachedData.l2beat and updates status on success', async () => { + seedCacheWith([seedChain(42161)]); + fetchL2Beat.mockResolvedValueOnce({ + source: 'live', + fetchedAt: '2026-05-05T12:00:00.000Z', + projects: [{ slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }] + }); + + const result = await processL2BeatBatch(); + + expect(result.source).toBe('live'); + expect(result.projectCount).toBe(1); + expect(cachedData.l2beat?.source).toBe('live'); + expect(getChainRefresherStatus().l2beat.lastRefreshSource).toBe('live'); + }); + }); + + describe('tickOnce / queue scheduling', () => { + it('first tick processes l2beat_batch (head of fresh queue)', async () => { + seedCacheWith([seedChain(1, ['https://rpc-a.example'])]); + fetchL2Beat.mockResolvedValueOnce({ + source: 'live', fetchedAt: '2026-05-05T00:00:00.000Z', + projects: [{ slug: 'eth', chainId: 1, displayName: 'Ethereum' }] + }); + + await tickOnce(); + + const status = getChainRefresherStatus(); + expect(status.lastTickJobType).toBe('l2beat_batch'); + expect(status.sweep.jobIndex).toBe(1); + expect(fetchL2Beat).toHaveBeenCalledTimes(1); + }); + + it('subsequent ticks process chain_rpc jobs in order', async () => { + seedCacheWith([ + seedChain(1, ['https://rpc-a.example']), + seedChain(2, ['https://rpc-b.example']) + ]); + fetchL2Beat.mockResolvedValueOnce({ + source: 'live', fetchedAt: '2026-05-05T00:00:00.000Z', projects: [] + }); + jsonRpcCall + .mockResolvedValueOnce('Geth/v1') + .mockResolvedValueOnce('0x10') + .mockResolvedValueOnce('Erigon/v1') + .mockResolvedValueOnce('0x12'); + + await tickOnce(); // l2beat_batch + await tickOnce(); // chain_rpc 1 + await tickOnce(); // chain_rpc 2 + + expect(cachedData.rpcHealth[1]).toHaveLength(1); + expect(cachedData.rpcHealth[2]).toHaveLength(1); + + const status = getChainRefresherStatus(); + expect(status.queueDepth).toBe(0); + expect(status.sweep.totalJobs).toBe(3); // 1 l2beat + 2 chains + }); + + it('rebuilds the queue once it drains, incrementing sweep number', async () => { + seedCacheWith([seedChain(1, [])]); // no RPCs to keep test deterministic + fetchL2Beat.mockResolvedValue({ + source: 'live', fetchedAt: '2026-05-05T00:00:00.000Z', projects: [] + }); + + await tickOnce(); // l2beat_batch (sweep #1) + await tickOnce(); // chain_rpc 1 (no-op, but increments cursor) + // queue empty -> next tick rebuilds + await tickOnce(); // l2beat_batch again (sweep #2) + + expect(getChainRefresherStatus().sweep.sweepNumber).toBe(2); + }); + + it('overlap guard: a tick in flight is skipped, not queued behind itself', async () => { + seedCacheWith([seedChain(1, [])]); + let release; + fetchL2Beat.mockImplementation(() => new Promise(r => { release = r; })); + + const first = tickOnce(); // sets tickInFlight = true + await tickOnce(); // immediately returns (no-op while in flight) + release({ source: 'live', fetchedAt: 'x', projects: [] }); + await first; + + // Only one fetchL2Beat call: the second tick saw tickInFlight and bailed. + expect(fetchL2Beat).toHaveBeenCalledTimes(1); + }); + }); + + describe('getChainRefresherStatus', () => { + it('exposes tick + sweep + per-job-type state', async () => { + seedCacheWith([seedChain(1, [])]); + fetchL2Beat.mockResolvedValue({ + source: 'fallback', fetchedAt: null, projects: [] + }); + + await tickOnce(); + const status = getChainRefresherStatus(); + + expect(status.tickIntervalMs).toBeGreaterThan(0); + expect(status.lastTickAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + expect(status.l2beat.lastRefreshSource).toBe('fallback'); + expect(status.rpc).toHaveProperty('isMonitoring'); + expect(status.sweep).toHaveProperty('sweepNumber'); + }); + }); +}); diff --git a/tests/unit/services/l2beatRefresher.test.js b/tests/unit/services/l2beatRefresher.test.js new file mode 100644 index 0000000..3184a85 --- /dev/null +++ b/tests/unit/services/l2beatRefresher.test.js @@ -0,0 +1,151 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; + +vi.mock('../../../src/sources/l2beat.js', () => ({ + fetchL2Beat: vi.fn() +})); + +vi.mock('../../../config.js', () => ({ + L2BEAT_REFRESH_INTERVAL_MS: 60000, + DATA_SOURCE_L2BEAT_API: 'https://l2beat.test/api/scaling-summary', + L2BEAT_FETCH_TIMEOUT_MS: 1000, + // chainRefresher (which l2beatRefresher now delegates to) transitively + // imports rpcUtil.js + fetchUtil.js, which need these env constants. + RPC_CHECK_TIMEOUT_MS: 5000, + RPC_CHECK_CONCURRENCY: 8, + PROXY_URL: '', + PROXY_ENABLED: false +})); + +import { fetchL2Beat } from '../../../src/sources/l2beat.js'; +import { applyDataToCache, cachedData } from '../../../src/store/cache.js'; +import { + runL2BeatRefresh, + startL2BeatRefresh, + stopL2BeatRefresh, + getL2BeatRefreshStatus +} from '../../../src/services/l2beatRefresher.js'; + +function seedIndexedCache() { + applyDataToCache({ + indexed: { + byChainId: { + 42161: { chainId: 42161, name: 'Arbitrum One', tags: [], sources: [], relations: [] }, + 10: { chainId: 10, name: 'OP Mainnet', tags: [], sources: [], relations: [] } + }, + byName: {}, + all: [] + }, + lastUpdated: '2026-05-05T00:00:00.000Z' + }); + cachedData.indexed.all = Object.values(cachedData.indexed.byChainId); +} + +describe('l2beatRefresher', () => { + beforeEach(() => { + fetchL2Beat.mockReset(); + applyDataToCache({}); + stopL2BeatRefresh(); + }); + + afterEach(() => { + stopL2BeatRefresh(); + }); + + describe('runL2BeatRefresh', () => { + it('skips when data is not loaded', async () => { + const result = await runL2BeatRefresh(); + expect(result).toEqual({ skipped: 'no-data' }); + expect(fetchL2Beat).not.toHaveBeenCalled(); + }); + + it('updates cache.l2beat and merges into indexed on success', async () => { + seedIndexedCache(); + fetchL2Beat.mockResolvedValueOnce({ + source: 'live', + fetchedAt: '2026-05-05T12:00:00.000Z', + projects: [ + { slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One', stage: 'Stage 1', category: 'Optimistic Rollup' } + ] + }); + + const result = await runL2BeatRefresh(); + + expect(result.source).toBe('live'); + expect(result.projectCount).toBe(1); + expect(cachedData.l2beat?.source).toBe('live'); + expect(cachedData.indexed.byChainId[42161].l2Beat).toMatchObject({ + slug: 'arbitrum', + stage: 'Stage 1', + dataFreshness: 'live' + }); + }); + + it('skips writing when cache.lastUpdated changes mid-flight (race guard)', async () => { + seedIndexedCache(); + let resolveFetch; + fetchL2Beat.mockImplementation(() => new Promise(resolve => { resolveFetch = resolve; })); + + const refreshPromise = runL2BeatRefresh(); + + // Simulate a concurrent loadData() bumping lastUpdated. + cachedData.lastUpdated = '2026-05-05T01:00:00.000Z'; + resolveFetch({ + source: 'live', + fetchedAt: '2026-05-05T12:00:00.000Z', + projects: [{ slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }] + }); + + const result = await refreshPromise; + expect(result).toEqual({ skipped: 'data-changed' }); + expect(cachedData.indexed.byChainId[42161].l2Beat).toBeUndefined(); + }); + + it('records lastRefreshError on fetch failure', async () => { + seedIndexedCache(); + fetchL2Beat.mockRejectedValueOnce(new Error('boom')); + + const result = await runL2BeatRefresh(); + expect(result.skipped).toBe('fetch-error'); + expect(getL2BeatRefreshStatus().lastRefreshError).toBe('boom'); + }); + }); + + describe('getL2BeatRefreshStatus', () => { + it('exposes intervalMs and refresh state', async () => { + seedIndexedCache(); + fetchL2Beat.mockResolvedValueOnce({ + source: 'fallback', + fetchedAt: null, + projects: [{ slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }] + }); + + await runL2BeatRefresh(); + const status = getL2BeatRefreshStatus(); + expect(status.intervalMs).toBe(60000); + expect(status.lastRefreshSource).toBe('fallback'); + expect(status.lastRefreshProjectCount).toBe(1); + expect(status.lastRefreshAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + expect(status.isRefreshing).toBe(false); + }); + }); + + describe('startL2BeatRefresh idempotency', () => { + it('starting twice does not double-schedule', async () => { + seedIndexedCache(); + fetchL2Beat.mockResolvedValue({ + source: 'live', + fetchedAt: '2026-05-05T12:00:00.000Z', + projects: [] + }); + + startL2BeatRefresh(); + startL2BeatRefresh(); + // Allow the immediate kick-off to settle. + await new Promise(r => setImmediate(r)); + await new Promise(r => setImmediate(r)); + + expect(fetchL2Beat.mock.calls.length).toBeLessThanOrEqual(2); + stopL2BeatRefresh(); + }); + }); +}); diff --git a/tests/unit/services/validation.test.js b/tests/unit/services/validation.test.js new file mode 100644 index 0000000..90038b5 --- /dev/null +++ b/tests/unit/services/validation.test.js @@ -0,0 +1,485 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { applyDataToCache, cachedData } from '../../../src/store/cache.js'; +import { validateChainData } from '../../../src/services/validation.js'; + +/** + * validateChainData() short-circuits to an error when any of the 3 upstream + * sources are absent. To exercise the L2BEAT rules in isolation we have to + * seed all of theGraph + chainlist + chains, even if they don't matter for + * the specific rule under test. + */ +function seedCache({ + chains, + l2beatProjects = null, + rawChains = [], + rawChainlist = [], + rpcHealth = {} +}) { + const byChainId = {}; + for (const c of chains) byChainId[c.chainId] = c; + + applyDataToCache({ + theGraph: { networks: [] }, + chainlist: rawChainlist, + chains: rawChains, + slip44: {}, + l2beat: l2beatProjects + ? { source: 'live', fetchedAt: '2026-05-05T00:00:00.000Z', projects: l2beatProjects } + : null, + indexed: { + byChainId, + byName: {}, + all: chains + }, + rpcHealth, + lastUpdated: '2026-05-05T00:00:00.000Z' + }); + cachedData.indexed.all = Object.values(cachedData.indexed.byChainId); +} + +function findErrorsForRule(report, ruleNumber) { + return report.allErrors.filter(e => e.rule === ruleNumber); +} + +describe('validation — L2BEAT cross-source rules', () => { + beforeEach(() => { + applyDataToCache({}); + }); + + describe('rule 7: l2beat_missing_classification', () => { + it('flags chains classified by L2BEAT but with no l2Of/testnetOf relation from other sources', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [], + l2Beat: { slug: 'arbitrum', stage: 'Stage 1', category: 'Optimistic Rollup' } + }] + }); + const report = validateChainData(); + const errs = findErrorsForRule(report, 7); + expect(errs).toHaveLength(1); + expect(errs[0].l2BeatSlug).toBe('arbitrum'); + }); + + it('does NOT flag chains with a corroborating l2Of relation from theGraph or chains', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { slug: 'arbitrum', stage: 'Stage 1', category: 'Optimistic Rollup' } + }] + }); + expect(findErrorsForRule(validateChainData(), 7)).toHaveLength(0); + }); + + it('does NOT flag chains without any L2BEAT data', () => { + seedCache({ + chains: [{ chainId: 1, name: 'Ethereum', tags: [], relations: [] }] + }); + expect(findErrorsForRule(validateChainData(), 7)).toHaveLength(0); + }); + }); + + describe('rule 8: l2beat_hostchain_no_relation', () => { + it('flags chains where L2BEAT hostChainId has no matching l2Of/testnetOf relation', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 999, source: 'theGraph' }], + l2Beat: { slug: 'arbitrum', hostChainId: 1 } + }] + }); + const errs = findErrorsForRule(validateChainData(), 8); + expect(errs).toHaveLength(1); + expect(errs[0].l2BeatHostChainId).toBe(1); + }); + + it('does NOT flag chains where a relation points to hostChainId', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { slug: 'arbitrum', hostChainId: 1 } + }] + }); + expect(findErrorsForRule(validateChainData(), 8)).toHaveLength(0); + }); + }); + + describe('rule 9: l2beat_category_name_mismatch', () => { + it('flags ZK category with optimistic-sounding name', () => { + seedCache({ + chains: [{ + chainId: 999, + name: 'Optimistic Rollup Project', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { + slug: 'something', + displayName: 'Optimistic Rollup Project', + category: 'ZK Rollup', + hostChainId: 1 + } + }] + }); + const errs = findErrorsForRule(validateChainData(), 9); + expect(errs).toHaveLength(1); + expect(errs[0].l2BeatCategory).toBe('ZK Rollup'); + }); + + it('does NOT flag matching category/name', () => { + seedCache({ + chains: [{ + chainId: 324, + name: 'ZKsync Era', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { + slug: 'zksync-era', + displayName: 'ZKsync Era', + category: 'ZK Rollup', + hostChainId: 1 + } + }] + }); + expect(findErrorsForRule(validateChainData(), 9)).toHaveLength(0); + }); + }); + + describe('rule 10: l2beat_unknown_chains', () => { + it('flags L2BEAT projects whose chainId is not in our registry', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { slug: 'arbitrum', hostChainId: 1 } + }], + l2beatProjects: [ + { slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }, + { slug: 'brand-new-l2', chainId: 999888, displayName: 'Brand New L2', stage: 'Stage 0' } + ] + }); + const errs = findErrorsForRule(validateChainData(), 10); + expect(errs).toHaveLength(1); + expect(errs[0].chainId).toBe(999888); + expect(errs[0].l2BeatSlug).toBe('brand-new-l2'); + }); + + it('emits nothing when every L2BEAT project maps to a known chainId', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { slug: 'arbitrum', hostChainId: 1 } + }], + l2beatProjects: [{ slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }] + }); + expect(findErrorsForRule(validateChainData(), 10)).toHaveLength(0); + }); + + it('emits nothing when l2beat cache is unavailable', () => { + seedCache({ + chains: [{ chainId: 1, name: 'Ethereum', tags: [], relations: [] }], + l2beatProjects: null + }); + expect(findErrorsForRule(validateChainData(), 10)).toHaveLength(0); + }); + }); + + describe('rule 11: l2beat_stage_zero_high_tvs', () => { + it('flags Stage 0 chains with TVS > $1B', () => { + seedCache({ + chains: [{ + chainId: 81457, + name: 'Blast', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { slug: 'blast', stage: 'Stage 0', tvs: 2_500_000_000, hostChainId: 1 } + }] + }); + const errs = findErrorsForRule(validateChainData(), 11); + expect(errs).toHaveLength(1); + expect(errs[0].l2BeatTvs).toBe(2_500_000_000); + }); + + it('does NOT flag Stage 1+ chains regardless of TVS', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { slug: 'arbitrum', stage: 'Stage 1', tvs: 10_000_000_000, hostChainId: 1 } + }] + }); + expect(findErrorsForRule(validateChainData(), 11)).toHaveLength(0); + }); + + it('does NOT flag Stage 0 chains below the threshold', () => { + seedCache({ + chains: [{ + chainId: 999, + name: 'Small L2', + tags: ['L2'], + relations: [{ kind: 'l2Of', chainId: 1, source: 'theGraph' }], + l2Beat: { slug: 'small', stage: 'Stage 0', tvs: 100_000_000, hostChainId: 1 } + }] + }); + expect(findErrorsForRule(validateChainData(), 11)).toHaveLength(0); + }); + }); + + describe('rule 12: rpc_block_height_drift', () => { + it('flags when working RPCs disagree by more than 100 blocks', () => { + seedCache({ + chains: [{ chainId: 1, name: 'Ethereum', tags: [], relations: [] }], + rpcHealth: { + 1: [ + { url: 'https://rpc-a', ok: true, blockHeight: 1_000_000 }, + { url: 'https://rpc-b', ok: true, blockHeight: 1_000_500 }, + { url: 'https://rpc-c', ok: false, error: 'timeout' } + ] + } + }); + const errs = findErrorsForRule(validateChainData(), 12); + expect(errs).toHaveLength(1); + expect(errs[0].drift).toBe(500); + expect(errs[0].laggingEndpoint.url).toBe('https://rpc-a'); + expect(errs[0].leadingEndpoint.url).toBe('https://rpc-b'); + }); + + it('does NOT flag when RPCs agree within the threshold', () => { + seedCache({ + chains: [{ chainId: 1, name: 'Ethereum', tags: [], relations: [] }], + rpcHealth: { + 1: [ + { url: 'https://rpc-a', ok: true, blockHeight: 1_000_000 }, + { url: 'https://rpc-b', ok: true, blockHeight: 1_000_010 } + ] + } + }); + expect(findErrorsForRule(validateChainData(), 12)).toHaveLength(0); + }); + + it('does NOT flag chains with fewer than 2 working endpoints', () => { + seedCache({ + chains: [{ chainId: 1, name: 'Ethereum', tags: [], relations: [] }], + rpcHealth: { + 1: [{ url: 'https://rpc-a', ok: true, blockHeight: 1_000_000 }] + } + }); + expect(findErrorsForRule(validateChainData(), 12)).toHaveLength(0); + }); + }); + + describe('rule 13: name_disagreement', () => { + it('flags meaningfully different names from chains.json vs theGraph', () => { + seedCache({ + chains: [{ + chainId: 137, + name: 'Polygon', + tags: [], + relations: [], + sources: ['chains', 'theGraph'], + theGraph: { fullName: 'Matic Network' } + }] + }); + const errs = findErrorsForRule(validateChainData(), 13); + expect(errs).toHaveLength(1); + expect(errs[0].chainsName).toBe('Polygon'); + expect(errs[0].theGraphName).toBe('Matic Network'); + }); + + it('does NOT flag substring variations like "Arbitrum One" vs "Arbitrum"', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: [], + relations: [], + sources: ['chains', 'theGraph'], + theGraph: { fullName: 'Arbitrum' } + }] + }); + expect(findErrorsForRule(validateChainData(), 13)).toHaveLength(0); + }); + + it('ignores "Mainnet" suffix differences', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + sources: ['chains', 'theGraph'], + theGraph: { fullName: 'Ethereum Mainnet' } + }] + }); + expect(findErrorsForRule(validateChainData(), 13)).toHaveLength(0); + }); + }); + + describe('rule 14: native_currency_mismatch', () => { + it('flags when chains.json and theGraph disagree on native symbol', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + nativeCurrency: { symbol: 'ETH' }, + theGraph: { nativeToken: 'ETC' } + }] + }); + const errs = findErrorsForRule(validateChainData(), 14); + expect(errs).toHaveLength(1); + expect(errs[0].chainsSymbol).toBe('ETH'); + expect(errs[0].theGraphSymbol).toBe('ETC'); + }); + + it('is case-insensitive', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + nativeCurrency: { symbol: 'eth' }, + theGraph: { nativeToken: 'ETH' } + }] + }); + expect(findErrorsForRule(validateChainData(), 14)).toHaveLength(0); + }); + }); + + describe('rule 15: slip44_native_symbol_mismatch', () => { + it('flags when slip44 symbol disagrees with native currency symbol', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + nativeCurrency: { symbol: 'ETH' }, + slip44Info: { coinType: 60, symbol: 'BTC' } + }] + }); + const errs = findErrorsForRule(validateChainData(), 15); + expect(errs).toHaveLength(1); + expect(errs[0].slip44CoinType).toBe(60); + }); + + it('does NOT flag matching symbols', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + nativeCurrency: { symbol: 'ETH' }, + slip44Info: { coinType: 60, symbol: 'ETH' } + }] + }); + expect(findErrorsForRule(validateChainData(), 15)).toHaveLength(0); + }); + }); + + describe('rule 16: rpc_url_in_one_source_only', () => { + it('flags healthy RPC URLs that exist in chainlist but not chains.json', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + sources: ['chains', 'chainlist'] + }], + rawChains: [{ chainId: 1, rpc: ['https://rpc-old.example'] }], + rawChainlist: [{ chainId: 1, rpc: ['https://rpc-old.example', 'https://rpc-new.example'] }], + rpcHealth: { + 1: [ + { url: 'https://rpc-old.example', ok: true, blockHeight: 1000 }, + { url: 'https://rpc-new.example', ok: true, blockHeight: 1000 } + ] + } + }); + const errs = findErrorsForRule(validateChainData(), 16); + expect(errs).toHaveLength(1); + expect(errs[0].onlyInChainlistHealthy).toContain('https://rpc-new.example'); + expect(errs[0].onlyInChainsHealthy).toEqual([]); + }); + + it('does NOT flag URLs that are unhealthy in both sources', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + sources: ['chains', 'chainlist'] + }], + rawChains: [{ chainId: 1, rpc: ['https://rpc-a'] }], + rawChainlist: [{ chainId: 1, rpc: ['https://rpc-a', 'https://rpc-b-broken'] }], + rpcHealth: { + 1: [ + { url: 'https://rpc-a', ok: true, blockHeight: 1000 }, + { url: 'https://rpc-b-broken', ok: false, error: 'timeout' } + ] + } + }); + expect(findErrorsForRule(validateChainData(), 16)).toHaveLength(0); + }); + + it('does NOT flag when chain is only in one source', () => { + seedCache({ + chains: [{ + chainId: 1, + name: 'Ethereum', + tags: [], + relations: [], + sources: ['chains'] + }], + rawChains: [{ chainId: 1, rpc: ['https://rpc-a'] }], + rawChainlist: [], + rpcHealth: { 1: [{ url: 'https://rpc-a', ok: true, blockHeight: 1000 }] } + }); + expect(findErrorsForRule(validateChainData(), 16)).toHaveLength(0); + }); + }); + + describe('summary aggregation', () => { + it('reports counts for all 11 rules in summary + errorsByRule', () => { + seedCache({ + chains: [{ + chainId: 42161, + name: 'Arbitrum One', + tags: ['L2'], + relations: [], + l2Beat: { slug: 'arbitrum', stage: 'Stage 1', category: 'Optimistic Rollup', hostChainId: 1 } + }] + }); + const report = validateChainData(); + for (const n of [7, 8, 9, 10, 11, 12, 13, 14, 15, 16]) { + expect(report.summary).toHaveProperty(`rule${n}`); + } + expect(report.errorsByRule).toHaveProperty('rule7_l2beat_missing_classification'); + expect(report.errorsByRule).toHaveProperty('rule12_rpc_block_height_drift'); + expect(report.errorsByRule).toHaveProperty('rule13_name_disagreement'); + expect(report.errorsByRule).toHaveProperty('rule14_native_currency_mismatch'); + expect(report.errorsByRule).toHaveProperty('rule15_slip44_native_symbol_mismatch'); + expect(report.errorsByRule).toHaveProperty('rule16_rpc_url_in_one_source_only'); + }); + }); +}); diff --git a/tests/unit/sources/l2beat.test.js b/tests/unit/sources/l2beat.test.js new file mode 100644 index 0000000..ed2f5d2 --- /dev/null +++ b/tests/unit/sources/l2beat.test.js @@ -0,0 +1,137 @@ +import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; +import { normalizeL2BeatResponse } from '../../../src/sources/l2beat.js'; + +// Mock fetchUtil and config so fetchL2Beat can be exercised without network access. +vi.mock('../../../fetchUtil.js', () => ({ + proxyFetch: vi.fn() +})); + +vi.mock('../../../config.js', () => ({ + DATA_SOURCE_L2BEAT_API: 'https://l2beat.test/api/scaling-summary', + L2BEAT_FETCH_TIMEOUT_MS: 1000 +})); + +describe('normalizeL2BeatResponse', () => { + it('returns [] for empty / unexpected payload shapes', () => { + expect(normalizeL2BeatResponse(null)).toEqual([]); + expect(normalizeL2BeatResponse({})).toEqual([]); + expect(normalizeL2BeatResponse({ projects: 'not-an-array' })).toEqual([]); + }); + + it('extracts projects from { projects: [...] } shape', () => { + const result = normalizeL2BeatResponse({ + projects: [ + { slug: 'arbitrum', chainId: 42161, name: 'Arbitrum One', stage: 'Stage 1' } + ] + }); + expect(result).toHaveLength(1); + expect(result[0]).toMatchObject({ + slug: 'arbitrum', + chainId: 42161, + displayName: 'Arbitrum One', + stage: 'Stage 1' + }); + }); + + it('extracts projects from { data: { projects: [...] } } shape', () => { + const result = normalizeL2BeatResponse({ + data: { projects: [{ slug: 'optimism', chainId: 10, name: 'OP Mainnet' }] } + }); + expect(result).toHaveLength(1); + expect(result[0].slug).toBe('optimism'); + }); + + it('extracts projects from a bare array shape', () => { + const result = normalizeL2BeatResponse([ + { slug: 'base', chainId: 8453, name: 'Base' } + ]); + expect(result).toHaveLength(1); + expect(result[0].slug).toBe('base'); + }); + + it('drops projects without slug or chainId', () => { + const result = normalizeL2BeatResponse({ + projects: [ + { slug: 'arbitrum', chainId: 42161, name: 'Arbitrum One' }, + { slug: 'no-chain-id', name: 'Something' }, + { chainId: 999, name: 'No Slug' } + ] + }); + expect(result).toHaveLength(1); + expect(result[0].slug).toBe('arbitrum'); + }); + + it('handles nested stage/daLayer/tvs shapes defensively', () => { + const result = normalizeL2BeatResponse({ + projects: [ + { + slug: 'arbitrum', + chainId: 42161, + name: 'Arbitrum One', + stage: { stage: 'Stage 1' }, + daLayer: { name: 'Ethereum' }, + tvs: { total: 1234567, breakdown: { canonical: 1000000, external: 234567, native: 0 } } + } + ] + }); + expect(result[0].stage).toBe('Stage 1'); + expect(result[0].daLayer).toBe('Ethereum'); + expect(result[0].tvs).toBe(1234567); + expect(result[0].tvsBreakdown).toEqual({ canonical: 1000000, external: 234567, native: 0 }); + }); + + it('falls back to chainConfig.chainId when chainId is not at top level', () => { + const result = normalizeL2BeatResponse({ + projects: [ + { slug: 'arbitrum', chainConfig: { chainId: 42161 }, name: 'Arbitrum One' } + ] + }); + expect(result[0].chainId).toBe(42161); + }); +}); + +describe('fetchL2Beat (integration with mocked transport)', () => { + let proxyFetch; + let fetchL2Beat; + + beforeEach(async () => { + vi.resetModules(); + proxyFetch = (await import('../../../fetchUtil.js')).proxyFetch; + fetchL2Beat = (await import('../../../src/sources/l2beat.js')).fetchL2Beat; + proxyFetch.mockReset(); + }); + + afterEach(() => { + vi.restoreAllMocks(); + }); + + it('returns source: live when the API succeeds', async () => { + proxyFetch.mockResolvedValueOnce({ + ok: true, + status: 200, + json: async () => ({ + projects: [{ slug: 'arbitrum', chainId: 42161, name: 'Arbitrum One', stage: 'Stage 1' }] + }) + }); + + const result = await fetchL2Beat(); + expect(result.source).toBe('live'); + expect(result.fetchedAt).toMatch(/^\d{4}-\d{2}-\d{2}T/); + expect(result.projects[0].slug).toBe('arbitrum'); + }); + + it('falls back to static JSON when the live API returns 403', async () => { + proxyFetch.mockResolvedValueOnce({ ok: false, status: 403 }); + const result = await fetchL2Beat(); + expect(result.source).toBe('fallback'); + expect(result.projects.length).toBeGreaterThan(0); + expect(result.projects.find(p => p.slug === 'arbitrum')).toBeDefined(); + }); + + it('falls back to static JSON when the live API throws', async () => { + proxyFetch.mockRejectedValueOnce(new Error('ECONNRESET')); + const result = await fetchL2Beat(); + expect(result.source).toBe('fallback'); + expect(result.projects.length).toBeGreaterThan(0); + }); +}); diff --git a/tests/unit/sources/slip44.test.js b/tests/unit/sources/slip44.test.js new file mode 100644 index 0000000..2103cc1 --- /dev/null +++ b/tests/unit/sources/slip44.test.js @@ -0,0 +1,63 @@ +import { describe, it, expect } from 'vitest'; +import { parseSLIP44 } from '../../../src/sources/slip44.js'; + +describe('parseSLIP44 (direct import from src/sources/slip44.js)', () => { + it('returns an empty object for empty input', () => { + expect(parseSLIP44('')).toEqual({}); + expect(parseSLIP44(null)).toEqual({}); + expect(parseSLIP44(undefined)).toEqual({}); + }); + + it('parses a minimal SLIP-0044 markdown table', () => { + const md = [ + '| Coin type | Path component | Symbol | Coin |', + '|-----------|----------------|--------|------|', + '| 0 | 0x80000000 | BTC | Bitcoin |', + '| 60 | 0x8000003c | ETH | Ether |' + ].join('\n'); + + const result = parseSLIP44(md); + + expect(result[0]).toEqual({ + coinType: 0, + pathComponent: '0x80000000', + symbol: 'BTC', + coin: 'Bitcoin' + }); + expect(result[60]).toEqual({ + coinType: 60, + pathComponent: '0x8000003c', + symbol: 'ETH', + coin: 'Ether' + }); + }); + + it('skips rows that are not numeric coin types', () => { + const md = [ + '| Coin type | Path component | Symbol | Coin |', + '|-----------|----------------|--------|------|', + '| n/a | 0x80000000 | XX | Bad |', + '| 1 | 0x80000001 | TBTC | Bitcoin Testnet |' + ].join('\n'); + + const result = parseSLIP44(md); + expect(Object.keys(result)).toEqual(['1']); + expect(result[1].coin).toBe('Bitcoin Testnet'); + }); + + it('ignores lines outside of the table section', () => { + const md = [ + '# SLIP-0044', + 'Some intro paragraph.', + '', + '| Coin type | Path component | Symbol | Coin |', + '|-----------|----------------|--------|------|', + '| 60 | 0x8000003c | ETH | Ether |', + '', + 'Trailing text.' + ].join('\n'); + + const result = parseSLIP44(md); + expect(Object.keys(result)).toEqual(['60']); + }); +}); diff --git a/tests/unit/store/cache.test.js b/tests/unit/store/cache.test.js new file mode 100644 index 0000000..8ffa575 --- /dev/null +++ b/tests/unit/store/cache.test.js @@ -0,0 +1,48 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { cachedData, applyDataToCache, getCachedData } from '../../../src/store/cache.js'; + +describe('store/cache', () => { + beforeEach(() => { + applyDataToCache({}); + }); + + it('exposes the singleton via getCachedData() and the live binding', () => { + expect(getCachedData()).toBe(cachedData); + }); + + it('applyDataToCache replaces every tracked field', () => { + applyDataToCache({ + theGraph: { networks: [] }, + chainlist: [{ chainId: 1 }], + chains: [{ chainId: 1 }], + slip44: { 60: {} }, + indexed: { byChainId: {}, byName: {}, all: [] }, + lastUpdated: '2026-01-01T00:00:00.000Z', + rpcHealth: { 1: [] }, + lastRpcCheck: '2026-01-01T00:00:00.000Z' + }); + + expect(cachedData.theGraph).toEqual({ networks: [] }); + expect(cachedData.chainlist).toEqual([{ chainId: 1 }]); + expect(cachedData.chains).toEqual([{ chainId: 1 }]); + expect(cachedData.slip44).toEqual({ 60: {} }); + expect(cachedData.indexed).toEqual({ byChainId: {}, byName: {}, all: [] }); + expect(cachedData.lastUpdated).toBe('2026-01-01T00:00:00.000Z'); + expect(cachedData.rpcHealth).toEqual({ 1: [] }); + expect(cachedData.lastRpcCheck).toBe('2026-01-01T00:00:00.000Z'); + }); + + it('applyDataToCache resets fields to safe defaults when omitted', () => { + applyDataToCache({ theGraph: { networks: [] } }); + applyDataToCache({}); + + expect(cachedData.theGraph).toBeNull(); + expect(cachedData.chainlist).toBeNull(); + expect(cachedData.chains).toBeNull(); + expect(cachedData.slip44).toEqual({}); + expect(cachedData.indexed).toBeNull(); + expect(cachedData.lastUpdated).toBeNull(); + expect(cachedData.rpcHealth).toEqual({}); + expect(cachedData.lastRpcCheck).toBeNull(); + }); +}); diff --git a/tests/unit/store/indexer-l2beat.test.js b/tests/unit/store/indexer-l2beat.test.js new file mode 100644 index 0000000..b25cf36 --- /dev/null +++ b/tests/unit/store/indexer-l2beat.test.js @@ -0,0 +1,127 @@ +import { describe, it, expect } from 'vitest'; +import { indexData } from '../../../src/store/indexer.js'; + +describe('indexer — L2BEAT integration', () => { + function buildBaseChainsList() { + return [ + { chainId: 42161, name: 'Arbitrum One' }, + { chainId: 10, name: 'OP Mainnet' }, + { chainId: 1, name: 'Ethereum' } + ]; + } + + function buildL2Beat(projects) { + return { source: 'live', fetchedAt: '2026-05-05T12:00:00.000Z', projects }; + } + + it('merges L2BEAT fields onto matching chains by chainId', () => { + const indexed = indexData(null, null, buildBaseChainsList(), null, buildL2Beat([ + { + slug: 'arbitrum', + chainId: 42161, + displayName: 'Arbitrum One', + stage: 'Stage 1', + category: 'Optimistic Rollup', + stack: 'Arbitrum Orbit', + daLayer: 'Ethereum', + hostChainId: 1 + } + ])); + + expect(indexed.byChainId[42161].l2Beat).toMatchObject({ + slug: 'arbitrum', + stage: 'Stage 1', + category: 'Optimistic Rollup', + stack: 'Arbitrum Orbit', + daLayer: 'Ethereum', + hostChainId: 1, + dataFreshness: 'live', + fetchedAt: '2026-05-05T12:00:00.000Z' + }); + }); + + it('adds L2 tag when L2BEAT classifies a chain', () => { + const indexed = indexData(null, null, buildBaseChainsList(), null, buildL2Beat([ + { slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One', category: 'Optimistic Rollup' } + ])); + expect(indexed.byChainId[42161].tags).toContain('L2'); + }); + + it('adds ZK tag for ZK Rollup category', () => { + const indexed = indexData(null, null, + [{ chainId: 324, name: 'ZKsync Era' }], + null, + buildL2Beat([{ slug: 'zksync-era', chainId: 324, displayName: 'ZKsync Era', category: 'ZK Rollup' }]) + ); + expect(indexed.byChainId[324].tags).toContain('L2'); + expect(indexed.byChainId[324].tags).toContain('ZK'); + }); + + it('adds Validium tag for Validium category', () => { + const indexed = indexData(null, null, + [{ chainId: 196, name: 'X Layer' }], + null, + buildL2Beat([{ slug: 'xlayer', chainId: 196, displayName: 'X Layer', category: 'Validium' }]) + ); + expect(indexed.byChainId[196].tags).toContain('Validium'); + }); + + it('adds l2beat to chain.sources', () => { + const indexed = indexData(null, null, buildBaseChainsList(), null, buildL2Beat([ + { slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' } + ])); + expect(indexed.byChainId[42161].sources).toContain('l2beat'); + }); + + it('skips L2BEAT projects whose chainId is not in the chain list', () => { + const indexed = indexData(null, null, buildBaseChainsList(), null, buildL2Beat([ + { slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }, + { slug: 'unknown-chain', chainId: 999999, displayName: 'Unknown' } + ])); + expect(indexed.byChainId[42161].l2Beat).toBeDefined(); + expect(indexed.byChainId[999999]).toBeUndefined(); + }); + + it('is a no-op when l2beat data is null/empty', () => { + const indexed = indexData(null, null, buildBaseChainsList(), null, null); + expect(indexed.byChainId[42161].l2Beat).toBeUndefined(); + + const indexed2 = indexData(null, null, buildBaseChainsList(), null, { source: 'unavailable', projects: [] }); + expect(indexed2.byChainId[42161].l2Beat).toBeUndefined(); + }); + + it('clears stale chain.l2Beat when a project disappears from the fresh fetch', () => { + // First sweep: both projects present. + const indexed = indexData(null, null, [ + { chainId: 42161, name: 'Arbitrum One' }, + { chainId: 10, name: 'OP Mainnet' } + ], null, buildL2Beat([ + { slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' }, + { slug: 'optimism', chainId: 10, displayName: 'OP Mainnet' } + ])); + expect(indexed.byChainId[42161].l2Beat).toBeDefined(); + expect(indexed.byChainId[10].l2Beat).toBeDefined(); + + // Second sweep on the SAME indexed object: optimism dropped from L2BEAT. + // Simulate the refresher's re-merge by calling indexL2BeatSource directly. + // (Imported lazily via dynamic import to keep test file self-contained.) + return import('../../../src/store/indexer.js').then(({ indexL2BeatSource }) => { + indexL2BeatSource(buildL2Beat([ + { slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One' } + ]), indexed); + expect(indexed.byChainId[42161].l2Beat).toBeDefined(); + expect(indexed.byChainId[10].l2Beat).toBeUndefined(); + expect(indexed.byChainId[10].sources).not.toContain('l2beat'); + }); + }); + + it('preserves dataFreshness="fallback" when sourced from static JSON', () => { + const indexed = indexData(null, null, buildBaseChainsList(), null, { + source: 'fallback', + fetchedAt: null, + projects: [{ slug: 'arbitrum', chainId: 42161, displayName: 'Arbitrum One', stage: 'Stage 1' }] + }); + expect(indexed.byChainId[42161].l2Beat.dataFreshness).toBe('fallback'); + expect(indexed.byChainId[42161].l2Beat.fetchedAt).toBeNull(); + }); +}); diff --git a/tests/unit/store/indexer-slip44.test.js b/tests/unit/store/indexer-slip44.test.js new file mode 100644 index 0000000..8644416 --- /dev/null +++ b/tests/unit/store/indexer-slip44.test.js @@ -0,0 +1,41 @@ +import { describe, it, expect } from 'vitest'; +import { indexData } from '../../../src/store/indexer.js'; + +describe('indexer — slip44 field retention (regression)', () => { + it('keeps slip44 on chain entry created from chains.json', () => { + const indexed = indexData(null, null, [ + { chainId: 1, name: 'Ethereum', slip44: 60 } + ], { 60: { coinType: 60, symbol: 'ETH', coin: 'Ethereum' } }); + + expect(indexed.byChainId[1].slip44).toBe(60); + expect(indexed.byChainId[1].slip44Info).toEqual({ + coinType: 60, + symbol: 'ETH', + coin: 'Ethereum' + }); + }); + + it('keeps slip44 on chain entry created from chainlist', () => { + const indexed = indexData( + null, + [{ chainId: 999, name: 'Test', slip44: 42 }], + null, + { 42: { coinType: 42, symbol: 'XYZ', coin: 'Test' } } + ); + + expect(indexed.byChainId[999].slip44).toBe(42); + expect(indexed.byChainId[999].slip44Info).toMatchObject({ symbol: 'XYZ' }); + }); + + it('keeps chains.slip44 even when the chain also appears in chainlist', () => { + const indexed = indexData( + null, + [{ chainId: 1, name: 'Ethereum' }], + [{ chainId: 1, name: 'Ethereum', slip44: 60 }], + { 60: { coinType: 60, symbol: 'ETH', coin: 'Ethereum' } } + ); + + expect(indexed.byChainId[1].slip44).toBe(60); + expect(indexed.byChainId[1].slip44Info).toBeDefined(); + }); +}); diff --git a/tests/unit/transport/fetch.test.js b/tests/unit/transport/fetch.test.js new file mode 100644 index 0000000..6dd2d04 --- /dev/null +++ b/tests/unit/transport/fetch.test.js @@ -0,0 +1,45 @@ +import { describe, it, expect } from 'vitest'; +import { fetchData } from '../../../src/transport/fetch.js'; + +// We don't mock fetchUtil for this test because fetchData should return null +// without any network call when given an unsupported format. Use a URL that +// won't actually resolve to keep the test offline-safe. + +describe('fetchData — unsupported format (regression)', () => { + it('returns null when format is neither "json" nor "text"', async () => { + // The fetch will fail (sandbox blocks network), but the catch block + // returns null anyway. We want to verify the contract holds for the + // success path too — so call with a format that bypasses both branches. + // Easiest deterministic check: stub global fetch to return a response + // and confirm the unknown-format branch returns null. + const origFetch = globalThis.fetch; + globalThis.fetch = async () => ({ + ok: true, + status: 200, + json: async () => ({}), + text: async () => 'x' + }); + try { + const result = await fetchData('https://example.test/x', 'xml'); + expect(result).toBeNull(); + } finally { + globalThis.fetch = origFetch; + } + }); + + it('returns json for format="json"', async () => { + const origFetch = globalThis.fetch; + globalThis.fetch = async () => ({ + ok: true, + status: 200, + json: async () => ({ hello: 'world' }), + text: async () => 'fallback' + }); + try { + const result = await fetchData('https://example.test/x', 'json'); + expect(result).toEqual({ hello: 'world' }); + } finally { + globalThis.fetch = origFetch; + } + }); +});