#!/usr/bin/env bash # Validate required config files and optional env vars before deployment/scripts # Recommendation: docs/10-best-practices/IMPLEMENTATION_CHECKLIST.md (Configuration validation) # Usage: ./scripts/validation/validate-config-files.sh [--dry-run] # --dry-run Print what would be validated and exit 0 (no file checks). set -euo pipefail SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)" DRY_RUN=false for a in "$@"; do [[ "$a" == "--dry-run" ]] && DRY_RUN=true && break; done log_info() { echo "[INFO] $1"; } log_ok() { echo "[OK] $1"; } log_warn() { echo "[WARN] $1"; } log_err() { echo "[ERROR] $1"; } ERRORS=0 # Required config paths (adjust per project) REQUIRED_FILES="${VALIDATE_REQUIRED_FILES:-}" # Example: REQUIRED_FILES="/path/to/config.toml /path/to/.env" # Optional env vars to warn if missing (default empty = no warnings; set VALIDATE_OPTIONAL_ENV for Proxmox API checks) OPTIONAL_ENV="${VALIDATE_OPTIONAL_ENV:-}" check_file() { local f="$1" if [[ -f "$f" ]]; then log_ok "Found: $f" return 0 else log_err "Missing required file: $f" ERRORS=$((ERRORS + 1)) return 1 fi } check_env() { local name="$1" if [[ -z "${!name:-}" ]]; then log_warn "Optional env not set: $name" return 1 else log_ok "Env set: $name" return 0 fi } if $DRY_RUN; then echo "=== Validation (--dry-run: would check) ===" echo " REQUIRED_FILES: ${REQUIRED_FILES:-}" echo " OPTIONAL_ENV: ${OPTIONAL_ENV:-}" echo " config/xdc-zero: validate-xdc-zero-config.sh (when config/xdc-zero exists)" echo " GRU reference primacy: scripts/verify/check-gru-reference-primacy-integration.sh (doc links + peg-bands hook)" exit 0 fi if [[ -n "$REQUIRED_FILES" ]]; then for f in $REQUIRED_FILES; do check_file "$f" done else # Default: check common locations [[ -d "$PROJECT_ROOT/config" ]] && check_file "$PROJECT_ROOT/config/ip-addresses.conf" || true [[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && check_file "$PROJECT_ROOT/config/smart-contracts-master.json" || true [[ -f "$PROJECT_ROOT/.env.example" ]] && log_ok ".env.example present (copy to .env and fill)" || true # Token mapping (Chain 138 ↔ Mainnet): optional but validate structure if present if [[ -f "$PROJECT_ROOT/config/token-mapping.json" ]]; then log_ok "Found: config/token-mapping.json" if command -v jq &>/dev/null; then if jq -e '.tokens | type == "array"' "$PROJECT_ROOT/config/token-mapping.json" &>/dev/null; then log_ok "token-mapping.json: valid JSON with .tokens array" else log_err "token-mapping.json: invalid or missing .tokens array" ERRORS=$((ERRORS + 1)) fi fi elif [[ -f "$PROJECT_ROOT/config/token-mapping-multichain.json" ]] && command -v jq &>/dev/null && jq -e '.pairs | type == "array"' "$PROJECT_ROOT/config/token-mapping-multichain.json" &>/dev/null; then log_ok "Token mapping: using config/token-mapping-multichain.json (relay fallback)" else log_warn "Optional config/token-mapping.json not found (relay uses fallback mapping)" fi if [[ -f "$PROJECT_ROOT/config/token-mapping-multichain.json" ]]; then log_ok "Found: config/token-mapping-multichain.json" if command -v jq &>/dev/null; then if jq -e '.pairs | type == "array"' "$PROJECT_ROOT/config/token-mapping-multichain.json" &>/dev/null; then log_ok "token-mapping-multichain.json: valid JSON with .pairs array" else log_err "token-mapping-multichain.json: invalid or missing .pairs array" ERRORS=$((ERRORS + 1)) fi fi fi if [[ -f "$PROJECT_ROOT/config/gru-transport-active.json" ]]; then log_ok "Found: config/gru-transport-active.json" if command -v jq &>/dev/null; then if jq -e ' (.system.name | type == "string") and (.system.shortName | type == "string") and (.enabledCanonicalTokens | type == "array") and (.enabledDestinationChains | type == "array") and (.approvedBridgePeers | type == "array") and (.transportPairs | type == "array") and (.publicPools | type == "array") and ((.gasAssetFamilies == null) or (.gasAssetFamilies | type == "array")) and ((.gasRedeemGroups == null) or (.gasRedeemGroups | type == "array")) and ((.gasProtocolExposure == null) or (.gasProtocolExposure | type == "array")) ' "$PROJECT_ROOT/config/gru-transport-active.json" &>/dev/null; then log_ok "gru-transport-active.json: top-level overlay structure is valid" else log_err "gru-transport-active.json: invalid top-level structure" ERRORS=$((ERRORS + 1)) fi fi if command -v node &>/dev/null; then if PROJECT_ROOT="$PROJECT_ROOT" node <<'NODE' const fs = require('fs'); const path = require('path'); const projectRoot = process.env.PROJECT_ROOT; function readJson(relativePath) { return JSON.parse(fs.readFileSync(path.join(projectRoot, relativePath), 'utf8')); } function readJsonMaybe(relativePath) { const full = path.join(projectRoot, relativePath); if (!fs.existsSync(full)) return null; return JSON.parse(fs.readFileSync(full, 'utf8')); } function normalizeAddress(address) { return typeof address === 'string' ? address.trim().toLowerCase() : ''; } function isNonZeroAddress(address) { const normalized = normalizeAddress(address); return /^0x[a-f0-9]{40}$/.test(normalized) && normalized !== '0x0000000000000000000000000000000000000000'; } function refConfigured(ref) { return !!ref && typeof ref === 'object' && ( (typeof ref.address === 'string' && ref.address.trim() !== '') || (typeof ref.env === 'string' && ref.env.trim() !== '') ); } const active = readJson('config/gru-transport-active.json'); const multichain = readJson('config/token-mapping-multichain.json'); const deploymentRel = 'cross-chain-pmm-lps/config/deployment-status.json'; const poolMatrixRel = 'cross-chain-pmm-lps/config/pool-matrix.json'; const deployment = readJsonMaybe(deploymentRel); const poolMatrix = readJsonMaybe(poolMatrixRel); const hasDeploymentOverlay = deployment !== null && poolMatrix !== null; if (!hasDeploymentOverlay) { console.warn( '[WARN] Missing cross-chain-pmm-lps deployment overlay (one of deployment-status.json / pool-matrix.json). ' + 'Skipping deployment/pool-matrix cross-checks. For full checks: git submodule update --init cross-chain-pmm-lps' ); } const currencyManifest = readJson('config/gru-iso4217-currency-manifest.json'); const monetaryUnitManifest = readJson('config/gru-monetary-unit-manifest.json'); const errors = []; const canonicalChainId = Number(active.system?.canonicalChainId ?? 138); const enabledCanonicalTokens = Array.isArray(active.enabledCanonicalTokens) ? active.enabledCanonicalTokens : []; const enabledCanonical = new Set(enabledCanonicalTokens.map((token) => String(token.symbol))); const enabledChainsArray = Array.isArray(active.enabledDestinationChains) ? active.enabledDestinationChains : []; const enabledChains = new Set(enabledChainsArray.map((chain) => Number(chain.chainId))); const peersByKey = new Map((active.approvedBridgePeers || []).map((peer) => [String(peer.key), peer])); const reserveVerifiers = active.reserveVerifiers && typeof active.reserveVerifiers === 'object' ? active.reserveVerifiers : {}; const transportPairsByKey = new Map((active.transportPairs || []).map((pair) => [String(pair.key), pair])); const publicPoolsByKey = new Map((active.publicPools || []).map((pool) => [String(pool.key), pool])); const manifestByCode = new Map((currencyManifest.currencies || []).map((currency) => [String(currency.code), currency])); const monetaryUnitsByCode = new Map((monetaryUnitManifest.monetaryUnits || []).map((unit) => [String(unit.code), unit])); const gasFamiliesByKey = new Map(((active.gasAssetFamilies || [])).map((family) => [String(family.familyKey), family])); const gasFamiliesBySymbol = new Map( (active.gasAssetFamilies || []).flatMap((family) => [ [String(family.canonicalSymbol138), family], [String(family.mirroredSymbol), family], ]) ); const gasProtocolExposureByKey = new Map((active.gasProtocolExposure || []).map((row) => [String(row.key), row])); function getMappingPair(fromChainId, toChainId) { return (multichain.pairs || []).find( (entry) => Number(entry.fromChainId) === Number(fromChainId) && Number(entry.toChainId) === Number(toChainId) ); } function getMappingToken(fromChainId, toChainId, mappingKey) { const pair = getMappingPair(fromChainId, toChainId); if (!pair) return null; return (pair.tokens || []).find((token) => token.key === mappingKey) || null; } function getExpectedPoolKey(chainId, mirroredSymbol) { if (!hasDeploymentOverlay || !poolMatrix?.chains) return null; const chain = poolMatrix.chains[String(chainId)]; const hubStable = typeof chain?.hubStable === 'string' ? chain.hubStable.trim() : ''; if (!hubStable) return null; return `${chainId}-${mirroredSymbol}-${hubStable}`; } for (const chain of active.enabledDestinationChains || []) { if (!peersByKey.has(String(chain.peerKey || ''))) { errors.push(`enabledDestinationChains[${chain.chainId}] references missing peerKey ${chain.peerKey}`); } } for (const token of enabledCanonicalTokens) { const registryFamily = String(token.registryFamily || '').trim(); if (registryFamily === 'gas_native') { const gasFamily = gasFamiliesByKey.get(String(token.familyKey || '')) || gasFamiliesBySymbol.get(String(token.symbol || '')); if (!gasFamily) { errors.push(`enabledCanonicalTokens[${token.symbol}] references missing gas family ${token.familyKey}`); continue; } if (String(gasFamily.canonicalSymbol138 || '') !== String(token.symbol || '')) { errors.push(`enabledCanonicalTokens[${token.symbol}] must match gasAssetFamilies canonicalSymbol138`); } continue; } if (registryFamily === 'monetary_unit') { const monetaryUnit = monetaryUnitsByCode.get(String(token.currencyCode || '')); if (!monetaryUnit) { errors.push(`enabledCanonicalTokens[${token.symbol}] references missing monetary unit ${token.currencyCode} in gru-monetary-unit-manifest.json`); continue; } if (String(monetaryUnit.canonicalSymbol || '') !== String(token.symbol || '')) { errors.push(`enabledCanonicalTokens[${token.symbol}] must match gru-monetary-unit-manifest.json canonicalSymbol`); } continue; } const currency = manifestByCode.get(String(token.currencyCode || '')); if (!currency) { errors.push(`enabledCanonicalTokens[${token.symbol}] references missing currencyCode ${token.currencyCode} in gru-iso4217-currency-manifest.json`); continue; } if (currency.status?.deployed !== true) { errors.push(`enabledCanonicalTokens[${token.symbol}] requires manifest currency ${token.currencyCode} to be deployed`); } if (currency.status?.transportActive !== true) { errors.push(`enabledCanonicalTokens[${token.symbol}] requires manifest currency ${token.currencyCode} to mark transportActive=true`); } } for (const pair of active.transportPairs || []) { const canonicalChainId = Number(pair.canonicalChainId ?? active.system?.canonicalChainId ?? 138); const destinationChainId = Number(pair.destinationChainId); const canonicalSymbol = String(pair.canonicalSymbol || ''); const mirroredSymbol = String(pair.mirroredSymbol || ''); if (!enabledCanonical.has(canonicalSymbol)) { errors.push(`transportPairs[${pair.key}] uses canonicalSymbol ${canonicalSymbol} which is not enabled`); } if (!enabledChains.has(destinationChainId)) { errors.push(`transportPairs[${pair.key}] uses destinationChainId ${destinationChainId} which is not enabled`); } const peer = peersByKey.get(String(pair.peerKey || '')); if (!peer) { errors.push(`transportPairs[${pair.key}] is missing approved bridge peer ${pair.peerKey}`); } else { if (!refConfigured(peer.l1Bridge)) { errors.push(`approvedBridgePeers[${peer.key}] is missing l1Bridge wiring`); } if (!refConfigured(peer.l2Bridge)) { errors.push(`approvedBridgePeers[${peer.key}] is missing l2Bridge wiring`); } } const maxOutstanding = pair.maxOutstanding && typeof pair.maxOutstanding === 'object' ? pair.maxOutstanding : null; if (!maxOutstanding || (!maxOutstanding.amount && !maxOutstanding.env)) { errors.push(`transportPairs[${pair.key}] is missing maxOutstanding amount/env`); } const mappingToken = getMappingToken(canonicalChainId, destinationChainId, pair.mappingKey); if (!mappingToken) { errors.push(`transportPairs[${pair.key}] mappingKey ${pair.mappingKey} is missing from token-mapping-multichain.json`); } else { if (!isNonZeroAddress(mappingToken.addressFrom)) { errors.push(`transportPairs[${pair.key}] has invalid canonical addressFrom in token-mapping-multichain.json`); } if (!isNonZeroAddress(mappingToken.addressTo)) { errors.push(`transportPairs[${pair.key}] mapping exists but cW pair is not deployed (addressTo missing/zero)`); } } if (hasDeploymentOverlay) { const deploymentChain = deployment.chains?.[String(destinationChainId)]; const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol] || deploymentChain?.gasMirrors?.[mirroredSymbol]; if (!deploymentChain || !isNonZeroAddress(deployedMirror)) { errors.push(`transportPairs[${pair.key}] mapping exists but deployment-status.json has no deployed ${mirroredSymbol} for chain ${destinationChainId}`); } else if (mappingToken && normalizeAddress(deployedMirror) !== normalizeAddress(mappingToken.addressTo)) { errors.push(`transportPairs[${pair.key}] deployment-status.json ${mirroredSymbol} does not match token-mapping-multichain.json addressTo`); } } if (pair.assetClass === 'gas_native') { const gasFamily = gasFamiliesByKey.get(String(pair.familyKey || '')) || gasFamiliesBySymbol.get(canonicalSymbol) || gasFamiliesBySymbol.get(mirroredSymbol); if (!gasFamily) { errors.push(`transportPairs[${pair.key}] references missing gas family ${pair.familyKey}`); } else { if (String(gasFamily.canonicalSymbol138 || '') !== canonicalSymbol) { errors.push(`transportPairs[${pair.key}] canonicalSymbol must match gas family canonicalSymbol138`); } if (String(gasFamily.mirroredSymbol || '') !== mirroredSymbol) { errors.push(`transportPairs[${pair.key}] mirroredSymbol must match gas family mirroredSymbol`); } if (String(gasFamily.backingMode || '') !== String(pair.backingMode || '')) { errors.push(`transportPairs[${pair.key}] backingMode must match gas family`); } if (!(gasFamily.originChains || []).map(Number).includes(destinationChainId)) { errors.push(`transportPairs[${pair.key}] destinationChainId ${destinationChainId} not allowed by gas family`); } } if (!pair.protocolExposureKey || !gasProtocolExposureByKey.has(String(pair.protocolExposureKey))) { errors.push(`transportPairs[${pair.key}] is missing gas protocol exposure wiring`); } } if ((pair.publicPoolKeys || []).length > 0) { for (const publicPoolKey of pair.publicPoolKeys) { if (!publicPoolsByKey.has(String(publicPoolKey))) { errors.push(`transportPairs[${pair.key}] references missing public pool key ${publicPoolKey}`); } } } if (pair.reserveVerifierKey) { const verifier = reserveVerifiers[pair.reserveVerifierKey]; if (!verifier) { errors.push(`transportPairs[${pair.key}] requires missing reserve verifier ${pair.reserveVerifierKey}`); } else { if (!refConfigured(verifier.bridgeRef)) { errors.push(`reserveVerifiers.${pair.reserveVerifierKey} is missing bridgeRef wiring`); } if (!refConfigured(verifier.verifierRef)) { errors.push(`reserveVerifiers.${pair.reserveVerifierKey} is missing verifierRef wiring`); } if (verifier.requireVaultBacking && !refConfigured(verifier.vaultRef)) { errors.push(`reserveVerifiers.${pair.reserveVerifierKey} requires vault backing but vaultRef is unset`); } if (verifier.requireReserveSystemBalance && !refConfigured(verifier.reserveSystemRef)) { errors.push(`reserveVerifiers.${pair.reserveVerifierKey} requires reserve-system balance checks but reserveSystemRef is unset`); } } } } for (const pool of active.publicPools || []) { if (pool.active === true) { if (!isNonZeroAddress(pool.poolAddress)) { errors.push(`publicPools[${pool.key}] is active but has no poolAddress`); continue; } if (hasDeploymentOverlay) { const deploymentChain = deployment.chains?.[String(pool.chainId)]; const deployedStable = Array.isArray(deploymentChain?.pmmPools) ? deploymentChain.pmmPools : []; const deployedVolatile = Array.isArray(deploymentChain?.pmmPoolsVolatile) ? deploymentChain.pmmPoolsVolatile : []; const deployedGas = Array.isArray(deploymentChain?.gasPmmPools) ? deploymentChain.gasPmmPools : []; const deploymentMatch = [...deployedStable, ...deployedVolatile].some( (entry) => normalizeAddress(entry?.poolAddress) === normalizeAddress(pool.poolAddress) ); const gasDeploymentMatch = deployedGas.some( (entry) => normalizeAddress(entry?.poolAddress) === normalizeAddress(pool.poolAddress) ); const stagedPlaceholder = String(pool.phase || '').toLowerCase().includes('staged'); if (!deploymentMatch && !gasDeploymentMatch && !stagedPlaceholder) { errors.push(`publicPools[${pool.key}] is active but deployment-status.json does not contain its poolAddress`); } } } } for (const family of active.gasAssetFamilies || []) { if (!family.familyKey || !family.canonicalSymbol138 || !family.mirroredSymbol) { errors.push(`gasAssetFamilies entry is missing familyKey/canonicalSymbol138/mirroredSymbol`); continue; } if (!Array.isArray(family.originChains) || family.originChains.length === 0) { errors.push(`gasAssetFamilies[${family.familyKey}] must declare originChains`); } if (!multichain.gasAssetRegistry || !Array.isArray(multichain.gasAssetRegistry.families)) { errors.push(`multichain gasAssetRegistry is missing while gas families are enabled`); continue; } const multichainFamily = multichain.gasAssetRegistry.families.find( (entry) => String(entry.familyKey) === String(family.familyKey) ); if (!multichainFamily) { errors.push(`gasAssetFamilies[${family.familyKey}] is missing from token-mapping-multichain.json gasAssetRegistry`); continue; } if (String(multichainFamily.canonicalSymbol138) !== String(family.canonicalSymbol138)) { errors.push(`gasAssetFamilies[${family.familyKey}] canonicalSymbol138 mismatch between transport-active and token-mapping-multichain`); } if (String(multichainFamily.mirroredSymbol) !== String(family.mirroredSymbol)) { errors.push(`gasAssetFamilies[${family.familyKey}] mirroredSymbol mismatch between transport-active and token-mapping-multichain`); } } for (const group of active.gasRedeemGroups || []) { const family = gasFamiliesByKey.get(String(group.familyKey || '')); if (!family) { errors.push(`gasRedeemGroups[${group.key}] references missing family ${group.familyKey}`); continue; } const allowedChains = Array.isArray(group.allowedChains) ? group.allowedChains.map(Number) : []; if (String(group.redeemPolicy || '') === 'family_fungible_inventory_gated') { const familyOriginChains = (family.originChains || []).map(Number).sort((a, b) => a - b); const sortedAllowed = [...allowedChains].sort((a, b) => a - b); if (JSON.stringify(sortedAllowed) !== JSON.stringify(familyOriginChains)) { errors.push(`gasRedeemGroups[${group.key}] must match the full origin chain set for ${family.familyKey}`); } } } for (const exposure of active.gasProtocolExposure || []) { const family = gasFamiliesByKey.get(String(exposure.familyKey || '')); if (!family) { errors.push(`gasProtocolExposure[${exposure.key}] references missing family ${exposure.familyKey}`); continue; } const oneInch = exposure.oneInch || {}; const uniswap = exposure.uniswapV3 || {}; const dodo = exposure.dodoPmm || {}; if ((oneInch.routingVisible === true || oneInch.live === true) && !(uniswap.live === true && dodo.active === true)) { errors.push(`gasProtocolExposure[${exposure.key}] cannot enable 1inch before DODO and Uniswap are live`); } } if (hasDeploymentOverlay) { for (const [chainIdKey, deploymentChain] of Object.entries(deployment.chains || {})) { const destinationChainId = Number(chainIdKey); if (destinationChainId === canonicalChainId) continue; if (deploymentChain?.bridgeAvailable !== true) continue; const mappingPair = getMappingPair(canonicalChainId, destinationChainId); if (!mappingPair) continue; let compatible = true; for (const token of enabledCanonicalTokens.filter((entry) => entry.registryFamily !== 'gas_native')) { const mappingKey = String(token.mappingKey || ''); const mirroredSymbol = String(token.mirroredSymbol || ''); const mappingToken = mappingKey ? (mappingPair.tokens || []).find((entry) => entry.key === mappingKey) : null; const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol]; const expectedPoolKey = getExpectedPoolKey(destinationChainId, mirroredSymbol); if ( !mappingKey || !mappingToken || !isNonZeroAddress(mappingToken.addressTo) || !isNonZeroAddress(deployedMirror) || normalizeAddress(mappingToken.addressTo) !== normalizeAddress(deployedMirror) || !expectedPoolKey ) { compatible = false; break; } } if (!compatible) continue; const enabledChain = enabledChainsArray.find((chain) => Number(chain.chainId) === destinationChainId); if (!enabledChain) { errors.push(`compatible destination chain ${destinationChainId} (${deploymentChain?.name || 'unknown'}) is missing from enabledDestinationChains`); continue; } for (const token of enabledCanonicalTokens.filter((entry) => entry.registryFamily !== 'gas_native')) { const expectedPairKey = `${canonicalChainId}-${destinationChainId}-${token.symbol}-${token.mirroredSymbol}`; const expectedPoolKey = getExpectedPoolKey(destinationChainId, String(token.mirroredSymbol || '')); const pair = transportPairsByKey.get(expectedPairKey); if (!pair) { errors.push(`compatible destination chain ${destinationChainId} is missing transport pair ${expectedPairKey}`); continue; } if (expectedPoolKey && !publicPoolsByKey.has(expectedPoolKey)) { errors.push(`compatible destination chain ${destinationChainId} is missing public pool placeholder ${expectedPoolKey}`); } if (expectedPoolKey && !(pair.publicPoolKeys || []).includes(expectedPoolKey)) { errors.push(`transportPairs[${pair.key}] must include the pool-matrix first-hop key ${expectedPoolKey}`); } } } } if (errors.length > 0) { console.error(errors.join('\n')); process.exit(1); } NODE then log_ok "gru-transport-active.json: overlay cross-checks passed" else log_err "gru-transport-active.json: overlay cross-checks failed" ERRORS=$((ERRORS + 1)) fi else log_err "Node.js is required to validate gru-transport-active.json cross-file wiring" ERRORS=$((ERRORS + 1)) fi else log_err "Missing config/gru-transport-active.json" ERRORS=$((ERRORS + 1)) fi if [[ -f "$PROJECT_ROOT/config/gru-governance-supervision-profile.json" ]]; then log_ok "Found: config/gru-governance-supervision-profile.json" if command -v jq &>/dev/null; then if jq -e ' (.profileId | type == "string") and (.requiredAssetMetadata | type == "array") and (.roles.tokenRoles | type == "array") and (.roles.registryRoles | type == "array") and (.roles.governanceRoles | type == "array") and (.storageNamespaces | type == "object") and (.proposalPolicy.defaultMinimumUpgradeNoticePeriodSeconds | type == "number") and (.proposalPolicy.assetScopeRequiredForUpgradeSensitiveChanges == true) and (.proposalPolicy.jurisdictionDerivedFromRegistryAssetState == true) and (.proposalPolicy.jurisdictionTransitionRequiresBothAuthorities == true) and (.proposalPolicy.proposalEntryPoint == "GovernanceController.proposeForAsset") and (.proposalPolicy.manualJurisdictionTaggingAllowed == false) and (.proposalPolicy.jurisdictionPolicyEntryPoints | type == "array") and (.emergencyPolicy.manualAdminPathsRemainAvailable == true) and ((.emergencyPolicy.authorizedRoles | index("EMERGENCY_ADMIN_ROLE")) != null) ' "$PROJECT_ROOT/config/gru-governance-supervision-profile.json" &>/dev/null; then log_ok "gru-governance-supervision-profile.json: top-level structure is valid" else log_err "gru-governance-supervision-profile.json: invalid top-level structure" ERRORS=$((ERRORS + 1)) fi fi fi [[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && log_ok "Found: config/smart-contracts-master.json" || true # Token lists (Uniswap format): validate structure if present for list in token-lists/lists/dbis-138.tokenlist.json token-lists/lists/cronos.tokenlist.json token-lists/lists/all-mainnet.tokenlist.json; do if [[ -f "$PROJECT_ROOT/$list" ]] && command -v jq &>/dev/null; then if jq -e '(.tokens | type == "array") and (.tokens | length > 0)' "$PROJECT_ROOT/$list" &>/dev/null; then log_ok "Token list valid: $list" else log_err "Token list invalid or empty: $list" ERRORS=$((ERRORS + 1)) fi fi done # DUAL_CHAIN config (explorer deploy source) if [[ -f "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json" ]] && command -v jq &>/dev/null; then if jq -e '(.tokens | type == "array") and (.tokens | length > 0)' "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json" &>/dev/null; then log_ok "DUAL_CHAIN_TOKEN_LIST valid" else log_err "DUAL_CHAIN_TOKEN_LIST invalid or empty" ERRORS=$((ERRORS + 1)) fi fi if [[ -f "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_PUBLIC_DEPLOYMENT_STATUS.json" ]] && command -v jq &>/dev/null; then if jq -e '(.summary | type == "object") and (.protocols.publicCwMesh | type == "array") and (.transport.wave1 | type == "array")' "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_PUBLIC_DEPLOYMENT_STATUS.json" &>/dev/null; then log_ok "GRU_V2_PUBLIC_DEPLOYMENT_STATUS valid" else log_err "GRU_V2_PUBLIC_DEPLOYMENT_STATUS invalid" ERRORS=$((ERRORS + 1)) fi fi if [[ -f "$PROJECT_ROOT/config/gru-v2-public-protocol-rollout-plan.json" ]] && command -v jq &>/dev/null; then if jq -e '(.protocols | type == "array") and (.protocols | length > 0)' "$PROJECT_ROOT/config/gru-v2-public-protocol-rollout-plan.json" &>/dev/null; then log_ok "gru-v2-public-protocol-rollout-plan.json valid" else log_err "gru-v2-public-protocol-rollout-plan.json invalid" ERRORS=$((ERRORS + 1)) fi fi if [[ -f "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_DEPLOYMENT_QUEUE.json" ]] && command -v jq &>/dev/null; then if jq -e '(.summary | type == "object") and (.assetQueue | type == "array") and (.chainQueue | type == "array") and (.protocolQueue | type == "array")' "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/GRU_V2_DEPLOYMENT_QUEUE.json" &>/dev/null; then log_ok "GRU_V2_DEPLOYMENT_QUEUE valid" else log_err "GRU_V2_DEPLOYMENT_QUEUE invalid" ERRORS=$((ERRORS + 1)) fi fi # Public-sector program manifest (served by phoenix-deploy-api GET /api/v1/public-sector/programs) if [[ -f "$PROJECT_ROOT/config/public-sector-program-manifest.json" ]]; then log_ok "Found: config/public-sector-program-manifest.json" if command -v jq &>/dev/null; then if jq -e ' (.schemaVersion | type == "string") and (.programs | type == "array") and (.programs | length > 0) and ((.programs | map(.id) | unique | length) == (.programs | length)) ' "$PROJECT_ROOT/config/public-sector-program-manifest.json" &>/dev/null; then log_ok "public-sector-program-manifest.json: schemaVersion, programs[], unique .id" else log_err "public-sector-program-manifest.json: invalid structure or duplicate program ids" ERRORS=$((ERRORS + 1)) fi fi else log_err "Missing config/public-sector-program-manifest.json" ERRORS=$((ERRORS + 1)) fi # Proxmox operational template (VMID/IP/FQDN mirror; see docs/03-deployment/PROXMOX_VE_OPERATIONAL_DEPLOYMENT_TEMPLATE.md) if [[ -f "$PROJECT_ROOT/config/proxmox-operational-template.json" ]]; then log_ok "Found: config/proxmox-operational-template.json" if command -v jq &>/dev/null; then if jq -e ' (.schemaVersion | type == "string") and (.network.management_lan.gateway | type == "string") and (.proxmox_nodes | type == "array") and (.proxmox_nodes | length >= 1) and (.services | type == "array") and (.services | length >= 1) ' "$PROJECT_ROOT/config/proxmox-operational-template.json" &>/dev/null; then log_ok "proxmox-operational-template.json: schema, network, nodes, services" else log_err "proxmox-operational-template.json: invalid top-level structure" ERRORS=$((ERRORS + 1)) fi fi else log_err "Missing config/proxmox-operational-template.json" ERRORS=$((ERRORS + 1)) fi if [[ -f "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json" ]]; then log_ok "Found: config/gru-iso4217-currency-manifest.json" if command -v jq &>/dev/null; then if jq -e ' (.name | type == "string") and (.version | type == "string") and (.updated | type == "string") and (.canonicalChainId | type == "number") and (.currencies | type == "array") and ((.currencies | length) > 0) and ((.currencies | map(.code) | unique | length) == (.currencies | length)) and ( all(.currencies[]; (.code | type == "string") and ((.code | length) >= 3) and (.name | type == "string") and (.type == "fiat" or .type == "commodity") and ((.minorUnits == null) or (.minorUnits | type == "number")) and (.status.planned | type == "boolean") and (.status.deployed | type == "boolean") and (.status.transportActive | type == "boolean") and (.status.x402Ready | type == "boolean") and (.canonicalAssets | type == "object") ) ) ' "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json" &>/dev/null; then log_ok "gru-iso4217-currency-manifest.json: top-level manifest structure is valid" else log_err "gru-iso4217-currency-manifest.json: invalid top-level structure" ERRORS=$((ERRORS + 1)) fi fi fi if [[ -f "$PROJECT_ROOT/config/gru-standards-profile.json" ]]; then log_ok "Found: config/gru-standards-profile.json" if command -v jq &>/dev/null; then if jq -e ' (.name | type == "string") and (.profileId | type == "string") and (.version | type == "string") and (.updated | type == "string") and (.canonicalChainId | type == "number") and (.scope | type == "object") and (.paymentProfiles | type == "array") and (.baseTokenStandards | type == "array") and (.transportAndWrapperStandards | type == "array") and (.governanceAndPolicyStandards | type == "array") ' "$PROJECT_ROOT/config/gru-standards-profile.json" &>/dev/null; then log_ok "gru-standards-profile.json: top-level standards profile structure is valid" else log_err "gru-standards-profile.json: invalid top-level structure" ERRORS=$((ERRORS + 1)) fi if jq -e ' (.canonicalChainId == $activeChain) and (.canonicalChainId == $manifestChain) and (.references.transportOverlay == "config/gru-transport-active.json") and (.references.currencyManifest == "config/gru-iso4217-currency-manifest.json") and (.references.governanceSupervisionProfile == "config/gru-governance-supervision-profile.json") and (.references.storageGovernanceDoc == "docs/04-configuration/GRU_STORAGE_GOVERNANCE_AND_SUPERVISION_STANDARD.md") ' \ --argjson activeChain "$(jq -r '.system.canonicalChainId' "$PROJECT_ROOT/config/gru-transport-active.json")" \ --argjson manifestChain "$(jq -r '.canonicalChainId' "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json")" \ "$PROJECT_ROOT/config/gru-standards-profile.json" &>/dev/null; then log_ok "gru-standards-profile.json: canonical-chain and reference wiring matches active overlay + currency manifest + governance profile" else log_err "gru-standards-profile.json: canonical-chain or reference wiring does not match active overlay / currency manifest / governance profile" ERRORS=$((ERRORS + 1)) fi fi else log_err "Missing config/gru-standards-profile.json" ERRORS=$((ERRORS + 1)) fi fi if [[ -n "$OPTIONAL_ENV" ]]; then for v in $OPTIONAL_ENV; do check_env "$v" || true done fi # DBIS institutional Digital Master Plan example JSON if [[ -f "$PROJECT_ROOT/config/dbis-institutional/examples/trust.json" ]] && [[ -x "$SCRIPT_DIR/validate-dbis-institutional-json.sh" ]]; then if bash "$SCRIPT_DIR/validate-dbis-institutional-json.sh" &>/dev/null; then log_ok "DBIS institutional examples (JSON parse)" else log_err "DBIS institutional examples failed JSON parse" ERRORS=$((ERRORS + 1)) fi fi if command -v check-jsonschema &>/dev/null && [[ -x "$SCRIPT_DIR/validate-dbis-institutional-schemas.sh" ]]; then if SCHEMA_STRICT=1 bash "$SCRIPT_DIR/validate-dbis-institutional-schemas.sh" &>/dev/null; then log_ok "DBIS institutional JSON Schemas (settlement-event, address-registry-entry)" else log_err "DBIS institutional JSON Schema validation failed (pip install check-jsonschema)" ERRORS=$((ERRORS + 1)) fi fi if command -v node &>/dev/null && [[ -f "$PROJECT_ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" ]]; then if node "$PROJECT_ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" &>/dev/null; then log_ok "DBIS institutional web3_eth_iban (XE) example aliases" else log_err "DBIS institutional web3_eth_iban validation failed (validate-address-registry-xe-aliases.mjs)" ERRORS=$((ERRORS + 1)) fi fi if command -v check-jsonschema &>/dev/null && [[ -x "$SCRIPT_DIR/validate-naming-convention-registry-examples.sh" ]]; then if SCHEMA_STRICT=1 bash "$SCRIPT_DIR/validate-naming-convention-registry-examples.sh" &>/dev/null; then log_ok "Naming conventions registry examples (UTRNF / DBIS token-registry-entry schema)" else log_err "Naming conventions registry example validation failed (see validate-naming-convention-registry-examples.sh)" ERRORS=$((ERRORS + 1)) fi fi if [[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && command -v jq &>/dev/null && [[ -x "$SCRIPT_DIR/validate-explorer-chain138-inventory.sh" ]]; then if bash "$SCRIPT_DIR/validate-explorer-chain138-inventory.sh" &>/dev/null; then log_ok "Explorer address-inventory Chain 138 vs smart-contracts-master.json" else log_err "Explorer address-inventory Chain 138 drift (see validate-explorer-chain138-inventory.sh)" ERRORS=$((ERRORS + 1)) fi fi if [[ -d "$PROJECT_ROOT/config/xdc-zero" ]] && [[ -x "$SCRIPT_DIR/validate-xdc-zero-config.sh" ]]; then if bash "$SCRIPT_DIR/validate-xdc-zero-config.sh" &>/dev/null; then log_ok "config/xdc-zero/*.json (parse)" else log_err "config/xdc-zero JSON parse failed (validate-xdc-zero-config.sh)" ERRORS=$((ERRORS + 1)) fi fi if [[ -d "$PROJECT_ROOT/config/xdc-zero" ]] && [[ -x "$SCRIPT_DIR/validate-xdc-zero-relayer-env.sh" ]]; then if bash "$SCRIPT_DIR/validate-xdc-zero-relayer-env.sh" &>/dev/null; then log_ok "config/xdc-zero relayer env/default examples" else log_err "config/xdc-zero relayer env/default validation failed (validate-xdc-zero-relayer-env.sh)" ERRORS=$((ERRORS + 1)) fi fi if [[ -x "$SCRIPT_DIR/validate-economics-strategy-json.sh" ]]; then if bash "$SCRIPT_DIR/validate-economics-strategy-json.sh" &>/dev/null; then log_ok "economics-toolkit strategy JSON (smoke + template; optional check-jsonschema)" else log_err "economics-toolkit strategy JSON validation failed (see scripts/validation/validate-economics-strategy-json.sh; run pnpm run economics:build)" ERRORS=$((ERRORS + 1)) fi fi if command -v python3 &>/dev/null; then for f in \ "$PROJECT_ROOT/scripts/it-ops/compute_ipam_drift.py" \ "$PROJECT_ROOT/scripts/it-ops/lib/collect_inventory_remote.py" \ "$PROJECT_ROOT/scripts/it-ops/persist-it-snapshot-sqlite.py" \ "$PROJECT_ROOT/services/sankofa-it-read-api/server.py" do if [[ -f "$f" ]]; then if python3 -m py_compile "$f" &>/dev/null; then log_ok "Python syntax: ${f#$PROJECT_ROOT/}" else log_err "Python syntax failed: $f" ERRORS=$((ERRORS + 1)) fi fi done fi if [[ -f "$PROJECT_ROOT/scripts/verify/check-gru-reference-primacy-integration.sh" ]]; then if bash "$PROJECT_ROOT/scripts/verify/check-gru-reference-primacy-integration.sh"; then log_ok "GRU reference primacy (doc links + peg-bands gruPolicyIntegration when submodule present)" else log_err "GRU reference primacy integration check failed (see scripts/verify/check-gru-reference-primacy-integration.sh)" ERRORS=$((ERRORS + 1)) fi fi if [[ $ERRORS -gt 0 ]]; then log_err "Validation failed with $ERRORS error(s). Set VALIDATE_REQUIRED_FILES='path1 path2' to require specific files." exit 1 fi log_ok "Validation passed." exit 0