Files
proxmox/scripts/validation/validate-config-files.sh

608 lines
27 KiB
Bash
Executable File

#!/usr/bin/env bash
# Validate required config files and optional env vars before deployment/scripts
# Recommendation: docs/10-best-practices/IMPLEMENTATION_CHECKLIST.md (Configuration validation)
# Usage: ./scripts/validation/validate-config-files.sh [--dry-run]
# --dry-run Print what would be validated and exit 0 (no file checks).
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
DRY_RUN=false
for a in "$@"; do [[ "$a" == "--dry-run" ]] && DRY_RUN=true && break; done
log_info() { echo "[INFO] $1"; }
log_ok() { echo "[OK] $1"; }
log_warn() { echo "[WARN] $1"; }
log_err() { echo "[ERROR] $1"; }
ERRORS=0
# Required config paths (adjust per project)
REQUIRED_FILES="${VALIDATE_REQUIRED_FILES:-}"
# Example: REQUIRED_FILES="/path/to/config.toml /path/to/.env"
# Optional env vars to warn if missing (default empty = no warnings; set VALIDATE_OPTIONAL_ENV for Proxmox API checks)
OPTIONAL_ENV="${VALIDATE_OPTIONAL_ENV:-}"
check_file() {
local f="$1"
if [[ -f "$f" ]]; then
log_ok "Found: $f"
return 0
else
log_err "Missing required file: $f"
ERRORS=$((ERRORS + 1))
return 1
fi
}
check_env() {
local name="$1"
if [[ -z "${!name:-}" ]]; then
log_warn "Optional env not set: $name"
return 1
else
log_ok "Env set: $name"
return 0
fi
}
if $DRY_RUN; then
echo "=== Validation (--dry-run: would check) ==="
echo " REQUIRED_FILES: ${REQUIRED_FILES:-<default: config/ip-addresses.conf, .env.example, token-mapping*.json, gru-transport-active.json, gru-iso4217-currency-manifest.json, gru-governance-supervision-profile.json>}"
echo " OPTIONAL_ENV: ${OPTIONAL_ENV:-<empty; set VALIDATE_OPTIONAL_ENV for Proxmox API vars>}"
echo " config/xdc-zero: validate-xdc-zero-config.sh (when config/xdc-zero exists)"
exit 0
fi
if [[ -n "$REQUIRED_FILES" ]]; then
for f in $REQUIRED_FILES; do
check_file "$f"
done
else
# Default: check common locations
[[ -d "$PROJECT_ROOT/config" ]] && check_file "$PROJECT_ROOT/config/ip-addresses.conf" || true
[[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && check_file "$PROJECT_ROOT/config/smart-contracts-master.json" || true
[[ -f "$PROJECT_ROOT/.env.example" ]] && log_ok ".env.example present (copy to .env and fill)" || true
# Token mapping (Chain 138 ↔ Mainnet): optional but validate structure if present
if [[ -f "$PROJECT_ROOT/config/token-mapping.json" ]]; then
log_ok "Found: config/token-mapping.json"
if command -v jq &>/dev/null; then
if jq -e '.tokens | type == "array"' "$PROJECT_ROOT/config/token-mapping.json" &>/dev/null; then
log_ok "token-mapping.json: valid JSON with .tokens array"
else
log_err "token-mapping.json: invalid or missing .tokens array"
ERRORS=$((ERRORS + 1))
fi
fi
elif [[ -f "$PROJECT_ROOT/config/token-mapping-multichain.json" ]] && command -v jq &>/dev/null && jq -e '.pairs | type == "array"' "$PROJECT_ROOT/config/token-mapping-multichain.json" &>/dev/null; then
log_ok "Token mapping: using config/token-mapping-multichain.json (relay fallback)"
else
log_warn "Optional config/token-mapping.json not found (relay uses fallback mapping)"
fi
if [[ -f "$PROJECT_ROOT/config/token-mapping-multichain.json" ]]; then
log_ok "Found: config/token-mapping-multichain.json"
if command -v jq &>/dev/null; then
if jq -e '.pairs | type == "array"' "$PROJECT_ROOT/config/token-mapping-multichain.json" &>/dev/null; then
log_ok "token-mapping-multichain.json: valid JSON with .pairs array"
else
log_err "token-mapping-multichain.json: invalid or missing .pairs array"
ERRORS=$((ERRORS + 1))
fi
fi
fi
if [[ -f "$PROJECT_ROOT/config/gru-transport-active.json" ]]; then
log_ok "Found: config/gru-transport-active.json"
if command -v jq &>/dev/null; then
if jq -e '
(.system.name | type == "string")
and (.system.shortName | type == "string")
and (.enabledCanonicalTokens | type == "array")
and (.enabledDestinationChains | type == "array")
and (.approvedBridgePeers | type == "array")
and (.transportPairs | type == "array")
and (.publicPools | type == "array")
' "$PROJECT_ROOT/config/gru-transport-active.json" &>/dev/null; then
log_ok "gru-transport-active.json: top-level overlay structure is valid"
else
log_err "gru-transport-active.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
fi
if command -v node &>/dev/null; then
if PROJECT_ROOT="$PROJECT_ROOT" node <<'NODE'
const fs = require('fs');
const path = require('path');
const projectRoot = process.env.PROJECT_ROOT;
function readJson(relativePath) {
return JSON.parse(fs.readFileSync(path.join(projectRoot, relativePath), 'utf8'));
}
function normalizeAddress(address) {
return typeof address === 'string' ? address.trim().toLowerCase() : '';
}
function isNonZeroAddress(address) {
const normalized = normalizeAddress(address);
return /^0x[a-f0-9]{40}$/.test(normalized) && normalized !== '0x0000000000000000000000000000000000000000';
}
function refConfigured(ref) {
return !!ref && typeof ref === 'object' && (
(typeof ref.address === 'string' && ref.address.trim() !== '') ||
(typeof ref.env === 'string' && ref.env.trim() !== '')
);
}
const active = readJson('config/gru-transport-active.json');
const multichain = readJson('config/token-mapping-multichain.json');
const deployment = readJson('cross-chain-pmm-lps/config/deployment-status.json');
const poolMatrix = readJson('cross-chain-pmm-lps/config/pool-matrix.json');
const currencyManifest = readJson('config/gru-iso4217-currency-manifest.json');
const errors = [];
const canonicalChainId = Number(active.system?.canonicalChainId ?? 138);
const enabledCanonicalTokens = Array.isArray(active.enabledCanonicalTokens) ? active.enabledCanonicalTokens : [];
const enabledCanonical = new Set(enabledCanonicalTokens.map((token) => String(token.symbol)));
const enabledChainsArray = Array.isArray(active.enabledDestinationChains) ? active.enabledDestinationChains : [];
const enabledChains = new Set(enabledChainsArray.map((chain) => Number(chain.chainId)));
const peersByKey = new Map((active.approvedBridgePeers || []).map((peer) => [String(peer.key), peer]));
const reserveVerifiers = active.reserveVerifiers && typeof active.reserveVerifiers === 'object'
? active.reserveVerifiers
: {};
const transportPairsByKey = new Map((active.transportPairs || []).map((pair) => [String(pair.key), pair]));
const publicPoolsByKey = new Map((active.publicPools || []).map((pool) => [String(pool.key), pool]));
const manifestByCode = new Map((currencyManifest.currencies || []).map((currency) => [String(currency.code), currency]));
function getMappingPair(fromChainId, toChainId) {
return (multichain.pairs || []).find(
(entry) => Number(entry.fromChainId) === Number(fromChainId) && Number(entry.toChainId) === Number(toChainId)
);
}
function getMappingToken(fromChainId, toChainId, mappingKey) {
const pair = getMappingPair(fromChainId, toChainId);
if (!pair) return null;
return (pair.tokens || []).find((token) => token.key === mappingKey) || null;
}
function getExpectedPoolKey(chainId, mirroredSymbol) {
const chain = poolMatrix.chains?.[String(chainId)];
const hubStable = typeof chain?.hubStable === 'string' ? chain.hubStable.trim() : '';
if (!hubStable) return null;
return `${chainId}-${mirroredSymbol}-${hubStable}`;
}
for (const chain of active.enabledDestinationChains || []) {
if (!peersByKey.has(String(chain.peerKey || ''))) {
errors.push(`enabledDestinationChains[${chain.chainId}] references missing peerKey ${chain.peerKey}`);
}
}
for (const token of enabledCanonicalTokens) {
const currency = manifestByCode.get(String(token.currencyCode || ''));
if (!currency) {
errors.push(`enabledCanonicalTokens[${token.symbol}] references missing currencyCode ${token.currencyCode} in gru-iso4217-currency-manifest.json`);
continue;
}
if (currency.status?.deployed !== true) {
errors.push(`enabledCanonicalTokens[${token.symbol}] requires manifest currency ${token.currencyCode} to be deployed`);
}
if (currency.status?.transportActive !== true) {
errors.push(`enabledCanonicalTokens[${token.symbol}] requires manifest currency ${token.currencyCode} to mark transportActive=true`);
}
}
for (const pair of active.transportPairs || []) {
const canonicalChainId = Number(pair.canonicalChainId ?? active.system?.canonicalChainId ?? 138);
const destinationChainId = Number(pair.destinationChainId);
const canonicalSymbol = String(pair.canonicalSymbol || '');
const mirroredSymbol = String(pair.mirroredSymbol || '');
if (!enabledCanonical.has(canonicalSymbol)) {
errors.push(`transportPairs[${pair.key}] uses canonicalSymbol ${canonicalSymbol} which is not enabled`);
}
if (!enabledChains.has(destinationChainId)) {
errors.push(`transportPairs[${pair.key}] uses destinationChainId ${destinationChainId} which is not enabled`);
}
const peer = peersByKey.get(String(pair.peerKey || ''));
if (!peer) {
errors.push(`transportPairs[${pair.key}] is missing approved bridge peer ${pair.peerKey}`);
} else {
if (!refConfigured(peer.l1Bridge)) {
errors.push(`approvedBridgePeers[${peer.key}] is missing l1Bridge wiring`);
}
if (!refConfigured(peer.l2Bridge)) {
errors.push(`approvedBridgePeers[${peer.key}] is missing l2Bridge wiring`);
}
}
const maxOutstanding = pair.maxOutstanding && typeof pair.maxOutstanding === 'object' ? pair.maxOutstanding : null;
if (!maxOutstanding || (!maxOutstanding.amount && !maxOutstanding.env)) {
errors.push(`transportPairs[${pair.key}] is missing maxOutstanding amount/env`);
}
const mappingToken = getMappingToken(canonicalChainId, destinationChainId, pair.mappingKey);
if (!mappingToken) {
errors.push(`transportPairs[${pair.key}] mappingKey ${pair.mappingKey} is missing from token-mapping-multichain.json`);
} else {
if (!isNonZeroAddress(mappingToken.addressFrom)) {
errors.push(`transportPairs[${pair.key}] has invalid canonical addressFrom in token-mapping-multichain.json`);
}
if (!isNonZeroAddress(mappingToken.addressTo)) {
errors.push(`transportPairs[${pair.key}] mapping exists but cW pair is not deployed (addressTo missing/zero)`);
}
}
const deploymentChain = deployment.chains?.[String(destinationChainId)];
const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol];
if (!deploymentChain || !isNonZeroAddress(deployedMirror)) {
errors.push(`transportPairs[${pair.key}] mapping exists but deployment-status.json has no deployed ${mirroredSymbol} for chain ${destinationChainId}`);
} else if (mappingToken && normalizeAddress(deployedMirror) !== normalizeAddress(mappingToken.addressTo)) {
errors.push(`transportPairs[${pair.key}] deployment-status.json ${mirroredSymbol} does not match token-mapping-multichain.json addressTo`);
}
if ((pair.publicPoolKeys || []).length > 0) {
for (const publicPoolKey of pair.publicPoolKeys) {
if (!publicPoolsByKey.has(String(publicPoolKey))) {
errors.push(`transportPairs[${pair.key}] references missing public pool key ${publicPoolKey}`);
}
}
}
if (pair.reserveVerifierKey) {
const verifier = reserveVerifiers[pair.reserveVerifierKey];
if (!verifier) {
errors.push(`transportPairs[${pair.key}] requires missing reserve verifier ${pair.reserveVerifierKey}`);
} else {
if (!refConfigured(verifier.bridgeRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} is missing bridgeRef wiring`);
}
if (!refConfigured(verifier.verifierRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} is missing verifierRef wiring`);
}
if (verifier.requireVaultBacking && !refConfigured(verifier.vaultRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} requires vault backing but vaultRef is unset`);
}
if (verifier.requireReserveSystemBalance && !refConfigured(verifier.reserveSystemRef)) {
errors.push(`reserveVerifiers.${pair.reserveVerifierKey} requires reserve-system balance checks but reserveSystemRef is unset`);
}
}
}
}
for (const pool of active.publicPools || []) {
if (pool.active === true) {
if (!isNonZeroAddress(pool.poolAddress)) {
errors.push(`publicPools[${pool.key}] is active but has no poolAddress`);
continue;
}
const deploymentChain = deployment.chains?.[String(pool.chainId)];
const deployedPools = Array.isArray(deploymentChain?.pmmPools) ? deploymentChain.pmmPools : [];
const deploymentMatch = deployedPools.some((entry) => normalizeAddress(entry?.poolAddress) === normalizeAddress(pool.poolAddress));
if (!deploymentMatch) {
errors.push(`publicPools[${pool.key}] is active but deployment-status.json does not contain its poolAddress`);
}
}
}
for (const [chainIdKey, deploymentChain] of Object.entries(deployment.chains || {})) {
const destinationChainId = Number(chainIdKey);
if (destinationChainId === canonicalChainId) continue;
if (deploymentChain?.bridgeAvailable !== true) continue;
const mappingPair = getMappingPair(canonicalChainId, destinationChainId);
if (!mappingPair) continue;
let compatible = true;
for (const token of enabledCanonicalTokens) {
const mappingKey = String(token.mappingKey || '');
const mirroredSymbol = String(token.mirroredSymbol || '');
const mappingToken = mappingKey ? (mappingPair.tokens || []).find((entry) => entry.key === mappingKey) : null;
const deployedMirror = deploymentChain?.cwTokens?.[mirroredSymbol];
const expectedPoolKey = getExpectedPoolKey(destinationChainId, mirroredSymbol);
if (
!mappingKey ||
!mappingToken ||
!isNonZeroAddress(mappingToken.addressTo) ||
!isNonZeroAddress(deployedMirror) ||
normalizeAddress(mappingToken.addressTo) !== normalizeAddress(deployedMirror) ||
!expectedPoolKey
) {
compatible = false;
break;
}
}
if (!compatible) continue;
const enabledChain = enabledChainsArray.find((chain) => Number(chain.chainId) === destinationChainId);
if (!enabledChain) {
errors.push(`compatible destination chain ${destinationChainId} (${deploymentChain?.name || 'unknown'}) is missing from enabledDestinationChains`);
continue;
}
for (const token of enabledCanonicalTokens) {
const expectedPairKey = `${canonicalChainId}-${destinationChainId}-${token.symbol}-${token.mirroredSymbol}`;
const expectedPoolKey = getExpectedPoolKey(destinationChainId, String(token.mirroredSymbol || ''));
const pair = transportPairsByKey.get(expectedPairKey);
if (!pair) {
errors.push(`compatible destination chain ${destinationChainId} is missing transport pair ${expectedPairKey}`);
continue;
}
if (expectedPoolKey && !publicPoolsByKey.has(expectedPoolKey)) {
errors.push(`compatible destination chain ${destinationChainId} is missing public pool placeholder ${expectedPoolKey}`);
}
if (expectedPoolKey && !(pair.publicPoolKeys || []).includes(expectedPoolKey)) {
errors.push(`transportPairs[${pair.key}] must include the pool-matrix first-hop key ${expectedPoolKey}`);
}
}
}
if (errors.length > 0) {
console.error(errors.join('\n'));
process.exit(1);
}
NODE
then
log_ok "gru-transport-active.json: overlay cross-checks passed"
else
log_err "gru-transport-active.json: overlay cross-checks failed"
ERRORS=$((ERRORS + 1))
fi
else
log_err "Node.js is required to validate gru-transport-active.json cross-file wiring"
ERRORS=$((ERRORS + 1))
fi
else
log_err "Missing config/gru-transport-active.json"
ERRORS=$((ERRORS + 1))
fi
if [[ -f "$PROJECT_ROOT/config/gru-governance-supervision-profile.json" ]]; then
log_ok "Found: config/gru-governance-supervision-profile.json"
if command -v jq &>/dev/null; then
if jq -e '
(.profileId | type == "string")
and (.requiredAssetMetadata | type == "array")
and (.roles.tokenRoles | type == "array")
and (.roles.registryRoles | type == "array")
and (.roles.governanceRoles | type == "array")
and (.storageNamespaces | type == "object")
and (.proposalPolicy.defaultMinimumUpgradeNoticePeriodSeconds | type == "number")
and (.proposalPolicy.assetScopeRequiredForUpgradeSensitiveChanges == true)
and (.proposalPolicy.jurisdictionDerivedFromRegistryAssetState == true)
and (.proposalPolicy.jurisdictionTransitionRequiresBothAuthorities == true)
and (.proposalPolicy.proposalEntryPoint == "GovernanceController.proposeForAsset")
and (.proposalPolicy.manualJurisdictionTaggingAllowed == false)
and (.proposalPolicy.jurisdictionPolicyEntryPoints | type == "array")
and (.emergencyPolicy.manualAdminPathsRemainAvailable == true)
and ((.emergencyPolicy.authorizedRoles | index("EMERGENCY_ADMIN_ROLE")) != null)
' "$PROJECT_ROOT/config/gru-governance-supervision-profile.json" &>/dev/null; then
log_ok "gru-governance-supervision-profile.json: top-level structure is valid"
else
log_err "gru-governance-supervision-profile.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
fi
fi
[[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && log_ok "Found: config/smart-contracts-master.json" || true
# Token lists (Uniswap format): validate structure if present
for list in token-lists/lists/dbis-138.tokenlist.json token-lists/lists/cronos.tokenlist.json token-lists/lists/all-mainnet.tokenlist.json; do
if [[ -f "$PROJECT_ROOT/$list" ]] && command -v jq &>/dev/null; then
if jq -e '(.tokens | type == "array") and (.tokens | length > 0)' "$PROJECT_ROOT/$list" &>/dev/null; then
log_ok "Token list valid: $list"
else
log_err "Token list invalid or empty: $list"
ERRORS=$((ERRORS + 1))
fi
fi
done
# DUAL_CHAIN config (explorer deploy source)
if [[ -f "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json" ]] && command -v jq &>/dev/null; then
if jq -e '(.tokens | type == "array") and (.tokens | length > 0)' "$PROJECT_ROOT/explorer-monorepo/backend/api/rest/config/metamask/DUAL_CHAIN_TOKEN_LIST.tokenlist.json" &>/dev/null; then
log_ok "DUAL_CHAIN_TOKEN_LIST valid"
else
log_err "DUAL_CHAIN_TOKEN_LIST invalid or empty"
ERRORS=$((ERRORS + 1))
fi
fi
# Public-sector program manifest (served by phoenix-deploy-api GET /api/v1/public-sector/programs)
if [[ -f "$PROJECT_ROOT/config/public-sector-program-manifest.json" ]]; then
log_ok "Found: config/public-sector-program-manifest.json"
if command -v jq &>/dev/null; then
if jq -e '
(.schemaVersion | type == "string")
and (.programs | type == "array")
and (.programs | length > 0)
and ((.programs | map(.id) | unique | length) == (.programs | length))
' "$PROJECT_ROOT/config/public-sector-program-manifest.json" &>/dev/null; then
log_ok "public-sector-program-manifest.json: schemaVersion, programs[], unique .id"
else
log_err "public-sector-program-manifest.json: invalid structure or duplicate program ids"
ERRORS=$((ERRORS + 1))
fi
fi
else
log_err "Missing config/public-sector-program-manifest.json"
ERRORS=$((ERRORS + 1))
fi
# Proxmox operational template (VMID/IP/FQDN mirror; see docs/03-deployment/PROXMOX_VE_OPERATIONAL_DEPLOYMENT_TEMPLATE.md)
if [[ -f "$PROJECT_ROOT/config/proxmox-operational-template.json" ]]; then
log_ok "Found: config/proxmox-operational-template.json"
if command -v jq &>/dev/null; then
if jq -e '
(.schemaVersion | type == "string")
and (.network.management_lan.gateway | type == "string")
and (.proxmox_nodes | type == "array")
and (.proxmox_nodes | length >= 1)
and (.services | type == "array")
and (.services | length >= 1)
' "$PROJECT_ROOT/config/proxmox-operational-template.json" &>/dev/null; then
log_ok "proxmox-operational-template.json: schema, network, nodes, services"
else
log_err "proxmox-operational-template.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
fi
else
log_err "Missing config/proxmox-operational-template.json"
ERRORS=$((ERRORS + 1))
fi
if [[ -f "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json" ]]; then
log_ok "Found: config/gru-iso4217-currency-manifest.json"
if command -v jq &>/dev/null; then
if jq -e '
(.name | type == "string")
and (.version | type == "string")
and (.updated | type == "string")
and (.canonicalChainId | type == "number")
and (.currencies | type == "array")
and ((.currencies | length) > 0)
and ((.currencies | map(.code) | unique | length) == (.currencies | length))
and (
all(.currencies[];
(.code | type == "string")
and ((.code | length) >= 3)
and (.name | type == "string")
and (.type == "fiat" or .type == "commodity")
and ((.minorUnits == null) or (.minorUnits | type == "number"))
and (.status.planned | type == "boolean")
and (.status.deployed | type == "boolean")
and (.status.transportActive | type == "boolean")
and (.status.x402Ready | type == "boolean")
and (.canonicalAssets | type == "object")
)
)
' "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json" &>/dev/null; then
log_ok "gru-iso4217-currency-manifest.json: top-level manifest structure is valid"
else
log_err "gru-iso4217-currency-manifest.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
fi
fi
if [[ -f "$PROJECT_ROOT/config/gru-standards-profile.json" ]]; then
log_ok "Found: config/gru-standards-profile.json"
if command -v jq &>/dev/null; then
if jq -e '
(.name | type == "string")
and (.profileId | type == "string")
and (.version | type == "string")
and (.updated | type == "string")
and (.canonicalChainId | type == "number")
and (.scope | type == "object")
and (.paymentProfiles | type == "array")
and (.baseTokenStandards | type == "array")
and (.transportAndWrapperStandards | type == "array")
and (.governanceAndPolicyStandards | type == "array")
' "$PROJECT_ROOT/config/gru-standards-profile.json" &>/dev/null; then
log_ok "gru-standards-profile.json: top-level standards profile structure is valid"
else
log_err "gru-standards-profile.json: invalid top-level structure"
ERRORS=$((ERRORS + 1))
fi
if jq -e '
(.canonicalChainId == $activeChain)
and (.canonicalChainId == $manifestChain)
and (.references.transportOverlay == "config/gru-transport-active.json")
and (.references.currencyManifest == "config/gru-iso4217-currency-manifest.json")
and (.references.governanceSupervisionProfile == "config/gru-governance-supervision-profile.json")
and (.references.storageGovernanceDoc == "docs/04-configuration/GRU_STORAGE_GOVERNANCE_AND_SUPERVISION_STANDARD.md")
' \
--argjson activeChain "$(jq -r '.system.canonicalChainId' "$PROJECT_ROOT/config/gru-transport-active.json")" \
--argjson manifestChain "$(jq -r '.canonicalChainId' "$PROJECT_ROOT/config/gru-iso4217-currency-manifest.json")" \
"$PROJECT_ROOT/config/gru-standards-profile.json" &>/dev/null; then
log_ok "gru-standards-profile.json: canonical-chain and reference wiring matches active overlay + currency manifest + governance profile"
else
log_err "gru-standards-profile.json: canonical-chain or reference wiring does not match active overlay / currency manifest / governance profile"
ERRORS=$((ERRORS + 1))
fi
fi
else
log_err "Missing config/gru-standards-profile.json"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -n "$OPTIONAL_ENV" ]]; then
for v in $OPTIONAL_ENV; do
check_env "$v" || true
done
fi
# DBIS institutional Digital Master Plan example JSON
if [[ -f "$PROJECT_ROOT/config/dbis-institutional/examples/trust.json" ]] && [[ -x "$SCRIPT_DIR/validate-dbis-institutional-json.sh" ]]; then
if bash "$SCRIPT_DIR/validate-dbis-institutional-json.sh" &>/dev/null; then
log_ok "DBIS institutional examples (JSON parse)"
else
log_err "DBIS institutional examples failed JSON parse"
ERRORS=$((ERRORS + 1))
fi
fi
if command -v check-jsonschema &>/dev/null && [[ -x "$SCRIPT_DIR/validate-dbis-institutional-schemas.sh" ]]; then
if SCHEMA_STRICT=1 bash "$SCRIPT_DIR/validate-dbis-institutional-schemas.sh" &>/dev/null; then
log_ok "DBIS institutional JSON Schemas (settlement-event, address-registry-entry)"
else
log_err "DBIS institutional JSON Schema validation failed (pip install check-jsonschema)"
ERRORS=$((ERRORS + 1))
fi
fi
if command -v node &>/dev/null && [[ -f "$PROJECT_ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" ]]; then
if node "$PROJECT_ROOT/scripts/verify/validate-address-registry-xe-aliases.mjs" &>/dev/null; then
log_ok "DBIS institutional web3_eth_iban (XE) example aliases"
else
log_err "DBIS institutional web3_eth_iban validation failed (validate-address-registry-xe-aliases.mjs)"
ERRORS=$((ERRORS + 1))
fi
fi
if command -v check-jsonschema &>/dev/null && [[ -x "$SCRIPT_DIR/validate-naming-convention-registry-examples.sh" ]]; then
if SCHEMA_STRICT=1 bash "$SCRIPT_DIR/validate-naming-convention-registry-examples.sh" &>/dev/null; then
log_ok "Naming conventions registry examples (UTRNF / DBIS token-registry-entry schema)"
else
log_err "Naming conventions registry example validation failed (see validate-naming-convention-registry-examples.sh)"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -f "$PROJECT_ROOT/config/smart-contracts-master.json" ]] && command -v jq &>/dev/null && [[ -x "$SCRIPT_DIR/validate-explorer-chain138-inventory.sh" ]]; then
if bash "$SCRIPT_DIR/validate-explorer-chain138-inventory.sh" &>/dev/null; then
log_ok "Explorer address-inventory Chain 138 vs smart-contracts-master.json"
else
log_err "Explorer address-inventory Chain 138 drift (see validate-explorer-chain138-inventory.sh)"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ -d "$PROJECT_ROOT/config/xdc-zero" ]] && [[ -x "$SCRIPT_DIR/validate-xdc-zero-config.sh" ]]; then
if bash "$SCRIPT_DIR/validate-xdc-zero-config.sh" &>/dev/null; then
log_ok "config/xdc-zero/*.json (parse)"
else
log_err "config/xdc-zero JSON parse failed (validate-xdc-zero-config.sh)"
ERRORS=$((ERRORS + 1))
fi
fi
if [[ $ERRORS -gt 0 ]]; then
log_err "Validation failed with $ERRORS error(s). Set VALIDATE_REQUIRED_FILES='path1 path2' to require specific files."
exit 1
fi
log_ok "Validation passed."
exit 0