chore: sync workspace — configs, docs, scripts, CI, pnpm, submodules
- Submodule pins: dbis_core, cross-chain-pmm-lps, mcp-proxmox (local, push may be pending), metamask-integration, smom-dbis-138 - Atomic swap + cross-chain-pmm-lops-publish, deploy-portal workflow, phoenix deploy-targets, routing/aggregator matrices - Docs, token-lists, forge proxy, phoenix API, runbooks, verify scripts Made-with: Cursor
This commit is contained in:
@@ -203,7 +203,7 @@ CT 2301 (besu-rpc-private-1) may fail to start with `lxc.hook.pre-start` due to
|
||||
|
||||
- **Daily/weekly checks:** `./scripts/maintenance/daily-weekly-checks.sh [daily|weekly|all]` — explorer sync (135), RPC health (136), config API (137). **Cron:** `./scripts/maintenance/schedule-daily-weekly-cron.sh [--install|--show]` (daily 08:00, weekly Sun 09:00). See [OPERATIONAL_RUNBOOKS.md](../docs/03-deployment/OPERATIONAL_RUNBOOKS.md) § Maintenance.
|
||||
- **Start firefly-ali-1 (6201):** `./scripts/maintenance/start-firefly-6201.sh [--dry-run] [--host HOST]` — start CT 6201 on r630-02 when needed (optional ongoing).
|
||||
- **Config validation (pre-deploy):** `./scripts/validation/validate-config-files.sh` — set `VALIDATE_REQUIRED_FILES` for required paths. **CI / all validation:** `./scripts/verify/run-all-validation.sh [--skip-genesis]` — dependencies + config + optional genesis (no LAN/SSH).
|
||||
- **Config validation (pre-deploy):** `./scripts/validation/validate-config-files.sh` — set `VALIDATE_REQUIRED_FILES` for required paths. **CI / all validation:** `./scripts/verify/run-all-validation.sh [--skip-genesis]` — dependencies, config files, **cW\* mesh matrix** (merge of `cross-chain-pmm-lps/config/deployment-status.json` and `reports/extraction/promod-uniswap-v2-live-pair-discovery-latest.json` when that file exists; no RPC), optional genesis (no LAN/SSH). **Matrix only:** `./scripts/verify/build-cw-mesh-deployment-matrix.sh` — stdout markdown; `--json-out reports/status/cw-mesh-deployment-matrix-latest.json` for machine-readable rows.
|
||||
|
||||
### 13. Phase 2, 3 & 4 Deployment Scripts
|
||||
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
# Check deployer wallet balances on ChainID 138 (native ETH + ERC-20: WETH, WETH10, LINK, cUSDT, cUSDC).
|
||||
# Output half of each balance as the funding plan for the three PMM liquidity pools.
|
||||
# Check deployer wallet balances on ChainID 138 (native ETH + ERC-20: WETH, WETH10, LINK, cUSDT, cUSDC, cEURT).
|
||||
# Output half of each balance as the funding plan for the canonical PMM liquidity pools.
|
||||
#
|
||||
# Usage:
|
||||
# RPC_URL_138=https://rpc-core.d-bis.org ./scripts/deployment/check-deployer-balance-chain138-and-funding-plan.sh
|
||||
# ./scripts/deployment/check-deployer-balance-chain138-and-funding-plan.sh
|
||||
# RPC_URL_138=https://rpc-core.d-bis.org ./scripts/deployment/... # override
|
||||
# # Or from smom-dbis-138: source .env then run from repo root with RPC_URL_138 set
|
||||
#
|
||||
# Requires: cast (foundry), jq (optional). RPC_URL_138 must be set and reachable.
|
||||
# Requires: cast (foundry), jq (optional). Defaults to public Chain 138 HTTP RPC if RPC_URL_138 unset.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
DEPLOYER="${DEPLOYER_ADDRESS:-0x4A666F96fC8764181194447A7dFdb7d471b301C8}"
|
||||
RPC="${RPC_URL_138:-}"
|
||||
if [ -z "$RPC" ]; then
|
||||
echo "ERROR: Set RPC_URL_138 (e.g. https://rpc-core.d-bis.org or http://192.168.11.211:8545)"
|
||||
exit 1
|
||||
fi
|
||||
CHAIN138_PUBLIC_RPC_DEFAULT="https://rpc-http-pub.d-bis.org"
|
||||
RPC="${RPC_URL_138:-${CHAIN138_PUBLIC_RPC_URL:-$CHAIN138_PUBLIC_RPC_DEFAULT}}"
|
||||
|
||||
CHAIN_ID=138
|
||||
|
||||
@@ -25,27 +23,48 @@ WETH10="0xf4BB2e28688e89fCcE3c0580D37d36A7672E8A9f"
|
||||
LINK="0xb7721dD53A8c629d9f1Ba31a5819AFe250002b03"
|
||||
CUSDT="0x93E66202A11B1772E55407B32B44e5Cd8eda7f22"
|
||||
CUSDC="0xf22258f57794CC8E06237084b353Ab30fFfa640b"
|
||||
CEURT="0xdf4b71c61E5912712C1Bdd451416B9aC26949d72"
|
||||
USDT_OFFICIAL="0x004b63A7B5b0E06f6bB6adb4a5F9f590BF3182D1"
|
||||
|
||||
# PMM pool addresses (from LIQUIDITY_POOLS_MASTER_MAP / ADDRESS_MATRIX)
|
||||
POOL_CUSDTCUSDC="0x9fcB06Aa1FD5215DC0E91Fd098aeff4B62fEa5C8"
|
||||
POOL_CUSDTUSDT="0x6fc60DEDc92a2047062294488539992710b99D71"
|
||||
POOL_CUSDCUSDC="0x90bd9Bf18Daa26Af3e814ea224032d015db58Ea5"
|
||||
POOL_CUSDTCUSDC="0x9e89bAe009adf128782E19e8341996c596ac40dC"
|
||||
POOL_CUSDTUSDT="0x866Cb44b59303d8dc5f4F9E3E7A8e8b0bf238d66"
|
||||
POOL_CUSDCUSDC="0xc39B7D0F40838cbFb54649d327f49a6DAC964062"
|
||||
POOL_CUSDTWETH="0xaE38a008Ba4Dbf8D9F141D03e9dC8f7Dbe0ce17c"
|
||||
POOL_CUSDCWETH="0xAAE68830a55767722618E869882c6Ed064Cc1eb2"
|
||||
POOL_CEURTWETH="0x4a64c886cedF00db42ea5B946D6b304121ad9529"
|
||||
|
||||
get_balance() {
|
||||
local addr="$1"
|
||||
cast call "$addr" "balanceOf(address)(uint256)" "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null || echo "0"
|
||||
cast call "$addr" "balanceOf(address)(uint256)" "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null | awk '{print $1}' || echo "0"
|
||||
}
|
||||
|
||||
get_decimals() {
|
||||
local addr="$1"
|
||||
cast call "$addr" "decimals()(uint8)" --rpc-url "$RPC" 2>/dev/null | cast --to-dec 2>/dev/null || echo "18"
|
||||
half_of() {
|
||||
python3 - "$1" <<'PY'
|
||||
import sys
|
||||
print(int(sys.argv[1]) // 2)
|
||||
PY
|
||||
}
|
||||
|
||||
format_units() {
|
||||
python3 - "$1" "$2" <<'PY'
|
||||
from decimal import Decimal, getcontext
|
||||
import sys
|
||||
getcontext().prec = 80
|
||||
raw = Decimal(sys.argv[1])
|
||||
decimals = int(sys.argv[2])
|
||||
value = raw / (Decimal(10) ** decimals)
|
||||
if decimals == 18:
|
||||
print(f"{value:.6f}")
|
||||
else:
|
||||
print(f"{value:.2f}")
|
||||
PY
|
||||
}
|
||||
|
||||
# Native balance
|
||||
native_wei=$(cast balance "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null || echo "0")
|
||||
native_eth=$(awk "BEGIN { printf \"%.6f\", $native_wei / 1e18 }" 2>/dev/null || echo "0")
|
||||
half_native_wei=$((native_wei / 2))
|
||||
native_wei=$(cast balance "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null | awk '{print $1}' || echo "0")
|
||||
native_eth=$(format_units "$native_wei" 18)
|
||||
half_native_wei=$(half_of "$native_wei")
|
||||
|
||||
echo "============================================"
|
||||
echo "Deployer wallet — ChainID 138"
|
||||
@@ -57,27 +76,23 @@ echo "--- Current balances ---"
|
||||
echo " Native ETH: $native_eth ETH (raw: $native_wei wei)"
|
||||
echo ""
|
||||
|
||||
RAW_WETH=0; RAW_WETH10=0; RAW_LINK=0; RAW_CUSDT=0; RAW_CUSDC=0
|
||||
HALF_WETH=0; HALF_WETH10=0; HALF_LINK=0; HALF_CUSDT=0; HALF_CUSDC=0
|
||||
RAW_WETH=0; RAW_WETH10=0; RAW_LINK=0; RAW_CUSDT=0; RAW_CUSDC=0; RAW_CEURT=0
|
||||
HALF_WETH=0; HALF_WETH10=0; HALF_LINK=0; HALF_CUSDT=0; HALF_CUSDC=0; HALF_CEURT=0
|
||||
|
||||
for entry in "WETH:$WETH:18" "WETH10:$WETH10:18" "LINK:$LINK:18" "cUSDT:$CUSDT:6" "cUSDC:$CUSDC:6"; do
|
||||
for entry in "WETH:$WETH:18" "WETH10:$WETH10:18" "LINK:$LINK:18" "cUSDT:$CUSDT:6" "cUSDC:$CUSDC:6" "cEURT:$CEURT:6"; do
|
||||
sym="${entry%%:*}"; rest="${entry#*:}"; addr="${rest%%:*}"; dec="${rest#*:}"
|
||||
raw=$(get_balance "$addr")
|
||||
half=$((raw / 2))
|
||||
half=$(half_of "$raw")
|
||||
case "$sym" in
|
||||
WETH) RAW_WETH=$raw; HALF_WETH=$half ;;
|
||||
WETH10) RAW_WETH10=$raw; HALF_WETH10=$half ;;
|
||||
LINK) RAW_LINK=$raw; HALF_LINK=$half ;;
|
||||
cUSDT) RAW_CUSDT=$raw; HALF_CUSDT=$half ;;
|
||||
cUSDC) RAW_CUSDC=$raw; HALF_CUSDC=$half ;;
|
||||
cEURT) RAW_CEURT=$raw; HALF_CEURT=$half ;;
|
||||
esac
|
||||
if [ "$dec" = "18" ]; then
|
||||
disp=$(awk "BEGIN { printf \"%.6f\", $raw / 1e18 }" 2>/dev/null || echo "0")
|
||||
half_disp=$(awk "BEGIN { printf \"%.6f\", $half / 1e18 }" 2>/dev/null || echo "0")
|
||||
else
|
||||
disp=$(awk "BEGIN { printf \"%.2f\", $raw / 1e$dec }" 2>/dev/null || echo "0")
|
||||
half_disp=$(awk "BEGIN { printf \"%.2f\", $half / 1e$dec }" 2>/dev/null || echo "0")
|
||||
fi
|
||||
disp=$(format_units "$raw" "$dec")
|
||||
half_disp=$(format_units "$half" "$dec")
|
||||
echo " $sym: $disp (raw: $raw) → half for LP: $half_disp (raw: $half)"
|
||||
done
|
||||
|
||||
@@ -96,18 +111,36 @@ echo "Pool 3: cUSDC/USDC ($POOL_CUSDCUSDC)"
|
||||
echo " Base (cUSDC): $HALF_CUSDC (decimals 6)"
|
||||
echo " Quote (USDC): use same amount in USDC (official) — check deployer USDC balance separately if needed"
|
||||
echo ""
|
||||
echo "Pool 4: cUSDT/WETH ($POOL_CUSDTWETH)"
|
||||
echo " Base (cUSDT): $HALF_CUSDT (decimals 6)"
|
||||
echo " Quote (WETH): $HALF_WETH (decimals 18)"
|
||||
echo ""
|
||||
echo "Pool 5: cUSDC/WETH ($POOL_CUSDCWETH)"
|
||||
echo " Base (cUSDC): $HALF_CUSDC (decimals 6)"
|
||||
echo " Quote (WETH): $HALF_WETH (decimals 18)"
|
||||
echo ""
|
||||
echo "Pool 6: cEURT/WETH ($POOL_CEURTWETH)"
|
||||
echo " Base (cEURT): $HALF_CEURT (decimals 6)"
|
||||
echo " Quote (WETH): $HALF_WETH (decimals 18)"
|
||||
echo ""
|
||||
echo "--- Env vars for AddLiquidityPMMPoolsChain138 (half of cUSDT/cUSDC) ---"
|
||||
echo "# Add to smom-dbis-138/.env and run: forge script script/dex/AddLiquidityPMMPoolsChain138.s.sol:AddLiquidityPMMPoolsChain138 --rpc-url \$RPC_URL_138 --broadcast --private-key \$PRIVATE_KEY"
|
||||
echo "POOL_CUSDTCUSDC=$POOL_CUSDTCUSDC"
|
||||
echo "POOL_CUSDTUSDT=$POOL_CUSDTUSDT"
|
||||
echo "POOL_CUSDCUSDC=$POOL_CUSDCUSDC"
|
||||
echo "POOL_CUSDTWETH=$POOL_CUSDTWETH"
|
||||
echo "POOL_CUSDCWETH=$POOL_CUSDCWETH"
|
||||
echo "POOL_CEURTWETH=$POOL_CEURTWETH"
|
||||
echo "ADD_LIQUIDITY_BASE_AMOUNT=$HALF_CUSDT"
|
||||
echo "ADD_LIQUIDITY_QUOTE_AMOUNT=$HALF_CUSDC"
|
||||
echo "# For pool cUSDT/cUSDC only (base=cUSDT, quote=cUSDC). For cUSDT/USDT and cUSDC/USDC use per-pool vars:"
|
||||
echo "# ADD_LIQUIDITY_CUSDTUSDT_BASE=$HALF_CUSDT ADD_LIQUIDITY_CUSDTUSDT_QUOTE=<deployer USDT balance / 2>"
|
||||
echo "# ADD_LIQUIDITY_CUSDCUSDC_BASE=$HALF_CUSDC ADD_LIQUIDITY_CUSDCUSDC_QUOTE=<deployer USDC balance / 2>"
|
||||
echo "# ADD_LIQUIDITY_CUSDTWETH_BASE=$HALF_CUSDT ADD_LIQUIDITY_CUSDTWETH_QUOTE=$HALF_WETH"
|
||||
echo "# ADD_LIQUIDITY_CUSDCWETH_BASE=$HALF_CUSDC ADD_LIQUIDITY_CUSDCWETH_QUOTE=$HALF_WETH"
|
||||
echo "# ADD_LIQUIDITY_CEURTWETH_BASE=$HALF_CEURT ADD_LIQUIDITY_CEURTWETH_QUOTE=$HALF_WETH"
|
||||
echo ""
|
||||
echo "--- Reserve ---"
|
||||
echo " Keep half of native ETH for gas. Half for LP (if wrapping to WETH for a pool): $((half_native_wei / 2)) wei."
|
||||
echo " Keep half of native ETH for gas. Half for LP (if wrapping to WETH for a pool): $(half_of "$half_native_wei") wei."
|
||||
echo " WETH/LINK: half amounts above can be reserved for other use or future pools."
|
||||
echo "============================================"
|
||||
|
||||
516
scripts/deployment/check-deployer-lp-balances.py
Executable file
516
scripts/deployment/check-deployer-lp-balances.py
Executable file
@@ -0,0 +1,516 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Enumerate PMM pool addresses (deployment-status.json) and Uniswap V2 pair addresses
|
||||
(pair-discovery JSON), then report deployer balances with **LP token resolution**.
|
||||
|
||||
**Uniswap V2:** The pair contract *is* the LP ERC-20 (`lpToken` = pair).
|
||||
|
||||
**DODO PMM (DVM / IDODOPMMPool):** Official DODO Vending Machine pools inherit ERC-20;
|
||||
`balanceOf(pool)` is the LP share balance — the pool address **is** the LP token.
|
||||
|
||||
**DODO V1-style PMM:** Some pools expose ``_BASE_CAPITAL_TOKEN_`` / ``_QUOTE_CAPITAL_TOKEN_``;
|
||||
LP exposure may be split across two capital ERC-20s (balances reported separately).
|
||||
|
||||
When ``balanceOf(pool)`` fails (RPC flake, proxy, or non-DVM), this script optionally
|
||||
re-probes with DODO view calls and alternate public RPCs (see ``--resolve-dodo``).
|
||||
|
||||
Deployer: ``--deployer`` / ``DEPLOYER_ADDRESS`` / ``PRIVATE_KEY`` (see below).
|
||||
|
||||
Usage:
|
||||
python3 scripts/deployment/check-deployer-lp-balances.py --summary-only
|
||||
python3 scripts/deployment/check-deployer-lp-balances.py --resolve-dodo --json-out /tmp/lp.json
|
||||
|
||||
Requires: ``cast`` (Foundry). Environment: ``DEPLOYER_ADDRESS``, ``PRIVATE_KEY``, etc.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEFAULT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
|
||||
DEFAULT_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
DEFAULT_ENV = ROOT / "smom-dbis-138" / ".env"
|
||||
|
||||
ZERO = "0x0000000000000000000000000000000000000000"
|
||||
|
||||
DEFAULT_RPC: dict[str, str] = {
|
||||
"1": "https://eth.llamarpc.com",
|
||||
"10": "https://mainnet.optimism.io",
|
||||
"25": "https://evm.cronos.org",
|
||||
"56": "https://bsc-dataseed.binance.org",
|
||||
"100": "https://rpc.gnosischain.com",
|
||||
"137": "https://polygon-rpc.com",
|
||||
"8453": "https://mainnet.base.org",
|
||||
"42161": "https://arbitrum-one.publicnode.com",
|
||||
"42220": "https://forno.celo.org",
|
||||
"43114": "https://avalanche-c-chain.publicnode.com",
|
||||
"1111": "https://api.wemix.com",
|
||||
}
|
||||
|
||||
# Extra public RPCs (retry when primary fails — connection resets, rate limits).
|
||||
RPC_FALLBACKS: dict[str, list[str]] = {
|
||||
"1": [
|
||||
"https://ethereum.publicnode.com",
|
||||
"https://1rpc.io/eth",
|
||||
"https://rpc.ankr.com/eth",
|
||||
],
|
||||
"137": ["https://polygon-bor.publicnode.com", "https://1rpc.io/matic"],
|
||||
"42161": ["https://arbitrum.llamarpc.com"],
|
||||
"56": ["https://bsc.publicnode.com"],
|
||||
"8453": ["https://base.llamarpc.com"],
|
||||
"10": ["https://optimism.publicnode.com"],
|
||||
}
|
||||
|
||||
RPC_KEYS: dict[str, list[str]] = {
|
||||
"1": ["ETHEREUM_MAINNET_RPC", "ETH_MAINNET_RPC_URL"],
|
||||
"10": ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
|
||||
"25": ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
|
||||
"56": ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
|
||||
"100": ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
|
||||
"137": ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
|
||||
"138": ["RPC_URL_138", "CHAIN_138_RPC_URL"],
|
||||
"8453": ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
|
||||
"42161": ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
|
||||
"42220": ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
|
||||
"43114": ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"],
|
||||
"1111": ["WEMIX_RPC_URL", "WEMIX_RPC"],
|
||||
"651940": ["ALL_MAINNET_RPC", "CHAIN_651940_RPC_URL"],
|
||||
}
|
||||
|
||||
|
||||
def load_dotenv(path: Path) -> dict[str, str]:
|
||||
out: dict[str, str] = {}
|
||||
if not path.is_file():
|
||||
return out
|
||||
for raw in path.read_text().splitlines():
|
||||
line = raw.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
k, v = line.split("=", 1)
|
||||
out[k.strip()] = v.strip().strip('"').strip("'")
|
||||
return out
|
||||
|
||||
|
||||
def resolve(env: dict[str, str], key: str, default: str = "") -> str:
|
||||
v = env.get(key, "")
|
||||
if v.startswith("${") and ":-" in v:
|
||||
inner = v[2:-1]
|
||||
alt = inner.split(":-", 1)
|
||||
return env.get(alt[0], alt[1] if len(alt) > 1 else "")
|
||||
return v or default
|
||||
|
||||
|
||||
def rpc_for(env: dict[str, str], cid: str) -> str:
|
||||
for k in RPC_KEYS.get(cid, []):
|
||||
v = resolve(env, k, "")
|
||||
if v and not v.startswith("$"):
|
||||
return v
|
||||
return DEFAULT_RPC.get(cid, "") or (
|
||||
resolve(env, "RPC_URL_138", "http://127.0.0.1:8545") if cid == "138" else ""
|
||||
)
|
||||
|
||||
|
||||
def rpc_chain_list(env: dict[str, str], cid: str) -> list[str]:
|
||||
primary = rpc_for(env, cid)
|
||||
seen: set[str] = set()
|
||||
out: list[str] = []
|
||||
for u in [primary] + RPC_FALLBACKS.get(cid, []):
|
||||
if u and u not in seen:
|
||||
seen.add(u)
|
||||
out.append(u)
|
||||
return out
|
||||
|
||||
|
||||
def deployer_address(env: dict[str, str], override: str | None) -> str:
|
||||
if override:
|
||||
return override
|
||||
for k in ("DEPLOYER_ADDRESS", "DEPLOYER"):
|
||||
v = (os.environ.get(k) or "").strip()
|
||||
if v:
|
||||
return v
|
||||
pk = env.get("PRIVATE_KEY", "") or (os.environ.get("PRIVATE_KEY") or "").strip()
|
||||
if pk:
|
||||
r = subprocess.run(
|
||||
["cast", "wallet", "address", pk],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=False,
|
||||
)
|
||||
if r.returncode == 0 and r.stdout.strip():
|
||||
return r.stdout.strip()
|
||||
return (env.get("DEPLOYER_ADDRESS") or "").strip()
|
||||
|
||||
|
||||
def parse_uint(s: str) -> int:
|
||||
return int(s.strip().split()[0])
|
||||
|
||||
|
||||
def parse_address_line(s: str) -> str | None:
|
||||
s = s.strip()
|
||||
if not s:
|
||||
return None
|
||||
m = re.search(r"(0x[a-fA-F0-9]{40})", s)
|
||||
return m.group(1) if m else None
|
||||
|
||||
|
||||
def cast_call(to: str, sig: str, rpc: str) -> tuple[str, str]:
|
||||
cmd = ["cast", "call", to, sig, "--rpc-url", rpc]
|
||||
r = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if r.returncode != 0:
|
||||
return "err", (r.stderr or r.stdout or "").strip()[:400]
|
||||
return "ok", r.stdout.strip()
|
||||
|
||||
|
||||
def erc20_balance(token: str, holder: str, rpc: str) -> tuple[str, int | str]:
|
||||
cmd = ["cast", "call", token, "balanceOf(address)(uint256)", holder, "--rpc-url", rpc]
|
||||
r = subprocess.run(cmd, capture_output=True, text=True)
|
||||
if r.returncode != 0:
|
||||
return "err", (r.stderr or r.stdout or "").strip()[:400]
|
||||
try:
|
||||
return "ok", parse_uint(r.stdout)
|
||||
except (ValueError, IndexError):
|
||||
return "err", f"parse:{r.stdout[:120]}"
|
||||
|
||||
|
||||
def erc20_balance_any_rpc(
|
||||
token: str, holder: str, rpcs: list[str]
|
||||
) -> tuple[str, int | str, str]:
|
||||
"""Returns (status, value|err, rpc_used)."""
|
||||
last_err = ""
|
||||
for rpc in rpcs:
|
||||
st, val = erc20_balance(token, holder, rpc)
|
||||
if st == "ok":
|
||||
return st, val, rpc
|
||||
last_err = str(val)
|
||||
return "err", last_err, rpcs[0] if rpcs else ""
|
||||
|
||||
|
||||
def resolve_pmm_row(
|
||||
pool: str,
|
||||
deployer: str,
|
||||
rpcs: list[str],
|
||||
do_resolve_dodo: bool,
|
||||
) -> dict:
|
||||
"""
|
||||
Build a result dict with lp resolution fields.
|
||||
Tries: pool ERC20 balance (any RPC) -> DODO _BASE/_QUOTE -> capital tokens.
|
||||
"""
|
||||
rec: dict = {
|
||||
"contract": pool,
|
||||
"lpTokenAddress": pool,
|
||||
"lpResolution": "unknown",
|
||||
"dodoBaseToken": None,
|
||||
"dodoQuoteToken": None,
|
||||
"lpBalances": [],
|
||||
"balanceRaw": 0,
|
||||
"status": "pending",
|
||||
"error": None,
|
||||
"rpcUsed": rpcs[0] if rpcs else None,
|
||||
}
|
||||
|
||||
st, val, used = erc20_balance_any_rpc(pool, deployer, rpcs)
|
||||
rec["rpcUsed"] = used
|
||||
if st == "ok":
|
||||
rec["status"] = "ok"
|
||||
rec["lpResolution"] = "dvm_or_erc20_pool"
|
||||
rec["balanceRaw"] = int(val)
|
||||
rec["lpBalances"] = [
|
||||
{
|
||||
"role": "lp_erc20",
|
||||
"token": pool,
|
||||
"raw": int(val),
|
||||
"note": "balanceOf(pool): DVM LP shares are usually the pool contract itself",
|
||||
}
|
||||
]
|
||||
return rec
|
||||
|
||||
rec["error"] = str(val)
|
||||
if not do_resolve_dodo:
|
||||
rec["status"] = "erc20_error"
|
||||
rec["lpResolution"] = "unresolved_pass_resolve_dodo"
|
||||
return rec
|
||||
|
||||
base_tok: str | None = None
|
||||
quote_tok: str | None = None
|
||||
for rpc in rpcs:
|
||||
stb, outb = cast_call(pool, "_BASE_TOKEN_()(address)", rpc)
|
||||
if stb == "ok":
|
||||
base_tok = parse_address_line(outb)
|
||||
rec["rpcUsed"] = rpc
|
||||
break
|
||||
if base_tok:
|
||||
rec["dodoBaseToken"] = base_tok
|
||||
for rpc in rpcs:
|
||||
stq, outq = cast_call(pool, "_QUOTE_TOKEN_()(address)", rpc)
|
||||
if stq == "ok":
|
||||
quote_tok = parse_address_line(outq)
|
||||
rec["dodoQuoteToken"] = quote_tok
|
||||
break
|
||||
|
||||
# Retry pool balanceOf after confirming DVM interface (fresh RPC may fix flake)
|
||||
st2, val2, used2 = erc20_balance_any_rpc(pool, deployer, rpcs)
|
||||
if st2 == "ok":
|
||||
rec["status"] = "ok"
|
||||
rec["lpResolution"] = "dvm_erc20_pool_after_probe"
|
||||
rec["balanceRaw"] = int(val2)
|
||||
rec["rpcUsed"] = used2
|
||||
rec["error"] = None
|
||||
rec["lpBalances"] = [
|
||||
{
|
||||
"role": "lp_erc20",
|
||||
"token": pool,
|
||||
"raw": int(val2),
|
||||
"note": "balanceOf(pool) succeeded after _BASE_TOKEN_ probe + RPC retry",
|
||||
}
|
||||
]
|
||||
return rec
|
||||
|
||||
capital_balances: list[dict] = []
|
||||
for cap_sig, role in (
|
||||
("_BASE_CAPITAL_TOKEN_()(address)", "base_capital"),
|
||||
("_QUOTE_CAPITAL_TOKEN_()(address)", "quote_capital"),
|
||||
):
|
||||
tok_a: str | None = None
|
||||
for rpc in rpcs:
|
||||
stc, outc = cast_call(pool, cap_sig, rpc)
|
||||
if stc == "ok":
|
||||
tok_a = parse_address_line(outc)
|
||||
if tok_a and tok_a != ZERO:
|
||||
bst, bval, bused = erc20_balance_any_rpc(tok_a, deployer, rpcs)
|
||||
if bst == "ok":
|
||||
capital_balances.append(
|
||||
{
|
||||
"role": role,
|
||||
"token": tok_a,
|
||||
"raw": int(bval),
|
||||
"note": f"DODO V1-style {cap_sig.split('(')[0]} balanceOf",
|
||||
}
|
||||
)
|
||||
break
|
||||
|
||||
if capital_balances:
|
||||
rec["status"] = "ok"
|
||||
rec["lpResolution"] = "v1_capital_tokens"
|
||||
rec["lpTokenAddress"] = pool
|
||||
rec["lpBalances"] = capital_balances
|
||||
rec["balanceRaw"] = sum(x["raw"] for x in capital_balances)
|
||||
rec["error"] = None
|
||||
return rec
|
||||
|
||||
if base_tok:
|
||||
rec["lpResolution"] = "dvm_interface_no_balance"
|
||||
rec["status"] = "erc20_error"
|
||||
rec["error"] = (
|
||||
f"Pool responds as DVM (_BASE_TOKEN_={base_tok}) but balanceOf(pool) failed: {rec.get('error', '')[:200]}"
|
||||
)
|
||||
else:
|
||||
rec["status"] = "erc20_error"
|
||||
rec["lpResolution"] = "unresolved"
|
||||
return rec
|
||||
|
||||
|
||||
def collect_entries(status_path: Path, discovery_path: Path) -> list[tuple]:
|
||||
status = json.loads(status_path.read_text())
|
||||
rows: list[tuple] = []
|
||||
for cid, ch in (status.get("chains") or {}).items():
|
||||
name = ch.get("name", cid)
|
||||
for pool in (ch.get("pmmPools") or []) + (ch.get("pmmPoolsVolatile") or []) + (ch.get("gasPmmPools") or []):
|
||||
addr = pool.get("poolAddress") or ""
|
||||
if not addr or addr == ZERO:
|
||||
continue
|
||||
label = f"{pool.get('base')}/{pool.get('quote')}"
|
||||
rows.append((cid, name, "PMM", label, addr))
|
||||
if discovery_path.is_file():
|
||||
disc = json.loads(discovery_path.read_text())
|
||||
for ent in disc.get("entries") or []:
|
||||
cid = str(ent.get("chain_id"))
|
||||
name = ent.get("network", cid)
|
||||
for pr in ent.get("pairsChecked") or []:
|
||||
addr = pr.get("poolAddress") or ""
|
||||
if not addr or addr == ZERO:
|
||||
continue
|
||||
label = f"{pr.get('base')}/{pr.get('quote')}"
|
||||
rows.append((cid, name, "UniV2", label, addr))
|
||||
seen: set[tuple[str, str]] = set()
|
||||
out: list[tuple] = []
|
||||
for row in rows:
|
||||
k = (row[0], row[4].lower())
|
||||
if k in seen:
|
||||
continue
|
||||
seen.add(k)
|
||||
out.append(row)
|
||||
return out
|
||||
|
||||
|
||||
def main() -> int:
|
||||
ap = argparse.ArgumentParser(
|
||||
description="Deployer LP balances with DODO / Uni V2 LP token resolution."
|
||||
)
|
||||
ap.add_argument("--status", type=Path, default=DEFAULT_STATUS)
|
||||
ap.add_argument("--discovery", type=Path, default=DEFAULT_DISCOVERY)
|
||||
ap.add_argument("--env", type=Path, default=DEFAULT_ENV)
|
||||
ap.add_argument("--deployer", default=None)
|
||||
ap.add_argument("--summary-only", action="store_true")
|
||||
ap.add_argument("--only-nonzero", action="store_true")
|
||||
ap.add_argument(
|
||||
"--no-resolve-dodo",
|
||||
action="store_true",
|
||||
help="Skip DODO _BASE_TOKEN_ / capital-token probes and extra RPC fallbacks (faster; more erc20_error).",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--chain-id",
|
||||
type=int,
|
||||
default=None,
|
||||
metavar="N",
|
||||
help="Only check this chain (e.g. 1 for Ethereum). Default: all chains.",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--json-out",
|
||||
type=Path,
|
||||
default=None,
|
||||
help="Full report JSON.",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--errors-json",
|
||||
type=Path,
|
||||
default=None,
|
||||
help="Rows that remain erc20_error or no_rpc.",
|
||||
)
|
||||
args = ap.parse_args()
|
||||
|
||||
env = load_dotenv(args.env)
|
||||
dep = deployer_address(env, args.deployer)
|
||||
if not dep:
|
||||
print("No deployer: set PRIVATE_KEY or DEPLOYER_ADDRESS in .env or pass --deployer", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
rows = collect_entries(args.status, args.discovery)
|
||||
if args.chain_id is not None:
|
||||
want = str(args.chain_id)
|
||||
rows = [r for r in rows if r[0] == want]
|
||||
results: list[dict] = []
|
||||
nonzero: list[dict] = []
|
||||
errors: list[dict] = []
|
||||
|
||||
for cid, name, venue, label, addr in sorted(rows, key=lambda x: (int(x[0]), x[2], x[3])):
|
||||
rpcs = rpc_chain_list(env, cid)
|
||||
base_rec: dict = {
|
||||
"chainId": cid,
|
||||
"network": name,
|
||||
"venue": venue,
|
||||
"pair": label,
|
||||
"contract": addr,
|
||||
}
|
||||
if not rpcs or not rpcs[0]:
|
||||
base_rec["status"] = "no_rpc"
|
||||
base_rec["lpResolution"] = "no_rpc"
|
||||
errors.append(base_rec)
|
||||
results.append(base_rec)
|
||||
continue
|
||||
|
||||
if venue == "UniV2":
|
||||
st, val, used = erc20_balance_any_rpc(addr, dep, rpcs)
|
||||
r = {
|
||||
**base_rec,
|
||||
"lpTokenAddress": addr,
|
||||
"lpResolution": "uniswap_v2_pair",
|
||||
"rpcUsed": used,
|
||||
"lpBalances": [
|
||||
{
|
||||
"role": "pair_lp",
|
||||
"token": addr,
|
||||
"raw": int(val) if st == "ok" else 0,
|
||||
"note": "Uniswap V2 pair contract is the LP ERC-20",
|
||||
}
|
||||
],
|
||||
"balanceRaw": int(val) if st == "ok" else 0,
|
||||
"status": "ok" if st == "ok" else "erc20_error",
|
||||
"error": None if st == "ok" else str(val),
|
||||
"dodoBaseToken": None,
|
||||
"dodoQuoteToken": None,
|
||||
}
|
||||
if st != "ok":
|
||||
r["status"] = "erc20_error"
|
||||
errors.append(r)
|
||||
elif r["balanceRaw"] > 0:
|
||||
nonzero.append(r)
|
||||
results.append(r)
|
||||
continue
|
||||
|
||||
# PMM
|
||||
r = {**base_rec, **resolve_pmm_row(addr, dep, rpcs, not args.no_resolve_dodo)}
|
||||
if r.get("status") == "ok" and r.get("balanceRaw", 0) > 0:
|
||||
nonzero.append(r)
|
||||
if r.get("status") != "ok":
|
||||
errors.append(r)
|
||||
results.append(r)
|
||||
|
||||
# Summary stats
|
||||
by_res: dict[str, int] = {}
|
||||
for r in results:
|
||||
lr = r.get("lpResolution") or "unknown"
|
||||
by_res[lr] = by_res.get(lr, 0) + 1
|
||||
|
||||
print(f"Deployer: {dep}")
|
||||
print(f"Contracts checked: {len(rows)}")
|
||||
print(f"Non-zero LP exposure (sum of components): {len(nonzero)}")
|
||||
print(f"Errors / no RPC: {len(errors)}")
|
||||
print(f"Resolution breakdown: {by_res}")
|
||||
if args.no_resolve_dodo:
|
||||
print("(Re-run without --no-resolve-dodo to probe DODO interfaces + RPC fallbacks.)")
|
||||
|
||||
if not args.summary_only:
|
||||
print("\n=== Non-zero LP / capital balances ===")
|
||||
for r in nonzero:
|
||||
lp = r.get("lpTokenAddress", r.get("contract"))
|
||||
print(
|
||||
f" chain {r['chainId']} {r['venue']} {r['pair']} | lpToken={lp} | "
|
||||
f"resolution={r.get('lpResolution')} | raw_total={r.get('balanceRaw')}"
|
||||
)
|
||||
for leg in r.get("lpBalances") or []:
|
||||
if leg.get("raw", 0) > 0:
|
||||
print(f" - {leg.get('role')} {leg.get('token')}: {leg.get('raw')} ({leg.get('note', '')[:60]})")
|
||||
if not args.only_nonzero and errors:
|
||||
print("\nSample unresolved / errors:")
|
||||
for r in errors[:12]:
|
||||
e = r.get("error", r.get("status", ""))
|
||||
print(
|
||||
f" chain {r['chainId']} {r['venue']} {r['pair']} | "
|
||||
f"{r.get('lpResolution', '')}: {str(e)[:120]}"
|
||||
)
|
||||
|
||||
if args.json_out:
|
||||
payload = {
|
||||
"deployer": dep,
|
||||
"resolveDodo": not args.no_resolve_dodo,
|
||||
"summary": {
|
||||
"checked": len(rows),
|
||||
"nonzero": len(nonzero),
|
||||
"errors": len(errors),
|
||||
"byLpResolution": by_res,
|
||||
},
|
||||
"nonzero": nonzero,
|
||||
"all": results,
|
||||
}
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
print(f"Wrote {args.json_out}")
|
||||
|
||||
if args.errors_json:
|
||||
err_only = [r for r in results if r.get("status") in ("no_rpc", "erc20_error")]
|
||||
args.errors_json.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.errors_json.write_text(json.dumps(err_only, indent=2) + "\n")
|
||||
print(f"Wrote {args.errors_json} ({len(err_only)} rows)")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
@@ -1,31 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
# Check deployer nonce and balance on Mainnet, Cronos, and Arbitrum.
|
||||
# Check deployer nonce and balance on Mainnet, Cronos, and Arbitrum (public RPCs by default).
|
||||
# Use to diagnose "nonce too high" / "invalid nonce" and "insufficient funds" before retrying cW* deploy.
|
||||
# Usage: ./scripts/deployment/check-deployer-nonce-and-balance.sh
|
||||
# Requires: smom-dbis-138/.env with PRIVATE_KEY, ETHEREUM_MAINNET_RPC, CRONOS_RPC_URL, ARBITRUM_MAINNET_RPC
|
||||
# Requires: PRIVATE_KEY (repo root .env, smom-dbis-138/.env, or ~/.secure-secrets/private-keys.env via load-project-env).
|
||||
# Optional: ETHEREUM_MAINNET_RPC, CRONOS_RPC_URL, ARBITRUM_MAINNET_RPC — if unset, uses public endpoints
|
||||
# (override per chain with ETHEREUM_MAINNET_PUBLIC_RPC / CRONOS_MAINNET_PUBLIC_RPC / ARBITRUM_MAINNET_PUBLIC_RPC).
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
SMOM="${PROJECT_ROOT}/smom-dbis-138"
|
||||
[[ -f "$SMOM/.env" ]] || { echo "Missing $SMOM/.env" >&2; exit 1; }
|
||||
set -a
|
||||
source "$SMOM/.env"
|
||||
set +a
|
||||
# shellcheck disable=SC1091
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
|
||||
# Defaults when .env does not set provider URLs (read-only public RPCs).
|
||||
PUBLIC_ETHEREUM_RPC="${ETHEREUM_MAINNET_PUBLIC_RPC:-https://ethereum-rpc.publicnode.com}"
|
||||
PUBLIC_CRONOS_RPC="${CRONOS_MAINNET_PUBLIC_RPC:-https://evm.cronos.org}"
|
||||
PUBLIC_ARBITRUM_RPC="${ARBITRUM_MAINNET_PUBLIC_RPC:-https://arbitrum-one-rpc.publicnode.com}"
|
||||
|
||||
DEPLOYER=""
|
||||
if [[ -n "${PRIVATE_KEY:-}" ]]; then
|
||||
DEPLOYER=$(cast wallet address "$PRIVATE_KEY" 2>/dev/null || true)
|
||||
fi
|
||||
[[ -z "$DEPLOYER" ]] && { echo "Could not derive deployer address (set PRIVATE_KEY in $SMOM/.env)" >&2; exit 1; }
|
||||
[[ -z "$DEPLOYER" ]] && {
|
||||
echo "Could not derive deployer address. Set PRIVATE_KEY in ${PROJECT_ROOT}/.env, smom-dbis-138/.env, or ~/.secure-secrets/private-keys.env" >&2
|
||||
exit 1
|
||||
}
|
||||
echo "Deployer address: $DEPLOYER"
|
||||
echo ""
|
||||
|
||||
for label in "Mainnet (1)" "Cronos (25)" "Arbitrum (42161)"; do
|
||||
case "$label" in
|
||||
"Mainnet (1)") RPC="${ETHEREUM_MAINNET_RPC:-$ETH_MAINNET_RPC_URL}"; CHAIN=1 ;;
|
||||
"Cronos (25)") RPC="${CRONOS_RPC_URL:-$CRONOS_RPC}"; CHAIN=25 ;;
|
||||
"Arbitrum (42161)") RPC="${ARBITRUM_MAINNET_RPC:-$ARBITRUM_MAINNET_RPC_URL}"; CHAIN=42161 ;;
|
||||
"Mainnet (1)")
|
||||
RPC="${ETHEREUM_MAINNET_RPC:-${ETH_MAINNET_RPC_URL:-$PUBLIC_ETHEREUM_RPC}}"
|
||||
CHAIN=1
|
||||
;;
|
||||
"Cronos (25)")
|
||||
RPC="${CRONOS_RPC_URL:-${CRONOS_RPC:-$PUBLIC_CRONOS_RPC}}"
|
||||
CHAIN=25
|
||||
;;
|
||||
"Arbitrum (42161)")
|
||||
RPC="${ARBITRUM_MAINNET_RPC:-${ARBITRUM_MAINNET_RPC_URL:-$PUBLIC_ARBITRUM_RPC}}"
|
||||
CHAIN=42161
|
||||
;;
|
||||
esac
|
||||
[[ -z "$RPC" ]] && { echo "$label: no RPC set, skip"; continue; }
|
||||
NONCE=$(cast nonce "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null || echo "?")
|
||||
|
||||
144
scripts/deployment/deploy-atomic-swap-dapp-5801.sh
Normal file
144
scripts/deployment/deploy-atomic-swap-dapp-5801.sh
Normal file
@@ -0,0 +1,144 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
SUBMODULE_ROOT="$PROJECT_ROOT/atomic-swap-dapp"
|
||||
source "$PROJECT_ROOT/config/ip-addresses.conf" 2>/dev/null || true
|
||||
PROXMOX_HOST="${PROXMOX_HOST:-${PROXMOX_HOST_R630_02:-192.168.11.12}}"
|
||||
VMID="${VMID:-5801}"
|
||||
DEPLOY_ROOT="${DEPLOY_ROOT:-/var/www/atomic-swap}"
|
||||
TMP_ARCHIVE="/tmp/atomic-swap-dapp-5801.tgz"
|
||||
DIST_DIR="$SUBMODULE_ROOT/dist"
|
||||
SKIP_BUILD="${SKIP_BUILD:-0}"
|
||||
|
||||
cleanup() {
|
||||
rm -f "$TMP_ARCHIVE"
|
||||
}
|
||||
|
||||
trap cleanup EXIT
|
||||
|
||||
if [ ! -d "$SUBMODULE_ROOT" ]; then
|
||||
echo "Missing submodule at $SUBMODULE_ROOT" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$SUBMODULE_ROOT"
|
||||
if [ "$SKIP_BUILD" != "1" ]; then
|
||||
npm run sync:ecosystem >/dev/null
|
||||
npm run validate:manifest >/dev/null
|
||||
npm run build >/dev/null
|
||||
fi
|
||||
|
||||
for required_path in \
|
||||
"$DIST_DIR/index.html" \
|
||||
"$DIST_DIR/data/ecosystem-manifest.json" \
|
||||
"$DIST_DIR/data/live-route-registry.json" \
|
||||
"$DIST_DIR/data/deployed-venue-inventory.json"; do
|
||||
if [ ! -f "$required_path" ]; then
|
||||
echo "Missing required build artifact: $required_path" >&2
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
jq -e '.supportedNetworks[] | select(.chainId == 138) | .deployedVenuePoolCount >= 19 and .publicRoutingPoolCount >= 19' \
|
||||
"$DIST_DIR/data/ecosystem-manifest.json" >/dev/null
|
||||
jq -e '.liveSwapRoutes | length >= 19' "$DIST_DIR/data/live-route-registry.json" >/dev/null
|
||||
jq -e '.liveBridgeRoutes | length >= 12' "$DIST_DIR/data/live-route-registry.json" >/dev/null
|
||||
jq -e '.networks[] | select(.chainId == 138) | .venueCounts.deployedVenuePoolCount >= 19 and .summary.totalVenues >= 19' \
|
||||
"$DIST_DIR/data/deployed-venue-inventory.json" >/dev/null
|
||||
|
||||
rm -f "$TMP_ARCHIVE"
|
||||
tar -C "$SUBMODULE_ROOT" -czf "$TMP_ARCHIVE" dist
|
||||
|
||||
scp -q -o StrictHostKeyChecking=accept-new "$TMP_ARCHIVE" "root@$PROXMOX_HOST:/tmp/atomic-swap-dapp-5801.tgz"
|
||||
|
||||
ssh -o StrictHostKeyChecking=accept-new "root@$PROXMOX_HOST" "
|
||||
pct push $VMID /tmp/atomic-swap-dapp-5801.tgz /tmp/atomic-swap-dapp-5801.tgz
|
||||
pct exec $VMID -- bash -lc '
|
||||
set -euo pipefail
|
||||
mkdir -p \"$DEPLOY_ROOT\"
|
||||
find \"$DEPLOY_ROOT\" -mindepth 1 -maxdepth 1 -exec rm -rf {} +
|
||||
rm -rf /tmp/dist
|
||||
tar -xzf /tmp/atomic-swap-dapp-5801.tgz -C /tmp
|
||||
cp -R /tmp/dist/. \"$DEPLOY_ROOT/\"
|
||||
mkdir -p /var/cache/nginx/atomic-swap-api
|
||||
cat > /etc/nginx/conf.d/atomic-swap-api-cache.conf <<\"EOF\"
|
||||
proxy_cache_path /var/cache/nginx/atomic-swap-api
|
||||
levels=1:2
|
||||
keys_zone=atomic_swap_api_cache:10m
|
||||
max_size=256m
|
||||
inactive=30m
|
||||
use_temp_path=off;
|
||||
EOF
|
||||
cat > /etc/nginx/sites-available/atomic-swap <<\"EOF\"
|
||||
server {
|
||||
listen 80 default_server;
|
||||
listen [::]:80 default_server;
|
||||
server_name _;
|
||||
|
||||
root $DEPLOY_ROOT;
|
||||
index index.html;
|
||||
|
||||
location / {
|
||||
try_files \$uri \$uri/ /index.html;
|
||||
}
|
||||
|
||||
location = /index.html {
|
||||
add_header Cache-Control \"no-store, no-cache, must-revalidate\" always;
|
||||
}
|
||||
|
||||
location /data/ {
|
||||
add_header Cache-Control \"no-store, no-cache, must-revalidate\" always;
|
||||
}
|
||||
|
||||
location /assets/ {
|
||||
add_header Cache-Control \"public, max-age=31536000, immutable\" always;
|
||||
}
|
||||
|
||||
location /api/v1/ {
|
||||
proxy_pass https://explorer.d-bis.org/api/v1/;
|
||||
proxy_ssl_server_name on;
|
||||
proxy_set_header Host explorer.d-bis.org;
|
||||
proxy_set_header X-Forwarded-Proto \$scheme;
|
||||
proxy_set_header X-Forwarded-For \$proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Host \$host;
|
||||
proxy_http_version 1.1;
|
||||
proxy_buffering on;
|
||||
proxy_cache atomic_swap_api_cache;
|
||||
proxy_cache_methods GET HEAD;
|
||||
proxy_cache_key \"\$scheme\$proxy_host\$request_uri\";
|
||||
proxy_cache_lock on;
|
||||
proxy_cache_lock_timeout 10s;
|
||||
proxy_cache_lock_age 10s;
|
||||
proxy_cache_background_update on;
|
||||
proxy_cache_revalidate on;
|
||||
proxy_cache_valid 200 10s;
|
||||
proxy_cache_valid 404 1s;
|
||||
proxy_cache_valid any 0;
|
||||
proxy_cache_use_stale error timeout invalid_header updating http_429 http_500 http_502 http_503 http_504;
|
||||
add_header X-Atomic-Swap-Cache \$upstream_cache_status always;
|
||||
}
|
||||
}
|
||||
EOF
|
||||
ln -sfn /etc/nginx/sites-available/atomic-swap /etc/nginx/sites-enabled/atomic-swap
|
||||
rm -f /etc/nginx/sites-enabled/default
|
||||
rm -f /etc/nginx/sites-enabled/dapp
|
||||
nginx -t
|
||||
systemctl reload nginx
|
||||
curl -fsS http://127.0.0.1/index.html >/dev/null
|
||||
curl -fsS http://127.0.0.1/data/ecosystem-manifest.json >/dev/null
|
||||
curl -fsS http://127.0.0.1/data/live-route-registry.json >/dev/null
|
||||
curl -fsS http://127.0.0.1/data/deployed-venue-inventory.json >/dev/null
|
||||
rm -rf /tmp/dist /tmp/atomic-swap-dapp-5801.tgz
|
||||
'
|
||||
rm -f /tmp/atomic-swap-dapp-5801.tgz
|
||||
"
|
||||
|
||||
curl -fsS https://atomic-swap.defi-oracle.io/ >/dev/null
|
||||
curl -fsS https://atomic-swap.defi-oracle.io/data/ecosystem-manifest.json | jq -e '.supportedNetworks[] | select(.chainId == 138) | .deployedVenuePoolCount >= 19 and .publicRoutingPoolCount >= 19' >/dev/null
|
||||
curl -fsS https://atomic-swap.defi-oracle.io/data/live-route-registry.json | jq -e '.liveSwapRoutes | length >= 19' >/dev/null
|
||||
curl -fsS https://atomic-swap.defi-oracle.io/data/live-route-registry.json | jq -e '.liveBridgeRoutes | length >= 12' >/dev/null
|
||||
curl -fsS https://atomic-swap.defi-oracle.io/data/deployed-venue-inventory.json | jq -e '.networks[] | select(.chainId == 138) | .venueCounts.deployedVenuePoolCount >= 19 and .summary.totalVenues >= 19' >/dev/null
|
||||
|
||||
echo "Deployed atomic-swap-dapp to VMID $VMID via $PROXMOX_HOST"
|
||||
191
scripts/deployment/price-cw-token-mainnet.sh
Executable file
191
scripts/deployment/price-cw-token-mainnet.sh
Executable file
@@ -0,0 +1,191 @@
|
||||
#!/usr/bin/env bash
|
||||
# Price cW* wrapped stables on Ethereum Mainnet using:
|
||||
# (1) Accounting peg assumption ($1 per token for USD-stable wrappers — document in reporting)
|
||||
# (2) DODO PMM integration: getPoolPrice / getPoolPriceOrOracle (1e18 = $1 when oracle aligned)
|
||||
# (3) Implied ratio from pool vault reserves (USDC per cWUSDT if base/quote match manifest)
|
||||
# (4) Optional Chainlink ETH/USD (macro reference only; not cW* direct)
|
||||
#
|
||||
# Usage (from repo root):
|
||||
# source scripts/lib/load-project-env.sh
|
||||
# ./scripts/deployment/price-cw-token-mainnet.sh
|
||||
# ./scripts/deployment/price-cw-token-mainnet.sh --json
|
||||
# POOL_CWUSDT_USDC_MAINNET=0x... ./scripts/deployment/price-cw-token-mainnet.sh
|
||||
#
|
||||
# Requires: cast (Foundry), jq (for --json)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
# shellcheck disable=SC1091
|
||||
[[ -f "$PROJECT_ROOT/scripts/lib/load-project-env.sh" ]] && source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
|
||||
|
||||
JSON=false
|
||||
for a in "$@"; do [[ "$a" == "--json" ]] && JSON=true; done
|
||||
|
||||
RPC="${ETHEREUM_MAINNET_RPC:-https://ethereum-rpc.publicnode.com}"
|
||||
INT="${DODO_PMM_INTEGRATION_MAINNET:-0xa9F284eD010f4F7d7F8F201742b49b9f58e29b84}"
|
||||
CWUSDT="${CWUSDT_MAINNET:-0xaF5017d0163ecb99D9B5D94e3b4D7b09Af44D8AE}"
|
||||
# Mainnet canonical USDC (do not use Chain 138 OFFICIAL_USDC_ADDRESS for this script)
|
||||
USDC_MAINNET_CANON="${USDC_MAINNET:-0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48}"
|
||||
USDC_OFFICIAL="${USDC_MAINNET_CANON}"
|
||||
POOL="${POOL_CWUSDT_USDC_MAINNET:-0x27f3aE7EE71Be3d77bAf17d4435cF8B895DD25D2}"
|
||||
# Chainlink ETH/USD — verified on mainnet; optional macro reference
|
||||
CHAINLINK_ETH_USD="${CHAINLINK_ETH_USD_FEED:-0x5f4eC3Df9cbd43714FE2740f5E3616155c5b8419}"
|
||||
|
||||
first_word() {
|
||||
awk '{print $1}' | head -1
|
||||
}
|
||||
|
||||
to_dec() {
|
||||
local x="$1"
|
||||
[[ -z "$x" ]] && echo "" && return
|
||||
echo "$x" | sed 's/\[.*//g' | awk '{print $1}'
|
||||
}
|
||||
|
||||
get_decimals() {
|
||||
local tok="$1"
|
||||
to_dec "$(cast call "$tok" "decimals()(uint8)" --rpc-url "$RPC" 2>/dev/null || echo "")"
|
||||
}
|
||||
|
||||
price_oracle_or_mid() {
|
||||
local pool="$1"
|
||||
local out
|
||||
out=$(cast call "$INT" "getPoolPriceOrOracle(address)(uint256)" "$pool" --rpc-url "$RPC" 2>/dev/null | first_word) || true
|
||||
if [[ -n "$out" && "$out" != *Error* ]]; then
|
||||
printf '%s|%s\n' "getPoolPriceOrOracle" "$out"
|
||||
return
|
||||
fi
|
||||
out=$(cast call "$INT" "getPoolPrice(address)(uint256)" "$pool" --rpc-url "$RPC" 2>/dev/null | first_word) || true
|
||||
printf '%s|%s\n' "getPoolPrice" "$out"
|
||||
}
|
||||
|
||||
parse_two_uints() {
|
||||
python3 -c "
|
||||
import re, sys
|
||||
s = sys.stdin.read()
|
||||
# cast may print scientific notation for large numbers
|
||||
parts = re.findall(r'-?[0-9]+(?:\.[0-9]+)?(?:[eE][+-]?[0-9]+)?', s)
|
||||
if len(parts) >= 2:
|
||||
print(int(float(parts[0])), int(float(parts[1])))
|
||||
"
|
||||
}
|
||||
|
||||
reserves() {
|
||||
local pool="$1"
|
||||
cast call "$INT" "getPoolReserves(address)(uint256,uint256)" "$pool" --rpc-url "$RPC" 2>/dev/null | parse_two_uints
|
||||
}
|
||||
|
||||
eth_usd_chainlink() {
|
||||
local feed="$1"
|
||||
[[ -z "$feed" ]] && { echo ""; return; }
|
||||
local code
|
||||
code=$(cast code "$feed" --rpc-url "$RPC" 2>/dev/null || true)
|
||||
[[ -z "$code" || "$code" == "0x" ]] && { echo ""; return; }
|
||||
cast call "$feed" "latestRoundData()(uint80,int256,uint256,uint256,uint80)" --rpc-url "$RPC" 2>/dev/null
|
||||
}
|
||||
|
||||
db=$(get_decimals "$CWUSDT")
|
||||
dq=$(get_decimals "$USDC_OFFICIAL")
|
||||
read -r br_raw qr_raw < <(reserves "$POOL" | awk '{print $1, $2}')
|
||||
br_raw=$(to_dec "${br_raw:-0}")
|
||||
qr_raw=$(to_dec "${qr_raw:-0}")
|
||||
|
||||
_pom=$(price_oracle_or_mid "$POOL")
|
||||
pm_method="${_pom%%|*}"
|
||||
price_raw=$(to_dec "${_pom#*|}")
|
||||
|
||||
# price from integration is 1e18-scaled "USD" in docs
|
||||
price_human=""
|
||||
if [[ -n "$price_raw" && "$price_raw" =~ ^[0-9]+$ ]]; then
|
||||
price_human=$(python3 -c "print(int('$price_raw')/1e18)" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
implied_usdc_per_cwusdt=""
|
||||
if [[ -n "$br_raw" && -n "$qr_raw" && "$br_raw" != "0" && -n "$db" && -n "$dq" ]]; then
|
||||
implied_usdc_per_cwusdt=$(python3 -c "br=int('$br_raw'); qr=int('$qr_raw'); db=int('$db'); dq=int('$dq'); print((qr/10**dq)/(br/10**db))" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
cl_eth=$(eth_usd_chainlink "$CHAINLINK_ETH_USD")
|
||||
cl_eth_price=""
|
||||
if [[ -n "$cl_eth" ]]; then
|
||||
# latestRoundData(): cast prints one value per line; answer is the 2nd field (int256, 1e8 USD for ETH/USD)
|
||||
ans=$(echo "$cl_eth" | awk 'NR == 2 { print $1; exit }')
|
||||
[[ -n "$ans" ]] && cl_eth_price=$(python3 -c "a=int(float('$ans')); print(a/1e8)" 2>/dev/null || echo "")
|
||||
fi
|
||||
|
||||
if [[ "$JSON" == true ]]; then
|
||||
jq_n() { [[ -z "$1" ]] && echo null || echo "$1"; }
|
||||
jq -n \
|
||||
--arg network "ethereum-mainnet" \
|
||||
--arg token "cWUSDT" \
|
||||
--arg token_addr "$CWUSDT" \
|
||||
--arg pool "$POOL" \
|
||||
--arg integration "$INT" \
|
||||
--argjson accounting_usd_assumption 1 \
|
||||
--arg pool_price_method "${pm_method:-none}" \
|
||||
--argjson pool_price_raw "$(jq_n "$price_raw")" \
|
||||
--argjson pool_price_usd_scale "$(jq_n "$price_human")" \
|
||||
--argjson base_reserve_raw "$(jq_n "$br_raw")" \
|
||||
--argjson quote_reserve_raw "$(jq_n "$qr_raw")" \
|
||||
--arg implied_str "${implied_usdc_per_cwusdt:-}" \
|
||||
--argjson chainlink_eth_usd "$(jq_n "$cl_eth_price")" \
|
||||
'{
|
||||
network: $network,
|
||||
token: $token,
|
||||
tokenAddress: $token_addr,
|
||||
poolAddress: $pool,
|
||||
integrationAddress: $integration,
|
||||
accountingUsdAssumptionPerToken: $accounting_usd_assumption,
|
||||
poolMidOrOracle: {
|
||||
method: $pool_price_method,
|
||||
priceRaw1e18: $pool_price_raw,
|
||||
priceAsUsdIf1e18EqualsOneDollar: $pool_price_usd_scale
|
||||
},
|
||||
impliedFromReserves: {
|
||||
baseReserveRaw: $base_reserve_raw,
|
||||
quoteReserveRaw: $quote_reserve_raw,
|
||||
impliedUsdcPerCwusdtIfBaseIsCwusdtAndQuoteIsUsdc: (if $implied_str == "" then null else ($implied_str | tonumber) end)
|
||||
},
|
||||
chainlinkEthUsd: $chainlink_eth_usd
|
||||
}'
|
||||
exit 0
|
||||
fi
|
||||
|
||||
cat << EOF
|
||||
========================================
|
||||
cWUSDT (Mainnet) — USD pricing toolkit
|
||||
========================================
|
||||
RPC: ${RPC%%\?*}
|
||||
Integration: $INT
|
||||
Pool (cWUSDT/USDC): $POOL
|
||||
cWUSDT token: $CWUSDT
|
||||
USDC (mainnet): $USDC_OFFICIAL
|
||||
|
||||
(1) ACCOUNTING ASSUMPTION (treasury / reporting)
|
||||
Treat 1 cWUSDT ≈ 1 USD only if your policy says the wrapper tracks USDT 1:1.
|
||||
Use for: internal books, dashboards where Etherscan shows \$0.
|
||||
|
||||
(2) POOL MID / ORACLE (DODOPMMIntegration)
|
||||
Method: ${pm_method:-n/a}
|
||||
Raw price (uint256, doc scale 1e18 = \$1): ${price_raw:-n/a}
|
||||
As decimal (if 1e18 = \$1): ${price_human:-n/a}
|
||||
|
||||
(3) IMPLIED FROM VAULT RESERVES (USDC per cWUSDT)
|
||||
Base reserve (cWUSDT, ${db:-?} dp): ${br_raw:-n/a}
|
||||
Quote reserve (USDC, ${dq:-?} dp): ${qr_raw:-n/a}
|
||||
Implied USDC per 1 cWUSDT: ${implied_usdc_per_cwusdt:-n/a}
|
||||
(Thin or imbalanced pools skew this; use for sanity check, not sole mark.)
|
||||
|
||||
(4) CHAINLINK ETH/USD (macro only — not cWUSDT)
|
||||
Feed: ${CHAINLINK_ETH_USD}
|
||||
ETH/USD: ${cl_eth_price:-n/a}
|
||||
|
||||
Optional env overrides:
|
||||
ETHEREUM_MAINNET_RPC, DODO_PMM_INTEGRATION_MAINNET,
|
||||
CWUSDT_MAINNET, POOL_CWUSDT_USDC_MAINNET, USDC_MAINNET,
|
||||
CHAINLINK_ETH_USD_FEED
|
||||
|
||||
See: docs/03-deployment/CW_TOKEN_USD_PRICING_RUNBOOK.md
|
||||
========================================
|
||||
EOF
|
||||
@@ -1,13 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
# Run remaining cW* steps: deploy (or dry-run), update token-mapping from .env, optional verify.
|
||||
# See docs/07-ccip/CW_DEPLOY_AND_WIRE_RUNBOOK.md and docs/00-meta/CW_BRIDGE_TASK_LIST.md.
|
||||
# Mainnet Etherscan (CompliantWrappedToken): smom-dbis-138/scripts/deployment/verify-mainnet-cw-etherscan.sh
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/deployment/run-cw-remaining-steps.sh [--dry-run] [--deploy] [--update-mapping] [--verify]
|
||||
# --dry-run Run deploy-cw in dry-run mode (print commands only).
|
||||
# --deploy Run deploy-cw on all chains (requires RPC/PRIVATE_KEY in smom-dbis-138/.env).
|
||||
# --update-mapping Update config/token-mapping-multichain.json from CWUSDT_*/CWUSDC_* in .env.
|
||||
# --verify For each chain with CWUSDT_* set, check MINTER_ROLE/BURNER_ROLE on cW* for CW_BRIDGE_*.
|
||||
# --update-mapping Update config/token-mapping-multichain.json from CW*_CHAIN keys in .env (12 cW symbols × mapped chains).
|
||||
# --verify For each chain with CW_BRIDGE_* set, check MINTER_ROLE/BURNER_ROLE on every deployed cW* (CWUSDT…CWXAUT) vs CW_BRIDGE_*.
|
||||
# With no options, runs --dry-run then --update-mapping (if any CWUSDT_* in .env).
|
||||
set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
@@ -103,18 +104,18 @@ if $DO_UPDATE_MAPPING; then
|
||||
fi
|
||||
|
||||
if $DO_VERIFY; then
|
||||
echo "=== Verify MINTER/BURNER roles on cW* for each chain ==="
|
||||
echo "=== Verify MINTER/BURNER roles on cW* for each chain (12 symbols) ==="
|
||||
MINTER_ROLE=$(cast keccak "MINTER_ROLE" 2>/dev/null || echo "0x")
|
||||
BURNER_ROLE=$(cast keccak "BURNER_ROLE" 2>/dev/null || echo "0x")
|
||||
# Env suffix -> env var for CompliantWrappedToken (must match update_mapping keyToEnv)
|
||||
CW_KEYS=(CWUSDT CWUSDC CWEURC CWEURT CWGBPC CWGBPT CWAUDC CWJPYC CWCHFC CWCADC CWXAUC CWXAUT)
|
||||
for name in MAINNET CRONOS BSC POLYGON GNOSIS AVALANCHE BASE ARBITRUM OPTIMISM; do
|
||||
cwusdt_var="CWUSDT_${name}"
|
||||
bridge_var="CW_BRIDGE_${name}"
|
||||
cwusdt="${!cwusdt_var:-}"
|
||||
bridge="${!bridge_var:-}"
|
||||
rpc_var="${name}_RPC_URL"
|
||||
[[ -z "$rpc_var" ]] && rpc_var="${name}_RPC"
|
||||
rpc="${!rpc_var:-}"
|
||||
if [[ -z "$cwusdt" || -z "$bridge" ]]; then continue; fi
|
||||
if [[ -z "$bridge" ]]; then continue; fi
|
||||
if [[ -z "$rpc" ]]; then
|
||||
case "$name" in
|
||||
MAINNET) rpc="${ETH_MAINNET_RPC_URL:-${ETHEREUM_MAINNET_RPC:-}}";;
|
||||
@@ -129,9 +130,17 @@ if $DO_VERIFY; then
|
||||
esac
|
||||
fi
|
||||
if [[ -z "$rpc" ]]; then echo " Skip $name: no RPC"; continue; fi
|
||||
m=$(cast call "$cwusdt" "hasRole(bytes32,address)(bool)" "$MINTER_ROLE" "$bridge" --rpc-url "$rpc" 2>/dev/null || echo "false")
|
||||
b=$(cast call "$cwusdt" "hasRole(bytes32,address)(bool)" "$BURNER_ROLE" "$bridge" --rpc-url "$rpc" 2>/dev/null || echo "false")
|
||||
echo " $name: MINTER=$m BURNER=$b (cWUSDT=$cwusdt bridge=$bridge)"
|
||||
any=false
|
||||
for key in "${CW_KEYS[@]}"; do
|
||||
var="${key}_${name}"
|
||||
addr="${!var:-}"
|
||||
[[ -z "$addr" ]] && continue
|
||||
any=true
|
||||
m=$(cast call "$addr" "hasRole(bytes32,address)(bool)" "$MINTER_ROLE" "$bridge" --rpc-url "$rpc" 2>/dev/null || echo "false")
|
||||
b=$(cast call "$addr" "hasRole(bytes32,address)(bool)" "$BURNER_ROLE" "$bridge" --rpc-url "$rpc" 2>/dev/null || echo "false")
|
||||
echo " $name $key: MINTER=$m BURNER=$b ($addr)"
|
||||
done
|
||||
if ! $any; then echo " $name: no CW*_* addresses in .env; skipped"; fi
|
||||
done
|
||||
fi
|
||||
|
||||
|
||||
@@ -1,24 +1,42 @@
|
||||
#!/usr/bin/env bash
|
||||
# Send a random amount between 5 and 9 ETH (inclusive) to each address in
|
||||
# config/pmm-soak-wallet-grid.json (Elemental Imperium 33×33×6 matrix).
|
||||
# Send native ETH to each address in config/pmm-soak-wallet-grid.json (Elemental
|
||||
# Imperium 33×33×6 matrix).
|
||||
#
|
||||
# Default mode (spread): take BALANCE_PCT of the signer's native balance (minus
|
||||
# optional reserve), split across the selected wallet slice with independent
|
||||
# uniform random multipliers in [1 - SPREAD_PCT/100, 1 + SPREAD_PCT/100] around
|
||||
# equal weight, then renormalize so the total sent equals that budget exactly.
|
||||
#
|
||||
# Legacy mode: random 5–9 ETH per wallet (--uniform-eth).
|
||||
#
|
||||
# Requires: cast (Foundry), jq, python3. Loads PRIVATE_KEY and RPC via load-project-env.sh.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/deployment/send-eth-ei-matrix-wallets.sh [--dry-run] [--limit N] [--offset N]
|
||||
#
|
||||
# --dry-run Print planned sends only (no transactions).
|
||||
# --limit N Process at most N wallets (after offset). Default: all.
|
||||
# --offset N Skip the first N wallets (resume / partial run).
|
||||
# --dry-run Print planned sends only (no transactions).
|
||||
# --limit N Process at most N wallets (after offset). Default: all.
|
||||
# --offset N Skip the first N wallets (resume / partial run).
|
||||
# --balance-pct P Percent of (balance minus reserve) to distribute (default: 80).
|
||||
# --spread-pct S Per-wallet multiplier range ±S%% around fair share (default: 15).
|
||||
# --reserve-wei W Wei to leave untouchable before applying balance-pct (default: 0).
|
||||
# --uniform-eth Legacy: random 5–9 ETH per wallet (ignores balance-pct/spread).
|
||||
#
|
||||
# Gas (Chain 138 / Besu): defaults avoid stuck pending txs from near-zero EIP-1559 caps.
|
||||
# Override if needed:
|
||||
# EI_MATRIX_GAS_PRICE=100000000000
|
||||
# EI_MATRIX_PRIORITY_GAS_PRICE=20000000000
|
||||
#
|
||||
# Spread mode defaults (same as --balance-pct / --spread-pct / --reserve-wei):
|
||||
# EI_MATRIX_BALANCE_PCT=80 EI_MATRIX_SPREAD_PCT=15 EI_MATRIX_RESERVE_WEI=0
|
||||
# Balance query uses public RPC by default; override with EI_MATRIX_BALANCE_RPC. Tx RPC: RPC_URL_138 or public.
|
||||
#
|
||||
# Nonces: each send uses an explicit --nonce from eth_getTransactionCount(..., "pending")
|
||||
# and increments locally so --async does not race duplicate nonces.
|
||||
#
|
||||
# RPC: defaults to Chain 138 public HTTP (reachable without LAN). Override with RPC_URL_138
|
||||
# (e.g. core or Besu on 192.168.x.x). Public URL env: CHAIN138_PUBLIC_RPC_URL or RPC_URL_138_PUBLIC.
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
@@ -28,11 +46,19 @@ cd "$PROJECT_ROOT"
|
||||
DRY_RUN=false
|
||||
LIMIT=""
|
||||
OFFSET="0"
|
||||
BALANCE_PCT="${EI_MATRIX_BALANCE_PCT:-80}"
|
||||
SPREAD_PCT="${EI_MATRIX_SPREAD_PCT:-15}"
|
||||
RESERVE_WEI="${EI_MATRIX_RESERVE_WEI:-0}"
|
||||
UNIFORM_ETH_LEGACY=false
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--dry-run) DRY_RUN=true; shift ;;
|
||||
--limit) LIMIT="${2:?}"; shift 2 ;;
|
||||
--offset) OFFSET="${2:?}"; shift 2 ;;
|
||||
--balance-pct) BALANCE_PCT="${2:?}"; shift 2 ;;
|
||||
--spread-pct) SPREAD_PCT="${2:?}"; shift 2 ;;
|
||||
--reserve-wei) RESERVE_WEI="${2:?}"; shift 2 ;;
|
||||
--uniform-eth) UNIFORM_ETH_LEGACY=true; shift ;;
|
||||
*) echo "Unknown arg: $1" >&2; exit 1 ;;
|
||||
esac
|
||||
done
|
||||
@@ -40,6 +66,13 @@ done
|
||||
# shellcheck disable=SC1091
|
||||
source "$PROJECT_ROOT/scripts/lib/load-project-env.sh"
|
||||
|
||||
CHAIN138_PUBLIC_RPC_DEFAULT="https://rpc-http-pub.d-bis.org"
|
||||
RPC_PUBLIC="${CHAIN138_PUBLIC_RPC_URL:-${RPC_URL_138_PUBLIC:-$CHAIN138_PUBLIC_RPC_DEFAULT}}"
|
||||
# Balance read for spread budget: public RPC first (aligns with deployer checks on public endpoints).
|
||||
# After that, nonce + sends use RPC (same as RPC_URL_138 when set, else public).
|
||||
BALANCE_RPC="${EI_MATRIX_BALANCE_RPC:-$RPC_PUBLIC}"
|
||||
RPC="${RPC_URL_138:-$RPC_PUBLIC}"
|
||||
|
||||
LOCK_FILE="${PROJECT_ROOT}/reports/status/ei-matrix-eth-send.lock"
|
||||
mkdir -p "$(dirname "$LOCK_FILE")"
|
||||
exec 200>"$LOCK_FILE"
|
||||
@@ -47,8 +80,6 @@ if ! flock -n 200; then
|
||||
echo "Another send-eth-ei-matrix-wallets.sh is already running (lock: $LOCK_FILE)." >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
RPC="${RPC_URL_138:-http://192.168.11.211:8545}"
|
||||
GRID="$PROJECT_ROOT/config/pmm-soak-wallet-grid.json"
|
||||
DEPLOYER_CANONICAL="0x4A666F96fC8764181194447A7dFdb7d471b301C8"
|
||||
# Wei per gas — must exceed stuck-replacement threshold on busy pools (see script header).
|
||||
@@ -63,13 +94,21 @@ command -v jq &>/dev/null || { echo "jq required" >&2; exit 1; }
|
||||
|
||||
FROM_ADDR=$(cast wallet address --private-key "$PRIVATE_KEY")
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "EI matrix ETH distribution (random 5–9 ETH per wallet)"
|
||||
if $UNIFORM_ETH_LEGACY; then
|
||||
echo "EI matrix ETH distribution (legacy: random 5–9 ETH per wallet)"
|
||||
else
|
||||
echo "EI matrix ETH distribution (${BALANCE_PCT}% of balance, ±${SPREAD_PCT}% jitter, normalized)"
|
||||
fi
|
||||
echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━"
|
||||
echo "RPC: $RPC"
|
||||
echo "Balance RPC (public): $BALANCE_RPC"
|
||||
echo "Tx / nonce RPC: $RPC"
|
||||
echo "Signer: $FROM_ADDR"
|
||||
echo "Grid: $GRID"
|
||||
echo "Dry-run: $DRY_RUN"
|
||||
echo "Offset: $OFFSET Limit: ${LIMIT:-all}"
|
||||
if ! $UNIFORM_ETH_LEGACY; then
|
||||
echo "Reserve: ${RESERVE_WEI} wei excluded before balance-pct"
|
||||
fi
|
||||
echo "Gas: maxFee=$EI_MATRIX_GAS_PRICE wei priorityFee=$EI_MATRIX_PRIORITY_GAS_PRICE wei"
|
||||
echo ""
|
||||
|
||||
@@ -87,10 +126,41 @@ pending_nonce() {
|
||||
cast to-dec "$hex"
|
||||
}
|
||||
|
||||
random_wei() {
|
||||
random_wei_uniform_legacy() {
|
||||
python3 -c "import random; from decimal import Decimal; print(int(Decimal(str(random.uniform(5.0, 9.0))) * 10**18))"
|
||||
}
|
||||
|
||||
# Args: count budget_wei spread_pct — prints one wei amount per line; sum(lines) == budget_wei
|
||||
generate_spread_amounts_wei() {
|
||||
local count="$1" budget="$2" spread="$3"
|
||||
python3 - "$count" "$budget" "$spread" <<'PY'
|
||||
import random
|
||||
import sys
|
||||
|
||||
n = int(sys.argv[1])
|
||||
budget = int(sys.argv[2])
|
||||
spread = float(sys.argv[3])
|
||||
if n <= 0:
|
||||
sys.exit("count must be positive")
|
||||
if budget < 0:
|
||||
sys.exit("budget must be non-negative")
|
||||
if spread < 0 or spread > 100:
|
||||
sys.exit("spread-pct must be in [0, 100]")
|
||||
# Integer-only weights (float on ~10^27 wei rounds to 0 → huge remainder loop and hang)
|
||||
base = 10000
|
||||
low_w = max(1, (100 * base - int(spread * base)) // 100)
|
||||
high_w = (100 * base + int(spread * base)) // 100
|
||||
w = [random.randint(low_w, high_w) for _ in range(n)]
|
||||
s = sum(w)
|
||||
raw = [(budget * wi) // s for wi in w]
|
||||
rem = budget - sum(raw)
|
||||
for i in range(rem):
|
||||
raw[i % n] += 1
|
||||
for w in raw:
|
||||
print(w)
|
||||
PY
|
||||
}
|
||||
|
||||
ERR_LOG="${PROJECT_ROOT}/reports/status/ei-matrix-eth-send-failures.log"
|
||||
mkdir -p "$(dirname "$ERR_LOG")"
|
||||
|
||||
@@ -102,55 +172,120 @@ else
|
||||
NONCE=0
|
||||
fi
|
||||
|
||||
# Args: addr wei idx — uses global NONCE, sent, failed, DRY_RUN, RPC, PRIVATE_KEY, EI_MATRIX_* , ERR_LOG
|
||||
matrix_try_send() {
|
||||
local addr="$1" wei="$2" idx="$3"
|
||||
local eth_approx out tx GP PP attempt
|
||||
if [[ "$wei" == "0" ]]; then
|
||||
echo "[skip] idx=$idx $addr zero wei"
|
||||
return 0
|
||||
fi
|
||||
eth_approx=$(python3 -c "w=int('$wei'); print(f'{w / 1e18:.6f}')")
|
||||
if $DRY_RUN; then
|
||||
echo "[dry-run] idx=$idx $addr ${wei} wei (~${eth_approx} ETH)"
|
||||
return 0
|
||||
fi
|
||||
GP="$EI_MATRIX_GAS_PRICE"
|
||||
PP="$EI_MATRIX_PRIORITY_GAS_PRICE"
|
||||
attempt=1
|
||||
while [[ "$attempt" -le 2 ]]; do
|
||||
if out=$(cast send "$addr" --value "$wei" --rpc-url "$RPC" --private-key "$PRIVATE_KEY" \
|
||||
--nonce "$NONCE" \
|
||||
--async \
|
||||
--gas-price "$GP" \
|
||||
--priority-gas-price "$PP" \
|
||||
2>&1); then
|
||||
tx=$(echo "$out" | tail -n1)
|
||||
echo "[ok] idx=$idx nonce=$NONCE $addr ${eth_approx} ETH tx=$tx"
|
||||
sent=$((sent + 1))
|
||||
NONCE=$((NONCE + 1))
|
||||
echo "$idx" > "${PROJECT_ROOT}/reports/status/ei-matrix-eth-send-last-idx.txt"
|
||||
break
|
||||
fi
|
||||
if echo "$out" | grep -q "Replacement transaction underpriced" && [[ "$attempt" -eq 1 ]]; then
|
||||
GP=$((GP * 2))
|
||||
PP=$((PP * 2))
|
||||
NONCE=$(pending_nonce) || true
|
||||
attempt=$((attempt + 1))
|
||||
continue
|
||||
fi
|
||||
echo "[fail] idx=$idx nonce=$NONCE $addr $out" | tee -a "$ERR_LOG" >&2
|
||||
failed=$((failed + 1))
|
||||
NONCE=$(pending_nonce) || true
|
||||
break
|
||||
done
|
||||
}
|
||||
|
||||
stream_addresses() {
|
||||
# Slice in jq only — jq|head breaks with SIGPIPE under pipefail when LIMIT is set.
|
||||
if [[ -n "${LIMIT:-}" ]]; then
|
||||
jq -r '.wallets[] | .address' "$GRID" | tail -n +$((OFFSET + 1)) | head -n "$LIMIT"
|
||||
jq -r --argjson o "$OFFSET" --argjson l "$LIMIT" '.wallets[$o:$o+$l][] | .address' "$GRID"
|
||||
else
|
||||
jq -r '.wallets[] | .address' "$GRID" | tail -n +$((OFFSET + 1))
|
||||
jq -r --argjson o "$OFFSET" '.wallets[$o:][] | .address' "$GRID"
|
||||
fi
|
||||
}
|
||||
|
||||
compute_budget_wei() {
|
||||
python3 - "$1" "$2" "$3" <<'PY'
|
||||
import sys
|
||||
bal = int(sys.argv[1])
|
||||
reserve = int(sys.argv[2])
|
||||
pct = float(sys.argv[3])
|
||||
avail = max(0, bal - reserve)
|
||||
print(int(avail * pct / 100.0))
|
||||
PY
|
||||
}
|
||||
|
||||
ADDR_TMP=""
|
||||
AMOUNTS_TMP=""
|
||||
cleanup_tmp() {
|
||||
[[ -n "$ADDR_TMP" && -f "$ADDR_TMP" ]] && rm -f "$ADDR_TMP"
|
||||
[[ -n "$AMOUNTS_TMP" && -f "$AMOUNTS_TMP" ]] && rm -f "$AMOUNTS_TMP"
|
||||
}
|
||||
trap cleanup_tmp EXIT
|
||||
|
||||
if $UNIFORM_ETH_LEGACY; then
|
||||
:
|
||||
else
|
||||
ADDR_TMP=$(mktemp)
|
||||
AMOUNTS_TMP=$(mktemp)
|
||||
stream_addresses > "$ADDR_TMP"
|
||||
WALLET_COUNT=$(wc -l < "$ADDR_TMP" | tr -d '[:space:]')
|
||||
if [[ -z "$WALLET_COUNT" || "$WALLET_COUNT" -eq 0 ]]; then
|
||||
echo "No wallets in range (offset=$OFFSET limit=${LIMIT:-all})." >&2
|
||||
exit 1
|
||||
fi
|
||||
BAL_WEI=$(cast balance "$FROM_ADDR" --rpc-url "$BALANCE_RPC" 2>/dev/null | awk '{print $1}' || echo "0")
|
||||
BUDGET_WEI=$(compute_budget_wei "$BAL_WEI" "$RESERVE_WEI" "$BALANCE_PCT")
|
||||
eth_bal=$(python3 -c "print(f'{int(\"$BAL_WEI\") / 1e18:.8f}')" 2>/dev/null || echo "?")
|
||||
eth_bud=$(python3 -c "print(f'{int(\"$BUDGET_WEI\") / 1e18:.8f}')" 2>/dev/null || echo "?")
|
||||
echo "Balance: $BAL_WEI wei (~$eth_bal ETH)"
|
||||
echo "Budget: $BUDGET_WEI wei (~$eth_bud ETH) (${BALANCE_PCT}% of max(0, balance − ${RESERVE_WEI} reserve))"
|
||||
echo "Wallets: $WALLET_COUNT spread: ±${SPREAD_PCT}% then normalized to budget"
|
||||
echo ""
|
||||
if [[ "$BUDGET_WEI" == "0" ]]; then
|
||||
echo "Budget is zero (balance below reserve or pct=0). Nothing to send." >&2
|
||||
exit 1
|
||||
fi
|
||||
generate_spread_amounts_wei "$WALLET_COUNT" "$BUDGET_WEI" "$SPREAD_PCT" > "$AMOUNTS_TMP"
|
||||
fi
|
||||
|
||||
sent=0
|
||||
failed=0
|
||||
idx=$OFFSET
|
||||
while read -r addr; do
|
||||
wei=$(random_wei)
|
||||
eth_approx=$(python3 -c "print(f'{$wei / 1e18:.6f}')")
|
||||
if $DRY_RUN; then
|
||||
echo "[dry-run] idx=$idx $addr ${wei} wei (~${eth_approx} ETH)"
|
||||
else
|
||||
GP="$EI_MATRIX_GAS_PRICE"
|
||||
PP="$EI_MATRIX_PRIORITY_GAS_PRICE"
|
||||
attempt=1
|
||||
while [[ "$attempt" -le 2 ]]; do
|
||||
if out=$(cast send "$addr" --value "$wei" --rpc-url "$RPC" --private-key "$PRIVATE_KEY" \
|
||||
--nonce "$NONCE" \
|
||||
--async \
|
||||
--gas-price "$GP" \
|
||||
--priority-gas-price "$PP" \
|
||||
2>&1); then
|
||||
tx=$(echo "$out" | tail -n1)
|
||||
echo "[ok] idx=$idx nonce=$NONCE $addr ${eth_approx} ETH tx=$tx"
|
||||
sent=$((sent + 1))
|
||||
NONCE=$((NONCE + 1))
|
||||
echo "$idx" > "${PROJECT_ROOT}/reports/status/ei-matrix-eth-send-last-idx.txt"
|
||||
break
|
||||
fi
|
||||
if echo "$out" | grep -q "Replacement transaction underpriced" && [[ "$attempt" -eq 1 ]]; then
|
||||
GP=$((GP * 2))
|
||||
PP=$((PP * 2))
|
||||
NONCE=$(pending_nonce) || true
|
||||
attempt=$((attempt + 1))
|
||||
continue
|
||||
fi
|
||||
echo "[fail] idx=$idx nonce=$NONCE $addr $out" | tee -a "$ERR_LOG" >&2
|
||||
failed=$((failed + 1))
|
||||
NONCE=$(pending_nonce) || true
|
||||
break
|
||||
done
|
||||
fi
|
||||
idx=$((idx + 1))
|
||||
done < <(stream_addresses)
|
||||
|
||||
if $UNIFORM_ETH_LEGACY; then
|
||||
while read -r addr; do
|
||||
wei=$(random_wei_uniform_legacy)
|
||||
matrix_try_send "$addr" "$wei" "$idx"
|
||||
idx=$((idx + 1))
|
||||
done < <(stream_addresses)
|
||||
else
|
||||
while IFS=$'\t' read -r addr wei; do
|
||||
matrix_try_send "$addr" "$wei" "$idx"
|
||||
idx=$((idx + 1))
|
||||
done < <(paste -d $'\t' "$ADDR_TMP" "$AMOUNTS_TMP")
|
||||
fi
|
||||
|
||||
echo ""
|
||||
if $DRY_RUN; then
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#!/usr/bin/env bash
|
||||
# Add Gitea webhook for Phoenix deploy to a repository.
|
||||
# Usage: GITEA_TOKEN=xxx PHOENIX_WEBHOOK_URL=https://host:4001/webhook/gitea bash add-gitea-webhook-phoenix.sh [owner/repo]
|
||||
# Example: GITEA_TOKEN=xxx PHOENIX_WEBHOOK_URL=http://192.168.11.59:4001/webhook/gitea bash add-gitea-webhook-phoenix.sh d-bis/proxmox
|
||||
# Example: GITEA_TOKEN=xxx PHOENIX_WEBHOOK_URL=http://192.168.11.59:4001/webhook/gitea PHOENIX_DEPLOY_SECRET=shared-secret bash add-gitea-webhook-phoenix.sh d-bis/proxmox
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
@@ -12,6 +12,7 @@ PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
GITEA_URL="${GITEA_URL:-https://gitea.d-bis.org}"
|
||||
GITEA_TOKEN="${GITEA_TOKEN:-}"
|
||||
PHOENIX_WEBHOOK_URL="${PHOENIX_WEBHOOK_URL:-}"
|
||||
PHOENIX_DEPLOY_SECRET="${PHOENIX_DEPLOY_SECRET:-}"
|
||||
REPO="${1:-d-bis/proxmox}"
|
||||
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
@@ -23,6 +24,10 @@ if [ -z "$PHOENIX_WEBHOOK_URL" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "WARNING: Only add the webhook deploy path for repos that are not already deploying via Gitea Actions."
|
||||
echo " If both Actions and webhook deploys are enabled for the same repo, pushes can trigger duplicate deploys."
|
||||
echo ""
|
||||
|
||||
API="${GITEA_URL%/}/api/v1"
|
||||
OWNER="${REPO%%/*}"
|
||||
REPO_NAME="${REPO#*/}"
|
||||
@@ -37,11 +42,13 @@ fi
|
||||
# Create webhook
|
||||
BODY=$(jq -n \
|
||||
--arg url "$PHOENIX_WEBHOOK_URL" \
|
||||
--arg secret "$PHOENIX_DEPLOY_SECRET" \
|
||||
'{
|
||||
type: "gitea",
|
||||
config: {
|
||||
url: $url,
|
||||
content_type: "json"
|
||||
content_type: "json",
|
||||
secret: $secret
|
||||
},
|
||||
events: ["push", "create"],
|
||||
active: true
|
||||
|
||||
90
scripts/dev-vm/bootstrap-phoenix-cicd.sh
Executable file
90
scripts/dev-vm/bootstrap-phoenix-cicd.sh
Executable file
@@ -0,0 +1,90 @@
|
||||
#!/usr/bin/env bash
|
||||
# Bootstrap the Phoenix deploy CI/CD path for this repo.
|
||||
#
|
||||
# Steps:
|
||||
# 1. Run repo validation
|
||||
# 2. Deploy phoenix-deploy-api to dev VM
|
||||
# 3. Smoke-check /health and /api/deploy-targets
|
||||
# 4. Optionally add a Gitea webhook for a repo that does not already deploy via Actions
|
||||
#
|
||||
# Usage:
|
||||
# bash scripts/dev-vm/bootstrap-phoenix-cicd.sh
|
||||
# bash scripts/dev-vm/bootstrap-phoenix-cicd.sh --repo d-bis/proxmox
|
||||
# bash scripts/dev-vm/bootstrap-phoenix-cicd.sh --repo some/repo --add-webhook
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
[[ -f "${PROJECT_ROOT}/.env" ]] && set -a && source "${PROJECT_ROOT}/.env" && set +a
|
||||
|
||||
REPO="${PHOENIX_CICD_REPO:-d-bis/proxmox}"
|
||||
ADD_WEBHOOK=0
|
||||
DRY_RUN=0
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--repo)
|
||||
REPO="${2:?--repo requires owner/repo}"
|
||||
shift 2
|
||||
;;
|
||||
--add-webhook)
|
||||
ADD_WEBHOOK=1
|
||||
shift
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=1
|
||||
shift
|
||||
;;
|
||||
*)
|
||||
echo "Unknown argument: $1" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
DEPLOY_HEALTH_URL="${PHOENIX_CICD_HEALTH_URL:-http://192.168.11.59:4001/health}"
|
||||
DEPLOY_TARGETS_URL="${PHOENIX_CICD_TARGETS_URL:-http://192.168.11.59:4001/api/deploy-targets}"
|
||||
|
||||
echo "=== Phoenix CI/CD bootstrap ==="
|
||||
echo "Repo: ${REPO}"
|
||||
echo "Health URL: ${DEPLOY_HEALTH_URL}"
|
||||
echo "Targets URL: ${DEPLOY_TARGETS_URL}"
|
||||
echo "Add webhook: ${ADD_WEBHOOK}"
|
||||
echo "Dry-run: ${DRY_RUN}"
|
||||
echo ""
|
||||
|
||||
if [[ "${DRY_RUN}" -eq 1 ]]; then
|
||||
echo "[DRY-RUN] bash scripts/verify/run-all-validation.sh --skip-genesis"
|
||||
echo "[DRY-RUN] bash scripts/deployment/deploy-phoenix-deploy-api-to-dev-vm.sh --apply --start-ct"
|
||||
echo "[DRY-RUN] curl -sS ${DEPLOY_HEALTH_URL}"
|
||||
echo "[DRY-RUN] curl -sS ${DEPLOY_TARGETS_URL}"
|
||||
if [[ "${ADD_WEBHOOK}" -eq 1 ]]; then
|
||||
echo "[DRY-RUN] bash scripts/dev-vm/add-gitea-webhook-phoenix.sh ${REPO}"
|
||||
fi
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "[1/4] Validation gate..."
|
||||
bash "${PROJECT_ROOT}/scripts/verify/run-all-validation.sh" --skip-genesis
|
||||
|
||||
echo ""
|
||||
echo "[2/4] Deploy phoenix-deploy-api..."
|
||||
bash "${PROJECT_ROOT}/scripts/deployment/deploy-phoenix-deploy-api-to-dev-vm.sh" --apply --start-ct
|
||||
|
||||
echo ""
|
||||
echo "[3/4] Smoke checks..."
|
||||
curl -sSf "${DEPLOY_HEALTH_URL}" | jq .
|
||||
curl -sSf "${DEPLOY_TARGETS_URL}" | jq .
|
||||
|
||||
if [[ "${ADD_WEBHOOK}" -eq 1 ]]; then
|
||||
echo ""
|
||||
echo "[4/4] Add Gitea webhook (only for repos not already deploying via Actions)..."
|
||||
bash "${PROJECT_ROOT}/scripts/dev-vm/add-gitea-webhook-phoenix.sh" "${REPO}"
|
||||
else
|
||||
echo ""
|
||||
echo "[4/4] Webhook step skipped. Re-run with --add-webhook only for repos that are not already deploying via Gitea Actions."
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "Done."
|
||||
37
scripts/dev-vm/trigger-phoenix-deploy.sh
Normal file
37
scripts/dev-vm/trigger-phoenix-deploy.sh
Normal file
@@ -0,0 +1,37 @@
|
||||
#!/usr/bin/env bash
|
||||
# Trigger phoenix-deploy-api directly for smoke tests or manual promotion.
|
||||
#
|
||||
# Usage:
|
||||
# bash scripts/dev-vm/trigger-phoenix-deploy.sh
|
||||
# bash scripts/dev-vm/trigger-phoenix-deploy.sh d-bis/proxmox main default
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
[[ -f "${PROJECT_ROOT}/.env" ]] && set -a && source "${PROJECT_ROOT}/.env" && set +a
|
||||
|
||||
REPO="${1:-d-bis/proxmox}"
|
||||
BRANCH="${2:-main}"
|
||||
TARGET="${3:-default}"
|
||||
SHA="${4:-manual-$(date +%Y%m%d%H%M%S)}"
|
||||
|
||||
DEPLOY_URL="${PHOENIX_DEPLOY_URL:-http://192.168.11.59:4001/api/deploy}"
|
||||
DEPLOY_TOKEN="${PHOENIX_DEPLOY_TOKEN:-${PHOENIX_DEPLOY_SECRET:-}}"
|
||||
|
||||
if [[ -z "${DEPLOY_TOKEN}" ]]; then
|
||||
echo "ERROR: set PHOENIX_DEPLOY_TOKEN or PHOENIX_DEPLOY_SECRET in root .env" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Triggering deploy:"
|
||||
echo " URL: ${DEPLOY_URL}"
|
||||
echo " Repo: ${REPO}"
|
||||
echo " Branch: ${BRANCH}"
|
||||
echo " Target: ${TARGET}"
|
||||
echo " SHA: ${SHA}"
|
||||
|
||||
curl -sSf -X POST "${DEPLOY_URL}" \
|
||||
-H "Authorization: Bearer ${DEPLOY_TOKEN}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "{\"repo\":\"${REPO}\",\"branch\":\"${BRANCH}\",\"target\":\"${TARGET}\",\"sha\":\"${SHA}\"}" | jq .
|
||||
@@ -17,16 +17,16 @@ cd "$PROJECT_ROOT"
|
||||
|
||||
PROXMOX_USER="${PROXMOX_USER:-root}"
|
||||
R630_01="${PROXMOX_HOST_R630_01:-${PROXMOX_R630_01:-192.168.11.11}}"
|
||||
ML110="${PROXMOX_HOST_ML110:-${PROXMOX_ML110:-192.168.11.10}}"
|
||||
R630_03="${PROXMOX_HOST_R630_03:-${PROXMOX_R630_03:-192.168.11.13}}"
|
||||
|
||||
# VMID -> Proxmox host
|
||||
# 1000,1001,1002 on r630-01; 1003,1004 on ml110
|
||||
# 1000,1001,1002 on r630-01; 1003,1004 on r630-03
|
||||
VALIDATORS=(
|
||||
"1000:$R630_01"
|
||||
"1001:$R630_01"
|
||||
"1002:$R630_01"
|
||||
"1003:$ML110"
|
||||
"1004:$ML110"
|
||||
"1003:$R630_03"
|
||||
"1004:$R630_03"
|
||||
)
|
||||
|
||||
CONFIG_PATH="/etc/besu/config-validator.toml"
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
# Besu expects TOML for permissions-nodes-config-file (not permissioned-nodes.json).
|
||||
# Static-nodes = bootstrap peers; permissions-nodes = allowlist. Both are essential.
|
||||
#
|
||||
# Run from repo root. Requires SSH to r630-01 (192.168.11.11) and ml110 (192.168.11.10).
|
||||
# Run from repo root. Requires SSH to r630-01 (192.168.11.11) and r630-03 (192.168.11.13).
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
@@ -24,8 +24,8 @@ if [ ! -f "$SOURCE_STATIC" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
R630_01="${PROXMOX_R630_01:-192.168.11.11}"
|
||||
ML110="${PROXMOX_ML110:-192.168.11.10}"
|
||||
R630_01="${PROXMOX_HOST_R630_01:-${PROXMOX_R630_01:-192.168.11.11}}"
|
||||
R630_03="${PROXMOX_HOST_R630_03:-${PROXMOX_R630_03:-192.168.11.13}}"
|
||||
USER="${PROXMOX_USER:-root}"
|
||||
PERM_PATH="/var/lib/besu/permissions"
|
||||
CONFIG_GLOB="/etc/besu/config-validator.toml"
|
||||
@@ -34,8 +34,8 @@ VALIDATORS=(
|
||||
"1000:$R630_01"
|
||||
"1001:$R630_01"
|
||||
"1002:$R630_01"
|
||||
"1003:$ML110"
|
||||
"1004:$ML110"
|
||||
"1003:$R630_03"
|
||||
"1004:$R630_03"
|
||||
)
|
||||
|
||||
RED='\033[0;31m'
|
||||
@@ -53,7 +53,7 @@ echo " Both are essential: static-nodes = bootstrap peers, permissions-nodes =
|
||||
echo ""
|
||||
|
||||
# Copy both files to each host once
|
||||
for host in "$R630_01" "$ML110"; do
|
||||
for host in "$R630_01" "$R630_03"; do
|
||||
log_info "Copying static-nodes.json and permissions-nodes.toml to $host"
|
||||
scp -o ConnectTimeout=5 -o StrictHostKeyChecking=no "$SOURCE_STATIC" "$SOURCE_TOML" "$USER@$host:/tmp/" 2>/dev/null || { log_err "scp to $host failed"; exit 1; }
|
||||
log_ok " Copied"
|
||||
@@ -108,7 +108,7 @@ for entry in "${VALIDATORS[@]}"; do
|
||||
done
|
||||
|
||||
# Cleanup host /tmp
|
||||
for host in "$R630_01" "$ML110"; do
|
||||
for host in "$R630_01" "$R630_03"; do
|
||||
ssh -o ConnectTimeout=5 -o StrictHostKeyChecking=no "$USER@$host" "rm -f /tmp/permissions-nodes.toml /tmp/static-nodes.json" 2>/dev/null || true
|
||||
done
|
||||
|
||||
|
||||
180
scripts/lib/cw_mesh_deployment_matrix.py
Normal file
180
scripts/lib/cw_mesh_deployment_matrix.py
Normal file
@@ -0,0 +1,180 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Merge deployment-status.json + Uni V2 pair-discovery report into a mesh matrix.
|
||||
|
||||
Read-only: does not call RPC. Use after running promod_uniswap_v2_live_pair_discovery.py
|
||||
or pointing at an existing reports/extraction/promod-uniswap-v2-live-pair-discovery-*.json.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEFAULT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
|
||||
DEFAULT_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def pmm_settlement_pools(pools: list) -> list[str]:
|
||||
out: list[str] = []
|
||||
for x in pools or []:
|
||||
b, q = x.get("base"), x.get("quote")
|
||||
if b in ("cWUSDC", "cWUSDT") and q in ("USDC", "USDT"):
|
||||
out.append(f"{b}/{q}")
|
||||
return sorted(set(out))
|
||||
|
||||
|
||||
def pmm_has_cw_mesh(pools: list) -> bool:
|
||||
for x in pools or []:
|
||||
if x.get("base") == "cWUSDT" and x.get("quote") == "cWUSDC":
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def discovery_mesh_entry(entries: list, chain_id: int) -> dict | None:
|
||||
for e in entries:
|
||||
if int(e.get("chain_id", -1)) == chain_id:
|
||||
return e
|
||||
return None
|
||||
|
||||
|
||||
def pair_mesh_state(entry: dict | None) -> tuple[bool | None, bool | None, str | None]:
|
||||
"""Returns live, healthy (None if unknown), pool address (None if n/a)."""
|
||||
if not entry:
|
||||
return None, None, None
|
||||
for p in entry.get("pairsChecked") or []:
|
||||
if p.get("base") == "cWUSDT" and p.get("quote") == "cWUSDC":
|
||||
live = p.get("live")
|
||||
h = p.get("health") or {}
|
||||
healthy = h.get("healthy")
|
||||
addr = p.get("poolAddress") or ""
|
||||
if addr in ("", "0x0000000000000000000000000000000000000000"):
|
||||
return bool(live), None, None
|
||||
return bool(live), bool(healthy) if healthy is not None else None, addr
|
||||
return False, None, None
|
||||
|
||||
|
||||
def build_rows(status_path: Path, discovery_path: Path) -> list[dict]:
|
||||
dep = load_json(status_path)
|
||||
disc_data = load_json(discovery_path)
|
||||
entries = disc_data.get("entries") or []
|
||||
rows: list[dict] = []
|
||||
|
||||
for cid_s, info in sorted((dep.get("chains") or {}).items(), key=lambda kv: int(kv[0])):
|
||||
cid = int(cid_s)
|
||||
cw = info.get("cwTokens") or {}
|
||||
pools = info.get("pmmPools") or []
|
||||
has_wusdt = "cWUSDT" in cw
|
||||
has_wusdc = "cWUSDC" in cw
|
||||
d_entry = discovery_mesh_entry(entries, cid)
|
||||
live, healthy, pool_addr = pair_mesh_state(d_entry)
|
||||
settle = pmm_settlement_pools(pools)
|
||||
rows.append(
|
||||
{
|
||||
"chainId": cid,
|
||||
"network": info.get("name", ""),
|
||||
"activationState": info.get("activationState", ""),
|
||||
"hasCWUSDT": has_wusdt,
|
||||
"hasCWUSDC": has_wusdc,
|
||||
"cwTokenCount": len(cw),
|
||||
"pmmCWUSDTvsCWUSDC": pmm_has_cw_mesh(pools),
|
||||
"pmmSettlementRails": settle,
|
||||
"uniswapV2PairDiscoveryPresent": d_entry is not None,
|
||||
"uniswapV2CWUSDTvsCWUSDCLive": live,
|
||||
"uniswapV2CWUSDTvsCWUSDCHealthy": healthy,
|
||||
"uniswapV2CWUSDTvsCWDCPool": pool_addr,
|
||||
}
|
||||
)
|
||||
return rows
|
||||
|
||||
|
||||
def print_markdown(rows: list[dict], generated_from: dict[str, str]) -> None:
|
||||
print("# cW* mesh deployment matrix\n")
|
||||
for k, v in generated_from.items():
|
||||
print(f"- **{k}:** `{v}`")
|
||||
print()
|
||||
print(
|
||||
"| Chain | Network | cWUSDT | cWUSDC | PMM cWUSDT↔cWUSDC | PMM settlement | "
|
||||
"UniV2 cWUSDT/cWUSDC live | healthy | Pool |"
|
||||
)
|
||||
print("|------:|:---|:---:|:---:|:---:|:---|:---:|:---:|:---|")
|
||||
def _fmt_bool(v: bool | None) -> str:
|
||||
if v is None:
|
||||
return "—"
|
||||
return str(v)
|
||||
|
||||
for r in rows:
|
||||
settle = ", ".join(r["pmmSettlementRails"]) if r["pmmSettlementRails"] else "—"
|
||||
if len(settle) > 48:
|
||||
settle = settle[:45] + "…"
|
||||
print(
|
||||
f"| {r['chainId']} | {r['network'][:26]} | "
|
||||
f"{'✓' if r['hasCWUSDT'] else '—'} | {'✓' if r['hasCWUSDC'] else '—'} | "
|
||||
f"{'✓' if r['pmmCWUSDTvsCWUSDC'] else '—'} | {settle} | "
|
||||
f"{_fmt_bool(r['uniswapV2CWUSDTvsCWUSDCLive'])} | {_fmt_bool(r['uniswapV2CWUSDTvsCWUSDCHealthy'])} | "
|
||||
f"`{r['uniswapV2CWUSDTvsCWDCPool'] or '—'}` |"
|
||||
)
|
||||
print()
|
||||
print("## Notes\n")
|
||||
print(
|
||||
"- **PMM settlement**: pools where base is `cWUSDC` or `cWUSDT` and quote is `USDC` or `USDT` "
|
||||
"in `deployment-status.json`."
|
||||
)
|
||||
print(
|
||||
"- **Uni V2** columns come from the pair-discovery report (reserves/health are snapshot, not live RPC)."
|
||||
)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
ap = argparse.ArgumentParser(description=__doc__)
|
||||
ap.add_argument(
|
||||
"--deployment-status",
|
||||
type=Path,
|
||||
default=DEFAULT_STATUS,
|
||||
help=f"Default: {DEFAULT_STATUS}",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--pair-discovery",
|
||||
type=Path,
|
||||
default=DEFAULT_DISCOVERY,
|
||||
help=f"Default: {DEFAULT_DISCOVERY}",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--json-out",
|
||||
type=Path,
|
||||
default=None,
|
||||
help="Optional path to write machine-readable rows (e.g. reports/status/cw-mesh-deployment-matrix-latest.json)",
|
||||
)
|
||||
ap.add_argument(
|
||||
"--no-markdown",
|
||||
action="store_true",
|
||||
help="Do not print markdown (useful with --json-out only)",
|
||||
)
|
||||
args = ap.parse_args()
|
||||
|
||||
rows = build_rows(args.deployment_status, args.pair_discovery)
|
||||
payload = {
|
||||
"schemaVersion": "1.0.0",
|
||||
"description": "Per-chain merge of deployment-status cwTokens/pmmPools and Uni V2 pair-discovery snapshot.",
|
||||
"generatedFrom": {
|
||||
"deploymentStatus": str(args.deployment_status.resolve()),
|
||||
"pairDiscovery": str(args.pair_discovery.resolve()),
|
||||
},
|
||||
"rows": rows,
|
||||
}
|
||||
|
||||
if args.json_out:
|
||||
args.json_out.parent.mkdir(parents=True, exist_ok=True)
|
||||
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
if not args.no_markdown:
|
||||
print_markdown(rows, payload["generatedFrom"])
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -4,10 +4,13 @@ from __future__ import annotations
|
||||
from pathlib import Path
|
||||
import argparse
|
||||
from decimal import Decimal, getcontext
|
||||
from functools import lru_cache
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
getcontext().prec = 42
|
||||
|
||||
@@ -20,6 +23,7 @@ ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
HEALTHY_DEVIATION_BPS = Decimal("25")
|
||||
MIN_HEALTHY_RESERVE_UNITS = Decimal("1000")
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
CAST_CALL_TIMEOUT_SECONDS = int(os.environ.get("PROMOD_CAST_TIMEOUT_SECONDS", "20"))
|
||||
|
||||
CHAIN_CONFIG = {
|
||||
"1": {"rpc_keys": ["ETHEREUM_MAINNET_RPC"], "hub": "USDC"},
|
||||
@@ -68,9 +72,9 @@ def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | Non
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
return os.environ.get(key, env_values.get(key, ""))
|
||||
seen.add(key)
|
||||
value = env_values.get(key, "")
|
||||
value = os.environ.get(key, env_values.get(key, ""))
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
@@ -80,18 +84,30 @@ def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | Non
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
return subprocess.check_output(cmd, text=True).strip()
|
||||
return subprocess.check_output(cmd, text=True, timeout=CAST_CALL_TIMEOUT_SECONDS).strip()
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
def cast_call_cached(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
return cast_call(rpc_url, target, signature, *args)
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
matches = UINT_RE.findall(value)
|
||||
if not matches:
|
||||
stripped = value.strip()
|
||||
if not stripped:
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
return int(matches[0])
|
||||
return int(stripped.split()[0])
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
matches = [int(match) for match in UINT_RE.findall(value)]
|
||||
matches = []
|
||||
for raw_line in value.splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line:
|
||||
continue
|
||||
matches.append(int(line.split()[0]))
|
||||
if len(matches) == count:
|
||||
break
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected {count} integers, got {value!r}")
|
||||
return matches[:count]
|
||||
@@ -109,11 +125,13 @@ def normalize_units(raw: int, decimals: int) -> Decimal:
|
||||
|
||||
|
||||
def compute_pair_health(rpc_url: str, pair_address: str, base_address: str, quote_address: str) -> dict:
|
||||
token0 = parse_address(cast_call(rpc_url, pair_address, "token0()(address)"))
|
||||
token1 = parse_address(cast_call(rpc_url, pair_address, "token1()(address)"))
|
||||
reserve0_raw, reserve1_raw, _ = parse_uints(cast_call(rpc_url, pair_address, "getReserves()(uint112,uint112,uint32)"), 3)
|
||||
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
|
||||
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
|
||||
token0 = parse_address(cast_call_cached(rpc_url, pair_address, "token0()(address)"))
|
||||
token1 = parse_address(cast_call_cached(rpc_url, pair_address, "token1()(address)"))
|
||||
reserve0_raw, reserve1_raw, _ = parse_uints(
|
||||
cast_call_cached(rpc_url, pair_address, "getReserves()(uint112,uint112,uint32)"), 3
|
||||
)
|
||||
decimals0 = parse_uint(cast_call_cached(rpc_url, token0, "decimals()(uint8)"))
|
||||
decimals1 = parse_uint(cast_call_cached(rpc_url, token1, "decimals()(uint8)"))
|
||||
|
||||
if token0.lower() == base_address.lower() and token1.lower() == quote_address.lower():
|
||||
base_raw, quote_raw = reserve0_raw, reserve1_raw
|
||||
@@ -171,6 +189,68 @@ def append_discovered_pair(status: dict, chain_id: str, pair: dict):
|
||||
return True
|
||||
|
||||
|
||||
def build_chain_entry(chain_id: str, chain: dict, config: dict, env_values: dict[str, str]):
|
||||
factory = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY", env_values)
|
||||
router = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER", env_values)
|
||||
start_block = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK", env_values) or "0"
|
||||
rpc_url = ""
|
||||
for key in config["rpc_keys"]:
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
rpc_url = value
|
||||
break
|
||||
|
||||
env_ready = bool(factory and router and rpc_url)
|
||||
pairs = []
|
||||
discovered_rows = []
|
||||
if env_ready:
|
||||
for base, quote, token0, token1 in candidate_pairs(chain):
|
||||
try:
|
||||
pair_address = cast_call_cached(rpc_url, factory, "getPair(address,address)(address)", token0, token1)
|
||||
except Exception as exc:
|
||||
pair_address = f"ERROR:{exc}"
|
||||
live = pair_address.lower() != ZERO_ADDRESS and not pair_address.startswith("ERROR:")
|
||||
row = {
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pair_address,
|
||||
"live": live,
|
||||
}
|
||||
if live:
|
||||
try:
|
||||
row["health"] = compute_pair_health(rpc_url, pair_address, token0, token1)
|
||||
except Exception as exc:
|
||||
row["health"] = {"healthy": False, "error": str(exc)}
|
||||
discovered_rows.append(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"row": {
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pair_address,
|
||||
"factoryAddress": factory,
|
||||
"routerAddress": router,
|
||||
"startBlock": int(start_block),
|
||||
"venue": "uniswap_v2_pair",
|
||||
"publicRoutingEnabled": False,
|
||||
},
|
||||
}
|
||||
)
|
||||
pairs.append(row)
|
||||
|
||||
entry = {
|
||||
"chain_id": int(chain_id),
|
||||
"network": chain.get("name"),
|
||||
"factoryAddress": factory or None,
|
||||
"routerAddress": router or None,
|
||||
"startBlock": int(start_block),
|
||||
"rpcConfigured": bool(rpc_url),
|
||||
"envReady": env_ready,
|
||||
"pairsChecked": pairs,
|
||||
}
|
||||
return entry, discovered_rows
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--write-discovered", action="store_true", help="Write discovered live pairs into deployment-status.json under uniswapV2Pools.")
|
||||
@@ -179,74 +259,22 @@ def main():
|
||||
status = load_json(DEPLOYMENT_STATUS)
|
||||
env_values = load_env(ENV_PATH)
|
||||
|
||||
entries = []
|
||||
entries_by_chain: dict[str, dict] = {}
|
||||
discovered_for_write = []
|
||||
jobs = []
|
||||
with ThreadPoolExecutor(max_workers=min(8, len(CHAIN_CONFIG))) as executor:
|
||||
for chain_id, config in CHAIN_CONFIG.items():
|
||||
chain = status["chains"].get(chain_id)
|
||||
if not chain:
|
||||
continue
|
||||
jobs.append((chain_id, executor.submit(build_chain_entry, chain_id, chain, config, env_values)))
|
||||
|
||||
for chain_id, config in CHAIN_CONFIG.items():
|
||||
chain = status["chains"].get(chain_id)
|
||||
if not chain:
|
||||
continue
|
||||
for chain_id, future in jobs:
|
||||
entry, discovered_rows = future.result()
|
||||
entries_by_chain[chain_id] = entry
|
||||
discovered_for_write.extend(discovered_rows)
|
||||
|
||||
factory = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY", env_values)
|
||||
router = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER", env_values)
|
||||
start_block = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK", env_values) or "0"
|
||||
rpc_url = ""
|
||||
for key in config["rpc_keys"]:
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
rpc_url = value
|
||||
break
|
||||
|
||||
env_ready = bool(factory and router and rpc_url)
|
||||
pairs = []
|
||||
if env_ready:
|
||||
for base, quote, token0, token1 in candidate_pairs(chain):
|
||||
try:
|
||||
pair_address = cast_call(rpc_url, factory, "getPair(address,address)(address)", token0, token1)
|
||||
except Exception as exc:
|
||||
pair_address = f"ERROR:{exc}"
|
||||
live = pair_address.lower() != ZERO_ADDRESS and not pair_address.startswith("ERROR:")
|
||||
row = {
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pair_address,
|
||||
"live": live,
|
||||
}
|
||||
if live:
|
||||
try:
|
||||
row["health"] = compute_pair_health(rpc_url, pair_address, token0, token1)
|
||||
except Exception as exc:
|
||||
row["health"] = {"healthy": False, "error": str(exc)}
|
||||
pairs.append(row)
|
||||
if live:
|
||||
discovered_for_write.append(
|
||||
{
|
||||
"chain_id": chain_id,
|
||||
"row": {
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pair_address,
|
||||
"factoryAddress": factory,
|
||||
"routerAddress": router,
|
||||
"startBlock": int(start_block),
|
||||
"venue": "uniswap_v2_pair",
|
||||
"publicRoutingEnabled": False,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
entries.append(
|
||||
{
|
||||
"chain_id": int(chain_id),
|
||||
"network": chain.get("name"),
|
||||
"factoryAddress": factory or None,
|
||||
"routerAddress": router or None,
|
||||
"startBlock": int(start_block),
|
||||
"rpcConfigured": bool(rpc_url),
|
||||
"envReady": env_ready,
|
||||
"pairsChecked": pairs,
|
||||
}
|
||||
)
|
||||
entries = [entries_by_chain[chain_id] for chain_id in CHAIN_CONFIG if chain_id in entries_by_chain]
|
||||
|
||||
writes = []
|
||||
if args.write_discovered:
|
||||
|
||||
@@ -12,6 +12,21 @@ import json
|
||||
from typing import Dict, List, Optional, Any
|
||||
from proxmoxer import ProxmoxAPI
|
||||
|
||||
|
||||
def normalize_proxmox_token_name(user: str, token_name: str) -> str:
|
||||
"""Accept bare token names or full token ids in PROXMOX_TOKEN_NAME."""
|
||||
if '!' not in token_name:
|
||||
return token_name
|
||||
|
||||
token_user, bare_token_name = token_name.split('!', 1)
|
||||
if token_user != user:
|
||||
raise ValueError(
|
||||
f"PROXMOX_TOKEN_NAME is for '{token_user}', but PROXMOX_USER is '{user}'"
|
||||
)
|
||||
if not bare_token_name:
|
||||
raise ValueError("PROXMOX_TOKEN_NAME is missing the token name after '!'" )
|
||||
return bare_token_name
|
||||
|
||||
def load_env_file(env_path: str = None) -> dict:
|
||||
"""Load environment variables from .env file."""
|
||||
if env_path is None:
|
||||
@@ -76,6 +91,13 @@ def get_proxmox_connection() -> ProxmoxAPI:
|
||||
print(" PROXMOX_TOKEN_NAME=your-token-name", file=sys.stderr)
|
||||
print(" PROXMOX_TOKEN_VALUE=your-token-value", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if token_value:
|
||||
try:
|
||||
token_name = normalize_proxmox_token_name(user, token_name)
|
||||
except ValueError as exc:
|
||||
print(f"Error: {exc}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
if token_value:
|
||||
|
||||
@@ -22,6 +22,21 @@ TUNNEL_URLS = {
|
||||
'192.168.11.12': 'r630-02.d-bis.org',
|
||||
}
|
||||
|
||||
|
||||
def normalize_proxmox_token_name(user: str, token_name: str) -> str:
|
||||
"""Accept bare token names or full token ids in PROXMOX_TOKEN_NAME."""
|
||||
if '!' not in token_name:
|
||||
return token_name
|
||||
|
||||
token_user, bare_token_name = token_name.split('!', 1)
|
||||
if token_user != user:
|
||||
raise ValueError(
|
||||
f"PROXMOX_TOKEN_NAME is for '{token_user}', but PROXMOX_USER is '{user}'"
|
||||
)
|
||||
if not bare_token_name:
|
||||
raise ValueError("PROXMOX_TOKEN_NAME is missing the token name after '!'")
|
||||
return bare_token_name
|
||||
|
||||
def load_env_file(env_path: str = None) -> dict:
|
||||
"""Load environment variables from .env file."""
|
||||
if env_path is None:
|
||||
@@ -97,6 +112,13 @@ def get_proxmox_connection() -> Optional[ProxmoxAPI]:
|
||||
if not token_value and not password:
|
||||
print("Error: PROXMOX_TOKEN_VALUE or PROXMOX_PASSWORD required", file=sys.stderr)
|
||||
return None
|
||||
|
||||
if token_value:
|
||||
try:
|
||||
token_name = normalize_proxmox_token_name(user, token_name)
|
||||
except ValueError as exc:
|
||||
print(f"❌ {exc}", file=sys.stderr)
|
||||
return None
|
||||
|
||||
try:
|
||||
if token_value:
|
||||
|
||||
59
scripts/maintenance/deploy-vzdump-prune-cron-to-proxmox-nodes.sh
Executable file
59
scripts/maintenance/deploy-vzdump-prune-cron-to-proxmox-nodes.sh
Executable file
@@ -0,0 +1,59 @@
|
||||
#!/usr/bin/env bash
|
||||
# Install prune-proxmox-vzdump-dump.sh on Proxmox hosts and schedule weekly cron (Sun 04:15).
|
||||
# Loads config/ip-addresses.conf. Requires SSH root key access.
|
||||
#
|
||||
# Backup retention alignment:
|
||||
# - pvesh get /cluster/backup — if you add Datacenter → Scheduled backups, set maxfiles
|
||||
# to the same (or lower) as VZDUMP_PRUNE_KEEP so GUI backups and this prune do not fight.
|
||||
# - Empty job list: retention for /var/lib/vz/dump is this weekly cron + ad-hoc vzdump.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/maintenance/deploy-vzdump-prune-cron-to-proxmox-nodes.sh [--dry-run]
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
# shellcheck source=/dev/null
|
||||
source "${PROJECT_ROOT}/config/ip-addresses.conf" 2>/dev/null || true
|
||||
|
||||
PROXMOX_SSH_USER="${PROXMOX_SSH_USER:-root}"
|
||||
SSH_OPTS="-o StrictHostKeyChecking=no -o ConnectTimeout=10 -o BatchMode=yes"
|
||||
PRUNE_LOCAL="${SCRIPT_DIR}/prune-proxmox-vzdump-dump.sh"
|
||||
REMOTE_BIN="/usr/local/sbin/prune-proxmox-vzdump-dump.sh"
|
||||
KEEP="${VZDUMP_PRUNE_KEEP:-2}"
|
||||
|
||||
HOSTS=(
|
||||
"${PROXMOX_HOST_ML110:-192.168.11.10}"
|
||||
"${PROXMOX_HOST_R630_01:-192.168.11.11}"
|
||||
"${PROXMOX_HOST_R630_02:-192.168.11.12}"
|
||||
)
|
||||
|
||||
DRY_RUN=false
|
||||
[[ "${1:-}" == "--dry-run" ]] && DRY_RUN=true
|
||||
|
||||
if [[ ! -f "$PRUNE_LOCAL" ]]; then
|
||||
echo "Missing $PRUNE_LOCAL" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
CRON_FILE="/etc/cron.d/vzdump-prune-local"
|
||||
|
||||
for ip in "${HOSTS[@]}"; do
|
||||
echo "=== ${PROXMOX_SSH_USER}@${ip} ==="
|
||||
if $DRY_RUN; then
|
||||
echo "Would: scp $PRUNE_LOCAL -> ${ip}:${REMOTE_BIN}"
|
||||
echo "Would: install ${CRON_FILE}"
|
||||
continue
|
||||
fi
|
||||
scp $SSH_OPTS "$PRUNE_LOCAL" "${PROXMOX_SSH_USER}@${ip}:${REMOTE_BIN}"
|
||||
{
|
||||
echo 'SHELL=/bin/sh'
|
||||
echo 'PATH=/usr/sbin:/usr/bin:/sbin:/bin'
|
||||
echo "# vzdump retention — see repo scripts/maintenance/prune-proxmox-vzdump-dump.sh"
|
||||
echo "15 4 * * 0 root ${REMOTE_BIN} ${KEEP} >>/var/log/vzdump-prune.log 2>&1"
|
||||
} | ssh $SSH_OPTS "${PROXMOX_SSH_USER}@${ip}" "chmod 755 ${REMOTE_BIN} && cat > ${CRON_FILE} && chmod 644 ${CRON_FILE}"
|
||||
echo "Installed ${REMOTE_BIN} and ${CRON_FILE} (keep=${KEEP})."
|
||||
done
|
||||
|
||||
echo "Done. Logs on nodes: /var/log/vzdump-prune.log (after first run)."
|
||||
@@ -5,7 +5,7 @@
|
||||
# the rest stay at head so the restarted node syncs quickly and consensus can continue.
|
||||
#
|
||||
# Usage: ./scripts/maintenance/fix-block-production-staggered-restart.sh [--dry-run]
|
||||
# Requires: SSH to Proxmox hosts (192.168.11.10 ML110, 192.168.11.11 R630-01, 192.168.11.12 R630-02)
|
||||
# Requires: SSH to Proxmox hosts (192.168.11.11 R630-01, 192.168.11.13 R630-03)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
@@ -28,8 +28,8 @@ log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
# Order: restart one at a time; wait between so restarted node can sync from others
|
||||
# VMID : host
|
||||
VALIDATORS=(
|
||||
"1004:${PROXMOX_HOST_ML110:-192.168.11.10}"
|
||||
"1003:${PROXMOX_HOST_ML110:-192.168.11.10}"
|
||||
"1004:${PROXMOX_HOST_R630_03:-192.168.11.13}"
|
||||
"1003:${PROXMOX_HOST_R630_03:-192.168.11.13}"
|
||||
"1002:${PROXMOX_HOST_R630_01:-192.168.11.11}"
|
||||
"1001:${PROXMOX_HOST_R630_01:-192.168.11.11}"
|
||||
"1000:${PROXMOX_HOST_R630_01:-192.168.11.11}"
|
||||
|
||||
65
scripts/maintenance/proxmox-backup-all-running-ct.sh
Executable file
65
scripts/maintenance/proxmox-backup-all-running-ct.sh
Executable file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env bash
|
||||
# Fleet vzdump of all *running* LXC on a Proxmox node (intended: r630-01 via cron).
|
||||
# - Uses zstd (faster / often better than gzip for this workload; extension .tar.zst).
|
||||
# - Retention: do NOT use rough mtime deletes here — keep /etc/cron.d/vzdump-prune-local
|
||||
# (prune-proxmox-vzdump-dump.sh keep=2) as the cap on /var/lib/vz/dump.
|
||||
# Install (from repo, r630-01):
|
||||
# scp scripts/maintenance/proxmox-backup-all-running-ct.sh root@192.168.11.11:/usr/local/bin/proxmox-backup.sh
|
||||
# ssh root@192.168.11.11 'chmod 755 /usr/local/bin/proxmox-backup.sh'
|
||||
# Cron (root on r630-01, single daily run with lock):
|
||||
# 0 2 * * * /usr/bin/flock -n /var/lock/proxmox-backup.lock /usr/local/bin/proxmox-backup.sh
|
||||
# To skip a few very large VMIDs (space-separated) on that run only, use:
|
||||
# VZDUMP_SKIP_VMIDS="2101 2500" /usr/local/bin/proxmox-backup.sh
|
||||
# Or: VZDUMP_COMPRESS=gzip if you must match legacy .tar.gz (not recommended).
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
BACKUP_STORAGE="${BACKUP_STORAGE:-local}"
|
||||
LOG_DIR="${LOG_DIR:-/var/log/proxmox-backups}"
|
||||
LOG_FILE="${LOG_DIR}/backup_$(date +%Y%m%d).log"
|
||||
COMPRESS="${VZDUMP_COMPRESS:-zstd}"
|
||||
# Space-separated VMIDs to skip (e.g. test CTs)
|
||||
SKIP_VMIDS="${VZDUMP_SKIP_VMIDS:-}"
|
||||
|
||||
mkdir -p "$LOG_DIR"
|
||||
|
||||
log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "$LOG_FILE"; }
|
||||
|
||||
is_skipped() {
|
||||
local v="$1"
|
||||
for s in $SKIP_VMIDS; do [[ "$v" == "$s" ]] && return 0; done
|
||||
return 1
|
||||
}
|
||||
|
||||
log "Starting backup job (compress=$COMPRESS storage=$BACKUP_STORAGE)..."
|
||||
|
||||
mapfile -t vmids < <(pct list 2>/dev/null | awk 'NR>1 && $2=="running" {print $1}')
|
||||
|
||||
if ((${#vmids[@]} == 0)); then
|
||||
log "No running containers."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
log "VMIDs: ${vmids[*]}"
|
||||
|
||||
ok=0
|
||||
fail=0
|
||||
for vmid in "${vmids[@]}"; do
|
||||
is_skipped "$vmid" && { log "SKIP $vmid (VZDUMP_SKIP_VMIDS)"; continue; }
|
||||
log "vzdump $vmid..."
|
||||
if command -v ionice >/dev/null 2>&1; then
|
||||
run=(nice ionice -c2 -n7 vzdump)
|
||||
else
|
||||
run=(nice vzdump)
|
||||
fi
|
||||
if "${run[@]}" "$vmid" --storage "$BACKUP_STORAGE" --compress "$COMPRESS" --mode snapshot --quiet; then
|
||||
log "OK $vmid"
|
||||
ok=$((ok + 1))
|
||||
else
|
||||
log "FAIL $vmid (non-zero exit)"
|
||||
fail=$((fail + 1))
|
||||
fi
|
||||
done
|
||||
|
||||
log "Done. success=$ok fail=$fail. Retention: /usr/local/sbin/prune-proxmox-vzdump-dump.sh (weekly cron)."
|
||||
exit 0
|
||||
33
scripts/maintenance/prune-orphan-vzdump-logs.sh
Executable file
33
scripts/maintenance/prune-orphan-vzdump-logs.sh
Executable file
@@ -0,0 +1,33 @@
|
||||
#!/usr/bin/env bash
|
||||
# Remove old vzdump *.log files in /var/lib/vz/dump (partial/failed run leftovers).
|
||||
# Usage:
|
||||
# ./scripts/maintenance/prune-orphan-vzdump-logs.sh 192.168.11.12
|
||||
# ./scripts/maintenance/prune-orphan-vzdump-logs.sh 192.168.11.12 --apply
|
||||
# Env: MIN_AGE_DAYS=90 VZDUMP_DIR=/var/lib/vz/dump
|
||||
#
|
||||
set -euo pipefail
|
||||
MIN_AGE_DAYS="${MIN_AGE_DAYS:-90}"
|
||||
DUMP="${VZDUMP_DIR:-/var/lib/vz/dump}"
|
||||
HOST="${1:-}"
|
||||
APPLY=0
|
||||
[[ "${2:-}" == "--apply" ]] && APPLY=1
|
||||
|
||||
if [[ -z "$HOST" ]]; then
|
||||
echo "Usage: $0 <pve_ip> [--apply]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ssh -o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no "root@$HOST" \
|
||||
MIN_AGE_DAYS="$MIN_AGE_DAYS" DUMP="$DUMP" APPLY="$APPLY" 'bash' <<'NODERUN'
|
||||
set -euo pipefail
|
||||
c=$(find "$DUMP" -maxdepth 1 -name "vzdump-*.log" -mtime "+${MIN_AGE_DAYS}" 2>/dev/null | wc -l)
|
||||
echo "[$(hostname)] $DUMP: $c log file(s) mtime +${MIN_AGE_DAYS}d"
|
||||
if (( c == 0 )); then exit 0; fi
|
||||
if [[ "$APPLY" == "1" ]]; then
|
||||
find "$DUMP" -maxdepth 1 -name "vzdump-*.log" -mtime "+${MIN_AGE_DAYS}" -print -delete
|
||||
echo "Deleted."
|
||||
else
|
||||
find "$DUMP" -maxdepth 1 -name "vzdump-*.log" -mtime "+${MIN_AGE_DAYS}" -ls 2>/dev/null | head -20
|
||||
echo "Dry-run. Re-run with same host and --apply"
|
||||
fi
|
||||
NODERUN
|
||||
127
scripts/maintenance/prune-proxmox-vzdump-dump.sh
Executable file
127
scripts/maintenance/prune-proxmox-vzdump-dump.sh
Executable file
@@ -0,0 +1,127 @@
|
||||
#!/usr/bin/env bash
|
||||
# Prune old Proxmox vzdump archives under /var/lib/vz/dump (the "local" dir storage).
|
||||
# Keeps the N newest archive per VMID for each family:
|
||||
# - vzdump-lxc-<vmid>-*.(tar.gz|tar.zst|vma.zst|vma.gz)
|
||||
# - vzdump-qemu-<vmid>-*.(tar.gz|tar.zst|vma.zst|vma.gz)
|
||||
# - vzdump-<vmid>-<epoch>.(tar.gz|tar.zst) (legacy naming without lxc/qemu)
|
||||
# Removes matching .log / .notes sidecars when removing an archive.
|
||||
#
|
||||
# Run ON the Proxmox node as root, or via SSH:
|
||||
# ssh root@192.168.11.11 'bash -s' < scripts/maintenance/prune-proxmox-vzdump-dump.sh -- 2
|
||||
#
|
||||
# Args: [KEEP] (default 2). Env: VZDUMP_DIR=/var/lib/vz/dump
|
||||
#
|
||||
set -euo pipefail
|
||||
|
||||
KEEP="${1:-2}"
|
||||
DUMP="${VZDUMP_DIR:-/var/lib/vz/dump}"
|
||||
|
||||
if ! [[ "$KEEP" =~ ^[0-9]+$ ]] || ((KEEP < 1)); then
|
||||
echo "Usage: $0 [KEEP>=1]" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cd "$DUMP" || {
|
||||
echo "Cannot cd to $DUMP" >&2
|
||||
exit 1
|
||||
}
|
||||
|
||||
shopt -s nullglob
|
||||
removed=0
|
||||
|
||||
# Sidecars for archive basename $1 (path without extension chain handled per type)
|
||||
remove_sidecars() {
|
||||
local base="$1"
|
||||
rm -f -- "${base}.log" "${base}.notes" "${base}.notes.zst" 2>/dev/null || true
|
||||
}
|
||||
|
||||
# $1 = prefix e.g. vzdump-lxc, $2 = vmid
|
||||
prune_family_globs() {
|
||||
local prefix="$1"
|
||||
local vmid="$2"
|
||||
local -a archives=()
|
||||
local f n i base
|
||||
for f in \
|
||||
"${prefix}-${vmid}-"*.tar.gz \
|
||||
"${prefix}-${vmid}-"*.tar.zst \
|
||||
"${prefix}-${vmid}-"*.vma.zst \
|
||||
"${prefix}-${vmid}-"*.vma.gz; do
|
||||
[[ -f "$f" ]] || continue
|
||||
archives+=("$f")
|
||||
done
|
||||
((${#archives[@]} == 0)) && return 0
|
||||
mapfile -t sorted < <(ls -t "${archives[@]}" 2>/dev/null)
|
||||
n=${#sorted[@]}
|
||||
((n > KEEP)) || return 0
|
||||
for ((i = KEEP; i < n; i++)); do
|
||||
f="${sorted[i]}"
|
||||
base="${f%.tar.gz}"
|
||||
base="${base%.tar.zst}"
|
||||
base="${base%.vma.zst}"
|
||||
base="${base%.vma.gz}"
|
||||
rm -f -- "$f"
|
||||
remove_sidecars "$base"
|
||||
((removed += 1)) || true
|
||||
done
|
||||
}
|
||||
|
||||
# Legacy: vzdump-<vmid>-<digits>.tar.*
|
||||
prune_legacy_vmid() {
|
||||
local vmid="$1"
|
||||
local -a archives=()
|
||||
local f n i base
|
||||
for f in vzdump-"${vmid}"-*.tar.gz vzdump-"${vmid}"-*.tar.zst vzdump-"${vmid}"-*.vma.zst vzdump-"${vmid}"-*.vma.gz; do
|
||||
[[ -f "$f" ]] || continue
|
||||
[[ "$f" == vzdump-lxc-* || "$f" == vzdump-qemu-* ]] && continue
|
||||
archives+=("$f")
|
||||
done
|
||||
((${#archives[@]} == 0)) && return 0
|
||||
mapfile -t sorted < <(ls -t "${archives[@]}" 2>/dev/null)
|
||||
n=${#sorted[@]}
|
||||
((n > KEEP)) || return 0
|
||||
for ((i = KEEP; i < n; i++)); do
|
||||
f="${sorted[i]}"
|
||||
base="${f%.tar.gz}"
|
||||
base="${base%.tar.zst}"
|
||||
base="${base%.vma.zst}"
|
||||
base="${base%.vma.gz}"
|
||||
rm -f -- "$f"
|
||||
remove_sidecars "$base"
|
||||
((removed += 1)) || true
|
||||
done
|
||||
}
|
||||
|
||||
declare -A vmid_lxc=()
|
||||
declare -A vmid_qemu=()
|
||||
declare -A vmid_legacy=()
|
||||
|
||||
for f in vzdump-lxc-*; do
|
||||
[[ -f "$f" ]] || continue
|
||||
[[ "$f" =~ ^vzdump-lxc-([0-9]+)- ]] || continue
|
||||
vmid_lxc["${BASH_REMATCH[1]}"]=1
|
||||
done
|
||||
for f in vzdump-qemu-*; do
|
||||
[[ -f "$f" ]] || continue
|
||||
[[ "$f" =~ ^vzdump-qemu-([0-9]+)- ]] || continue
|
||||
vmid_qemu["${BASH_REMATCH[1]}"]=1
|
||||
done
|
||||
for f in vzdump-[0-9]*-*; do
|
||||
[[ -f "$f" ]] || continue
|
||||
[[ "$f" == vzdump-lxc-* || "$f" == vzdump-qemu-* ]] && continue
|
||||
[[ "$f" =~ \.(tar\.gz|tar\.zst|vma\.zst|vma\.gz)$ ]] || continue
|
||||
[[ "$f" =~ ^vzdump-([0-9]+)-[0-9_]+ ]] || continue
|
||||
vmid_legacy["${BASH_REMATCH[1]}"]=1
|
||||
done
|
||||
|
||||
for vmid in "${!vmid_lxc[@]}"; do
|
||||
prune_family_globs vzdump-lxc "$vmid"
|
||||
done
|
||||
for vmid in "${!vmid_qemu[@]}"; do
|
||||
prune_family_globs vzdump-qemu "$vmid"
|
||||
done
|
||||
for vmid in "${!vmid_legacy[@]}"; do
|
||||
prune_legacy_vmid "$vmid"
|
||||
done
|
||||
|
||||
echo "prune-proxmox-vzdump-dump: removed ${removed} archive(s); keep=${KEEP} newest per VMID in ${DUMP}"
|
||||
df -h "$DUMP" 2>/dev/null || df -h /var/lib/vz
|
||||
19
scripts/maintenance/verify-pve-cluster-health.sh
Executable file
19
scripts/maintenance/verify-pve-cluster-health.sh
Executable file
@@ -0,0 +1,19 @@
|
||||
#!/usr/bin/env bash
|
||||
# Quick post-maintenance check: corosync quorum and node list from one Proxmox host.
|
||||
# Usage: ./scripts/maintenance/verify-pve-cluster-health.sh [r630-01-ip]
|
||||
# Requires SSH root@host (BatchMode + key).
|
||||
#
|
||||
set -euo pipefail
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
# shellcheck source=/dev/null
|
||||
source "${SCRIPT_DIR}/../../config/ip-addresses.conf" 2>/dev/null || true
|
||||
|
||||
IP="${1:-${PROXMOX_HOST_R630_01:-192.168.11.11}}"
|
||||
SSH_OPTS="-o StrictHostKeyChecking=no -o ConnectTimeout=12 -o BatchMode=yes"
|
||||
|
||||
echo "=== pvecm status ($IP) ==="
|
||||
ssh $SSH_OPTS "root@$IP" "pvecm status; echo '---'; pvesh get /nodes --output-format json-pretty 2>/dev/null | head -80" || {
|
||||
echo "SSH or pvecm failed."
|
||||
exit 1
|
||||
}
|
||||
echo "OK. Expect: Quorate: Yes, and expected nodes online."
|
||||
0
scripts/monitoring/collect-storage-growth-data.sh
Normal file → Executable file
0
scripts/monitoring/collect-storage-growth-data.sh
Normal file → Executable file
@@ -61,7 +61,7 @@ echo ""
|
||||
if [[ "$DRY_RUN" == true ]]; then
|
||||
echo "[DRY-RUN] Would run: run-wave0-from-lan.sh (NPMplus RPC fix + backup)"
|
||||
else
|
||||
bash "$SCRIPT_DIR/run-wave0-from-lan.sh" $([[ "$SKIP_BACKUP" == true ]] && echo --skip-backup) 2>/dev/null || true
|
||||
bash "$SCRIPT_DIR/run-wave0-from-lan.sh" $([[ "$SKIP_BACKUP" == true ]] && echo --skip-backup)
|
||||
fi
|
||||
echo ""
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ if $DRY_RUN; then
|
||||
echo ""
|
||||
echo "1. Config validation: bash scripts/validation/validate-config-files.sh [--dry-run]"
|
||||
echo "2. On-chain check (138): SKIP_EXIT=1 bash scripts/verify/check-contracts-on-chain-138.sh || true"
|
||||
echo "3. All validation: bash scripts/verify/run-all-validation.sh --skip-genesis"
|
||||
echo "3. All validation: bash scripts/verify/run-all-validation.sh --skip-genesis (includes cW* mesh matrix when pair-discovery JSON exists)"
|
||||
echo "4. Reconcile .env: bash scripts/verify/reconcile-env-canonical.sh --print"
|
||||
echo ""
|
||||
echo "Run without --dry-run to execute. Exit 0 = success."
|
||||
@@ -41,7 +41,7 @@ echo "[Step 2/4] On-chain contract check (Chain 138)..."
|
||||
SKIP_EXIT=1 bash scripts/verify/check-contracts-on-chain-138.sh || true
|
||||
echo ""
|
||||
|
||||
# 3. Full validation (skip genesis to avoid RPC)
|
||||
# 3. Full validation (skip genesis to avoid RPC; includes cW* mesh matrix when pair-discovery JSON exists)
|
||||
echo "[Step 3/4] Run all validation (--skip-genesis)..."
|
||||
bash scripts/verify/run-all-validation.sh --skip-genesis
|
||||
echo ""
|
||||
|
||||
@@ -132,6 +132,23 @@ else
|
||||
log_err "Missing config/public-sector-program-manifest.json"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
# Phoenix deploy targets (used by phoenix-deploy-api POST /api/deploy)
|
||||
if [[ -f "$PROJECT_ROOT/phoenix-deploy-api/deploy-targets.json" ]]; then
|
||||
log_ok "Found: phoenix-deploy-api/deploy-targets.json"
|
||||
if [[ -x "$PROJECT_ROOT/scripts/validation/validate-phoenix-deploy-targets.sh" ]]; then
|
||||
if "$PROJECT_ROOT/scripts/validation/validate-phoenix-deploy-targets.sh" "$PROJECT_ROOT/phoenix-deploy-api/deploy-targets.json" >/dev/null; then
|
||||
log_ok "phoenix-deploy-api/deploy-targets.json: repo/branch/target map valid"
|
||||
else
|
||||
log_err "phoenix-deploy-api/deploy-targets.json: invalid structure"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
else
|
||||
log_warn "validate-phoenix-deploy-targets.sh not executable; skipping deploy target validation"
|
||||
fi
|
||||
else
|
||||
log_err "Missing phoenix-deploy-api/deploy-targets.json"
|
||||
ERRORS=$((ERRORS + 1))
|
||||
fi
|
||||
# Proxmox operational template (VMID/IP/FQDN mirror; see docs/03-deployment/PROXMOX_VE_OPERATIONAL_DEPLOYMENT_TEMPLATE.md)
|
||||
if [[ -f "$PROJECT_ROOT/config/proxmox-operational-template.json" ]]; then
|
||||
log_ok "Found: config/proxmox-operational-template.json"
|
||||
|
||||
48
scripts/validation/validate-phoenix-deploy-targets.sh
Executable file
48
scripts/validation/validate-phoenix-deploy-targets.sh
Executable file
@@ -0,0 +1,48 @@
|
||||
#!/usr/bin/env bash
|
||||
# Validate phoenix-deploy-api/deploy-targets.json structure.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
|
||||
TARGETS_FILE="${1:-$PROJECT_ROOT/phoenix-deploy-api/deploy-targets.json}"
|
||||
|
||||
if [[ ! -f "$TARGETS_FILE" ]]; then
|
||||
echo "[ERROR] Missing deploy targets file: $TARGETS_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! command -v jq >/dev/null 2>&1; then
|
||||
echo "[ERROR] jq is required to validate $TARGETS_FILE" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
jq -e '
|
||||
(.targets | type == "array")
|
||||
and (.targets | length > 0)
|
||||
and all(.targets[]; (
|
||||
(.repo | type == "string" and length > 0)
|
||||
and ((.branch // "main") | type == "string" and length > 0)
|
||||
and ((.target // "default") | type == "string" and length > 0)
|
||||
and (.cwd | type == "string" and length > 0)
|
||||
and (.command | type == "array" and length > 0)
|
||||
and (all(.command[]; type == "string" and length > 0))
|
||||
and ((.required_env // []) | type == "array")
|
||||
and (all((.required_env // [])[]?; type == "string" and length > 0))
|
||||
and (
|
||||
(.healthcheck // null) == null or (
|
||||
(.healthcheck.url | type == "string" and length > 0)
|
||||
and ((.healthcheck.expect_status // 200) | type == "number")
|
||||
and ((.healthcheck.attempts // 1) | type == "number")
|
||||
and ((.healthcheck.delay_ms // 0) | type == "number")
|
||||
and ((.healthcheck.timeout_ms // 10000) | type == "number")
|
||||
)
|
||||
)
|
||||
))
|
||||
and (
|
||||
[.targets[] | [(.repo), (.branch // "main"), (.target // "default")] | join("|")]
|
||||
| length == (unique | length)
|
||||
)
|
||||
' "$TARGETS_FILE" >/dev/null
|
||||
|
||||
echo "[OK] phoenix deploy targets valid: $TARGETS_FILE"
|
||||
@@ -4,7 +4,7 @@
|
||||
# Before each verify, uses `cast code` on RPC to skip EOAs / empty code (avoids Blockscout
|
||||
# "not a smart contract" noise for addresses like WETH10 when nothing is deployed there).
|
||||
# Set VERIFY_SKIP_BYTECODE_CHECK=1 to attempt forge verify even when code lookup fails or is empty.
|
||||
# Version: 2026-03-26
|
||||
# Version: 2026-04-22 (DODO PMM + TransactionMirror: explicit constructor args + deploy profile)
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
@@ -89,6 +89,8 @@ ADDR_CCIP_SENDER="${CCIP_SENDER:-${ADDR_CCIP_SENDER:-0x105F8A15b819948a891535057
|
||||
ADDR_CCIPWETH10="${CCIPWETH10_BRIDGE_CHAIN138:-${ADDR_CCIPWETH10_BRIDGE:-0xe0E93247376aa097dB308B92e6Ba36bA015535D0}}"
|
||||
ADDR_CCIPWETH9="${CCIPWETH9_BRIDGE_CHAIN138:-${ADDR_CCIPWETH9_BRIDGE:-0x971cD9D156f193df8051E48043C476e53ECd4693}}"
|
||||
ADDR_WETH10="${WETH10_CHAIN138:-0xf4BB2e28688e89fCcE3c0580D37d36A7672E8A9f}"
|
||||
# Used early for AccessControl admin resolution (mirror admin often matches DODO admin on Chain 138).
|
||||
ADDR_TX_MIRROR="${TRANSACTION_MIRROR_ADDRESS:-}"
|
||||
|
||||
should_verify WETH10 && verify_one "$ADDR_WETH10" "WETH10" "contracts/tokens/WETH10.sol:WETH10"
|
||||
should_verify Multicall && verify_one "$ADDR_MULTICALL" "Multicall" "contracts/utils/Multicall.sol:Multicall"
|
||||
@@ -108,5 +110,119 @@ if [[ -d "$ALLTRA" ]]; then
|
||||
should_verify WithdrawalEscrow && { echo "Verifying WithdrawalEscrow at $ADDR_ESCROW..."; (cd "$ALLTRA" && forge verify-contract "$ADDR_ESCROW" "contracts/settlement/WithdrawalEscrow.sol:WithdrawalEscrow" --chain-id 138 --verifier blockscout --verifier-url "$VERIFIER_URL" --rpc-url "$RPC" --flatten 2>&1) && echo " OK" || echo " (skip)"; }
|
||||
fi
|
||||
|
||||
# Optional DODO PMM + TransactionMirror (addresses from smom-dbis-138/.env).
|
||||
# Uses explicit constructor args read from chain (immutables + AccessControl admin via hasRole).
|
||||
# Bytecode match: FOUNDRY_PROFILE=deploy (optimizer 100, via_ir, cancun) — override with VERIFY_CHAIN138_FOUNDRY_PROFILE=default if your deployment used the default profile instead.
|
||||
DEFAULT_ADMIN_ROLE_ZERO="0x0000000000000000000000000000000000000000000000000000000000000000"
|
||||
|
||||
# Resolve DEFAULT_ADMIN_ROLE holder for AccessControl (tries env candidates, then TransactionMirror admin()).
|
||||
resolve_access_control_admin() {
|
||||
local contract="$1"
|
||||
local rpc="$2"
|
||||
local role="$DEFAULT_ADMIN_ROLE_ZERO"
|
||||
local cand adm hr
|
||||
local -a cands=()
|
||||
[[ -n "${DODO_INTEGRATION_ADMIN:-}" ]] && cands+=("${DODO_INTEGRATION_ADMIN}")
|
||||
[[ -n "${DODO_PMM_PROVIDER_ADMIN:-}" ]] && cands+=("${DODO_PMM_PROVIDER_ADMIN}")
|
||||
if [[ -n "${ADDR_TX_MIRROR:-}" ]]; then
|
||||
adm=$(cast call "${ADDR_TX_MIRROR}" "admin()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || true
|
||||
[[ -n "$adm" && "$adm" != "0x0000000000000000000000000000000000000000" ]] && cands+=("$adm")
|
||||
fi
|
||||
[[ -n "${GOVERNANCE_CONTROLLER:-}" ]] && cands+=("${GOVERNANCE_CONTROLLER}")
|
||||
[[ -n "${ADDR_MULTISIG:-}" ]] && cands+=("${ADDR_MULTISIG}")
|
||||
|
||||
for cand in "${cands[@]}"; do
|
||||
[[ -z "$cand" ]] && continue
|
||||
hr=$(cast call "$contract" "hasRole(bytes32,address)(bool)" "$role" "$cand" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || continue
|
||||
if [[ "$hr" == "true" ]]; then
|
||||
echo "$cand"
|
||||
return 0
|
||||
fi
|
||||
done
|
||||
return 1
|
||||
}
|
||||
|
||||
dodo_integration_constructor_hex() {
|
||||
local int="$1" rpc="$2" admin="$3"
|
||||
local dvm appr usdt usdc cusdt cusdc
|
||||
dvm=$(cast call "$int" "dodoVendingMachine()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
appr=$(cast call "$int" "dodoApprove()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
usdt=$(cast call "$int" "officialUSDT()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
usdc=$(cast call "$int" "officialUSDC()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
cusdt=$(cast call "$int" "compliantUSDT()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
cusdc=$(cast call "$int" "compliantUSDC()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
cast abi-encode 'constructor(address,address,address,address,address,address,address)' \
|
||||
"$admin" "$dvm" "$appr" "$usdt" "$usdc" "$cusdt" "$cusdc" 2>/dev/null | tr -d '\n\r \t'
|
||||
}
|
||||
|
||||
dodo_provider_constructor_hex() {
|
||||
local prov="$1" rpc="$2" admin="$3"
|
||||
local di
|
||||
di=$(cast call "$prov" "dodoIntegration()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
cast abi-encode 'constructor(address,address)' "$di" "$admin" 2>/dev/null | tr -d '\n\r \t'
|
||||
}
|
||||
|
||||
transaction_mirror_constructor_hex() {
|
||||
local mir="$1" rpc="$2"
|
||||
local adm
|
||||
adm=$(cast call "$mir" "admin()(address)" --rpc-url "$rpc" 2>/dev/null | tr -d '\n\r \t') || return 1
|
||||
cast abi-encode 'constructor(address)' "$adm" 2>/dev/null | tr -d '\n\r \t'
|
||||
}
|
||||
|
||||
verify_one_explicit() {
|
||||
local addr="$1" path="$2" name="$3" ctor_hex="$4"
|
||||
local profile="${VERIFY_CHAIN138_FOUNDRY_PROFILE:-deploy}"
|
||||
echo "Verifying $name at $addr (explicit ctor, FOUNDRY_PROFILE=$profile)..."
|
||||
if ! has_contract_bytecode "$addr"; then
|
||||
echo " skip: no contract bytecode at $addr"
|
||||
return 0
|
||||
fi
|
||||
if [[ -z "$ctor_hex" ]]; then
|
||||
echo " skip: could not build constructor args (cast/RPC failed)"
|
||||
return 0
|
||||
fi
|
||||
if FOUNDRY_PROFILE="$profile" forge verify-contract "$addr" "$path" \
|
||||
--chain-id 138 \
|
||||
--constructor-args "$ctor_hex" \
|
||||
--verifier blockscout \
|
||||
--verifier-url "$VERIFIER_URL" \
|
||||
--rpc-url "$RPC" \
|
||||
--skip-is-verified-check 2>&1; then
|
||||
echo " OK"
|
||||
else
|
||||
echo " (skip: mismatch or already verified — try VERIFY_CHAIN138_FOUNDRY_PROFILE=default or confirm deploy profile in foundry.toml)"
|
||||
fi
|
||||
}
|
||||
|
||||
ADDR_DODO_INTEGRATION="${DODO_PMM_INTEGRATION_ADDRESS:-${DODO_PMM_INTEGRATION:-${CHAIN_138_DODO_PMM_INTEGRATION:-}}}"
|
||||
ADDR_DODO_PROVIDER="${DODO_PMM_PROVIDER_ADDRESS:-${DODO_PMM_PROVIDER:-}}"
|
||||
|
||||
if [[ -n "$ADDR_DODO_INTEGRATION" ]] && should_verify DODOPMMIntegration; then
|
||||
admin_i=$(resolve_access_control_admin "$ADDR_DODO_INTEGRATION" "$RPC" || true)
|
||||
if [[ -z "$admin_i" ]]; then
|
||||
echo "Verifying DODOPMMIntegration at $ADDR_DODO_INTEGRATION — skip: could not resolve AccessControl admin (set DODO_INTEGRATION_ADMIN or TRANSACTION_MIRROR_ADDRESS for hasRole probe)"
|
||||
else
|
||||
enc=$(dodo_integration_constructor_hex "$ADDR_DODO_INTEGRATION" "$RPC" "$admin_i" || true)
|
||||
verify_one_explicit "$ADDR_DODO_INTEGRATION" "contracts/dex/DODOPMMIntegration.sol:DODOPMMIntegration" "DODOPMMIntegration" "${enc:-}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "$ADDR_DODO_PROVIDER" ]] && should_verify DODOPMMProvider; then
|
||||
admin_p=$(resolve_access_control_admin "$ADDR_DODO_PROVIDER" "$RPC" || true)
|
||||
if [[ -z "$admin_p" ]]; then
|
||||
echo "Verifying DODOPMMProvider at $ADDR_DODO_PROVIDER — skip: could not resolve AccessControl admin (set DODO_PMM_PROVIDER_ADMIN or TRANSACTION_MIRROR_ADDRESS)"
|
||||
else
|
||||
enc=$(dodo_provider_constructor_hex "$ADDR_DODO_PROVIDER" "$RPC" "$admin_p" || true)
|
||||
verify_one_explicit "$ADDR_DODO_PROVIDER" "contracts/liquidity/providers/DODOPMMProvider.sol:DODOPMMProvider" "DODOPMMProvider" "${enc:-}"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "$ADDR_TX_MIRROR" ]] && should_verify TransactionMirror; then
|
||||
enc=$(transaction_mirror_constructor_hex "$ADDR_TX_MIRROR" "$RPC" || true)
|
||||
verify_one_explicit "$ADDR_TX_MIRROR" "contracts/mirror/TransactionMirror.sol:TransactionMirror" "TransactionMirror" "${enc:-}"
|
||||
fi
|
||||
|
||||
# CompliantFiatToken: one deployment per currency with distinct constructor args — verify per token in the Blockscout UI or add scripted entries when addresses are enumerated in env.
|
||||
|
||||
echo ""
|
||||
echo "Done. Check http://${IP_BLOCKSCOUT} or https://explorer.d-bis.org for verification status."
|
||||
|
||||
8
scripts/verify/build-cw-mesh-deployment-matrix.sh
Executable file
8
scripts/verify/build-cw-mesh-deployment-matrix.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
# Read-only matrix: deployment-status.json + Uni V2 pair-discovery JSON -> stdout (+ optional JSON).
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$ROOT"
|
||||
|
||||
python3 scripts/lib/cw_mesh_deployment_matrix.py "$@"
|
||||
452
scripts/verify/build-cw-public-price-table.py
Normal file
452
scripts/verify/build-cw-public-price-table.py
Normal file
@@ -0,0 +1,452 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from collections import deque
|
||||
from dataclasses import dataclass
|
||||
from decimal import Decimal, InvalidOperation, getcontext
|
||||
from pathlib import Path
|
||||
|
||||
getcontext().prec = 50
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
|
||||
UNISWAP_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
JSON_OUT = ROOT / "reports" / "status" / "cw-public-prices-latest.json"
|
||||
DOC_OUT = ROOT / "docs" / "03-deployment" / "CW_PUBLIC_NETWORK_PRICES.md"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
|
||||
CHAIN_CONFIG = {
|
||||
"1": {"rpc_keys": ["ETHEREUM_MAINNET_RPC"]},
|
||||
"10": {"rpc_keys": ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"]},
|
||||
"25": {"rpc_keys": ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"]},
|
||||
"56": {"rpc_keys": ["BSC_RPC_URL", "BSC_MAINNET_RPC"]},
|
||||
"100": {"rpc_keys": ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"]},
|
||||
"137": {"rpc_keys": ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"]},
|
||||
"1111": {"rpc_keys": ["WEMIX_RPC_URL", "WEMIX_MAINNET_RPC"]},
|
||||
"8453": {"rpc_keys": ["BASE_RPC_URL", "BASE_MAINNET_RPC"]},
|
||||
"42161": {"rpc_keys": ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"]},
|
||||
"42220": {"rpc_keys": ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"]},
|
||||
"43114": {"rpc_keys": ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"]},
|
||||
}
|
||||
|
||||
STABLES = {"USDC": Decimal("1"), "USDT": Decimal("1")}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Edge:
|
||||
src: str
|
||||
dst: str
|
||||
ratio: Decimal
|
||||
venue: str
|
||||
path_label: str
|
||||
price_detail: str
|
||||
liquidity_note: str
|
||||
|
||||
|
||||
def now() -> str:
|
||||
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def write_json(path: Path, payload: dict) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
|
||||
def write_text(path: Path, text: str) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(text.rstrip() + "\n")
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
values.update(load_env_from_shell())
|
||||
return values
|
||||
|
||||
|
||||
def load_env_from_shell() -> dict[str, str]:
|
||||
loader = ROOT / "smom-dbis-138" / "scripts" / "load-env.sh"
|
||||
if not loader.exists():
|
||||
return {}
|
||||
proc = subprocess.run(
|
||||
["bash", "-lc", f"source {loader} >/dev/null 2>&1 && env"],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=15,
|
||||
check=False,
|
||||
cwd=ROOT,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
return {}
|
||||
values: dict[str, str] = {}
|
||||
for raw_line in proc.stdout.splitlines():
|
||||
if "=" not in raw_line:
|
||||
continue
|
||||
key, value = raw_line.split("=", 1)
|
||||
values[key.strip()] = value.strip()
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = UINT_RE.findall(cleaned)
|
||||
if matches:
|
||||
return int(matches[0])
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
return int(token)
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = [int(match) for match in UINT_RE.findall(cleaned)]
|
||||
if len(matches) >= count:
|
||||
return matches[:count]
|
||||
matches = []
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
matches.append(int(token))
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected {count} integers, got {value!r}")
|
||||
return matches[:count]
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
proc = subprocess.run(cmd, text=True, capture_output=True, timeout=3, check=False)
|
||||
if proc.returncode != 0:
|
||||
stderr = proc.stderr.strip() or proc.stdout.strip() or "cast call failed"
|
||||
raise RuntimeError(stderr)
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def safe_decimal(value: str | int | float | Decimal | None) -> Decimal | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return Decimal(str(value))
|
||||
except (InvalidOperation, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def format_decimal(value: Decimal | None, places: int = 8) -> str:
|
||||
if value is None:
|
||||
return "not found"
|
||||
quant = Decimal(10) ** -places
|
||||
try:
|
||||
rounded = value.quantize(quant)
|
||||
except InvalidOperation:
|
||||
return str(value)
|
||||
return format(rounded, "f")
|
||||
|
||||
|
||||
def normalize_18(raw: int) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** 18)
|
||||
|
||||
|
||||
def rpc_for_chain(chain_id: str, env_values: dict[str, str]) -> str:
|
||||
if chain_id == "1":
|
||||
infura_project_id = resolve_env_value("INFURA_PROJECT_ID", env_values)
|
||||
if infura_project_id:
|
||||
return f"https://mainnet.infura.io/v3/{infura_project_id}"
|
||||
config = CHAIN_CONFIG.get(chain_id, {})
|
||||
for key in config.get("rpc_keys", []):
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
return value
|
||||
return ""
|
||||
|
||||
|
||||
def build_uniswap_edges(entry: dict) -> list[Edge]:
|
||||
edges: list[Edge] = []
|
||||
for row in entry.get("pairsChecked") or []:
|
||||
if not row.get("live"):
|
||||
continue
|
||||
health = row.get("health") or {}
|
||||
price = safe_decimal(health.get("priceQuotePerBase"))
|
||||
if price is None or price <= 0:
|
||||
continue
|
||||
base = row["base"]
|
||||
quote = row["quote"]
|
||||
pair = f"{base}/{quote}"
|
||||
addr = row.get("poolAddress") or ""
|
||||
reserves = f"base={health.get('baseReserveUnits', '?')}, quote={health.get('quoteReserveUnits', '?')}"
|
||||
liquidity_note = (
|
||||
f"Uniswap V2 pair {addr}; healthy={health.get('healthy')}; "
|
||||
f"depthOk={health.get('depthOk')}; parityOk={health.get('parityOk')}; {reserves}"
|
||||
)
|
||||
edges.append(
|
||||
Edge(
|
||||
src=base,
|
||||
dst=quote,
|
||||
ratio=price,
|
||||
venue="uniswap_v2",
|
||||
path_label=pair,
|
||||
price_detail=f"reserve ratio from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
edges.append(
|
||||
Edge(
|
||||
src=quote,
|
||||
dst=base,
|
||||
ratio=Decimal(1) / price,
|
||||
venue="uniswap_v2",
|
||||
path_label=pair,
|
||||
price_detail=f"inverse reserve ratio from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
return edges
|
||||
|
||||
|
||||
def build_pmm_edges(chain: dict, rpc_url: str) -> tuple[list[Edge], list[dict]]:
|
||||
edges: list[Edge] = []
|
||||
snapshots: list[dict] = []
|
||||
if not rpc_url:
|
||||
return edges, snapshots
|
||||
|
||||
for row in chain.get("pmmPools") or []:
|
||||
pool = row.get("poolAddress") or ""
|
||||
base = row.get("base")
|
||||
quote = row.get("quote")
|
||||
if not pool or pool.lower() == ZERO_ADDRESS or not base or not quote:
|
||||
continue
|
||||
try:
|
||||
mid_price = normalize_18(parse_uint(cast_call(rpc_url, pool, "getMidPrice()(uint256)")))
|
||||
except Exception as exc:
|
||||
snapshots.append(
|
||||
{
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pool,
|
||||
"venue": row.get("venue", "dodo_pmm"),
|
||||
"error": str(exc),
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
if mid_price <= 0:
|
||||
continue
|
||||
|
||||
pair = f"{base}/{quote}"
|
||||
liquidity_note = f"DODO PMM {pool}; midPrice={mid_price}"
|
||||
edges.append(
|
||||
Edge(
|
||||
src=base,
|
||||
dst=quote,
|
||||
ratio=mid_price,
|
||||
venue="dodo_pmm",
|
||||
path_label=pair,
|
||||
price_detail=f"PMM mid price from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
edges.append(
|
||||
Edge(
|
||||
src=quote,
|
||||
dst=base,
|
||||
ratio=Decimal(1) / mid_price,
|
||||
venue="dodo_pmm",
|
||||
path_label=pair,
|
||||
price_detail=f"inverse PMM mid price from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
snapshots.append(
|
||||
{
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pool,
|
||||
"venue": row.get("venue", "dodo_pmm"),
|
||||
"midPrice": str(mid_price),
|
||||
}
|
||||
)
|
||||
return edges, snapshots
|
||||
|
||||
|
||||
def best_prices_for_chain(chain: dict, edges: list[Edge]) -> dict[str, dict]:
|
||||
adjacency: dict[str, list[Edge]] = {}
|
||||
for edge in edges:
|
||||
adjacency.setdefault(edge.src, []).append(edge)
|
||||
|
||||
best: dict[str, dict] = {}
|
||||
queue: deque[tuple[str, Decimal, list[str], list[str], list[str], int]] = deque()
|
||||
|
||||
for stable, price in STABLES.items():
|
||||
best[stable] = {
|
||||
"price": price,
|
||||
"steps": [],
|
||||
"venues": [],
|
||||
"notes": [f"{stable} anchored at 1 USD"],
|
||||
"hops": 0,
|
||||
}
|
||||
queue.append((stable, price, [], [], [f"{stable} anchored at 1 USD"], 0))
|
||||
|
||||
while queue:
|
||||
token, usd_price, steps, venues, notes, hops = queue.popleft()
|
||||
for edge in adjacency.get(token, []):
|
||||
next_price = usd_price / edge.ratio
|
||||
next_steps = steps + [edge.path_label]
|
||||
next_venues = venues + [edge.venue]
|
||||
next_notes = notes + [edge.liquidity_note]
|
||||
next_hops = hops + 1
|
||||
current = best.get(edge.dst)
|
||||
should_replace = current is None or next_hops < current["hops"]
|
||||
if not should_replace and current is not None and next_hops == current["hops"]:
|
||||
current_venue_score = 0 if "dodo_pmm" in current["venues"] else 1
|
||||
next_venue_score = 0 if "dodo_pmm" in next_venues else 1
|
||||
should_replace = next_venue_score < current_venue_score
|
||||
if should_replace:
|
||||
best[edge.dst] = {
|
||||
"price": next_price,
|
||||
"steps": next_steps,
|
||||
"venues": next_venues,
|
||||
"notes": next_notes,
|
||||
"hops": next_hops,
|
||||
}
|
||||
queue.append((edge.dst, next_price, next_steps, next_venues, next_notes, next_hops))
|
||||
|
||||
out: dict[str, dict] = {}
|
||||
for symbol in sorted((chain.get("cwTokens") or {}).keys()):
|
||||
resolution = best.get(symbol)
|
||||
if resolution is None:
|
||||
out[symbol] = {
|
||||
"priceUsd": None,
|
||||
"derivedFrom": "not found",
|
||||
"sourceType": "not_found",
|
||||
"notes": ["No live direct or bridged price path was found from USDC/USDT anchors."],
|
||||
}
|
||||
continue
|
||||
out[symbol] = {
|
||||
"priceUsd": str(resolution["price"]),
|
||||
"derivedFrom": " -> ".join(resolution["steps"]) if resolution["steps"] else "stable anchor",
|
||||
"sourceType": resolution["venues"][0] if resolution["venues"] else "stable_anchor",
|
||||
"notes": resolution["notes"],
|
||||
}
|
||||
return out
|
||||
|
||||
|
||||
def build_report() -> dict:
|
||||
env_values = merged_env_values()
|
||||
deployment = load_json(DEPLOYMENT_STATUS)
|
||||
discovery = load_json(UNISWAP_DISCOVERY)
|
||||
discovery_by_chain = {str(entry["chain_id"]): entry for entry in discovery.get("entries") or []}
|
||||
|
||||
chains_out: list[dict] = []
|
||||
for chain_id, chain in sorted((deployment.get("chains") or {}).items(), key=lambda item: int(item[0])):
|
||||
if int(chain_id) == 138:
|
||||
continue
|
||||
rpc_url = rpc_for_chain(chain_id, env_values)
|
||||
uniswap_edges = build_uniswap_edges(discovery_by_chain.get(chain_id, {}))
|
||||
pmm_edges, pmm_snapshots = build_pmm_edges(chain, rpc_url)
|
||||
price_rows = best_prices_for_chain(chain, uniswap_edges + pmm_edges)
|
||||
chains_out.append(
|
||||
{
|
||||
"chainId": int(chain_id),
|
||||
"network": chain.get("name", ""),
|
||||
"activationState": chain.get("activationState", ""),
|
||||
"rpcConfigured": bool(rpc_url),
|
||||
"prices": price_rows,
|
||||
"pmmSnapshots": pmm_snapshots,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"generatedAt": now(),
|
||||
"inputs": {
|
||||
"deploymentStatus": str(DEPLOYMENT_STATUS),
|
||||
"uniswapDiscovery": str(UNISWAP_DISCOVERY),
|
||||
},
|
||||
"chains": chains_out,
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(payload: dict) -> str:
|
||||
lines = [
|
||||
"# cW Public Network Prices",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Deployment inventory: `{payload['inputs']['deploymentStatus']}`",
|
||||
f"- Uniswap discovery snapshot: `{payload['inputs']['uniswapDiscovery']}`",
|
||||
"- Price convention: USD per 1 token.",
|
||||
"- `not found` means the generator could not reach the token from a live USDC/USDT anchor using the current public-pair snapshot plus live PMM mid-price reads.",
|
||||
"",
|
||||
"| Chain | Token | Price (USD) | Derived From | Source | Notes |",
|
||||
"|---|---|---:|---|---|---|",
|
||||
]
|
||||
|
||||
for chain in payload["chains"]:
|
||||
first_row = True
|
||||
prices = chain["prices"]
|
||||
for symbol in sorted(prices.keys()):
|
||||
row = prices[symbol]
|
||||
chain_cell = f"`{chain['chainId']}` {chain['network']}" if first_row else ""
|
||||
first_row = False
|
||||
notes = "; ".join(row["notes"][:2])
|
||||
lines.append(
|
||||
f"| {chain_cell} | `{symbol}` | `{format_decimal(safe_decimal(row['priceUsd']))}` | "
|
||||
f"`{row['derivedFrom']}` | `{row['sourceType']}` | {notes} |"
|
||||
)
|
||||
if prices:
|
||||
lines.append(
|
||||
f"| | | | | | Activation state: `{chain['activationState'] or 'active'}`; RPC configured: `{chain['rpcConfigured']}` |"
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
payload = build_report()
|
||||
write_json(JSON_OUT, payload)
|
||||
write_text(DOC_OUT, render_markdown(payload))
|
||||
print(JSON_OUT)
|
||||
print(DOC_OUT)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
763
scripts/verify/build-cw-public-repeg-plan.py
Normal file
763
scripts/verify/build-cw-public-repeg-plan.py
Normal file
@@ -0,0 +1,763 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from decimal import Decimal, InvalidOperation, getcontext
|
||||
from pathlib import Path
|
||||
|
||||
getcontext().prec = 50
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
PRICE_REPORT = ROOT / "reports" / "status" / "cw-public-prices-latest.json"
|
||||
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
|
||||
UNISWAP_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
JSON_OUT = ROOT / "reports" / "status" / "cw-public-repeg-plan-latest.json"
|
||||
DOC_OUT = ROOT / "docs" / "03-deployment" / "CW_PUBLIC_NETWORK_REPEG_PLAN.md"
|
||||
ADDRESS_RE = re.compile(r"0x[a-fA-F0-9]{40}")
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
|
||||
TARGETS = {
|
||||
"cWUSDC": Decimal("1"),
|
||||
"cWUSDT": Decimal("1"),
|
||||
"cWAUSDT": Decimal("1"),
|
||||
"cWUSDW": Decimal("1"),
|
||||
"cWEURC": Decimal("1.08"),
|
||||
"cWEURT": Decimal("1.08"),
|
||||
"cWGBPC": Decimal("1.27"),
|
||||
"cWGBPT": Decimal("1.27"),
|
||||
"cWAUDC": Decimal("0.66"),
|
||||
"cWJPYC": Decimal("0.0067"),
|
||||
"cWCHFC": Decimal("1.11"),
|
||||
"cWCADC": Decimal("0.74"),
|
||||
"cWXAUC": Decimal("3200"),
|
||||
"cWXAUT": Decimal("3200"),
|
||||
}
|
||||
|
||||
DODO_THRESHOLD_PCT = Decimal("1")
|
||||
UNISWAP_THRESHOLD_PCT = Decimal("1")
|
||||
|
||||
DODO_ENV_KEYS = {
|
||||
1: ["DODO_PMM_INTEGRATION_MAINNET"],
|
||||
10: ["DODO_PMM_INTEGRATION_OPTIMISM"],
|
||||
25: ["DODO_PMM_INTEGRATION_CRONOS"],
|
||||
56: ["DODO_PMM_INTEGRATION_BSC"],
|
||||
100: ["DODO_PMM_INTEGRATION_GNOSIS"],
|
||||
137: ["DODO_PMM_INTEGRATION_POLYGON"],
|
||||
42220: ["DODO_PMM_INTEGRATION_CELO"],
|
||||
43114: ["DODO_PMM_INTEGRATION_AVALANCHE"],
|
||||
8453: ["DODO_PMM_INTEGRATION_BASE"],
|
||||
42161: ["DODO_PMM_INTEGRATION_ARBITRUM"],
|
||||
}
|
||||
|
||||
RPC_ENV_KEYS = {
|
||||
1: ["ETHEREUM_MAINNET_RPC"],
|
||||
10: ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
|
||||
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
|
||||
56: ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
|
||||
100: ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
|
||||
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
|
||||
42220: ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
|
||||
43114: ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"],
|
||||
8453: ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
|
||||
42161: ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
|
||||
}
|
||||
|
||||
|
||||
def now() -> str:
|
||||
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
values.update(load_env_from_shell())
|
||||
return values
|
||||
|
||||
|
||||
def load_env_from_shell() -> dict[str, str]:
|
||||
loader = ROOT / "smom-dbis-138" / "scripts" / "load-env.sh"
|
||||
if not loader.exists():
|
||||
return {}
|
||||
proc = subprocess.run(
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
f"source {loader} >/dev/null 2>&1 && env",
|
||||
],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=15,
|
||||
check=False,
|
||||
cwd=ROOT,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
return {}
|
||||
values: dict[str, str] = {}
|
||||
for raw_line in proc.stdout.splitlines():
|
||||
if "=" not in raw_line:
|
||||
continue
|
||||
key, value = raw_line.split("=", 1)
|
||||
values[key.strip()] = value.strip()
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
proc = subprocess.run(cmd, text=True, capture_output=True, timeout=8, check=False)
|
||||
if proc.returncode != 0:
|
||||
stderr = proc.stderr.strip() or proc.stdout.strip() or "cast call failed"
|
||||
raise RuntimeError(stderr)
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = UINT_RE.findall(cleaned)
|
||||
if matches:
|
||||
return int(matches[0])
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
return int(token)
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = [int(match) for match in UINT_RE.findall(cleaned)]
|
||||
if len(matches) >= count:
|
||||
return matches[:count]
|
||||
matches = []
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
matches.append(int(token))
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected {count} integers from {value!r}")
|
||||
return matches[:count]
|
||||
|
||||
|
||||
def normalize_units(raw: int, decimals: int) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** decimals)
|
||||
|
||||
|
||||
def write_json(path: Path, payload: dict) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
|
||||
def write_text(path: Path, text: str) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(text.rstrip() + "\n")
|
||||
|
||||
|
||||
def safe_decimal(value: str | None) -> Decimal | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return Decimal(value)
|
||||
except (InvalidOperation, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def pct_deviation(current: Decimal, target: Decimal) -> Decimal:
|
||||
return ((current - target) / target) * Decimal("100")
|
||||
|
||||
|
||||
def format_decimal(value: Decimal | None, places: int = 8) -> str:
|
||||
if value is None:
|
||||
return "not found"
|
||||
quant = Decimal(10) ** -places
|
||||
try:
|
||||
value = value.quantize(quant)
|
||||
except InvalidOperation:
|
||||
pass
|
||||
return format(value, "f")
|
||||
|
||||
|
||||
def format_pct(value: Decimal) -> str:
|
||||
sign = "+" if value >= 0 else ""
|
||||
return f"{sign}{format_decimal(value, 2)}%"
|
||||
|
||||
|
||||
def first_address(texts: list[str]) -> str | None:
|
||||
for text in texts:
|
||||
match = ADDRESS_RE.search(text)
|
||||
if match:
|
||||
return match.group(0)
|
||||
return None
|
||||
|
||||
|
||||
def command_block(lines: list[str]) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def derive_deployer(env_values: dict[str, str]) -> str:
|
||||
pk = resolve_env_value("PRIVATE_KEY", env_values) or resolve_env_value("KEEPER_PRIVATE_KEY", env_values)
|
||||
if not pk or "${" in pk:
|
||||
return ""
|
||||
proc = subprocess.run(
|
||||
["cast", "wallet", "address", "--private-key", pk],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=8,
|
||||
check=False,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
return ""
|
||||
match = ADDRESS_RE.search(proc.stdout.strip())
|
||||
return match.group(0) if match else ""
|
||||
|
||||
|
||||
def build_pair_to_pool(chain: dict) -> dict[str, list[dict]]:
|
||||
pairs: dict[str, list[dict]] = {}
|
||||
for row in chain.get("pmmPools") or []:
|
||||
pairs.setdefault(f"{row['base']}/{row['quote']}", []).append(row)
|
||||
for row in chain.get("uniswapV2Pools") or []:
|
||||
pairs.setdefault(f"{row['base']}/{row['quote']}", []).append(row)
|
||||
return pairs
|
||||
|
||||
|
||||
def select_pool(pair_rows: list[dict], source_type: str) -> dict:
|
||||
if not pair_rows:
|
||||
return {}
|
||||
if source_type == "dodo_pmm":
|
||||
for row in pair_rows:
|
||||
if row.get("venue") == "dodo_pmm" or row.get("poolType") == "stable_quote" or "k" in row:
|
||||
return row
|
||||
if source_type == "uniswap_v2":
|
||||
for row in pair_rows:
|
||||
if row.get("venue") == "uniswap_v2_pair":
|
||||
return row
|
||||
return pair_rows[0]
|
||||
|
||||
|
||||
def dodo_action(pair: str, current: Decimal, target: Decimal) -> str:
|
||||
if current < target:
|
||||
return f"Fund quote side and buy base through `{pair}` until `getMidPrice()` returns target."
|
||||
return f"Fund base side and sell base into `{pair}` until `getMidPrice()` returns target."
|
||||
|
||||
|
||||
def uniswap_action(pair: str, target: Decimal) -> str:
|
||||
return (
|
||||
f"Withdraw or ignore bad LP, then reseed `{pair}` through the chain router at target ratio `{target}` "
|
||||
"with balanced reserves."
|
||||
)
|
||||
|
||||
|
||||
def integration_for_chain(chain_id: int, env_values: dict[str, str]) -> str:
|
||||
for key in DODO_ENV_KEYS.get(chain_id, []):
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
return value
|
||||
return ""
|
||||
|
||||
|
||||
def rpc_for_chain(chain_id: int, env_values: dict[str, str]) -> str:
|
||||
if chain_id == 1:
|
||||
infura_project_id = resolve_env_value("INFURA_PROJECT_ID", env_values)
|
||||
if infura_project_id:
|
||||
return f"https://mainnet.infura.io/v3/{infura_project_id}"
|
||||
for key in RPC_ENV_KEYS.get(chain_id, []):
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
return value
|
||||
return ""
|
||||
|
||||
|
||||
def rpc_var_for_chain(chain_id: int) -> str:
|
||||
return RPC_ENV_KEYS.get(chain_id, ["RPC_URL"])[0]
|
||||
|
||||
|
||||
def dodo_commands(chain_id: int, pair: str, pool_address: str, current: Decimal, target: Decimal, chain_info: dict, env_values: dict[str, str]) -> dict:
|
||||
base_symbol, quote_symbol = pair.split("/")
|
||||
integration = integration_for_chain(chain_id, env_values)
|
||||
rpc_var = rpc_var_for_chain(chain_id)
|
||||
base_address = (chain_info.get("cwTokens") or {}).get(base_symbol) or (chain_info.get("anchorAddresses") or {}).get(base_symbol) or "REPLACE_BASE_TOKEN"
|
||||
quote_address = (chain_info.get("cwTokens") or {}).get(quote_symbol) or (chain_info.get("anchorAddresses") or {}).get(quote_symbol) or "REPLACE_QUOTE_TOKEN"
|
||||
if current < target:
|
||||
return {
|
||||
"direction": "quote_in_buy_base",
|
||||
"commands": command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export RPC_URL=\"${{{rpc_var}}}\"",
|
||||
f"export INTEGRATION=\"{integration or 'REPLACE_DODO_INTEGRATION'}\"",
|
||||
f"export POOL=\"{pool_address or 'REPLACE_POOL'}\"",
|
||||
f"export BASE_TOKEN=\"{base_address}\"",
|
||||
f"export QUOTE_TOKEN=\"{quote_address}\"",
|
||||
"export QUOTE_IN_RAW=REPLACE_QUOTE_IN_RAW",
|
||||
"export MIN_BASE_OUT_RAW=REPLACE_MIN_BASE_OUT_RAW",
|
||||
'cast send "$QUOTE_TOKEN" \'approve(address,uint256)(bool)\' "$INTEGRATION" "$QUOTE_IN_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$INTEGRATION" \'swapExactIn(address,address,uint256,uint256)\' "$POOL" "$QUOTE_TOKEN" "$QUOTE_IN_RAW" "$MIN_BASE_OUT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
]
|
||||
),
|
||||
}
|
||||
return {
|
||||
"direction": "base_in_sell_base",
|
||||
"commands": command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export RPC_URL=\"${{{rpc_var}}}\"",
|
||||
f"export INTEGRATION=\"{integration or 'REPLACE_DODO_INTEGRATION'}\"",
|
||||
f"export POOL=\"{pool_address or 'REPLACE_POOL'}\"",
|
||||
f"export BASE_TOKEN=\"{base_address}\"",
|
||||
f"export QUOTE_TOKEN=\"{quote_address}\"",
|
||||
"export BASE_IN_RAW=REPLACE_BASE_IN_RAW",
|
||||
"export MIN_QUOTE_OUT_RAW=REPLACE_MIN_QUOTE_OUT_RAW",
|
||||
'cast send "$BASE_TOKEN" \'approve(address,uint256)(bool)\' "$INTEGRATION" "$BASE_IN_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$INTEGRATION" \'swapExactIn(address,address,uint256,uint256)\' "$POOL" "$BASE_TOKEN" "$BASE_IN_RAW" "$MIN_QUOTE_OUT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def uniswap_commands(chain_id: int, pair: str, pool_address: str, chain_info: dict) -> dict:
|
||||
token_a, token_b = pair.split("/")
|
||||
rpc_var = rpc_var_for_chain(chain_id)
|
||||
router = ""
|
||||
for row in chain_info.get("uniswapV2Pools") or []:
|
||||
if f"{row['base']}/{row['quote']}" == pair and row.get("routerAddress"):
|
||||
router = row["routerAddress"]
|
||||
break
|
||||
token_a_address = (chain_info.get("cwTokens") or {}).get(token_a) or (chain_info.get("anchorAddresses") or {}).get(token_a) or "REPLACE_TOKEN_A"
|
||||
token_b_address = (chain_info.get("cwTokens") or {}).get(token_b) or (chain_info.get("anchorAddresses") or {}).get(token_b) or "REPLACE_TOKEN_B"
|
||||
return {
|
||||
"commands": command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export RPC_URL=\"${{{rpc_var}}}\"",
|
||||
f"export ROUTER=\"{router or f'${{CHAIN_{chain_id}_UNISWAP_V2_ROUTER}}'}\"",
|
||||
f"export PAIR=\"{pool_address}\"",
|
||||
f"export TOKEN_A=\"{token_a_address}\"",
|
||||
f"export TOKEN_B=\"{token_b_address}\"",
|
||||
"export AMOUNT_A_RAW=REPLACE_AMOUNT_A_RAW",
|
||||
"export AMOUNT_B_RAW=REPLACE_AMOUNT_B_RAW",
|
||||
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
|
||||
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
|
||||
'# Optional: remove bad LP first if you control LP tokens for this pair.',
|
||||
'cast send "$TOKEN_A" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_A_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$TOKEN_B" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_B_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$ROUTER" \'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)\' \\',
|
||||
' "$TOKEN_A" "$TOKEN_B" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$SIGNER" "$DEADLINE" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def dodo_sizing_and_balance(
|
||||
chain_id: int,
|
||||
pair: str,
|
||||
pool_address: str,
|
||||
current: Decimal,
|
||||
target: Decimal,
|
||||
chain_info: dict,
|
||||
env_values: dict[str, str],
|
||||
deployer: str,
|
||||
) -> dict:
|
||||
rpc_url = rpc_for_chain(chain_id, env_values)
|
||||
base_symbol, quote_symbol = pair.split("/")
|
||||
base_address = (chain_info.get("cwTokens") or {}).get(base_symbol) or (chain_info.get("anchorAddresses") or {}).get(base_symbol) or ""
|
||||
quote_address = (chain_info.get("cwTokens") or {}).get(quote_symbol) or (chain_info.get("anchorAddresses") or {}).get(quote_symbol) or ""
|
||||
result = {
|
||||
"heuristic": "suggested_raw = simple reserve-parity top-up on the funding side using target quote-per-base",
|
||||
"deployer": deployer or None,
|
||||
"rpcConfigured": bool(rpc_url),
|
||||
}
|
||||
if not rpc_url or not base_address or not quote_address or not pool_address:
|
||||
result["error"] = "missing rpc or token/pool address"
|
||||
return result
|
||||
try:
|
||||
base_decimals = parse_uint(cast_call(rpc_url, base_address, "decimals()(uint8)"))
|
||||
quote_decimals = parse_uint(cast_call(rpc_url, quote_address, "decimals()(uint8)"))
|
||||
base_reserve_raw, quote_reserve_raw = parse_uints(
|
||||
cast_call(rpc_url, pool_address, "getVaultReserve()(uint256,uint256)"), 2
|
||||
)
|
||||
except Exception as exc:
|
||||
result["error"] = str(exc)
|
||||
return result
|
||||
|
||||
gap_fraction = abs(target - current) / target
|
||||
base_units = normalize_units(base_reserve_raw, base_decimals)
|
||||
quote_units = normalize_units(quote_reserve_raw, quote_decimals)
|
||||
if current < target:
|
||||
funding_symbol = quote_symbol
|
||||
funding_address = quote_address
|
||||
funding_decimals = quote_decimals
|
||||
target_quote_units = base_units * target
|
||||
top_up_units = max(target_quote_units - quote_units, Decimal(0))
|
||||
suggested_raw = int((top_up_units * (Decimal(10) ** quote_decimals)).to_integral_value())
|
||||
else:
|
||||
funding_symbol = base_symbol
|
||||
funding_address = base_address
|
||||
funding_decimals = base_decimals
|
||||
target_base_units = quote_units / target
|
||||
top_up_units = max(target_base_units - base_units, Decimal(0))
|
||||
suggested_raw = int((top_up_units * (Decimal(10) ** base_decimals)).to_integral_value())
|
||||
|
||||
deployer_balance_raw = None
|
||||
if deployer:
|
||||
try:
|
||||
deployer_balance_raw = parse_uint(cast_call(rpc_url, funding_address, "balanceOf(address)(uint256)", deployer))
|
||||
except Exception as exc:
|
||||
result["deployerBalanceError"] = str(exc)
|
||||
|
||||
result.update(
|
||||
{
|
||||
"baseToken": {"symbol": base_symbol, "address": base_address, "decimals": base_decimals},
|
||||
"quoteToken": {"symbol": quote_symbol, "address": quote_address, "decimals": quote_decimals},
|
||||
"poolReserves": {
|
||||
"baseRaw": str(base_reserve_raw),
|
||||
"baseUnits": str(normalize_units(base_reserve_raw, base_decimals)),
|
||||
"quoteRaw": str(quote_reserve_raw),
|
||||
"quoteUnits": str(normalize_units(quote_reserve_raw, quote_decimals)),
|
||||
},
|
||||
"gapFraction": str(gap_fraction),
|
||||
"fundingSide": funding_symbol,
|
||||
"suggestedTradeRaw": str(suggested_raw),
|
||||
"suggestedTradeUnits": str(normalize_units(suggested_raw, funding_decimals)),
|
||||
}
|
||||
)
|
||||
if deployer_balance_raw is not None:
|
||||
shortfall = max(suggested_raw - deployer_balance_raw, 0)
|
||||
result["deployerFundingCheck"] = {
|
||||
"token": funding_symbol,
|
||||
"balanceRaw": str(deployer_balance_raw),
|
||||
"balanceUnits": str(normalize_units(deployer_balance_raw, funding_decimals)),
|
||||
"shortfallRaw": str(shortfall),
|
||||
"shortfallUnits": str(normalize_units(shortfall, funding_decimals)),
|
||||
"covered": shortfall == 0,
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
def live_uniswap_ratio(
|
||||
chain_id: int,
|
||||
pair_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
chain_info: dict,
|
||||
env_values: dict[str, str],
|
||||
) -> Decimal | None:
|
||||
rpc_url = rpc_for_chain(chain_id, env_values)
|
||||
if not rpc_url or not pair_address:
|
||||
return None
|
||||
base_address = ((chain_info.get("cwTokens") or {}).get(base_symbol) or (chain_info.get("anchorAddresses") or {}).get(base_symbol) or "").lower()
|
||||
quote_address = ((chain_info.get("cwTokens") or {}).get(quote_symbol) or (chain_info.get("anchorAddresses") or {}).get(quote_symbol) or "").lower()
|
||||
if not base_address or not quote_address:
|
||||
return None
|
||||
try:
|
||||
token0 = cast_call(rpc_url, pair_address, "token0()(address)").strip().lower()
|
||||
token1 = cast_call(rpc_url, pair_address, "token1()(address)").strip().lower()
|
||||
reserve0_raw, reserve1_raw, _ = parse_uints(cast_call(rpc_url, pair_address, "getReserves()((uint112,uint112,uint32))"), 3)
|
||||
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
|
||||
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
reserve_by_token = {
|
||||
token0: normalize_units(reserve0_raw, decimals0),
|
||||
token1: normalize_units(reserve1_raw, decimals1),
|
||||
}
|
||||
base_units = reserve_by_token.get(base_address)
|
||||
quote_units = reserve_by_token.get(quote_address)
|
||||
if base_units in (None, Decimal(0)) or quote_units is None:
|
||||
return None
|
||||
return quote_units / base_units
|
||||
|
||||
|
||||
def build_payload() -> dict:
|
||||
env_values = merged_env_values()
|
||||
deployer = derive_deployer(env_values)
|
||||
prices = load_json(PRICE_REPORT)
|
||||
deployment = load_json(DEPLOYMENT_STATUS)
|
||||
discovery = load_json(UNISWAP_DISCOVERY)
|
||||
|
||||
dodo_findings: list[dict] = []
|
||||
uniswap_findings: list[dict] = []
|
||||
unpriced: list[dict] = []
|
||||
|
||||
for chain in prices.get("chains") or []:
|
||||
chain_id = str(chain["chainId"])
|
||||
chain_info = deployment["chains"].get(chain_id, {})
|
||||
pair_map = build_pair_to_pool(chain_info)
|
||||
for symbol, row in sorted(chain.get("prices", {}).items()):
|
||||
target = TARGETS.get(symbol)
|
||||
current = safe_decimal(row.get("priceUsd"))
|
||||
if current is None:
|
||||
if target is not None:
|
||||
unpriced.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"token": symbol,
|
||||
"targetUsd": str(target),
|
||||
"reason": "no live price path found from current public snapshot",
|
||||
}
|
||||
)
|
||||
continue
|
||||
if target is None:
|
||||
continue
|
||||
deviation = pct_deviation(current, target)
|
||||
if abs(deviation) <= DODO_THRESHOLD_PCT:
|
||||
continue
|
||||
derived_from = row["derivedFrom"]
|
||||
if " -> " in derived_from:
|
||||
# Derived route; fix the direct broken pool(s) below in the Uniswap section.
|
||||
continue
|
||||
pool = select_pool(pair_map.get(derived_from, []), row["sourceType"])
|
||||
pool_address = pool.get("poolAddress") or first_address(row.get("notes", []))
|
||||
dodo_findings.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"token": symbol,
|
||||
"pair": derived_from,
|
||||
"poolAddress": pool_address,
|
||||
"venue": row["sourceType"],
|
||||
"currentUsd": str(current),
|
||||
"targetUsd": str(target),
|
||||
"deviationPct": str(deviation),
|
||||
"action": dodo_action(derived_from, current, target),
|
||||
"operator": dodo_commands(chain["chainId"], derived_from, pool_address or "", current, target, chain_info, env_values),
|
||||
"sizing": dodo_sizing_and_balance(chain["chainId"], derived_from, pool_address or "", current, target, chain_info, env_values, deployer),
|
||||
}
|
||||
)
|
||||
|
||||
for entry in discovery.get("entries") or []:
|
||||
chain_info = deployment["chains"].get(str(entry["chain_id"]), {})
|
||||
for row in entry.get("pairsChecked") or []:
|
||||
if not row.get("live"):
|
||||
continue
|
||||
pair = f"{row['base']}/{row['quote']}"
|
||||
target = Decimal("1")
|
||||
current = live_uniswap_ratio(entry["chain_id"], row["poolAddress"], row["base"], row["quote"], chain_info, env_values)
|
||||
if current is None:
|
||||
current = safe_decimal((row.get("health") or {}).get("priceQuotePerBase"))
|
||||
if current is None:
|
||||
continue
|
||||
deviation = pct_deviation(current, target)
|
||||
if abs(deviation) <= UNISWAP_THRESHOLD_PCT:
|
||||
continue
|
||||
if pair not in ("cWUSDT/cWUSDC", "cWAUSDT/cWUSDT", "cWAUSDT/cWUSDC"):
|
||||
continue
|
||||
uniswap_findings.append(
|
||||
{
|
||||
"chainId": entry["chain_id"],
|
||||
"network": entry["network"],
|
||||
"pair": pair,
|
||||
"poolAddress": row["poolAddress"],
|
||||
"currentRatio": str(current),
|
||||
"targetRatio": str(target),
|
||||
"deviationPct": str(deviation),
|
||||
"healthy": (row.get("health") or {}).get("healthy"),
|
||||
"action": uniswap_action(pair, target),
|
||||
"operator": uniswap_commands(entry["chain_id"], pair, row["poolAddress"], chain_info),
|
||||
}
|
||||
)
|
||||
|
||||
dodo_findings.sort(key=lambda row: (row["chainId"], row["pair"]))
|
||||
uniswap_findings.sort(key=lambda row: (row["chainId"], row["pair"]))
|
||||
unpriced.sort(key=lambda row: (row["chainId"], row["token"]))
|
||||
|
||||
return {
|
||||
"generatedAt": now(),
|
||||
"inputs": {
|
||||
"priceReport": str(PRICE_REPORT),
|
||||
"deploymentStatus": str(DEPLOYMENT_STATUS),
|
||||
"uniswapDiscovery": str(UNISWAP_DISCOVERY),
|
||||
},
|
||||
"targetsUsd": {k: str(v) for k, v in TARGETS.items()},
|
||||
"thresholds": {
|
||||
"dodoDeviationPct": str(DODO_THRESHOLD_PCT),
|
||||
"uniswapDeviationPct": str(UNISWAP_THRESHOLD_PCT),
|
||||
},
|
||||
"deployer": deployer or None,
|
||||
"offPegDodoPools": dodo_findings,
|
||||
"offPegUniswapPairs": uniswap_findings,
|
||||
"unpricedTargets": unpriced,
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(payload: dict) -> str:
|
||||
lines = [
|
||||
"# cW Public Network Repeg Plan",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Price report: `{payload['inputs']['priceReport']}`",
|
||||
f"- Deployment inventory: `{payload['inputs']['deploymentStatus']}`",
|
||||
f"- Uniswap discovery: `{payload['inputs']['uniswapDiscovery']}`",
|
||||
"- Scope: current off-peg or unreadable public-network `cW*` pools only.",
|
||||
"- PMM rule: if current price is below target, fund quote and buy base; if current price is above target, fund base and sell base.",
|
||||
"- Uniswap rule: remove bad LP or ignore stale LP, then reseed at the target reserve ratio.",
|
||||
"",
|
||||
"## Off-Peg DODO PMM Pools",
|
||||
"",
|
||||
"| Chain | Pair | Pool | Current | Target | Deviation | Repair Path |",
|
||||
"|---|---|---|---:|---:|---:|---|",
|
||||
]
|
||||
|
||||
if payload["offPegDodoPools"]:
|
||||
for row in payload["offPegDodoPools"]:
|
||||
lines.append(
|
||||
f"| `{row['chainId']}` {row['network']} | `{row['pair']}` | `{row['poolAddress'] or 'unknown'}` | "
|
||||
f"`{format_decimal(safe_decimal(row['currentUsd']))}` | `{format_decimal(safe_decimal(row['targetUsd']))}` | "
|
||||
f"`{format_pct(Decimal(row['deviationPct']))}` | {row['action']} |"
|
||||
)
|
||||
else:
|
||||
lines.append("| — | — | — | — | — | — | No off-peg DODO PMM pools found at the configured threshold. |")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Broken Uniswap Pairs",
|
||||
"",
|
||||
"| Chain | Pair | Pool | Current Ratio | Target | Deviation | Repair Path |",
|
||||
"|---|---|---|---:|---:|---:|---|",
|
||||
]
|
||||
)
|
||||
|
||||
if payload["offPegUniswapPairs"]:
|
||||
for row in payload["offPegUniswapPairs"]:
|
||||
lines.append(
|
||||
f"| `{row['chainId']}` {row['network']} | `{row['pair']}` | `{row['poolAddress']}` | "
|
||||
f"`{format_decimal(safe_decimal(row['currentRatio']))}` | `{format_decimal(safe_decimal(row['targetRatio']))}` | "
|
||||
f"`{format_pct(Decimal(row['deviationPct']))}` | {row['action']} |"
|
||||
)
|
||||
else:
|
||||
lines.append("| — | — | — | — | — | — | No broken Uniswap pairs found at the configured threshold. |")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Unpriced Targets",
|
||||
"",
|
||||
"| Chain | Token | Target | Status |",
|
||||
"|---|---|---:|---|",
|
||||
]
|
||||
)
|
||||
|
||||
if payload["unpricedTargets"]:
|
||||
for row in payload["unpricedTargets"]:
|
||||
lines.append(
|
||||
f"| `{row['chainId']}` {row['network']} | `{row['token']}` | "
|
||||
f"`{format_decimal(safe_decimal(row['targetUsd']))}` | {row['reason']} |"
|
||||
)
|
||||
else:
|
||||
lines.append("| — | — | — | No unpriced targets. |")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Execution Order",
|
||||
"",
|
||||
"1. Repeg direct PMM anchor pools first: `cWUSDC/USDC`, `cWUSDT/USDT`, then the fiat/metal quoted rails.",
|
||||
"2. Repeg `cWUSDT/cWUSDC` next on any chain where the wrapped support pair is broken.",
|
||||
"3. Repeg wrapped edge pairs such as `cWAUSDT/cWUSDT` and `cWAUSDT/cWUSDC` after the core wrapped support rail is healthy.",
|
||||
"4. Re-run the price table and this repeg plan after each venue is repaired.",
|
||||
"",
|
||||
f"- Deployer inspected: `{payload.get('deployer') or 'not found in env'}`",
|
||||
"",
|
||||
"## Operator Commands",
|
||||
"",
|
||||
]
|
||||
)
|
||||
for row in payload["offPegDodoPools"]:
|
||||
sizing = row.get("sizing") or {}
|
||||
funding = sizing.get("deployerFundingCheck") or {}
|
||||
reserve_note = ""
|
||||
pool_reserves = sizing.get("poolReserves") or {}
|
||||
if sizing.get("error"):
|
||||
reserve_note = f"- Sizing note: `{sizing['error']}`"
|
||||
elif pool_reserves.get("baseRaw") == "0" and pool_reserves.get("quoteRaw") == "0":
|
||||
reserve_note = "- Sizing note: `Pool reports zero vault reserves; heuristic trade size is informational only.`"
|
||||
lines.extend(
|
||||
[
|
||||
f"### `{row['chainId']}` {row['network']} `{row['pair']}`",
|
||||
"",
|
||||
f"- Suggested funding side: `{sizing.get('fundingSide', 'unknown')}`",
|
||||
f"- Suggested trade raw: `{sizing.get('suggestedTradeRaw', 'unknown')}`",
|
||||
f"- Suggested trade units: `{sizing.get('suggestedTradeUnits', 'unknown')}`",
|
||||
f"- Gap fraction: `{format_decimal(safe_decimal(sizing.get('gapFraction')), 6) if sizing.get('gapFraction') else 'unknown'}`",
|
||||
f"- Deployer balance on funding side: `{funding.get('balanceUnits', 'unknown')}` `{funding.get('token', '')}`",
|
||||
f"- Deployer shortfall: `{funding.get('shortfallUnits', 'unknown')}` `{funding.get('token', '')}`",
|
||||
f"- Covered: `{funding.get('covered', 'unknown')}`",
|
||||
*(["- Reserve snapshot: `baseRaw=0 quoteRaw=0`"] if reserve_note and "zero vault reserves" in reserve_note else []),
|
||||
*([reserve_note] if reserve_note else []),
|
||||
"",
|
||||
"```bash",
|
||||
row["operator"]["commands"],
|
||||
"```",
|
||||
"",
|
||||
]
|
||||
)
|
||||
for row in payload["offPegUniswapPairs"]:
|
||||
lines.extend(
|
||||
[
|
||||
f"### `{row['chainId']}` {row['network']} `{row['pair']}`",
|
||||
"",
|
||||
"```bash",
|
||||
row["operator"]["commands"],
|
||||
"```",
|
||||
"",
|
||||
]
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
payload = build_payload()
|
||||
write_json(JSON_OUT, payload)
|
||||
write_text(DOC_OUT, render_markdown(payload))
|
||||
print(JSON_OUT)
|
||||
print(DOC_OUT)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
229
scripts/verify/build-liquidity-pools-completion-report.py
Normal file
229
scripts/verify/build-liquidity-pools-completion-report.py
Normal file
@@ -0,0 +1,229 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a strict completion report for non-live liquidity venue inventory."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import json
|
||||
from collections import Counter, defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
MASTER_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
|
||||
OUT_JSON = ROOT / "reports/status/liquidity-pools-completion-latest.json"
|
||||
OUT_MD = ROOT / "reports/status/liquidity-pools-completion-latest.md"
|
||||
OUT_CSV = ROOT / "reports/status/liquidity-pools-completion-remaining-latest.csv"
|
||||
|
||||
|
||||
CSV_FIELDS = [
|
||||
"chainId",
|
||||
"network",
|
||||
"venue",
|
||||
"pair",
|
||||
"poolAddress",
|
||||
"baseTokenAddress",
|
||||
"quoteTokenAddress",
|
||||
"status",
|
||||
"completionClass",
|
||||
]
|
||||
|
||||
|
||||
def md_table(headers: list[str], rows: list[list[str]]) -> str:
|
||||
out = ["| " + " | ".join(headers) + " |", "| " + " | ".join(["---"] * len(headers)) + " |"]
|
||||
out.extend("| " + " | ".join(row) + " |" for row in rows)
|
||||
return "\n".join(out)
|
||||
|
||||
|
||||
def completion_class(status: str) -> str:
|
||||
if status.startswith("live"):
|
||||
return "complete"
|
||||
if status == "planned_gas_placeholder":
|
||||
return "planned_gas_surface"
|
||||
if status in {"configured_no_live_balance", "configured_no_code", "configured_code_present_no_balance_read"}:
|
||||
return "deploy_or_verify_live_balance"
|
||||
if status == "planned_reference_placeholder":
|
||||
return "planned_reference_surface"
|
||||
if status == "supported_not_live":
|
||||
return "supported_but_not_live"
|
||||
return status
|
||||
|
||||
|
||||
def build() -> dict:
|
||||
data = json.loads(MASTER_JSON.read_text())
|
||||
generated_at = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
||||
|
||||
chain_rows = []
|
||||
remaining_csv_rows = []
|
||||
totals = Counter()
|
||||
|
||||
for chain in data["chains"]:
|
||||
remaining_pools = [p for p in chain["pools"] if not str(p["status"]).startswith("live")]
|
||||
remaining_refs = [r for r in chain.get("referenceVenues", []) if not str(r["status"]).startswith("live")]
|
||||
|
||||
pool_status_counts = Counter(p["status"] for p in remaining_pools)
|
||||
ref_status_counts = Counter(r["status"] for r in remaining_refs)
|
||||
venue_counts = Counter(p["venue"] for p in remaining_pools)
|
||||
|
||||
totals.update(pool_status_counts)
|
||||
|
||||
chain_rows.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"remainingPoolCount": len(remaining_pools),
|
||||
"remainingReferenceVenueCount": len(remaining_refs),
|
||||
"poolStatusCounts": dict(pool_status_counts),
|
||||
"referenceStatusCounts": dict(ref_status_counts),
|
||||
"remainingVenueCounts": dict(venue_counts),
|
||||
"remainingPools": [
|
||||
{
|
||||
"venue": p["venue"],
|
||||
"pair": f"{p['baseSymbol']}/{p['quoteSymbol']}",
|
||||
"poolAddress": p["poolAddress"],
|
||||
"baseTokenAddress": p["baseAddress"],
|
||||
"quoteTokenAddress": p["quoteAddress"],
|
||||
"status": p["status"],
|
||||
"completionClass": completion_class(p["status"]),
|
||||
}
|
||||
for p in remaining_pools
|
||||
],
|
||||
"remainingReferenceVenues": [
|
||||
{
|
||||
"protocol": r["protocol"],
|
||||
"pair": f"{r['baseSymbol']}/{r['quoteSymbol']}",
|
||||
"venueAddress": r["venueAddress"],
|
||||
"status": r["status"],
|
||||
"completionClass": completion_class(r["status"]),
|
||||
}
|
||||
for r in remaining_refs
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
for p in remaining_pools:
|
||||
remaining_csv_rows.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"venue": p["venue"],
|
||||
"pair": f"{p['baseSymbol']}/{p['quoteSymbol']}",
|
||||
"poolAddress": p["poolAddress"],
|
||||
"baseTokenAddress": p["baseAddress"],
|
||||
"quoteTokenAddress": p["quoteAddress"],
|
||||
"status": p["status"],
|
||||
"completionClass": completion_class(p["status"]),
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"generatedAt": generated_at,
|
||||
"source": str(MASTER_JSON.relative_to(ROOT)),
|
||||
"summary": {
|
||||
"remainingPools": sum(r["remainingPoolCount"] for r in chain_rows),
|
||||
"remainingReferenceVenues": sum(r["remainingReferenceVenueCount"] for r in chain_rows),
|
||||
"poolStatusCounts": dict(totals),
|
||||
},
|
||||
"chains": chain_rows,
|
||||
"remainingCsvRows": remaining_csv_rows,
|
||||
}
|
||||
|
||||
|
||||
def write_csv(rows: list[dict]) -> None:
|
||||
with OUT_CSV.open("w", newline="") as fh:
|
||||
writer = csv.DictWriter(fh, fieldnames=CSV_FIELDS)
|
||||
writer.writeheader()
|
||||
writer.writerows(rows)
|
||||
|
||||
|
||||
def write_markdown(report: dict) -> None:
|
||||
lines = [
|
||||
"# Liquidity Pools Completion Report",
|
||||
"",
|
||||
f"- Generated: `{report['generatedAt']}`",
|
||||
f"- Source: `{report['source']}`",
|
||||
"",
|
||||
"## Summary",
|
||||
"",
|
||||
f"- Remaining non-live pool rows: `{report['summary']['remainingPools']}`",
|
||||
f"- Remaining non-live reference venue rows: `{report['summary']['remainingReferenceVenues']}`",
|
||||
f"- Pool status counts: `{json.dumps(report['summary']['poolStatusCounts'], sort_keys=True)}`",
|
||||
"",
|
||||
]
|
||||
|
||||
summary_rows = []
|
||||
for chain in report["chains"]:
|
||||
summary_rows.append(
|
||||
[
|
||||
str(chain["chainId"]),
|
||||
chain["network"],
|
||||
str(chain["remainingPoolCount"]),
|
||||
str(chain["remainingReferenceVenueCount"]),
|
||||
json.dumps(chain["poolStatusCounts"], sort_keys=True),
|
||||
]
|
||||
)
|
||||
lines += [
|
||||
"## By Chain",
|
||||
"",
|
||||
md_table(["ChainID", "Network", "Remaining Pools", "Remaining Reference Venues", "Pool Status Counts"], summary_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
for chain in report["chains"]:
|
||||
if chain["remainingPoolCount"] == 0 and chain["remainingReferenceVenueCount"] == 0:
|
||||
continue
|
||||
lines += [f"## {chain['network']} ({chain['chainId']})", ""]
|
||||
|
||||
if chain["remainingPoolCount"]:
|
||||
pool_rows = [
|
||||
[
|
||||
p["venue"],
|
||||
p["pair"],
|
||||
f"`{p['poolAddress']}`",
|
||||
p["status"],
|
||||
p["completionClass"],
|
||||
]
|
||||
for p in chain["remainingPools"]
|
||||
]
|
||||
lines += [
|
||||
"### Remaining Pools",
|
||||
"",
|
||||
md_table(["Venue", "Pair", "Pool", "Status", "Completion Class"], pool_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
if chain["remainingReferenceVenueCount"]:
|
||||
ref_rows = [
|
||||
[
|
||||
r["protocol"],
|
||||
r["pair"],
|
||||
f"`{r['venueAddress']}`" if r["venueAddress"] else "—",
|
||||
r["status"],
|
||||
r["completionClass"],
|
||||
]
|
||||
for r in chain["remainingReferenceVenues"]
|
||||
]
|
||||
lines += [
|
||||
"### Remaining Reference Venues",
|
||||
"",
|
||||
md_table(["Protocol", "Pair", "Venue Address", "Status", "Completion Class"], ref_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
OUT_MD.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
report = build()
|
||||
OUT_JSON.write_text(json.dumps(report, indent=2) + "\n")
|
||||
write_markdown(report)
|
||||
write_csv(report["remainingCsvRows"])
|
||||
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_CSV.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
594
scripts/verify/build-liquidity-pools-master-map.py
Normal file
594
scripts/verify/build-liquidity-pools-master-map.py
Normal file
@@ -0,0 +1,594 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a live liquidity-pool inventory from deployment-status + on-chain reserves.
|
||||
|
||||
Outputs:
|
||||
- reports/status/liquidity-pools-master-map-latest.json
|
||||
- docs/11-references/LIQUIDITY_POOLS_MASTER_MAP.md
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps/config/deployment-status.json"
|
||||
CHAIN138_EXECUTION = ROOT / "smom-dbis-138/config/chain138-eth-pmm-pools-execution.json"
|
||||
CHAIN138_PMM = ROOT / "smom-dbis-138/config/chain138-pmm-pools.json"
|
||||
OUT_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
|
||||
OUT_MD = ROOT / "docs/11-references/LIQUIDITY_POOLS_MASTER_MAP.md"
|
||||
UNIV2_DISCOVERY = ROOT / "reports/extraction/promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
GRU_GAP_REPORT = ROOT / "reports/extraction/promod-gru-v2-full-mesh-gap-report-latest.json"
|
||||
|
||||
POOL_LIST_KEYS = ("pmmPools", "pmmPoolsVolatile", "gasPmmPools", "uniswapV2Pools")
|
||||
CHAIN_ORDER = ["138", "651940", "1", "10", "25", "56", "100", "137", "8453", "42161", "42220", "43114", "1111"]
|
||||
|
||||
RPC_DEFAULTS = {
|
||||
"138": os.environ.get("CHAIN138_RPC_URL")
|
||||
or os.environ.get("RPC_URL_138")
|
||||
or os.environ.get("RPC_URL")
|
||||
or "https://rpc-core.d-bis.org",
|
||||
"651940": os.environ.get("CHAIN_651940_RPC")
|
||||
or os.environ.get("CHAIN_651940_RPC_URL")
|
||||
or os.environ.get("ALLTRA_MAINNET_RPC")
|
||||
or "https://mainnet-rpc.alltra.global",
|
||||
"1": os.environ.get("ETHEREUM_MAINNET_RPC") or "https://eth.llamarpc.com",
|
||||
"10": os.environ.get("OPTIMISM_MAINNET_RPC") or os.environ.get("OPTIMISM_RPC_URL") or "https://mainnet.optimism.io",
|
||||
"25": os.environ.get("CRONOS_RPC") or os.environ.get("CRONOS_MAINNET_RPC") or "https://evm.cronos.org",
|
||||
"56": os.environ.get("BSC_RPC_URL") or os.environ.get("BSC_MAINNET_RPC") or "https://bsc-dataseed.binance.org",
|
||||
"100": os.environ.get("GNOSIS_RPC") or os.environ.get("GNOSIS_MAINNET_RPC") or "https://rpc.gnosischain.com",
|
||||
"137": os.environ.get("POLYGON_MAINNET_RPC") or os.environ.get("POLYGON_RPC_URL") or "https://polygon-bor-rpc.publicnode.com",
|
||||
"8453": os.environ.get("BASE_MAINNET_RPC") or os.environ.get("BASE_RPC_URL") or "https://mainnet.base.org",
|
||||
"42161": os.environ.get("ARBITRUM_MAINNET_RPC") or os.environ.get("ARBITRUM_RPC_URL") or "https://arb1.arbitrum.io/rpc",
|
||||
"42220": os.environ.get("CELO_RPC") or os.environ.get("CELO_MAINNET_RPC") or "https://forno.celo.org",
|
||||
"43114": os.environ.get("AVALANCHE_RPC_URL") or os.environ.get("AVALANCHE_MAINNET_RPC") or "https://api.avax.network/ext/bc/C/rpc",
|
||||
"1111": os.environ.get("WEMIX_RPC") or os.environ.get("WEMIX_MAINNET_RPC") or "https://api.wemix.com",
|
||||
}
|
||||
|
||||
PLACEHOLDER_NOTES = {"placeholder_scaffold_not_live"}
|
||||
ZERO_ADDR = "0x0000000000000000000000000000000000000000"
|
||||
|
||||
|
||||
def load_json(path: Path) -> Any:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def cast_call(rpc: str, to: str, sig: str, *args: str) -> list[str]:
|
||||
cmd = ["cast", "call", to, sig, *args, "--rpc-url", rpc]
|
||||
last_exc: Exception | None = None
|
||||
for attempt in range(3):
|
||||
try:
|
||||
out = subprocess.check_output(cmd, text=True, stderr=subprocess.DEVNULL, timeout=5).strip()
|
||||
return [line.strip() for line in out.splitlines() if line.strip()]
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
if attempt < 2:
|
||||
time.sleep(0.25 * (attempt + 1))
|
||||
raise last_exc or RuntimeError(f"cast call failed for {to} {sig}")
|
||||
|
||||
|
||||
def cast_code(rpc: str, address: str) -> str:
|
||||
cmd = ["cast", "code", address, "--rpc-url", rpc]
|
||||
last_exc: Exception | None = None
|
||||
for attempt in range(3):
|
||||
try:
|
||||
return subprocess.check_output(cmd, text=True, stderr=subprocess.DEVNULL, timeout=5).strip()
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
if attempt < 2:
|
||||
time.sleep(0.25 * (attempt + 1))
|
||||
raise last_exc or RuntimeError(f"cast code failed for {address}")
|
||||
|
||||
|
||||
def clean_int(value: str) -> int:
|
||||
token = value.split()[0]
|
||||
if token.startswith("0x"):
|
||||
return int(token, 16)
|
||||
return int(token)
|
||||
|
||||
|
||||
def is_placeholder_address(addr: str | None) -> bool:
|
||||
if not addr or not isinstance(addr, str) or not addr.startswith("0x"):
|
||||
return True
|
||||
body = addr[2:].lower()
|
||||
if body == "0" * 40:
|
||||
return True
|
||||
return "0" * 20 in body
|
||||
|
||||
|
||||
def human_amount(raw: int | None, decimals: int | None) -> str | None:
|
||||
if raw is None or decimals is None:
|
||||
return None
|
||||
negative = raw < 0
|
||||
raw = abs(raw)
|
||||
scale = 10 ** decimals
|
||||
whole = raw // scale
|
||||
frac = raw % scale
|
||||
frac_s = f"{frac:0{decimals}d}".rstrip("0")
|
||||
text = str(whole) if not frac_s else f"{whole}.{frac_s}"
|
||||
return f"-{text}" if negative else text
|
||||
|
||||
|
||||
def markdown_table(headers: list[str], rows: list[list[str]]) -> str:
|
||||
out = ["| " + " | ".join(headers) + " |", "| " + " | ".join(["---"] * len(headers)) + " |"]
|
||||
for row in rows:
|
||||
out.append("| " + " | ".join(row) + " |")
|
||||
return "\n".join(out)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TokenInfo:
|
||||
symbol: str
|
||||
address: str | None
|
||||
decimals: int | None
|
||||
|
||||
|
||||
class PoolBuilder:
|
||||
def __init__(self) -> None:
|
||||
self.deployment_status = load_json(DEPLOYMENT_STATUS)
|
||||
self.chain138_execution = load_json(CHAIN138_EXECUTION)
|
||||
self.chain138_pmm = load_json(CHAIN138_PMM)
|
||||
self.univ2_discovery = load_json(UNIV2_DISCOVERY) if UNIV2_DISCOVERY.exists() else None
|
||||
self.gru_gap_report = load_json(GRU_GAP_REPORT) if GRU_GAP_REPORT.exists() else None
|
||||
self.decimals_cache: dict[tuple[str, str], int | None] = {}
|
||||
self.univ2_health_index = self.build_univ2_health_index()
|
||||
self.documented_live_dodo_pairs = self.build_documented_live_dodo_pairs()
|
||||
self.fallback_symbol_decimals = {
|
||||
"WETH": 18,
|
||||
"WETH9": 18,
|
||||
"WETH10": 18,
|
||||
"WETHL2": 18,
|
||||
"WWEMIX": 18,
|
||||
"WCRO": 18,
|
||||
"WALL": 18,
|
||||
"cWETH": 18,
|
||||
"cWETHL2": 18,
|
||||
}
|
||||
|
||||
def build_univ2_health_index(self) -> dict[tuple[int, str], dict[str, Any]]:
|
||||
index: dict[tuple[int, str], dict[str, Any]] = {}
|
||||
if not self.univ2_discovery:
|
||||
return index
|
||||
for entry in self.univ2_discovery.get("entries", []):
|
||||
chain_id = int(entry["chain_id"])
|
||||
for pair in entry.get("pairsChecked", []):
|
||||
addr = pair.get("poolAddress")
|
||||
if addr and addr != ZERO_ADDR:
|
||||
index[(chain_id, addr.lower())] = pair
|
||||
return index
|
||||
|
||||
def build_documented_live_dodo_pairs(self) -> dict[int, set[str]]:
|
||||
out: dict[int, set[str]] = {}
|
||||
if not self.gru_gap_report:
|
||||
return out
|
||||
for chain in self.gru_gap_report.get("chains", []):
|
||||
dodo = ((chain.get("venue_status") or {}).get("dodo_pmm") or {})
|
||||
out[int(chain["chain_id"])] = set(dodo.get("live_pairs", []))
|
||||
return out
|
||||
|
||||
def resolve_token_address(self, chain_id: str, chain_data: dict[str, Any], symbol: str) -> str | None:
|
||||
if chain_id == "138":
|
||||
if symbol in self.chain138_execution.get("tokens", {}):
|
||||
return self.chain138_execution["tokens"][symbol]
|
||||
if symbol in chain_data.get("anchorAddresses", {}):
|
||||
return chain_data["anchorAddresses"][symbol]
|
||||
if symbol in self.chain138_pmm.get("tokens", {}):
|
||||
return self.chain138_pmm["tokens"][symbol]
|
||||
for key in ("cwTokens", "anchorAddresses", "gasMirrors", "gasQuoteAddresses"):
|
||||
mapping = chain_data.get(key, {})
|
||||
if symbol in mapping:
|
||||
return mapping[symbol]
|
||||
return None
|
||||
|
||||
def token_decimals(self, rpc: str, address: str | None, chain_id: str, symbol: str | None = None) -> int | None:
|
||||
if not address:
|
||||
return None
|
||||
cache_key = (chain_id, address.lower())
|
||||
if cache_key in self.decimals_cache:
|
||||
return self.decimals_cache[cache_key]
|
||||
try:
|
||||
lines = cast_call(rpc, address, "decimals()(uint8)")
|
||||
dec = clean_int(lines[0])
|
||||
if dec == 0 and (symbol or "") in self.fallback_symbol_decimals:
|
||||
dec = self.fallback_symbol_decimals[symbol or ""]
|
||||
except Exception:
|
||||
dec = self.fallback_symbol_decimals.get(symbol or "")
|
||||
self.decimals_cache[cache_key] = dec
|
||||
return dec
|
||||
|
||||
def build_pool_rows(self) -> dict[str, Any]:
|
||||
chains = self.deployment_status["chains"]
|
||||
generated_at = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
||||
report_chains: list[dict[str, Any]] = []
|
||||
|
||||
for chain_id in CHAIN_ORDER:
|
||||
chain_data = chains.get(chain_id)
|
||||
if not chain_data:
|
||||
continue
|
||||
rpc = RPC_DEFAULTS.get(chain_id)
|
||||
chain_rows: list[dict[str, Any]] = []
|
||||
for list_key in POOL_LIST_KEYS:
|
||||
for pool in chain_data.get(list_key, []):
|
||||
row = self.build_pool_row(chain_id, chain_data, list_key, pool, rpc)
|
||||
chain_rows.append(row)
|
||||
reference_venues = [self.build_reference_venue_row(chain_id, chain_data, venue) for venue in chain_data.get("gasReferenceVenues", [])]
|
||||
report_chains.append(
|
||||
{
|
||||
"chainId": int(chain_id),
|
||||
"network": chain_data["name"],
|
||||
"rpc": rpc,
|
||||
"poolCount": len(chain_rows),
|
||||
"pools": chain_rows,
|
||||
"referenceVenueCount": len(reference_venues),
|
||||
"referenceVenues": reference_venues,
|
||||
}
|
||||
)
|
||||
return {
|
||||
"generatedAt": generated_at,
|
||||
"sourceFiles": [
|
||||
str(DEPLOYMENT_STATUS.relative_to(ROOT)),
|
||||
str(CHAIN138_EXECUTION.relative_to(ROOT)),
|
||||
str(CHAIN138_PMM.relative_to(ROOT)),
|
||||
],
|
||||
"chains": report_chains,
|
||||
}
|
||||
|
||||
def build_pool_row(
|
||||
self,
|
||||
chain_id: str,
|
||||
chain_data: dict[str, Any],
|
||||
list_key: str,
|
||||
pool: dict[str, Any],
|
||||
rpc: str | None,
|
||||
) -> dict[str, Any]:
|
||||
base_symbol = pool.get("base") or pool.get("tokenIn")
|
||||
quote_symbol = pool.get("quote") or pool.get("tokenOut")
|
||||
base_address = self.resolve_token_address(chain_id, chain_data, base_symbol) if base_symbol else None
|
||||
quote_address = self.resolve_token_address(chain_id, chain_data, quote_symbol) if quote_symbol else None
|
||||
venue = pool.get("venue") or ("uniswap_v2_pair" if list_key == "uniswapV2Pools" else "dodo_pmm")
|
||||
notes = list(pool.get("notes", []))
|
||||
live_enabled = pool.get("publicRoutingEnabled")
|
||||
row: dict[str, Any] = {
|
||||
"chainId": int(chain_id),
|
||||
"network": chain_data["name"],
|
||||
"inventoryGroup": list_key,
|
||||
"venue": venue,
|
||||
"baseSymbol": base_symbol,
|
||||
"baseAddress": base_address,
|
||||
"quoteSymbol": quote_symbol,
|
||||
"quoteAddress": quote_address,
|
||||
"poolAddress": pool.get("poolAddress"),
|
||||
"feeBps": pool.get("feeBps"),
|
||||
"k": pool.get("k"),
|
||||
"role": pool.get("role"),
|
||||
"poolType": pool.get("poolType"),
|
||||
"publicRoutingEnabled": live_enabled,
|
||||
"notes": notes,
|
||||
"status": "configured",
|
||||
"balances": None,
|
||||
"queryError": None,
|
||||
"balanceSource": None,
|
||||
}
|
||||
|
||||
if list_key == "uniswapV2Pools":
|
||||
row["factoryAddress"] = pool.get("factoryAddress")
|
||||
row["routerAddress"] = pool.get("routerAddress")
|
||||
|
||||
pool_address = pool.get("poolAddress")
|
||||
if not rpc:
|
||||
row["status"] = "rpc_missing"
|
||||
return row
|
||||
if is_placeholder_address(pool_address) or any(note in PLACEHOLDER_NOTES for note in notes):
|
||||
row["status"] = "planned_gas_placeholder" if list_key == "gasPmmPools" else "placeholder_not_live"
|
||||
return row
|
||||
if not pool_address or not base_address or not quote_address:
|
||||
row["status"] = "address_resolution_missing"
|
||||
return row
|
||||
|
||||
try:
|
||||
if list_key == "uniswapV2Pools":
|
||||
balances = self.fetch_uniswap_v2_discovery_balances(int(chain_id), pool_address, base_symbol, quote_symbol, base_address, quote_address, rpc)
|
||||
if balances is not None:
|
||||
row["balances"] = balances
|
||||
row["status"] = "live"
|
||||
row["balanceSource"] = "uniswap_v2_live_pair_discovery"
|
||||
else:
|
||||
row["status"] = "configured_no_live_balance"
|
||||
elif chain_id == "138":
|
||||
row["balances"] = self.fetch_dodo_balances(rpc, chain_id, pool_address, base_symbol, quote_symbol, base_address, quote_address)
|
||||
row["status"] = "live"
|
||||
row["balanceSource"] = "rpc_getVaultReserve"
|
||||
else:
|
||||
code = cast_code(rpc, pool_address)
|
||||
if code in ("", "0x"):
|
||||
row["status"] = "configured_no_code"
|
||||
elif list_key == "pmmPools" and f"{base_symbol}/{quote_symbol}" in self.documented_live_dodo_pairs.get(int(chain_id), set()):
|
||||
row["status"] = "live_documented_no_balance"
|
||||
row["balanceSource"] = "promod_gru_v2_full_mesh_gap_report"
|
||||
else:
|
||||
try:
|
||||
row["balances"] = self.fetch_dodo_balances(rpc, chain_id, pool_address, base_symbol, quote_symbol, base_address, quote_address)
|
||||
row["status"] = "live"
|
||||
row["balanceSource"] = "rpc_getVaultReserve"
|
||||
except Exception:
|
||||
row["status"] = "configured_code_present_no_balance_read"
|
||||
except Exception as exc:
|
||||
if list_key == "pmmPools" and f"{base_symbol}/{quote_symbol}" in self.documented_live_dodo_pairs.get(int(chain_id), set()):
|
||||
row["status"] = "live_documented_no_balance"
|
||||
row["balanceSource"] = "promod_gru_v2_full_mesh_gap_report"
|
||||
row["queryError"] = str(exc)
|
||||
else:
|
||||
row["status"] = "query_failed"
|
||||
row["queryError"] = str(exc)
|
||||
return row
|
||||
|
||||
def build_reference_venue_row(self, chain_id: str, chain_data: dict[str, Any], venue: dict[str, Any]) -> dict[str, Any]:
|
||||
base_symbol = venue.get("base")
|
||||
quote_symbol = venue.get("quote")
|
||||
notes = list(venue.get("notes", []))
|
||||
if any(note in PLACEHOLDER_NOTES for note in notes):
|
||||
status = "planned_reference_placeholder"
|
||||
elif venue.get("live"):
|
||||
status = "live"
|
||||
elif venue.get("supported"):
|
||||
status = "supported_not_live"
|
||||
else:
|
||||
status = "unsupported"
|
||||
return {
|
||||
"chainId": int(chain_id),
|
||||
"network": chain_data["name"],
|
||||
"protocol": venue.get("protocol"),
|
||||
"familyKey": venue.get("familyKey"),
|
||||
"baseSymbol": base_symbol,
|
||||
"baseAddress": self.resolve_token_address(chain_id, chain_data, base_symbol) if base_symbol else None,
|
||||
"quoteSymbol": quote_symbol,
|
||||
"quoteAddress": self.resolve_token_address(chain_id, chain_data, quote_symbol) if quote_symbol else None,
|
||||
"venueAddress": venue.get("venueAddress"),
|
||||
"supported": venue.get("supported"),
|
||||
"live": venue.get("live"),
|
||||
"routingVisible": venue.get("routingVisible"),
|
||||
"reference": venue.get("reference"),
|
||||
"aggregatorOnly": venue.get("aggregatorOnly"),
|
||||
"indexRequired": venue.get("indexRequired"),
|
||||
"dependsOn": venue.get("dependsOn", []),
|
||||
"notes": notes,
|
||||
"status": status,
|
||||
}
|
||||
|
||||
def fetch_uniswap_v2_discovery_balances(
|
||||
self,
|
||||
chain_id: int,
|
||||
pool_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
base_address: str,
|
||||
quote_address: str,
|
||||
rpc: str,
|
||||
) -> dict[str, Any] | None:
|
||||
pair = self.univ2_health_index.get((chain_id, pool_address.lower()))
|
||||
if not pair or not pair.get("live"):
|
||||
return None
|
||||
health = pair.get("health") or {}
|
||||
base_raw = int(health["baseReserveRaw"])
|
||||
quote_raw = int(health["quoteReserveRaw"])
|
||||
base_dec = self.token_decimals(rpc, base_address, str(chain_id), base_symbol)
|
||||
quote_dec = self.token_decimals(rpc, quote_address, str(chain_id), quote_symbol)
|
||||
return {
|
||||
"base": {
|
||||
"symbol": base_symbol,
|
||||
"address": base_address,
|
||||
"raw": str(base_raw),
|
||||
"decimals": base_dec,
|
||||
"formatted": health.get("baseReserveUnits") or human_amount(base_raw, base_dec),
|
||||
},
|
||||
"quote": {
|
||||
"symbol": quote_symbol,
|
||||
"address": quote_address,
|
||||
"raw": str(quote_raw),
|
||||
"decimals": quote_dec,
|
||||
"formatted": health.get("quoteReserveUnits") or human_amount(quote_raw, quote_dec),
|
||||
},
|
||||
"health": health,
|
||||
}
|
||||
|
||||
def fetch_dodo_balances(
|
||||
self,
|
||||
rpc: str,
|
||||
chain_id: str,
|
||||
pool_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
base_address: str,
|
||||
quote_address: str,
|
||||
) -> dict[str, Any]:
|
||||
lines = cast_call(rpc, pool_address, "getVaultReserve()(uint256,uint256)")
|
||||
base_raw = clean_int(lines[0])
|
||||
quote_raw = clean_int(lines[1])
|
||||
base_dec = self.token_decimals(rpc, base_address, chain_id, base_symbol)
|
||||
quote_dec = self.token_decimals(rpc, quote_address, chain_id, quote_symbol)
|
||||
return {
|
||||
"base": {
|
||||
"symbol": base_symbol,
|
||||
"address": base_address,
|
||||
"raw": str(base_raw),
|
||||
"decimals": base_dec,
|
||||
"formatted": human_amount(base_raw, base_dec),
|
||||
},
|
||||
"quote": {
|
||||
"symbol": quote_symbol,
|
||||
"address": quote_address,
|
||||
"raw": str(quote_raw),
|
||||
"decimals": quote_dec,
|
||||
"formatted": human_amount(quote_raw, quote_dec),
|
||||
},
|
||||
}
|
||||
|
||||
def fetch_uniswap_v2_balances(
|
||||
self,
|
||||
rpc: str,
|
||||
chain_id: str,
|
||||
pool_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
base_address: str,
|
||||
quote_address: str,
|
||||
) -> dict[str, Any]:
|
||||
reserves = cast_call(rpc, pool_address, "getReserves()(uint112,uint112,uint32)")
|
||||
reserve0 = clean_int(reserves[0])
|
||||
reserve1 = clean_int(reserves[1])
|
||||
token0 = cast_call(rpc, pool_address, "token0()(address)")[0].split()[0]
|
||||
token1 = cast_call(rpc, pool_address, "token1()(address)")[0].split()[0]
|
||||
|
||||
if token0.lower() == base_address.lower() and token1.lower() == quote_address.lower():
|
||||
base_raw, quote_raw = reserve0, reserve1
|
||||
elif token0.lower() == quote_address.lower() and token1.lower() == base_address.lower():
|
||||
base_raw, quote_raw = reserve1, reserve0
|
||||
else:
|
||||
raise RuntimeError(f"pair token mismatch: token0={token0} token1={token1}")
|
||||
|
||||
base_dec = self.token_decimals(rpc, base_address, chain_id, base_symbol)
|
||||
quote_dec = self.token_decimals(rpc, quote_address, chain_id, quote_symbol)
|
||||
return {
|
||||
"base": {
|
||||
"symbol": base_symbol,
|
||||
"address": base_address,
|
||||
"raw": str(base_raw),
|
||||
"decimals": base_dec,
|
||||
"formatted": human_amount(base_raw, base_dec),
|
||||
},
|
||||
"quote": {
|
||||
"symbol": quote_symbol,
|
||||
"address": quote_address,
|
||||
"raw": str(quote_raw),
|
||||
"decimals": quote_dec,
|
||||
"formatted": human_amount(quote_raw, quote_dec),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(report: dict[str, Any]) -> str:
|
||||
generated_at = report["generatedAt"]
|
||||
lines = [
|
||||
"# Liquidity Pools Master Map — Chain 138, ALL Mainnet, and Public Networks",
|
||||
"",
|
||||
f"**Last Updated:** {generated_at}",
|
||||
"**Document Version:** 2.0",
|
||||
"**Status:** Generated from deployment-status plus live reserve reads where supported",
|
||||
"",
|
||||
"## Overview",
|
||||
"",
|
||||
"This document is generated from the machine-readable deployment graph in `cross-chain-pmm-lps/config/deployment-status.json`, plus live reserve reads from chain RPCs.",
|
||||
"",
|
||||
"Pool categories:",
|
||||
"- `pmmPools`: standard DODO PMM routing pools",
|
||||
"- `pmmPoolsVolatile`: volatile-route PMM pools",
|
||||
"- `gasPmmPools`: gas-family PMM pools",
|
||||
"- `uniswapV2Pools`: Uniswap V2-compatible pairs",
|
||||
"",
|
||||
]
|
||||
|
||||
summary_rows: list[list[str]] = []
|
||||
for chain in report["chains"]:
|
||||
live = sum(1 for pool in chain["pools"] if str(pool["status"]).startswith("live"))
|
||||
planned_placeholder = sum(1 for pool in chain["pools"] if pool["status"] == "planned_gas_placeholder")
|
||||
failed = sum(1 for pool in chain["pools"] if pool["status"] == "query_failed")
|
||||
summary_rows.append(
|
||||
[
|
||||
str(chain["chainId"]),
|
||||
chain["network"],
|
||||
str(chain["poolCount"]),
|
||||
str(live),
|
||||
str(planned_placeholder),
|
||||
str(failed),
|
||||
str(chain.get("referenceVenueCount", 0)),
|
||||
chain["rpc"] or "n/a",
|
||||
]
|
||||
)
|
||||
lines += ["## Network Summary", "", markdown_table(["ChainID", "Network", "Pools", "Live Read", "Planned Gas Placeholder", "Query Failed", "Reference Venues", "RPC"], summary_rows), ""]
|
||||
|
||||
for chain in report["chains"]:
|
||||
lines += [f"## {chain['network']} ({chain['chainId']})", ""]
|
||||
if not chain["pools"]:
|
||||
lines += ["No pools are listed in the canonical deployment graph for this network.", ""]
|
||||
continue
|
||||
rows: list[list[str]] = []
|
||||
for pool in chain["pools"]:
|
||||
balances = pool.get("balances") or {}
|
||||
base_bal = balances.get("base", {}).get("formatted") if balances else None
|
||||
quote_bal = balances.get("quote", {}).get("formatted") if balances else None
|
||||
rows.append(
|
||||
[
|
||||
pool["inventoryGroup"],
|
||||
pool["venue"],
|
||||
f"{pool['baseSymbol']} / {pool['quoteSymbol']}",
|
||||
f"`{pool['poolAddress']}`",
|
||||
f"`{pool['baseAddress']}`" if pool.get("baseAddress") else "—",
|
||||
f"`{pool['quoteAddress']}`" if pool.get("quoteAddress") else "—",
|
||||
base_bal or "—",
|
||||
quote_bal or "—",
|
||||
pool["status"],
|
||||
]
|
||||
)
|
||||
lines += [
|
||||
markdown_table(
|
||||
["Group", "Venue", "Pair", "Pool", "Base Token", "Quote Token", "Base Balance", "Quote Balance", "Status"],
|
||||
rows,
|
||||
),
|
||||
"",
|
||||
]
|
||||
ref_rows = chain.get("referenceVenues", [])
|
||||
if ref_rows:
|
||||
ref_table_rows = []
|
||||
for venue in ref_rows:
|
||||
ref_table_rows.append(
|
||||
[
|
||||
venue["protocol"],
|
||||
f"{venue.get('baseSymbol')} / {venue.get('quoteSymbol')}",
|
||||
f"`{venue['venueAddress']}`" if venue.get("venueAddress") else "—",
|
||||
"yes" if venue.get("supported") else "no",
|
||||
"yes" if venue.get("live") else "no",
|
||||
"yes" if venue.get("routingVisible") else "no",
|
||||
venue["status"],
|
||||
]
|
||||
)
|
||||
lines += [
|
||||
f"### {chain['network']} Reference Venues",
|
||||
"",
|
||||
markdown_table(["Protocol", "Pair", "Venue Address", "Supported", "Live", "Routing Visible", "Status"], ref_table_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
lines += [
|
||||
"## Source Files",
|
||||
"",
|
||||
"- `cross-chain-pmm-lps/config/deployment-status.json`",
|
||||
"- `smom-dbis-138/config/chain138-eth-pmm-pools-execution.json`",
|
||||
"- `smom-dbis-138/config/chain138-pmm-pools.json`",
|
||||
f"- `reports/status/{OUT_JSON.name}`",
|
||||
"",
|
||||
]
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
builder = PoolBuilder()
|
||||
report = builder.build_pool_rows()
|
||||
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_JSON.write_text(json.dumps(report, indent=2) + "\n")
|
||||
OUT_MD.write_text(render_markdown(report) + "\n")
|
||||
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
317
scripts/verify/build-network-deployment-inventory.mjs
Normal file
317
scripts/verify/build-network-deployment-inventory.mjs
Normal file
@@ -0,0 +1,317 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const repoRoot = path.resolve(__dirname, "..", "..");
|
||||
|
||||
const deploymentStatusPath = path.join(repoRoot, "cross-chain-pmm-lps/config/deployment-status.json");
|
||||
const manifestPath = path.join(repoRoot, "atomic-swap-dapp/config/ecosystem-manifest.json");
|
||||
const liveRouteRegistryPath = path.join(repoRoot, "atomic-swap-dapp/config/live-route-registry.json");
|
||||
const routingRegistryPath = path.join(repoRoot, "config/routing-registry.json");
|
||||
const allMainnetProtocolSurfacePath = path.join(repoRoot, "config/allmainnet-non-dodo-protocol-surface.json");
|
||||
const allMainnetTokenDocPath = path.join(repoRoot, "docs/11-references/ALL_MAINNET_TOKEN_ADDRESSES.md");
|
||||
const markdownOutputPath = path.join(repoRoot, "reports/status/network-deployment-inventory-latest.md");
|
||||
const jsonOutputPath = path.join(repoRoot, "reports/status/network-deployment-inventory-latest.json");
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function writeText(filePath, content) {
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.writeFileSync(filePath, content);
|
||||
}
|
||||
|
||||
function normalizeBool(value) {
|
||||
if (value === true) return "✓";
|
||||
if (value === false) return "—";
|
||||
return "?";
|
||||
}
|
||||
|
||||
function parseMarkdownTable(markdown, sectionTitle) {
|
||||
const lines = markdown.split("\n");
|
||||
const heading = `### ${sectionTitle}`;
|
||||
const startIndex = lines.findIndex((line) => line.trim() === heading);
|
||||
if (startIndex === -1) {
|
||||
return [];
|
||||
}
|
||||
const tableLines = [];
|
||||
for (const rawLine of lines.slice(startIndex + 1)) {
|
||||
const line = rawLine.trim();
|
||||
if (line.startsWith("### ") || line.startsWith("## ")) {
|
||||
break;
|
||||
}
|
||||
tableLines.push(line);
|
||||
}
|
||||
|
||||
const relevant = tableLines.filter((line) => line.startsWith("|"));
|
||||
|
||||
if (relevant.length < 3) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const headers = relevant[0]
|
||||
.split("|")
|
||||
.slice(1, -1)
|
||||
.map((cell) => cell.trim());
|
||||
|
||||
return relevant.slice(2).map((line) => {
|
||||
const values = line
|
||||
.split("|")
|
||||
.slice(1, -1)
|
||||
.map((cell) => cell.trim());
|
||||
return Object.fromEntries(headers.map((header, index) => [header, values[index] ?? ""]));
|
||||
}).filter(
|
||||
(row) =>
|
||||
row.Symbol &&
|
||||
row.Symbol !== "Symbol" &&
|
||||
row.Symbol !== "--------" &&
|
||||
row.Address &&
|
||||
row.Address !== "Address" &&
|
||||
row.Address !== "---------"
|
||||
);
|
||||
}
|
||||
|
||||
function parseAllMainnetTokens(markdown) {
|
||||
const sections = ["Stablecoins", "Wrapped Tokens", "DeFi Tokens"];
|
||||
return sections.flatMap((section) =>
|
||||
parseMarkdownTable(markdown, section).map((row) => ({
|
||||
category: section,
|
||||
token: row.Token,
|
||||
symbol: row.Symbol,
|
||||
address: row.Address?.replace(/`/g, "") ?? "",
|
||||
decimals: Number(row.Decimals || 0),
|
||||
status: row.Status,
|
||||
notes: row.Notes
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
function buildSpecialCaseBridgeList(liveRouteRegistry, routingRegistry, chainId) {
|
||||
const liveRoutes = (liveRouteRegistry.liveBridgeRoutes ?? [])
|
||||
.filter((route) => Number(route.fromChainId) === Number(chainId) || Number(route.toChainId) === Number(chainId))
|
||||
.map((route) => ({
|
||||
routeId: route.routeId,
|
||||
fromChainId: route.fromChainId,
|
||||
toChainId: route.toChainId,
|
||||
bridgeType: route.bridgeType,
|
||||
asset: route.assetSymbol,
|
||||
bridgeAddress: route.bridgeAddress,
|
||||
status: "live"
|
||||
}));
|
||||
|
||||
if (liveRoutes.length > 0) {
|
||||
return liveRoutes;
|
||||
}
|
||||
|
||||
return (routingRegistry.routes ?? [])
|
||||
.filter((route) => Number(route.fromChain) === Number(chainId) || Number(route.toChain) === Number(chainId))
|
||||
.map((route) => ({
|
||||
routeId: `${route.fromChain}-${route.toChain}-${route.asset}-${route.pathType}`.toLowerCase(),
|
||||
fromChainId: route.fromChain,
|
||||
toChainId: route.toChain,
|
||||
bridgeType: route.pathType,
|
||||
asset: route.asset,
|
||||
bridgeAddress: route.bridgeAddress,
|
||||
status: "configured"
|
||||
}));
|
||||
}
|
||||
|
||||
function countTokens(chainStatus) {
|
||||
return Object.keys(chainStatus.cwTokens ?? {}).length;
|
||||
}
|
||||
|
||||
function countAnchors(chainStatus) {
|
||||
return Object.keys(chainStatus.anchorAddresses ?? {}).length;
|
||||
}
|
||||
|
||||
function countPools(chainStatus, key) {
|
||||
return Array.isArray(chainStatus[key]) ? chainStatus[key].length : 0;
|
||||
}
|
||||
|
||||
function buildNextTasks(chainId) {
|
||||
switch (Number(chainId)) {
|
||||
case 138:
|
||||
return [
|
||||
"Keep Chain 138 PMM inventory, live swap routes, and public manifest synchronized after every token or pool deployment.",
|
||||
"Replace remaining placeholder bridge metadata with canonical deployed addresses and keep bridge notes current.",
|
||||
"Re-run route, inventory, and explorer verification audits after any bridge or pool change.",
|
||||
"Maintain source-chain bridge support for CCIP, GRU, and Alltra routes as the canonical launch surface."
|
||||
];
|
||||
case 1111:
|
||||
return [
|
||||
"Keep Wemix marked `planned_gas_scaffold` and `bridgeAvailable: false` until a successful bridge proof transfer is recorded.",
|
||||
"Replace placeholder gas PMM pools and gas reference venues with real deployed venues or remove them from the launch inventory.",
|
||||
"Complete canonical Wemix bridge readiness: funding, proof transfer verification, and any required inbound/outbound confirmation.",
|
||||
"Add real same-chain settlement inventory only after WEMIX-side execution is actually deployed and routable."
|
||||
];
|
||||
case 651940:
|
||||
return [
|
||||
"Add real same-chain Alltra swap inventory if users should settle into assets other than direct bridge receive.",
|
||||
"Publish deployed PMM or public DEX pool addresses into canonical inventory instead of anchor-only metadata.",
|
||||
"Expand the canonical Alltra token/routing surface only when those assets are actually part of supported settlement flows.",
|
||||
"Keep AlltraAdapter bridge metadata, supported assets, and fee assumptions synchronized with deployed bridge behavior."
|
||||
];
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
const deploymentStatus = readJson(deploymentStatusPath);
|
||||
const manifest = readJson(manifestPath);
|
||||
const liveRouteRegistry = readJson(liveRouteRegistryPath);
|
||||
const routingRegistry = readJson(routingRegistryPath);
|
||||
const allMainnetProtocolSurface = readJson(allMainnetProtocolSurfacePath);
|
||||
const allMainnetTokenDoc = fs.readFileSync(allMainnetTokenDocPath, "utf8");
|
||||
|
||||
const supportedNetworks = manifest.supportedNetworks ?? [];
|
||||
const manifestByChain = new Map(supportedNetworks.map((network) => [String(network.chainId), network]));
|
||||
const allMainnetTokens = parseAllMainnetTokens(allMainnetTokenDoc);
|
||||
|
||||
const chains = Object.entries(deploymentStatus.chains ?? {})
|
||||
.map(([chainId, status]) => {
|
||||
const manifestRow = manifestByChain.get(chainId) ?? {};
|
||||
return {
|
||||
chainId: Number(chainId),
|
||||
chainKey: chainId,
|
||||
network: status.name ?? manifestRow.name ?? `Chain ${chainId}`,
|
||||
activationState: status.activationState ?? manifestRow.activationState ?? "live",
|
||||
bridgeAvailable:
|
||||
typeof status.bridgeAvailable === "boolean"
|
||||
? status.bridgeAvailable
|
||||
: Boolean(manifestRow.bridgeAvailable),
|
||||
tokenCount: countTokens(status),
|
||||
anchorCount: countAnchors(status),
|
||||
pmmPoolCount: countPools(status, "pmmPools"),
|
||||
volatilePoolCount: countPools(status, "pmmPoolsVolatile"),
|
||||
gasPmmPoolCount: countPools(status, "gasPmmPools"),
|
||||
referenceVenueCount: countPools(status, "gasReferenceVenues"),
|
||||
uniswapV2PoolCount: countPools(status, "uniswapV2Pools"),
|
||||
bridgeRouteCount: (manifestRow.routeCoverage?.inboundBridgeRoutes ?? 0) + (manifestRow.routeCoverage?.outboundBridgeRoutes ?? 0),
|
||||
cwTokens: status.cwTokens ?? {},
|
||||
anchorAddresses: status.anchorAddresses ?? {},
|
||||
gasMirrors: status.gasMirrors ?? {},
|
||||
gasQuoteAddresses: status.gasQuoteAddresses ?? {},
|
||||
specialCaseBridges: buildSpecialCaseBridgeList(liveRouteRegistry, routingRegistry, chainId)
|
||||
};
|
||||
})
|
||||
.sort((left, right) => left.chainId - right.chainId);
|
||||
|
||||
const specialCases = chains
|
||||
.filter((chain) => [138, 1111, 651940].includes(chain.chainId))
|
||||
.map((chain) => ({
|
||||
...chain,
|
||||
documentedTokens:
|
||||
chain.chainId === 651940
|
||||
? (allMainnetProtocolSurface.documentedTokens ?? allMainnetTokens)
|
||||
: [],
|
||||
nextTasks: buildNextTasks(chain.chainId)
|
||||
}));
|
||||
|
||||
const markdown = [
|
||||
"# Network Deployment Inventory",
|
||||
"",
|
||||
"| Chain | Network | Activation | Bridge Available | Tokens | Anchors | PMM | Volatile | Gas PMM | Ref Venues | UniV2 | Bridge Routes |",
|
||||
"|---:|---|---|:---:|---:|---:|---:|---:|---:|---:|---:|---:|",
|
||||
...chains.map(
|
||||
(chain) =>
|
||||
`| \`${chain.chainId}\` | ${chain.network} | \`${chain.activationState}\` | ${normalizeBool(chain.bridgeAvailable)} | ${chain.tokenCount} | ${chain.anchorCount} | ${chain.pmmPoolCount} | ${chain.volatilePoolCount} | ${chain.gasPmmPoolCount} | ${chain.referenceVenueCount} | ${chain.uniswapV2PoolCount} | ${chain.bridgeRouteCount} |`
|
||||
),
|
||||
"",
|
||||
"## Special-Case Networks",
|
||||
"",
|
||||
...specialCases.flatMap((chain) => {
|
||||
const bridgeLines =
|
||||
chain.specialCaseBridges.length > 0
|
||||
? chain.specialCaseBridges.map(
|
||||
(bridge) =>
|
||||
` - \`${bridge.fromChainId} -> ${bridge.toChainId}\` \`${bridge.bridgeType}\` \`${bridge.asset}\` at \`${bridge.bridgeAddress}\``
|
||||
)
|
||||
: [" - none recorded"];
|
||||
|
||||
const documentedTokenLines =
|
||||
chain.documentedTokens.length > 0
|
||||
? [
|
||||
"",
|
||||
"- Documented ecosystem tokens:",
|
||||
...chain.documentedTokens.map(
|
||||
(token) =>
|
||||
` - \`${token.symbol}\` ${token.address} (${token.decimals} decimals; ${token.category}; ${token.status})`
|
||||
)
|
||||
]
|
||||
: [];
|
||||
|
||||
return [
|
||||
`### \`${chain.chainId}\` ${chain.network}`,
|
||||
"",
|
||||
`- Activation state: \`${chain.activationState}\``,
|
||||
`- Bridge available: \`${chain.bridgeAvailable}\``,
|
||||
`- cW tokens: ${Object.keys(chain.cwTokens).length ? Object.keys(chain.cwTokens).join(", ") : "none"}`,
|
||||
`- Anchors: ${Object.keys(chain.anchorAddresses).length ? Object.entries(chain.anchorAddresses).map(([symbol, address]) => `${symbol}=${address}`).join(", ") : "none"}`,
|
||||
`- PMM pools: \`${chain.pmmPoolCount}\``,
|
||||
`- Gas PMM pools: \`${chain.gasPmmPoolCount}\``,
|
||||
`- Gas/reference venues: \`${chain.referenceVenueCount}\``,
|
||||
`- UniV2 pools: \`${chain.uniswapV2PoolCount}\``,
|
||||
"- Bridge routes:",
|
||||
...bridgeLines,
|
||||
...documentedTokenLines,
|
||||
"",
|
||||
"- Next tasks:",
|
||||
...chain.nextTasks.map((task) => ` - ${task}`),
|
||||
""
|
||||
];
|
||||
})
|
||||
].join("\n");
|
||||
|
||||
const json = {
|
||||
name: "Network Deployment Inventory",
|
||||
generatedAt: new Date().toISOString(),
|
||||
sourceFiles: [
|
||||
path.relative(repoRoot, deploymentStatusPath),
|
||||
path.relative(repoRoot, manifestPath),
|
||||
path.relative(repoRoot, liveRouteRegistryPath),
|
||||
path.relative(repoRoot, routingRegistryPath),
|
||||
path.relative(repoRoot, allMainnetProtocolSurfacePath),
|
||||
path.relative(repoRoot, allMainnetTokenDocPath)
|
||||
],
|
||||
networks: chains.map((chain) => ({
|
||||
chainId: chain.chainId,
|
||||
network: chain.network,
|
||||
activationState: chain.activationState,
|
||||
bridgeAvailable: chain.bridgeAvailable,
|
||||
tokenCount: chain.tokenCount,
|
||||
anchorCount: chain.anchorCount,
|
||||
pmmPoolCount: chain.pmmPoolCount,
|
||||
volatilePoolCount: chain.volatilePoolCount,
|
||||
gasPmmPoolCount: chain.gasPmmPoolCount,
|
||||
referenceVenueCount: chain.referenceVenueCount,
|
||||
uniswapV2PoolCount: chain.uniswapV2PoolCount,
|
||||
bridgeRouteCount: chain.bridgeRouteCount
|
||||
})),
|
||||
specialCases: specialCases.map((chain) => ({
|
||||
chainId: chain.chainId,
|
||||
network: chain.network,
|
||||
activationState: chain.activationState,
|
||||
bridgeAvailable: chain.bridgeAvailable,
|
||||
cwTokens: chain.cwTokens,
|
||||
anchors: chain.anchorAddresses,
|
||||
gasMirrors: chain.gasMirrors,
|
||||
gasQuoteAddresses: chain.gasQuoteAddresses,
|
||||
bridgeRoutes: chain.specialCaseBridges,
|
||||
documentedTokens: chain.documentedTokens,
|
||||
nextTasks: chain.nextTasks
|
||||
}))
|
||||
};
|
||||
|
||||
writeText(markdownOutputPath, `${markdown}\n`);
|
||||
writeText(jsonOutputPath, `${JSON.stringify(json, null, 2)}\n`);
|
||||
console.log(`Wrote ${path.relative(repoRoot, markdownOutputPath)}`);
|
||||
console.log(`Wrote ${path.relative(repoRoot, jsonOutputPath)}`);
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -29,9 +29,9 @@ done
|
||||
|
||||
echo "All required dependencies present: ${REQUIRED[*]}"
|
||||
if [ ${#OPTIONAL_MISSING[@]} -gt 0 ]; then
|
||||
echo "Optional (recommended for automation): ${OPTIONAL[*]}"
|
||||
echo "Missing optional: ${OPTIONAL_MISSING[*]}"
|
||||
echo "Install (Debian/Ubuntu): sudo apt install -y sshpass rsync dnsutils iproute2 screen tmux htop shellcheck parallel sqlite3"
|
||||
echo " (dig from dnsutils; ss from iproute2; wscat/websocat: npm install -g wscat or cargo install websocat)"
|
||||
# Not a failure — optional tools (exit code stays 0)
|
||||
echo "Note — optional tools not in PATH: ${OPTIONAL_MISSING[*]}"
|
||||
echo " To install (Debian/Ubuntu): sudo apt install -y sshpass rsync dnsutils iproute2 screen tmux htop shellcheck parallel sqlite3"
|
||||
echo " (dig: dnsutils; ss: iproute2; wscat/websocat: npm install -g wscat or cargo install websocat)"
|
||||
fi
|
||||
exit 0
|
||||
|
||||
56
scripts/verify/export-liquidity-pools-compact-csv.py
Normal file
56
scripts/verify/export-liquidity-pools-compact-csv.py
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Export the compact liquidity-pools CSV from the generated master-map JSON."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
INPUT_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
|
||||
OUTPUT_CSV = ROOT / "reports/status/liquidity-pools-master-map-compact-latest.csv"
|
||||
|
||||
FIELDNAMES = [
|
||||
"chainId",
|
||||
"network",
|
||||
"pair",
|
||||
"poolAddress",
|
||||
"baseTokenAddress",
|
||||
"quoteTokenAddress",
|
||||
"baseBalance",
|
||||
"quoteBalance",
|
||||
"status",
|
||||
]
|
||||
|
||||
|
||||
def main() -> int:
|
||||
data = json.loads(INPUT_JSON.read_text())
|
||||
OUTPUT_CSV.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with OUTPUT_CSV.open("w", newline="") as fh:
|
||||
writer = csv.DictWriter(fh, fieldnames=FIELDNAMES)
|
||||
writer.writeheader()
|
||||
for chain in data.get("chains", []):
|
||||
for pool in chain.get("pools", []):
|
||||
writer.writerow(
|
||||
{
|
||||
"chainId": pool.get("chainId", chain.get("chainId")),
|
||||
"network": pool.get("network", chain.get("network")),
|
||||
"pair": f"{pool.get('baseSymbol')}/{pool.get('quoteSymbol')}",
|
||||
"poolAddress": pool.get("poolAddress", ""),
|
||||
"baseTokenAddress": pool.get("baseAddress", ""),
|
||||
"quoteTokenAddress": pool.get("quoteAddress", ""),
|
||||
"baseBalance": ((pool.get("balances") or {}).get("base") or {}).get("formatted", ""),
|
||||
"quoteBalance": ((pool.get("balances") or {}).get("quote") or {}).get("formatted", ""),
|
||||
"status": pool.get("status", ""),
|
||||
}
|
||||
)
|
||||
|
||||
print(f"Wrote {OUTPUT_CSV.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
409
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.py
Normal file
409
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.py
Normal file
@@ -0,0 +1,409 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal, ROUND_CEILING, getcontext
|
||||
from pathlib import Path
|
||||
|
||||
getcontext().prec = 42
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
LATEST_SNAPSHOT = ROOT / "reports" / "status" / "mainnet-cwusdc-usdc-preflight-latest.json"
|
||||
POLICY_PATH = ROOT / "config" / "extraction" / "mainnet-cwusdc-usdc-support-policy.json"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
DEFAULT_CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
|
||||
DEFAULT_USDC = "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"
|
||||
SIX_DECIMALS = Decimal(10) ** 6
|
||||
ADDRESS_RE = re.compile(r"0x[a-fA-F0-9]{40}")
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = os.environ.get(key) or env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def normalize_units(raw: int, decimals: int = 6) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** decimals)
|
||||
|
||||
|
||||
def units_to_raw(units: Decimal, decimals: int = 6) -> int:
|
||||
scale = Decimal(10) ** decimals
|
||||
return int((units * scale).to_integral_value(rounding=ROUND_CEILING))
|
||||
|
||||
|
||||
def decimal_max(a: Decimal, b: Decimal) -> Decimal:
|
||||
return a if a >= b else b
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
matches = UINT_RE.findall(value)
|
||||
if not matches:
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
return int(matches[0])
|
||||
|
||||
|
||||
def parse_address(value: str) -> str:
|
||||
match = ADDRESS_RE.search(value)
|
||||
if not match:
|
||||
raise ValueError(f"could not parse address from {value!r}")
|
||||
return match.group(0)
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
return subprocess.check_output(cmd, text=True).strip()
|
||||
|
||||
|
||||
def query_balance(rpc_url: str, token: str, holder: str) -> int:
|
||||
return parse_uint(cast_call(rpc_url, token, "balanceOf(address)(uint256)", holder))
|
||||
|
||||
|
||||
def derive_holder_from_private_key(env_values: dict[str, str]) -> str:
|
||||
private_key = resolve_env_value("PRIVATE_KEY", env_values) or resolve_env_value("KEEPER_PRIVATE_KEY", env_values)
|
||||
if not private_key or "${" in private_key:
|
||||
return ""
|
||||
output = subprocess.check_output(["cast", "wallet", "address", "--private-key", private_key], text=True).strip()
|
||||
return parse_address(output)
|
||||
|
||||
|
||||
def shell_quote(value: str) -> str:
|
||||
return "'" + value.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
|
||||
def command_block(lines: list[str]) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def funding_status(required_raw: int, available_raw: int, decimals: int = 6) -> dict:
|
||||
shortfall_raw = max(required_raw - available_raw, 0)
|
||||
return {
|
||||
"requiredRaw": str(required_raw),
|
||||
"requiredUnits": str(normalize_units(required_raw, decimals)),
|
||||
"availableRaw": str(available_raw),
|
||||
"availableUnits": str(normalize_units(available_raw, decimals)),
|
||||
"shortfallRaw": str(shortfall_raw),
|
||||
"shortfallUnits": str(normalize_units(shortfall_raw, decimals)),
|
||||
"covered": shortfall_raw == 0,
|
||||
}
|
||||
|
||||
|
||||
def build_plan(snapshot: dict, policy: dict, env_values: dict[str, str], holder_override: str) -> dict:
|
||||
rpc_url = resolve_env_value("ETHEREUM_MAINNET_RPC", env_values)
|
||||
if not rpc_url:
|
||||
raise RuntimeError("missing ETHEREUM_MAINNET_RPC")
|
||||
|
||||
summary = snapshot["summary"]
|
||||
public_health = snapshot["health"]["publicPairHealth"]
|
||||
defended_health = snapshot["health"]["defendedVenueHealth"]
|
||||
treasury = snapshot.get("treasuryManager") or {}
|
||||
|
||||
base_reserve_raw = int(summary["defendedBaseReserveRaw"])
|
||||
quote_reserve_raw = int(summary["defendedQuoteReserveRaw"])
|
||||
target_reserve_raw = max(base_reserve_raw, quote_reserve_raw)
|
||||
add_quote_raw = max(base_reserve_raw - quote_reserve_raw, 0)
|
||||
add_base_raw = max(quote_reserve_raw - base_reserve_raw, 0)
|
||||
|
||||
min_base_units = Decimal(str(policy["thresholds"]["minBaseReserveUnits"]))
|
||||
min_quote_units = Decimal(str(policy["thresholds"]["minQuoteReserveUnits"]))
|
||||
public_base_units = Decimal(str(summary["publicPairBaseReserveUnits"]))
|
||||
public_quote_units = Decimal(str(summary["publicPairQuoteReserveUnits"]))
|
||||
public_base_shortfall_units = decimal_max(min_base_units - public_base_units, Decimal(0))
|
||||
public_quote_shortfall_units = decimal_max(min_quote_units - public_quote_units, Decimal(0))
|
||||
public_base_shortfall_raw = units_to_raw(public_base_shortfall_units)
|
||||
public_quote_shortfall_raw = units_to_raw(public_quote_shortfall_units)
|
||||
|
||||
max_automated_raw = int(policy["managedCycle"]["maxAutomatedFlashQuoteAmountRaw"])
|
||||
manager_available_raw = int(treasury.get("availableQuoteRaw") or 0)
|
||||
|
||||
holder = holder_override or derive_holder_from_private_key(env_values)
|
||||
cwusdc = resolve_env_value("CWUSDC_MAINNET", env_values) or DEFAULT_CWUSDC
|
||||
usdc = resolve_env_value("USDC_MAINNET", env_values) or DEFAULT_USDC
|
||||
manager = snapshot["resolvedAddresses"].get("treasuryManager") or ""
|
||||
receiver = snapshot["resolvedAddresses"].get("receiver") or ""
|
||||
defended_pool = snapshot["health"]["defendedVenue"]["poolAddress"]
|
||||
public_pair = snapshot["health"]["publicPair"]["poolAddress"]
|
||||
integration = resolve_env_value("DODO_PMM_INTEGRATION_MAINNET", env_values)
|
||||
router = resolve_env_value("CHAIN_1_UNISWAP_V2_ROUTER", env_values)
|
||||
|
||||
holder_state = None
|
||||
holder_usdc_raw = 0
|
||||
holder_cwusdc_raw = 0
|
||||
holder_blockers: list[str] = []
|
||||
if holder and holder.lower() != ZERO_ADDRESS:
|
||||
try:
|
||||
holder_cwusdc_raw = query_balance(rpc_url, cwusdc, holder)
|
||||
holder_usdc_raw = query_balance(rpc_url, usdc, holder)
|
||||
holder_state = {
|
||||
"address": holder,
|
||||
"cwusdcBalanceRaw": str(holder_cwusdc_raw),
|
||||
"cwusdcBalanceUnits": str(normalize_units(holder_cwusdc_raw)),
|
||||
"usdcBalanceRaw": str(holder_usdc_raw),
|
||||
"usdcBalanceUnits": str(normalize_units(holder_usdc_raw)),
|
||||
}
|
||||
except Exception as exc:
|
||||
holder_blockers.append(f"holder balance query failed: {exc}")
|
||||
|
||||
manager_funding = funding_status(max_automated_raw, manager_available_raw)
|
||||
defended_quote_funding = funding_status(add_quote_raw, holder_usdc_raw)
|
||||
public_base_funding = funding_status(public_base_shortfall_raw, holder_cwusdc_raw)
|
||||
public_quote_funding = funding_status(public_quote_shortfall_raw, holder_usdc_raw)
|
||||
|
||||
blockers: list[str] = []
|
||||
warnings = snapshot.get("warnings") or []
|
||||
if add_base_raw > 0:
|
||||
blockers.append("defended pool needs base-side top-up logic; current plan only supports quote-side top-up for this rail")
|
||||
if add_quote_raw > 0 and holder_state and not defended_quote_funding["covered"]:
|
||||
blockers.append(
|
||||
"operator wallet does not hold enough USDC to restore defended pool reserve parity; external funding is required"
|
||||
)
|
||||
if public_base_shortfall_raw > 0 and holder_state and not public_base_funding["covered"]:
|
||||
blockers.append(
|
||||
"operator wallet does not hold enough cWUSDC to reseed the public pair to policy floor; external mint/bridge is required"
|
||||
)
|
||||
if public_quote_shortfall_raw > 0 and holder_state and not public_quote_funding["covered"]:
|
||||
blockers.append(
|
||||
"operator wallet does not hold enough USDC to reseed the public pair to policy floor"
|
||||
)
|
||||
if manager_funding["covered"] is False and holder_state and holder_usdc_raw < max_automated_raw:
|
||||
blockers.append("operator wallet cannot fully fund even one max-sized automated defense cycle from current USDC balance")
|
||||
if not integration:
|
||||
blockers.append("missing DODO_PMM_INTEGRATION_MAINNET")
|
||||
if not router:
|
||||
blockers.append("missing CHAIN_1_UNISWAP_V2_ROUTER")
|
||||
if any("defended quote query failed" in warning for warning in warnings):
|
||||
blockers.append("defended pool quote preview reverted; set MIN_BASE_OUT_RAW manually before any quote-in trade")
|
||||
|
||||
operator_commands = {
|
||||
"rerunPreflight": "bash scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.sh",
|
||||
"rerunPlan": "bash scripts/verify/plan-mainnet-cwusdc-usdc-repeg.sh",
|
||||
}
|
||||
|
||||
if manager and manager.lower() != ZERO_ADDRESS:
|
||||
operator_commands["fundManagerUsdc"] = command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export USDC={usdc}",
|
||||
f"export MANAGER={manager}",
|
||||
f"export AMOUNT_RAW={max_automated_raw}",
|
||||
'cast send "$USDC" \'transfer(address,uint256)(bool)\' "$MANAGER" "$AMOUNT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
]
|
||||
)
|
||||
|
||||
if integration and add_quote_raw > 0:
|
||||
operator_commands["tradeDefendedPoolQuoteIn"] = command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export CWUSDC={cwusdc}",
|
||||
f"export USDC={usdc}",
|
||||
f"export INTEGRATION={integration}",
|
||||
f"export POOL={defended_pool}",
|
||||
f"export QUOTE_IN_RAW={add_quote_raw}",
|
||||
"export MIN_BASE_OUT_RAW=REPLACE_AFTER_DRY_RUN",
|
||||
'cast send "$USDC" \'approve(address,uint256)(bool)\' "$INTEGRATION" "$QUOTE_IN_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
'cast send "$INTEGRATION" \'swapExactIn(address,address,uint256,uint256)\' "$POOL" "$USDC" "$QUOTE_IN_RAW" "$MIN_BASE_OUT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
]
|
||||
)
|
||||
|
||||
if router and public_base_shortfall_raw > 0 and public_quote_shortfall_raw > 0:
|
||||
operator_commands["reseedPublicPair"] = command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export ROUTER={router}",
|
||||
f"export CWUSDC={cwusdc}",
|
||||
f"export USDC={usdc}",
|
||||
f"export BASE_AMOUNT_RAW={public_base_shortfall_raw}",
|
||||
f"export QUOTE_AMOUNT_RAW={public_quote_shortfall_raw}",
|
||||
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
|
||||
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
|
||||
'cast send "$CWUSDC" \'approve(address,uint256)(bool)\' "$ROUTER" "$BASE_AMOUNT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
'cast send "$USDC" \'approve(address,uint256)(bool)\' "$ROUTER" "$QUOTE_AMOUNT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
'cast send "$ROUTER" \'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)\' \\',
|
||||
' "$CWUSDC" "$USDC" "$BASE_AMOUNT_RAW" "$QUOTE_AMOUNT_RAW" "$BASE_AMOUNT_RAW" "$QUOTE_AMOUNT_RAW" "$SIGNER" "$DEADLINE" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
]
|
||||
)
|
||||
|
||||
recommended_actions = [
|
||||
{
|
||||
"step": "fund_manager_for_one_max_cycle",
|
||||
"quoteAmountRaw": str(max_automated_raw),
|
||||
"quoteAmountUnits": str(normalize_units(max_automated_raw)),
|
||||
"status": "ready" if manager_funding["covered"] else "needs_usdc",
|
||||
},
|
||||
{
|
||||
"step": "sell_usdc_into_defended_pool_toward_simple_1_to_1_reserve_parity",
|
||||
"baseAmountRaw": str(add_base_raw),
|
||||
"quoteAmountRaw": str(add_quote_raw),
|
||||
"quoteAmountUnits": str(normalize_units(add_quote_raw)),
|
||||
"status": "ready" if add_quote_raw == 0 or defended_quote_funding["covered"] else "needs_usdc",
|
||||
},
|
||||
{
|
||||
"step": "reseed_public_pair_to_policy_floor",
|
||||
"baseAmountRaw": str(public_base_shortfall_raw),
|
||||
"baseAmountUnits": str(normalize_units(public_base_shortfall_raw)),
|
||||
"quoteAmountRaw": str(public_quote_shortfall_raw),
|
||||
"quoteAmountUnits": str(normalize_units(public_quote_shortfall_raw)),
|
||||
"status": (
|
||||
"ready"
|
||||
if public_base_shortfall_raw == 0
|
||||
or (public_base_funding["covered"] and public_quote_funding["covered"])
|
||||
else "needs_inventory"
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
return {
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"mode": "read_only_repeg_plan",
|
||||
"snapshotPath": str(LATEST_SNAPSHOT),
|
||||
"policyPath": str(POLICY_PATH),
|
||||
"inferenceNotes": [
|
||||
"Defended-pool 1:1 sizing is inferred from equal 6-decimal matched-rail tokens and reserve-balance parity.",
|
||||
"DODO PMM mid-price can still differ from reserve ratio; rerun preflight after each funding action.",
|
||||
"Public-pair reseed target uses the current policy reserve floors, not a smaller cosmetic liquidity target.",
|
||||
],
|
||||
"resolvedAddresses": {
|
||||
"holder": holder or None,
|
||||
"cwusdc": cwusdc,
|
||||
"usdc": usdc,
|
||||
"publicPair": public_pair,
|
||||
"defendedPool": defended_pool,
|
||||
"treasuryManager": manager or None,
|
||||
"receiver": receiver or None,
|
||||
"dodoIntegration": integration or None,
|
||||
"uniswapV2Router": router or None,
|
||||
},
|
||||
"defendedVenue": {
|
||||
"midPrice": summary["defendedMidPrice"],
|
||||
"deviationBps": summary["defendedDeviationBps"],
|
||||
"baseReserveRaw": str(base_reserve_raw),
|
||||
"baseReserveUnits": str(normalize_units(base_reserve_raw)),
|
||||
"quoteReserveRaw": str(quote_reserve_raw),
|
||||
"quoteReserveUnits": str(normalize_units(quote_reserve_raw)),
|
||||
"simpleReserveParity": {
|
||||
"targetReservePerSideRaw": str(target_reserve_raw),
|
||||
"targetReservePerSideUnits": str(normalize_units(target_reserve_raw)),
|
||||
"addBaseRaw": str(add_base_raw),
|
||||
"addBaseUnits": str(normalize_units(add_base_raw)),
|
||||
"addQuoteRaw": str(add_quote_raw),
|
||||
"addQuoteUnits": str(normalize_units(add_quote_raw)),
|
||||
},
|
||||
},
|
||||
"publicLane": {
|
||||
"pairAddress": public_pair,
|
||||
"priceQuotePerBase": public_health["priceQuotePerBase"],
|
||||
"deviationBps": summary["publicPairDeviationBps"],
|
||||
"baseReserveUnits": str(public_base_units),
|
||||
"quoteReserveUnits": str(public_quote_units),
|
||||
"policyFloorBaseUnits": str(min_base_units),
|
||||
"policyFloorQuoteUnits": str(min_quote_units),
|
||||
"policyFloorBaseShortfallRaw": str(public_base_shortfall_raw),
|
||||
"policyFloorBaseShortfallUnits": str(normalize_units(public_base_shortfall_raw)),
|
||||
"policyFloorQuoteShortfallRaw": str(public_quote_shortfall_raw),
|
||||
"policyFloorQuoteShortfallUnits": str(normalize_units(public_quote_shortfall_raw)),
|
||||
},
|
||||
"automation": {
|
||||
"managerAvailableQuoteRaw": str(manager_available_raw),
|
||||
"managerAvailableQuoteUnits": str(normalize_units(manager_available_raw)),
|
||||
"maxAutomatedFlashQuoteAmountRaw": str(max_automated_raw),
|
||||
"maxAutomatedFlashQuoteAmountUnits": str(normalize_units(max_automated_raw)),
|
||||
"managerFundingForOneMaxCycle": manager_funding,
|
||||
},
|
||||
"holderState": holder_state,
|
||||
"holderFundingChecks": {
|
||||
"defendedQuoteTopUp": defended_quote_funding,
|
||||
"publicPairBaseTopUp": public_base_funding,
|
||||
"publicPairQuoteTopUp": public_quote_funding,
|
||||
},
|
||||
"recommendedActions": recommended_actions,
|
||||
"operatorCommands": operator_commands,
|
||||
"warnings": warnings,
|
||||
"blockers": holder_blockers + blockers,
|
||||
"status": {
|
||||
"canFullyReachSimple1To1WithCurrentHolder": len(holder_blockers + blockers) == 0,
|
||||
"needsExternalFunding": (
|
||||
not defended_quote_funding["covered"]
|
||||
or not public_base_funding["covered"]
|
||||
or not public_quote_funding["covered"]
|
||||
),
|
||||
"canFundManagerFromCurrentHolder": holder_usdc_raw >= max_automated_raw if holder_state else None,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--snapshot", default=str(LATEST_SNAPSHOT), help="Path to a preflight snapshot JSON.")
|
||||
parser.add_argument("--holder", default="", help="Optional holder address to inventory-check instead of deriving from PRIVATE_KEY.")
|
||||
parser.add_argument("--out", help="Write the plan JSON to this file.")
|
||||
args = parser.parse_args()
|
||||
|
||||
snapshot_path = Path(args.snapshot)
|
||||
if not snapshot_path.exists():
|
||||
raise RuntimeError(f"missing snapshot file: {snapshot_path}")
|
||||
|
||||
snapshot = load_json(snapshot_path)
|
||||
policy = load_json(POLICY_PATH)
|
||||
env_values = merged_env_values()
|
||||
plan = build_plan(snapshot, policy, env_values, args.holder.strip())
|
||||
|
||||
output = json.dumps(plan, indent=2)
|
||||
if args.out:
|
||||
out_path = Path(args.out)
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
out_path.write_text(output + "\n")
|
||||
print(output)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
50
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.sh
Normal file
50
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
|
||||
PLAN_PY="${PROJECT_ROOT}/scripts/verify/plan-mainnet-cwusdc-usdc-repeg.py"
|
||||
STAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
OUT_DIR="${PROJECT_ROOT}/reports/status"
|
||||
OUT_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-repeg-plan-${STAMP}.json"
|
||||
LATEST_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-repeg-plan-latest.json"
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
python3 "${PLAN_PY}" --out "${OUT_FILE}" > "${LATEST_FILE}"
|
||||
|
||||
echo "Wrote repeg plan:"
|
||||
echo " ${OUT_FILE}"
|
||||
echo "Updated latest pointer:"
|
||||
echo " ${LATEST_FILE}"
|
||||
echo
|
||||
python3 - <<'PY' "${LATEST_FILE}"
|
||||
import json, sys
|
||||
from pathlib import Path
|
||||
|
||||
data = json.loads(Path(sys.argv[1]).read_text())
|
||||
defended = data["defendedVenue"]["simpleReserveParity"]
|
||||
public_lane = data["publicLane"]
|
||||
automation = data["automation"]["managerFundingForOneMaxCycle"]
|
||||
holder = data.get("holderState") or {}
|
||||
|
||||
print("Summary:")
|
||||
print(f" defendedMidPrice={data['defendedVenue']['midPrice']}")
|
||||
print(f" defendedAddQuoteUnits={defended['addQuoteUnits']}")
|
||||
print(f" publicPolicyFloorBaseShortfallUnits={public_lane['policyFloorBaseShortfallUnits']}")
|
||||
print(f" publicPolicyFloorQuoteShortfallUnits={public_lane['policyFloorQuoteShortfallUnits']}")
|
||||
print(f" managerFundingShortfallUnits={automation['shortfallUnits']}")
|
||||
if holder:
|
||||
print(f" holder={holder['address']}")
|
||||
print(f" holderCwusdcUnits={holder['cwusdcBalanceUnits']}")
|
||||
print(f" holderUsdcUnits={holder['usdcBalanceUnits']}")
|
||||
blockers = data.get("blockers") or []
|
||||
if blockers:
|
||||
print("Blockers:")
|
||||
for blocker in blockers:
|
||||
print(f" - {blocker}")
|
||||
PY
|
||||
277
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.py
Executable file
277
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.py
Executable file
@@ -0,0 +1,277 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal, getcontext
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
getcontext().prec = 42
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
CHECKER_PATH = ROOT / "scripts" / "verify" / "check-mainnet-cwusdc-usdc-support-health.py"
|
||||
POLICY_PATH = ROOT / "config" / "extraction" / "mainnet-cwusdc-usdc-support-policy.json"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
ADDRESS_RE = re.compile(r"0x[a-fA-F0-9]{40}")
|
||||
BROADCAST_RECEIVER = ROOT / "smom-dbis-138" / "broadcast" / "DeployAaveQuotePushFlashReceiver.s.sol" / "1" / "run-latest.json"
|
||||
BROADCAST_MANAGER = ROOT / "smom-dbis-138" / "broadcast" / "DeployQuotePushTreasuryManager.s.sol" / "1" / "run-latest.json"
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = os.environ.get(key) or env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
return subprocess.check_output(cmd, text=True).strip()
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
matches = UINT_RE.findall(value)
|
||||
if not matches:
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
return int(matches[0])
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
matches = [int(match) for match in UINT_RE.findall(value)]
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected at least {count} integers from {value!r}")
|
||||
return matches[:count]
|
||||
|
||||
|
||||
def parse_address(value: str) -> str:
|
||||
match = ADDRESS_RE.search(value)
|
||||
if not match:
|
||||
raise ValueError(f"could not parse address from {value!r}")
|
||||
return match.group(0)
|
||||
|
||||
|
||||
def normalize_units(raw: int, decimals: int) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** decimals)
|
||||
|
||||
|
||||
def run_health_checker() -> dict:
|
||||
output = subprocess.check_output(["python3", str(CHECKER_PATH)], text=True)
|
||||
return json.loads(output)
|
||||
|
||||
|
||||
def read_latest_create_address(path: Path, expected_contract_name: str) -> str:
|
||||
if not path.exists():
|
||||
return ""
|
||||
data = load_json(path)
|
||||
for tx in reversed(data.get("transactions", [])):
|
||||
if tx.get("transactionType") == "CREATE" and tx.get("contractName") == expected_contract_name:
|
||||
return str(tx.get("contractAddress") or "").strip()
|
||||
return ""
|
||||
|
||||
|
||||
def query_token_meta(rpc_url: str, token: str) -> dict:
|
||||
decimals = parse_uint(cast_call(rpc_url, token, "decimals()(uint8)"))
|
||||
symbol = cast_call(rpc_url, token, "symbol()(string)")
|
||||
return {"address": token, "symbol": symbol, "decimals": decimals}
|
||||
|
||||
|
||||
def query_manager_state(rpc_url: str, manager: str) -> dict:
|
||||
quote_token = parse_address(cast_call(rpc_url, manager, "quoteToken()(address)"))
|
||||
receiver = parse_address(cast_call(rpc_url, manager, "receiver()(address)"))
|
||||
state = {
|
||||
"manager": manager,
|
||||
"quoteToken": query_token_meta(rpc_url, quote_token),
|
||||
"receiver": receiver,
|
||||
"receiverOwner": parse_address(cast_call(rpc_url, manager, "receiverOwner()(address)")),
|
||||
"isReceiverOwnedByManager": "true" in cast_call(rpc_url, manager, "isReceiverOwnedByManager()(bool)").lower(),
|
||||
"quoteBalanceRaw": str(parse_uint(cast_call(rpc_url, manager, "quoteBalance()(uint256)"))),
|
||||
"availableQuoteRaw": str(parse_uint(cast_call(rpc_url, manager, "availableQuote()(uint256)"))),
|
||||
"receiverSweepableQuoteRaw": str(parse_uint(cast_call(rpc_url, manager, "receiverSweepableQuote()(uint256)"))),
|
||||
"receiverReserveRetainedRaw": str(parse_uint(cast_call(rpc_url, manager, "receiverReserveRetained()(uint256)"))),
|
||||
"managerReserveRetainedRaw": str(parse_uint(cast_call(rpc_url, manager, "managerReserveRetained()(uint256)"))),
|
||||
"gasRecipient": parse_address(cast_call(rpc_url, manager, "gasRecipient()(address)")),
|
||||
"recycleRecipient": parse_address(cast_call(rpc_url, manager, "recycleRecipient()(address)")),
|
||||
}
|
||||
decimals = state["quoteToken"]["decimals"]
|
||||
state["quoteBalanceUnits"] = str(normalize_units(int(state["quoteBalanceRaw"]), decimals))
|
||||
state["availableQuoteUnits"] = str(normalize_units(int(state["availableQuoteRaw"]), decimals))
|
||||
state["receiverSweepableQuoteUnits"] = str(normalize_units(int(state["receiverSweepableQuoteRaw"]), decimals))
|
||||
state["receiverReserveRetainedUnits"] = str(normalize_units(int(state["receiverReserveRetainedRaw"]), decimals))
|
||||
state["managerReserveRetainedUnits"] = str(normalize_units(int(state["managerReserveRetainedRaw"]), decimals))
|
||||
return state
|
||||
|
||||
|
||||
def query_receiver_state(rpc_url: str, receiver: str, quote_token: str) -> dict:
|
||||
return {
|
||||
"receiver": receiver,
|
||||
"owner": parse_address(cast_call(rpc_url, receiver, "owner()(address)")),
|
||||
"quoteBalanceRaw": str(parse_uint(cast_call(rpc_url, quote_token, "balanceOf(address)(uint256)", receiver))),
|
||||
}
|
||||
|
||||
|
||||
def query_defended_quotes(rpc_url: str, defended_pool: str, trader: str, policy: dict) -> list[dict]:
|
||||
rows: list[dict] = []
|
||||
for item in policy["managedCycle"]["quoteAmountByDeviationBps"]:
|
||||
amount_raw = int(item["flashQuoteAmountRaw"])
|
||||
base_out_raw, mt_fee_raw = parse_uints(
|
||||
cast_call(rpc_url, defended_pool, "querySellQuote(address,uint256)(uint256,uint256)", trader, str(amount_raw)),
|
||||
2,
|
||||
)
|
||||
rows.append(
|
||||
{
|
||||
"minDeviationBps": int(item["minDeviationBps"]),
|
||||
"flashQuoteAmountRaw": amount_raw,
|
||||
"receiveBaseAmountRaw": str(base_out_raw),
|
||||
"mtFeeRaw": str(mt_fee_raw),
|
||||
}
|
||||
)
|
||||
return rows
|
||||
|
||||
|
||||
def build_summary(snapshot: dict) -> dict:
|
||||
public_health = snapshot["health"]["publicPairHealth"]
|
||||
defended_health = snapshot["health"]["defendedVenueHealth"]
|
||||
treasury = snapshot.get("treasuryManager") or {}
|
||||
return {
|
||||
"publicPairDeviationBps": public_health.get("deviationBps"),
|
||||
"publicPairBaseReserveUnits": public_health.get("baseReserveUnits"),
|
||||
"publicPairQuoteReserveUnits": public_health.get("quoteReserveUnits"),
|
||||
"defendedMidPrice": defended_health.get("midPrice"),
|
||||
"defendedDeviationBps": defended_health.get("deviationBps"),
|
||||
"defendedBaseReserveRaw": defended_health.get("baseReserveRaw"),
|
||||
"defendedQuoteReserveRaw": defended_health.get("quoteReserveRaw"),
|
||||
"managerAvailableQuoteUnits": treasury.get("availableQuoteUnits"),
|
||||
"receiverSweepableQuoteUnits": treasury.get("receiverSweepableQuoteUnits"),
|
||||
"decisionAction": snapshot["health"]["decision"]["action"],
|
||||
"decisionSeverity": snapshot["health"]["decision"]["severity"],
|
||||
"flashQuoteAmountRaw": snapshot["health"]["decision"]["flashQuoteAmountRaw"],
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--out", help="Write the JSON snapshot to this file.")
|
||||
args = parser.parse_args()
|
||||
|
||||
env_values = merged_env_values()
|
||||
policy = load_json(POLICY_PATH)
|
||||
health = run_health_checker()
|
||||
rpc_url = ""
|
||||
for key in policy["network"].get("rpcEnvKeys", []):
|
||||
rpc_url = resolve_env_value(key, env_values)
|
||||
if rpc_url:
|
||||
break
|
||||
if not rpc_url:
|
||||
raise RuntimeError("missing mainnet RPC URL")
|
||||
|
||||
manager_addr = resolve_env_value("QUOTE_PUSH_TREASURY_MANAGER_MAINNET", env_values)
|
||||
receiver_addr = resolve_env_value("AAVE_QUOTE_PUSH_RECEIVER_MAINNET", env_values)
|
||||
if not receiver_addr:
|
||||
receiver_addr = read_latest_create_address(BROADCAST_RECEIVER, "AaveQuotePushFlashReceiver")
|
||||
if not manager_addr:
|
||||
manager_addr = read_latest_create_address(BROADCAST_MANAGER, "QuotePushTreasuryManager")
|
||||
defended_pool = health["defendedVenue"]["poolAddress"]
|
||||
|
||||
treasury_manager = None
|
||||
receiver_state = None
|
||||
defended_quotes = []
|
||||
warnings: list[str] = []
|
||||
|
||||
if manager_addr and manager_addr.lower() != ZERO_ADDRESS:
|
||||
try:
|
||||
treasury_manager = query_manager_state(rpc_url, manager_addr)
|
||||
receiver_addr = treasury_manager["receiver"]
|
||||
except Exception as exc:
|
||||
warnings.append(f"treasury manager query failed: {exc}")
|
||||
else:
|
||||
warnings.append("treasury manager query skipped: QUOTE_PUSH_TREASURY_MANAGER_MAINNET not configured")
|
||||
|
||||
if receiver_addr and receiver_addr.lower() != ZERO_ADDRESS:
|
||||
quote_token = treasury_manager["quoteToken"]["address"] if treasury_manager else health["publicPair"]["quoteAddress"]
|
||||
try:
|
||||
receiver_state = query_receiver_state(rpc_url, receiver_addr, quote_token)
|
||||
quote_decimals = treasury_manager["quoteToken"]["decimals"] if treasury_manager else 6
|
||||
receiver_state["quoteBalanceUnits"] = str(normalize_units(int(receiver_state["quoteBalanceRaw"]), quote_decimals))
|
||||
except Exception as exc:
|
||||
warnings.append(f"receiver query failed: {exc}")
|
||||
else:
|
||||
warnings.append("receiver query skipped: AAVE_QUOTE_PUSH_RECEIVER_MAINNET not configured")
|
||||
|
||||
trader = receiver_addr if receiver_addr and receiver_addr.lower() != ZERO_ADDRESS else ""
|
||||
if trader:
|
||||
try:
|
||||
defended_quotes = query_defended_quotes(rpc_url, defended_pool, trader, policy)
|
||||
except Exception as exc:
|
||||
warnings.append(f"defended quote query failed: {exc}")
|
||||
else:
|
||||
warnings.append("defended quote query skipped: no receiver configured")
|
||||
|
||||
snapshot = {
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"mode": "read_only_preflight",
|
||||
"policyPath": str(POLICY_PATH),
|
||||
"checkerPath": str(CHECKER_PATH),
|
||||
"resolvedAddresses": {
|
||||
"receiver": receiver_addr or None,
|
||||
"treasuryManager": manager_addr or None,
|
||||
},
|
||||
"health": health,
|
||||
"treasuryManager": treasury_manager,
|
||||
"receiver": receiver_state,
|
||||
"defendedLaneQuotes": defended_quotes,
|
||||
"warnings": warnings,
|
||||
}
|
||||
snapshot["summary"] = build_summary(snapshot)
|
||||
|
||||
output = json.dumps(snapshot, indent=2)
|
||||
if args.out:
|
||||
out_path = Path(args.out)
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
out_path.write_text(output + "\n")
|
||||
print(output)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
45
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.sh
Executable file
45
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.sh
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
|
||||
SNAPSHOT_PY="${PROJECT_ROOT}/scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.py"
|
||||
STAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
OUT_DIR="${PROJECT_ROOT}/reports/status"
|
||||
OUT_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-preflight-${STAMP}.json"
|
||||
LATEST_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-preflight-latest.json"
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
python3 "${SNAPSHOT_PY}" --out "${OUT_FILE}" > "${LATEST_FILE}"
|
||||
|
||||
echo "Wrote snapshot:"
|
||||
echo " ${OUT_FILE}"
|
||||
echo "Updated latest pointer:"
|
||||
echo " ${LATEST_FILE}"
|
||||
echo
|
||||
python3 - <<'PY' "${LATEST_FILE}"
|
||||
import json, sys
|
||||
from pathlib import Path
|
||||
|
||||
data = json.loads(Path(sys.argv[1]).read_text())
|
||||
s = data["summary"]
|
||||
print("Summary:")
|
||||
print(f" publicPairDeviationBps={s.get('publicPairDeviationBps')}")
|
||||
print(f" publicPairBaseReserveUnits={s.get('publicPairBaseReserveUnits')}")
|
||||
print(f" publicPairQuoteReserveUnits={s.get('publicPairQuoteReserveUnits')}")
|
||||
print(f" defendedMidPrice={s.get('defendedMidPrice')}")
|
||||
print(f" managerAvailableQuoteUnits={s.get('managerAvailableQuoteUnits')}")
|
||||
print(f" receiverSweepableQuoteUnits={s.get('receiverSweepableQuoteUnits')}")
|
||||
print(f" decision={s.get('decisionSeverity')}/{s.get('decisionAction')}")
|
||||
print(f" flashQuoteAmountRaw={s.get('flashQuoteAmountRaw')}")
|
||||
warnings = data.get("warnings") or []
|
||||
if warnings:
|
||||
print("Warnings:")
|
||||
for warning in warnings:
|
||||
print(f" - {warning}")
|
||||
PY
|
||||
Reference in New Issue
Block a user