chore: sync workspace — configs, docs, scripts, CI, pnpm, submodules
- Submodule pins: dbis_core, cross-chain-pmm-lps, mcp-proxmox (local, push may be pending), metamask-integration, smom-dbis-138 - Atomic swap + cross-chain-pmm-lops-publish, deploy-portal workflow, phoenix deploy-targets, routing/aggregator matrices - Docs, token-lists, forge proxy, phoenix API, runbooks, verify scripts Made-with: Cursor
This commit is contained in:
8
scripts/verify/build-cw-mesh-deployment-matrix.sh
Executable file
8
scripts/verify/build-cw-mesh-deployment-matrix.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env bash
|
||||
# Read-only matrix: deployment-status.json + Uni V2 pair-discovery JSON -> stdout (+ optional JSON).
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)"
|
||||
cd "$ROOT"
|
||||
|
||||
python3 scripts/lib/cw_mesh_deployment_matrix.py "$@"
|
||||
452
scripts/verify/build-cw-public-price-table.py
Normal file
452
scripts/verify/build-cw-public-price-table.py
Normal file
@@ -0,0 +1,452 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import math
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from collections import deque
|
||||
from dataclasses import dataclass
|
||||
from decimal import Decimal, InvalidOperation, getcontext
|
||||
from pathlib import Path
|
||||
|
||||
getcontext().prec = 50
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
|
||||
UNISWAP_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
JSON_OUT = ROOT / "reports" / "status" / "cw-public-prices-latest.json"
|
||||
DOC_OUT = ROOT / "docs" / "03-deployment" / "CW_PUBLIC_NETWORK_PRICES.md"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
|
||||
CHAIN_CONFIG = {
|
||||
"1": {"rpc_keys": ["ETHEREUM_MAINNET_RPC"]},
|
||||
"10": {"rpc_keys": ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"]},
|
||||
"25": {"rpc_keys": ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"]},
|
||||
"56": {"rpc_keys": ["BSC_RPC_URL", "BSC_MAINNET_RPC"]},
|
||||
"100": {"rpc_keys": ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"]},
|
||||
"137": {"rpc_keys": ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"]},
|
||||
"1111": {"rpc_keys": ["WEMIX_RPC_URL", "WEMIX_MAINNET_RPC"]},
|
||||
"8453": {"rpc_keys": ["BASE_RPC_URL", "BASE_MAINNET_RPC"]},
|
||||
"42161": {"rpc_keys": ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"]},
|
||||
"42220": {"rpc_keys": ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"]},
|
||||
"43114": {"rpc_keys": ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"]},
|
||||
}
|
||||
|
||||
STABLES = {"USDC": Decimal("1"), "USDT": Decimal("1")}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class Edge:
|
||||
src: str
|
||||
dst: str
|
||||
ratio: Decimal
|
||||
venue: str
|
||||
path_label: str
|
||||
price_detail: str
|
||||
liquidity_note: str
|
||||
|
||||
|
||||
def now() -> str:
|
||||
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def write_json(path: Path, payload: dict) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
|
||||
def write_text(path: Path, text: str) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(text.rstrip() + "\n")
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
values.update(load_env_from_shell())
|
||||
return values
|
||||
|
||||
|
||||
def load_env_from_shell() -> dict[str, str]:
|
||||
loader = ROOT / "smom-dbis-138" / "scripts" / "load-env.sh"
|
||||
if not loader.exists():
|
||||
return {}
|
||||
proc = subprocess.run(
|
||||
["bash", "-lc", f"source {loader} >/dev/null 2>&1 && env"],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=15,
|
||||
check=False,
|
||||
cwd=ROOT,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
return {}
|
||||
values: dict[str, str] = {}
|
||||
for raw_line in proc.stdout.splitlines():
|
||||
if "=" not in raw_line:
|
||||
continue
|
||||
key, value = raw_line.split("=", 1)
|
||||
values[key.strip()] = value.strip()
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = UINT_RE.findall(cleaned)
|
||||
if matches:
|
||||
return int(matches[0])
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
return int(token)
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = [int(match) for match in UINT_RE.findall(cleaned)]
|
||||
if len(matches) >= count:
|
||||
return matches[:count]
|
||||
matches = []
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
matches.append(int(token))
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected {count} integers, got {value!r}")
|
||||
return matches[:count]
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
proc = subprocess.run(cmd, text=True, capture_output=True, timeout=3, check=False)
|
||||
if proc.returncode != 0:
|
||||
stderr = proc.stderr.strip() or proc.stdout.strip() or "cast call failed"
|
||||
raise RuntimeError(stderr)
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def safe_decimal(value: str | int | float | Decimal | None) -> Decimal | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return Decimal(str(value))
|
||||
except (InvalidOperation, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def format_decimal(value: Decimal | None, places: int = 8) -> str:
|
||||
if value is None:
|
||||
return "not found"
|
||||
quant = Decimal(10) ** -places
|
||||
try:
|
||||
rounded = value.quantize(quant)
|
||||
except InvalidOperation:
|
||||
return str(value)
|
||||
return format(rounded, "f")
|
||||
|
||||
|
||||
def normalize_18(raw: int) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** 18)
|
||||
|
||||
|
||||
def rpc_for_chain(chain_id: str, env_values: dict[str, str]) -> str:
|
||||
if chain_id == "1":
|
||||
infura_project_id = resolve_env_value("INFURA_PROJECT_ID", env_values)
|
||||
if infura_project_id:
|
||||
return f"https://mainnet.infura.io/v3/{infura_project_id}"
|
||||
config = CHAIN_CONFIG.get(chain_id, {})
|
||||
for key in config.get("rpc_keys", []):
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
return value
|
||||
return ""
|
||||
|
||||
|
||||
def build_uniswap_edges(entry: dict) -> list[Edge]:
|
||||
edges: list[Edge] = []
|
||||
for row in entry.get("pairsChecked") or []:
|
||||
if not row.get("live"):
|
||||
continue
|
||||
health = row.get("health") or {}
|
||||
price = safe_decimal(health.get("priceQuotePerBase"))
|
||||
if price is None or price <= 0:
|
||||
continue
|
||||
base = row["base"]
|
||||
quote = row["quote"]
|
||||
pair = f"{base}/{quote}"
|
||||
addr = row.get("poolAddress") or ""
|
||||
reserves = f"base={health.get('baseReserveUnits', '?')}, quote={health.get('quoteReserveUnits', '?')}"
|
||||
liquidity_note = (
|
||||
f"Uniswap V2 pair {addr}; healthy={health.get('healthy')}; "
|
||||
f"depthOk={health.get('depthOk')}; parityOk={health.get('parityOk')}; {reserves}"
|
||||
)
|
||||
edges.append(
|
||||
Edge(
|
||||
src=base,
|
||||
dst=quote,
|
||||
ratio=price,
|
||||
venue="uniswap_v2",
|
||||
path_label=pair,
|
||||
price_detail=f"reserve ratio from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
edges.append(
|
||||
Edge(
|
||||
src=quote,
|
||||
dst=base,
|
||||
ratio=Decimal(1) / price,
|
||||
venue="uniswap_v2",
|
||||
path_label=pair,
|
||||
price_detail=f"inverse reserve ratio from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
return edges
|
||||
|
||||
|
||||
def build_pmm_edges(chain: dict, rpc_url: str) -> tuple[list[Edge], list[dict]]:
|
||||
edges: list[Edge] = []
|
||||
snapshots: list[dict] = []
|
||||
if not rpc_url:
|
||||
return edges, snapshots
|
||||
|
||||
for row in chain.get("pmmPools") or []:
|
||||
pool = row.get("poolAddress") or ""
|
||||
base = row.get("base")
|
||||
quote = row.get("quote")
|
||||
if not pool or pool.lower() == ZERO_ADDRESS or not base or not quote:
|
||||
continue
|
||||
try:
|
||||
mid_price = normalize_18(parse_uint(cast_call(rpc_url, pool, "getMidPrice()(uint256)")))
|
||||
except Exception as exc:
|
||||
snapshots.append(
|
||||
{
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pool,
|
||||
"venue": row.get("venue", "dodo_pmm"),
|
||||
"error": str(exc),
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
if mid_price <= 0:
|
||||
continue
|
||||
|
||||
pair = f"{base}/{quote}"
|
||||
liquidity_note = f"DODO PMM {pool}; midPrice={mid_price}"
|
||||
edges.append(
|
||||
Edge(
|
||||
src=base,
|
||||
dst=quote,
|
||||
ratio=mid_price,
|
||||
venue="dodo_pmm",
|
||||
path_label=pair,
|
||||
price_detail=f"PMM mid price from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
edges.append(
|
||||
Edge(
|
||||
src=quote,
|
||||
dst=base,
|
||||
ratio=Decimal(1) / mid_price,
|
||||
venue="dodo_pmm",
|
||||
path_label=pair,
|
||||
price_detail=f"inverse PMM mid price from {pair}",
|
||||
liquidity_note=liquidity_note,
|
||||
)
|
||||
)
|
||||
snapshots.append(
|
||||
{
|
||||
"base": base,
|
||||
"quote": quote,
|
||||
"poolAddress": pool,
|
||||
"venue": row.get("venue", "dodo_pmm"),
|
||||
"midPrice": str(mid_price),
|
||||
}
|
||||
)
|
||||
return edges, snapshots
|
||||
|
||||
|
||||
def best_prices_for_chain(chain: dict, edges: list[Edge]) -> dict[str, dict]:
|
||||
adjacency: dict[str, list[Edge]] = {}
|
||||
for edge in edges:
|
||||
adjacency.setdefault(edge.src, []).append(edge)
|
||||
|
||||
best: dict[str, dict] = {}
|
||||
queue: deque[tuple[str, Decimal, list[str], list[str], list[str], int]] = deque()
|
||||
|
||||
for stable, price in STABLES.items():
|
||||
best[stable] = {
|
||||
"price": price,
|
||||
"steps": [],
|
||||
"venues": [],
|
||||
"notes": [f"{stable} anchored at 1 USD"],
|
||||
"hops": 0,
|
||||
}
|
||||
queue.append((stable, price, [], [], [f"{stable} anchored at 1 USD"], 0))
|
||||
|
||||
while queue:
|
||||
token, usd_price, steps, venues, notes, hops = queue.popleft()
|
||||
for edge in adjacency.get(token, []):
|
||||
next_price = usd_price / edge.ratio
|
||||
next_steps = steps + [edge.path_label]
|
||||
next_venues = venues + [edge.venue]
|
||||
next_notes = notes + [edge.liquidity_note]
|
||||
next_hops = hops + 1
|
||||
current = best.get(edge.dst)
|
||||
should_replace = current is None or next_hops < current["hops"]
|
||||
if not should_replace and current is not None and next_hops == current["hops"]:
|
||||
current_venue_score = 0 if "dodo_pmm" in current["venues"] else 1
|
||||
next_venue_score = 0 if "dodo_pmm" in next_venues else 1
|
||||
should_replace = next_venue_score < current_venue_score
|
||||
if should_replace:
|
||||
best[edge.dst] = {
|
||||
"price": next_price,
|
||||
"steps": next_steps,
|
||||
"venues": next_venues,
|
||||
"notes": next_notes,
|
||||
"hops": next_hops,
|
||||
}
|
||||
queue.append((edge.dst, next_price, next_steps, next_venues, next_notes, next_hops))
|
||||
|
||||
out: dict[str, dict] = {}
|
||||
for symbol in sorted((chain.get("cwTokens") or {}).keys()):
|
||||
resolution = best.get(symbol)
|
||||
if resolution is None:
|
||||
out[symbol] = {
|
||||
"priceUsd": None,
|
||||
"derivedFrom": "not found",
|
||||
"sourceType": "not_found",
|
||||
"notes": ["No live direct or bridged price path was found from USDC/USDT anchors."],
|
||||
}
|
||||
continue
|
||||
out[symbol] = {
|
||||
"priceUsd": str(resolution["price"]),
|
||||
"derivedFrom": " -> ".join(resolution["steps"]) if resolution["steps"] else "stable anchor",
|
||||
"sourceType": resolution["venues"][0] if resolution["venues"] else "stable_anchor",
|
||||
"notes": resolution["notes"],
|
||||
}
|
||||
return out
|
||||
|
||||
|
||||
def build_report() -> dict:
|
||||
env_values = merged_env_values()
|
||||
deployment = load_json(DEPLOYMENT_STATUS)
|
||||
discovery = load_json(UNISWAP_DISCOVERY)
|
||||
discovery_by_chain = {str(entry["chain_id"]): entry for entry in discovery.get("entries") or []}
|
||||
|
||||
chains_out: list[dict] = []
|
||||
for chain_id, chain in sorted((deployment.get("chains") or {}).items(), key=lambda item: int(item[0])):
|
||||
if int(chain_id) == 138:
|
||||
continue
|
||||
rpc_url = rpc_for_chain(chain_id, env_values)
|
||||
uniswap_edges = build_uniswap_edges(discovery_by_chain.get(chain_id, {}))
|
||||
pmm_edges, pmm_snapshots = build_pmm_edges(chain, rpc_url)
|
||||
price_rows = best_prices_for_chain(chain, uniswap_edges + pmm_edges)
|
||||
chains_out.append(
|
||||
{
|
||||
"chainId": int(chain_id),
|
||||
"network": chain.get("name", ""),
|
||||
"activationState": chain.get("activationState", ""),
|
||||
"rpcConfigured": bool(rpc_url),
|
||||
"prices": price_rows,
|
||||
"pmmSnapshots": pmm_snapshots,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"generatedAt": now(),
|
||||
"inputs": {
|
||||
"deploymentStatus": str(DEPLOYMENT_STATUS),
|
||||
"uniswapDiscovery": str(UNISWAP_DISCOVERY),
|
||||
},
|
||||
"chains": chains_out,
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(payload: dict) -> str:
|
||||
lines = [
|
||||
"# cW Public Network Prices",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Deployment inventory: `{payload['inputs']['deploymentStatus']}`",
|
||||
f"- Uniswap discovery snapshot: `{payload['inputs']['uniswapDiscovery']}`",
|
||||
"- Price convention: USD per 1 token.",
|
||||
"- `not found` means the generator could not reach the token from a live USDC/USDT anchor using the current public-pair snapshot plus live PMM mid-price reads.",
|
||||
"",
|
||||
"| Chain | Token | Price (USD) | Derived From | Source | Notes |",
|
||||
"|---|---|---:|---|---|---|",
|
||||
]
|
||||
|
||||
for chain in payload["chains"]:
|
||||
first_row = True
|
||||
prices = chain["prices"]
|
||||
for symbol in sorted(prices.keys()):
|
||||
row = prices[symbol]
|
||||
chain_cell = f"`{chain['chainId']}` {chain['network']}" if first_row else ""
|
||||
first_row = False
|
||||
notes = "; ".join(row["notes"][:2])
|
||||
lines.append(
|
||||
f"| {chain_cell} | `{symbol}` | `{format_decimal(safe_decimal(row['priceUsd']))}` | "
|
||||
f"`{row['derivedFrom']}` | `{row['sourceType']}` | {notes} |"
|
||||
)
|
||||
if prices:
|
||||
lines.append(
|
||||
f"| | | | | | Activation state: `{chain['activationState'] or 'active'}`; RPC configured: `{chain['rpcConfigured']}` |"
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
payload = build_report()
|
||||
write_json(JSON_OUT, payload)
|
||||
write_text(DOC_OUT, render_markdown(payload))
|
||||
print(JSON_OUT)
|
||||
print(DOC_OUT)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
763
scripts/verify/build-cw-public-repeg-plan.py
Normal file
763
scripts/verify/build-cw-public-repeg-plan.py
Normal file
@@ -0,0 +1,763 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from decimal import Decimal, InvalidOperation, getcontext
|
||||
from pathlib import Path
|
||||
|
||||
getcontext().prec = 50
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
PRICE_REPORT = ROOT / "reports" / "status" / "cw-public-prices-latest.json"
|
||||
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
|
||||
UNISWAP_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
JSON_OUT = ROOT / "reports" / "status" / "cw-public-repeg-plan-latest.json"
|
||||
DOC_OUT = ROOT / "docs" / "03-deployment" / "CW_PUBLIC_NETWORK_REPEG_PLAN.md"
|
||||
ADDRESS_RE = re.compile(r"0x[a-fA-F0-9]{40}")
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
|
||||
TARGETS = {
|
||||
"cWUSDC": Decimal("1"),
|
||||
"cWUSDT": Decimal("1"),
|
||||
"cWAUSDT": Decimal("1"),
|
||||
"cWUSDW": Decimal("1"),
|
||||
"cWEURC": Decimal("1.08"),
|
||||
"cWEURT": Decimal("1.08"),
|
||||
"cWGBPC": Decimal("1.27"),
|
||||
"cWGBPT": Decimal("1.27"),
|
||||
"cWAUDC": Decimal("0.66"),
|
||||
"cWJPYC": Decimal("0.0067"),
|
||||
"cWCHFC": Decimal("1.11"),
|
||||
"cWCADC": Decimal("0.74"),
|
||||
"cWXAUC": Decimal("3200"),
|
||||
"cWXAUT": Decimal("3200"),
|
||||
}
|
||||
|
||||
DODO_THRESHOLD_PCT = Decimal("1")
|
||||
UNISWAP_THRESHOLD_PCT = Decimal("1")
|
||||
|
||||
DODO_ENV_KEYS = {
|
||||
1: ["DODO_PMM_INTEGRATION_MAINNET"],
|
||||
10: ["DODO_PMM_INTEGRATION_OPTIMISM"],
|
||||
25: ["DODO_PMM_INTEGRATION_CRONOS"],
|
||||
56: ["DODO_PMM_INTEGRATION_BSC"],
|
||||
100: ["DODO_PMM_INTEGRATION_GNOSIS"],
|
||||
137: ["DODO_PMM_INTEGRATION_POLYGON"],
|
||||
42220: ["DODO_PMM_INTEGRATION_CELO"],
|
||||
43114: ["DODO_PMM_INTEGRATION_AVALANCHE"],
|
||||
8453: ["DODO_PMM_INTEGRATION_BASE"],
|
||||
42161: ["DODO_PMM_INTEGRATION_ARBITRUM"],
|
||||
}
|
||||
|
||||
RPC_ENV_KEYS = {
|
||||
1: ["ETHEREUM_MAINNET_RPC"],
|
||||
10: ["OPTIMISM_RPC_URL", "OPTIMISM_MAINNET_RPC"],
|
||||
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
|
||||
56: ["BSC_RPC_URL", "BSC_MAINNET_RPC"],
|
||||
100: ["GNOSIS_RPC_URL", "GNOSIS_MAINNET_RPC", "GNOSIS_RPC"],
|
||||
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
|
||||
42220: ["CELO_RPC_URL", "CELO_MAINNET_RPC", "CELO_RPC"],
|
||||
43114: ["AVALANCHE_RPC_URL", "AVALANCHE_MAINNET_RPC"],
|
||||
8453: ["BASE_RPC_URL", "BASE_MAINNET_RPC"],
|
||||
42161: ["ARBITRUM_RPC_URL", "ARBITRUM_MAINNET_RPC"],
|
||||
}
|
||||
|
||||
|
||||
def now() -> str:
|
||||
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
values.update(load_env_from_shell())
|
||||
return values
|
||||
|
||||
|
||||
def load_env_from_shell() -> dict[str, str]:
|
||||
loader = ROOT / "smom-dbis-138" / "scripts" / "load-env.sh"
|
||||
if not loader.exists():
|
||||
return {}
|
||||
proc = subprocess.run(
|
||||
[
|
||||
"bash",
|
||||
"-lc",
|
||||
f"source {loader} >/dev/null 2>&1 && env",
|
||||
],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=15,
|
||||
check=False,
|
||||
cwd=ROOT,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
return {}
|
||||
values: dict[str, str] = {}
|
||||
for raw_line in proc.stdout.splitlines():
|
||||
if "=" not in raw_line:
|
||||
continue
|
||||
key, value = raw_line.split("=", 1)
|
||||
values[key.strip()] = value.strip()
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
proc = subprocess.run(cmd, text=True, capture_output=True, timeout=8, check=False)
|
||||
if proc.returncode != 0:
|
||||
stderr = proc.stderr.strip() or proc.stdout.strip() or "cast call failed"
|
||||
raise RuntimeError(stderr)
|
||||
return proc.stdout.strip()
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = UINT_RE.findall(cleaned)
|
||||
if matches:
|
||||
return int(matches[0])
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
return int(token)
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
cleaned = re.sub(r"\[[^\]]*\]", "", value)
|
||||
matches = [int(match) for match in UINT_RE.findall(cleaned)]
|
||||
if len(matches) >= count:
|
||||
return matches[:count]
|
||||
matches = []
|
||||
for line in value.splitlines():
|
||||
token = line.strip().split(" ", 1)[0]
|
||||
if token.isdigit():
|
||||
matches.append(int(token))
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected {count} integers from {value!r}")
|
||||
return matches[:count]
|
||||
|
||||
|
||||
def normalize_units(raw: int, decimals: int) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** decimals)
|
||||
|
||||
|
||||
def write_json(path: Path, payload: dict) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(payload, indent=2) + "\n")
|
||||
|
||||
|
||||
def write_text(path: Path, text: str) -> None:
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(text.rstrip() + "\n")
|
||||
|
||||
|
||||
def safe_decimal(value: str | None) -> Decimal | None:
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return Decimal(value)
|
||||
except (InvalidOperation, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def pct_deviation(current: Decimal, target: Decimal) -> Decimal:
|
||||
return ((current - target) / target) * Decimal("100")
|
||||
|
||||
|
||||
def format_decimal(value: Decimal | None, places: int = 8) -> str:
|
||||
if value is None:
|
||||
return "not found"
|
||||
quant = Decimal(10) ** -places
|
||||
try:
|
||||
value = value.quantize(quant)
|
||||
except InvalidOperation:
|
||||
pass
|
||||
return format(value, "f")
|
||||
|
||||
|
||||
def format_pct(value: Decimal) -> str:
|
||||
sign = "+" if value >= 0 else ""
|
||||
return f"{sign}{format_decimal(value, 2)}%"
|
||||
|
||||
|
||||
def first_address(texts: list[str]) -> str | None:
|
||||
for text in texts:
|
||||
match = ADDRESS_RE.search(text)
|
||||
if match:
|
||||
return match.group(0)
|
||||
return None
|
||||
|
||||
|
||||
def command_block(lines: list[str]) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def derive_deployer(env_values: dict[str, str]) -> str:
|
||||
pk = resolve_env_value("PRIVATE_KEY", env_values) or resolve_env_value("KEEPER_PRIVATE_KEY", env_values)
|
||||
if not pk or "${" in pk:
|
||||
return ""
|
||||
proc = subprocess.run(
|
||||
["cast", "wallet", "address", "--private-key", pk],
|
||||
text=True,
|
||||
capture_output=True,
|
||||
timeout=8,
|
||||
check=False,
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
return ""
|
||||
match = ADDRESS_RE.search(proc.stdout.strip())
|
||||
return match.group(0) if match else ""
|
||||
|
||||
|
||||
def build_pair_to_pool(chain: dict) -> dict[str, list[dict]]:
|
||||
pairs: dict[str, list[dict]] = {}
|
||||
for row in chain.get("pmmPools") or []:
|
||||
pairs.setdefault(f"{row['base']}/{row['quote']}", []).append(row)
|
||||
for row in chain.get("uniswapV2Pools") or []:
|
||||
pairs.setdefault(f"{row['base']}/{row['quote']}", []).append(row)
|
||||
return pairs
|
||||
|
||||
|
||||
def select_pool(pair_rows: list[dict], source_type: str) -> dict:
|
||||
if not pair_rows:
|
||||
return {}
|
||||
if source_type == "dodo_pmm":
|
||||
for row in pair_rows:
|
||||
if row.get("venue") == "dodo_pmm" or row.get("poolType") == "stable_quote" or "k" in row:
|
||||
return row
|
||||
if source_type == "uniswap_v2":
|
||||
for row in pair_rows:
|
||||
if row.get("venue") == "uniswap_v2_pair":
|
||||
return row
|
||||
return pair_rows[0]
|
||||
|
||||
|
||||
def dodo_action(pair: str, current: Decimal, target: Decimal) -> str:
|
||||
if current < target:
|
||||
return f"Fund quote side and buy base through `{pair}` until `getMidPrice()` returns target."
|
||||
return f"Fund base side and sell base into `{pair}` until `getMidPrice()` returns target."
|
||||
|
||||
|
||||
def uniswap_action(pair: str, target: Decimal) -> str:
|
||||
return (
|
||||
f"Withdraw or ignore bad LP, then reseed `{pair}` through the chain router at target ratio `{target}` "
|
||||
"with balanced reserves."
|
||||
)
|
||||
|
||||
|
||||
def integration_for_chain(chain_id: int, env_values: dict[str, str]) -> str:
|
||||
for key in DODO_ENV_KEYS.get(chain_id, []):
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
return value
|
||||
return ""
|
||||
|
||||
|
||||
def rpc_for_chain(chain_id: int, env_values: dict[str, str]) -> str:
|
||||
if chain_id == 1:
|
||||
infura_project_id = resolve_env_value("INFURA_PROJECT_ID", env_values)
|
||||
if infura_project_id:
|
||||
return f"https://mainnet.infura.io/v3/{infura_project_id}"
|
||||
for key in RPC_ENV_KEYS.get(chain_id, []):
|
||||
value = resolve_env_value(key, env_values)
|
||||
if value:
|
||||
return value
|
||||
return ""
|
||||
|
||||
|
||||
def rpc_var_for_chain(chain_id: int) -> str:
|
||||
return RPC_ENV_KEYS.get(chain_id, ["RPC_URL"])[0]
|
||||
|
||||
|
||||
def dodo_commands(chain_id: int, pair: str, pool_address: str, current: Decimal, target: Decimal, chain_info: dict, env_values: dict[str, str]) -> dict:
|
||||
base_symbol, quote_symbol = pair.split("/")
|
||||
integration = integration_for_chain(chain_id, env_values)
|
||||
rpc_var = rpc_var_for_chain(chain_id)
|
||||
base_address = (chain_info.get("cwTokens") or {}).get(base_symbol) or (chain_info.get("anchorAddresses") or {}).get(base_symbol) or "REPLACE_BASE_TOKEN"
|
||||
quote_address = (chain_info.get("cwTokens") or {}).get(quote_symbol) or (chain_info.get("anchorAddresses") or {}).get(quote_symbol) or "REPLACE_QUOTE_TOKEN"
|
||||
if current < target:
|
||||
return {
|
||||
"direction": "quote_in_buy_base",
|
||||
"commands": command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export RPC_URL=\"${{{rpc_var}}}\"",
|
||||
f"export INTEGRATION=\"{integration or 'REPLACE_DODO_INTEGRATION'}\"",
|
||||
f"export POOL=\"{pool_address or 'REPLACE_POOL'}\"",
|
||||
f"export BASE_TOKEN=\"{base_address}\"",
|
||||
f"export QUOTE_TOKEN=\"{quote_address}\"",
|
||||
"export QUOTE_IN_RAW=REPLACE_QUOTE_IN_RAW",
|
||||
"export MIN_BASE_OUT_RAW=REPLACE_MIN_BASE_OUT_RAW",
|
||||
'cast send "$QUOTE_TOKEN" \'approve(address,uint256)(bool)\' "$INTEGRATION" "$QUOTE_IN_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$INTEGRATION" \'swapExactIn(address,address,uint256,uint256)\' "$POOL" "$QUOTE_TOKEN" "$QUOTE_IN_RAW" "$MIN_BASE_OUT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
]
|
||||
),
|
||||
}
|
||||
return {
|
||||
"direction": "base_in_sell_base",
|
||||
"commands": command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export RPC_URL=\"${{{rpc_var}}}\"",
|
||||
f"export INTEGRATION=\"{integration or 'REPLACE_DODO_INTEGRATION'}\"",
|
||||
f"export POOL=\"{pool_address or 'REPLACE_POOL'}\"",
|
||||
f"export BASE_TOKEN=\"{base_address}\"",
|
||||
f"export QUOTE_TOKEN=\"{quote_address}\"",
|
||||
"export BASE_IN_RAW=REPLACE_BASE_IN_RAW",
|
||||
"export MIN_QUOTE_OUT_RAW=REPLACE_MIN_QUOTE_OUT_RAW",
|
||||
'cast send "$BASE_TOKEN" \'approve(address,uint256)(bool)\' "$INTEGRATION" "$BASE_IN_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$INTEGRATION" \'swapExactIn(address,address,uint256,uint256)\' "$POOL" "$BASE_TOKEN" "$BASE_IN_RAW" "$MIN_QUOTE_OUT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def uniswap_commands(chain_id: int, pair: str, pool_address: str, chain_info: dict) -> dict:
|
||||
token_a, token_b = pair.split("/")
|
||||
rpc_var = rpc_var_for_chain(chain_id)
|
||||
router = ""
|
||||
for row in chain_info.get("uniswapV2Pools") or []:
|
||||
if f"{row['base']}/{row['quote']}" == pair and row.get("routerAddress"):
|
||||
router = row["routerAddress"]
|
||||
break
|
||||
token_a_address = (chain_info.get("cwTokens") or {}).get(token_a) or (chain_info.get("anchorAddresses") or {}).get(token_a) or "REPLACE_TOKEN_A"
|
||||
token_b_address = (chain_info.get("cwTokens") or {}).get(token_b) or (chain_info.get("anchorAddresses") or {}).get(token_b) or "REPLACE_TOKEN_B"
|
||||
return {
|
||||
"commands": command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export RPC_URL=\"${{{rpc_var}}}\"",
|
||||
f"export ROUTER=\"{router or f'${{CHAIN_{chain_id}_UNISWAP_V2_ROUTER}}'}\"",
|
||||
f"export PAIR=\"{pool_address}\"",
|
||||
f"export TOKEN_A=\"{token_a_address}\"",
|
||||
f"export TOKEN_B=\"{token_b_address}\"",
|
||||
"export AMOUNT_A_RAW=REPLACE_AMOUNT_A_RAW",
|
||||
"export AMOUNT_B_RAW=REPLACE_AMOUNT_B_RAW",
|
||||
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
|
||||
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
|
||||
'# Optional: remove bad LP first if you control LP tokens for this pair.',
|
||||
'cast send "$TOKEN_A" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_A_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$TOKEN_B" \'approve(address,uint256)(bool)\' "$ROUTER" "$AMOUNT_B_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
'cast send "$ROUTER" \'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)\' \\',
|
||||
' "$TOKEN_A" "$TOKEN_B" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$AMOUNT_A_RAW" "$AMOUNT_B_RAW" "$SIGNER" "$DEADLINE" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$RPC_URL"',
|
||||
]
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def dodo_sizing_and_balance(
|
||||
chain_id: int,
|
||||
pair: str,
|
||||
pool_address: str,
|
||||
current: Decimal,
|
||||
target: Decimal,
|
||||
chain_info: dict,
|
||||
env_values: dict[str, str],
|
||||
deployer: str,
|
||||
) -> dict:
|
||||
rpc_url = rpc_for_chain(chain_id, env_values)
|
||||
base_symbol, quote_symbol = pair.split("/")
|
||||
base_address = (chain_info.get("cwTokens") or {}).get(base_symbol) or (chain_info.get("anchorAddresses") or {}).get(base_symbol) or ""
|
||||
quote_address = (chain_info.get("cwTokens") or {}).get(quote_symbol) or (chain_info.get("anchorAddresses") or {}).get(quote_symbol) or ""
|
||||
result = {
|
||||
"heuristic": "suggested_raw = simple reserve-parity top-up on the funding side using target quote-per-base",
|
||||
"deployer": deployer or None,
|
||||
"rpcConfigured": bool(rpc_url),
|
||||
}
|
||||
if not rpc_url or not base_address or not quote_address or not pool_address:
|
||||
result["error"] = "missing rpc or token/pool address"
|
||||
return result
|
||||
try:
|
||||
base_decimals = parse_uint(cast_call(rpc_url, base_address, "decimals()(uint8)"))
|
||||
quote_decimals = parse_uint(cast_call(rpc_url, quote_address, "decimals()(uint8)"))
|
||||
base_reserve_raw, quote_reserve_raw = parse_uints(
|
||||
cast_call(rpc_url, pool_address, "getVaultReserve()(uint256,uint256)"), 2
|
||||
)
|
||||
except Exception as exc:
|
||||
result["error"] = str(exc)
|
||||
return result
|
||||
|
||||
gap_fraction = abs(target - current) / target
|
||||
base_units = normalize_units(base_reserve_raw, base_decimals)
|
||||
quote_units = normalize_units(quote_reserve_raw, quote_decimals)
|
||||
if current < target:
|
||||
funding_symbol = quote_symbol
|
||||
funding_address = quote_address
|
||||
funding_decimals = quote_decimals
|
||||
target_quote_units = base_units * target
|
||||
top_up_units = max(target_quote_units - quote_units, Decimal(0))
|
||||
suggested_raw = int((top_up_units * (Decimal(10) ** quote_decimals)).to_integral_value())
|
||||
else:
|
||||
funding_symbol = base_symbol
|
||||
funding_address = base_address
|
||||
funding_decimals = base_decimals
|
||||
target_base_units = quote_units / target
|
||||
top_up_units = max(target_base_units - base_units, Decimal(0))
|
||||
suggested_raw = int((top_up_units * (Decimal(10) ** base_decimals)).to_integral_value())
|
||||
|
||||
deployer_balance_raw = None
|
||||
if deployer:
|
||||
try:
|
||||
deployer_balance_raw = parse_uint(cast_call(rpc_url, funding_address, "balanceOf(address)(uint256)", deployer))
|
||||
except Exception as exc:
|
||||
result["deployerBalanceError"] = str(exc)
|
||||
|
||||
result.update(
|
||||
{
|
||||
"baseToken": {"symbol": base_symbol, "address": base_address, "decimals": base_decimals},
|
||||
"quoteToken": {"symbol": quote_symbol, "address": quote_address, "decimals": quote_decimals},
|
||||
"poolReserves": {
|
||||
"baseRaw": str(base_reserve_raw),
|
||||
"baseUnits": str(normalize_units(base_reserve_raw, base_decimals)),
|
||||
"quoteRaw": str(quote_reserve_raw),
|
||||
"quoteUnits": str(normalize_units(quote_reserve_raw, quote_decimals)),
|
||||
},
|
||||
"gapFraction": str(gap_fraction),
|
||||
"fundingSide": funding_symbol,
|
||||
"suggestedTradeRaw": str(suggested_raw),
|
||||
"suggestedTradeUnits": str(normalize_units(suggested_raw, funding_decimals)),
|
||||
}
|
||||
)
|
||||
if deployer_balance_raw is not None:
|
||||
shortfall = max(suggested_raw - deployer_balance_raw, 0)
|
||||
result["deployerFundingCheck"] = {
|
||||
"token": funding_symbol,
|
||||
"balanceRaw": str(deployer_balance_raw),
|
||||
"balanceUnits": str(normalize_units(deployer_balance_raw, funding_decimals)),
|
||||
"shortfallRaw": str(shortfall),
|
||||
"shortfallUnits": str(normalize_units(shortfall, funding_decimals)),
|
||||
"covered": shortfall == 0,
|
||||
}
|
||||
return result
|
||||
|
||||
|
||||
def live_uniswap_ratio(
|
||||
chain_id: int,
|
||||
pair_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
chain_info: dict,
|
||||
env_values: dict[str, str],
|
||||
) -> Decimal | None:
|
||||
rpc_url = rpc_for_chain(chain_id, env_values)
|
||||
if not rpc_url or not pair_address:
|
||||
return None
|
||||
base_address = ((chain_info.get("cwTokens") or {}).get(base_symbol) or (chain_info.get("anchorAddresses") or {}).get(base_symbol) or "").lower()
|
||||
quote_address = ((chain_info.get("cwTokens") or {}).get(quote_symbol) or (chain_info.get("anchorAddresses") or {}).get(quote_symbol) or "").lower()
|
||||
if not base_address or not quote_address:
|
||||
return None
|
||||
try:
|
||||
token0 = cast_call(rpc_url, pair_address, "token0()(address)").strip().lower()
|
||||
token1 = cast_call(rpc_url, pair_address, "token1()(address)").strip().lower()
|
||||
reserve0_raw, reserve1_raw, _ = parse_uints(cast_call(rpc_url, pair_address, "getReserves()((uint112,uint112,uint32))"), 3)
|
||||
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
|
||||
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
reserve_by_token = {
|
||||
token0: normalize_units(reserve0_raw, decimals0),
|
||||
token1: normalize_units(reserve1_raw, decimals1),
|
||||
}
|
||||
base_units = reserve_by_token.get(base_address)
|
||||
quote_units = reserve_by_token.get(quote_address)
|
||||
if base_units in (None, Decimal(0)) or quote_units is None:
|
||||
return None
|
||||
return quote_units / base_units
|
||||
|
||||
|
||||
def build_payload() -> dict:
|
||||
env_values = merged_env_values()
|
||||
deployer = derive_deployer(env_values)
|
||||
prices = load_json(PRICE_REPORT)
|
||||
deployment = load_json(DEPLOYMENT_STATUS)
|
||||
discovery = load_json(UNISWAP_DISCOVERY)
|
||||
|
||||
dodo_findings: list[dict] = []
|
||||
uniswap_findings: list[dict] = []
|
||||
unpriced: list[dict] = []
|
||||
|
||||
for chain in prices.get("chains") or []:
|
||||
chain_id = str(chain["chainId"])
|
||||
chain_info = deployment["chains"].get(chain_id, {})
|
||||
pair_map = build_pair_to_pool(chain_info)
|
||||
for symbol, row in sorted(chain.get("prices", {}).items()):
|
||||
target = TARGETS.get(symbol)
|
||||
current = safe_decimal(row.get("priceUsd"))
|
||||
if current is None:
|
||||
if target is not None:
|
||||
unpriced.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"token": symbol,
|
||||
"targetUsd": str(target),
|
||||
"reason": "no live price path found from current public snapshot",
|
||||
}
|
||||
)
|
||||
continue
|
||||
if target is None:
|
||||
continue
|
||||
deviation = pct_deviation(current, target)
|
||||
if abs(deviation) <= DODO_THRESHOLD_PCT:
|
||||
continue
|
||||
derived_from = row["derivedFrom"]
|
||||
if " -> " in derived_from:
|
||||
# Derived route; fix the direct broken pool(s) below in the Uniswap section.
|
||||
continue
|
||||
pool = select_pool(pair_map.get(derived_from, []), row["sourceType"])
|
||||
pool_address = pool.get("poolAddress") or first_address(row.get("notes", []))
|
||||
dodo_findings.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"token": symbol,
|
||||
"pair": derived_from,
|
||||
"poolAddress": pool_address,
|
||||
"venue": row["sourceType"],
|
||||
"currentUsd": str(current),
|
||||
"targetUsd": str(target),
|
||||
"deviationPct": str(deviation),
|
||||
"action": dodo_action(derived_from, current, target),
|
||||
"operator": dodo_commands(chain["chainId"], derived_from, pool_address or "", current, target, chain_info, env_values),
|
||||
"sizing": dodo_sizing_and_balance(chain["chainId"], derived_from, pool_address or "", current, target, chain_info, env_values, deployer),
|
||||
}
|
||||
)
|
||||
|
||||
for entry in discovery.get("entries") or []:
|
||||
chain_info = deployment["chains"].get(str(entry["chain_id"]), {})
|
||||
for row in entry.get("pairsChecked") or []:
|
||||
if not row.get("live"):
|
||||
continue
|
||||
pair = f"{row['base']}/{row['quote']}"
|
||||
target = Decimal("1")
|
||||
current = live_uniswap_ratio(entry["chain_id"], row["poolAddress"], row["base"], row["quote"], chain_info, env_values)
|
||||
if current is None:
|
||||
current = safe_decimal((row.get("health") or {}).get("priceQuotePerBase"))
|
||||
if current is None:
|
||||
continue
|
||||
deviation = pct_deviation(current, target)
|
||||
if abs(deviation) <= UNISWAP_THRESHOLD_PCT:
|
||||
continue
|
||||
if pair not in ("cWUSDT/cWUSDC", "cWAUSDT/cWUSDT", "cWAUSDT/cWUSDC"):
|
||||
continue
|
||||
uniswap_findings.append(
|
||||
{
|
||||
"chainId": entry["chain_id"],
|
||||
"network": entry["network"],
|
||||
"pair": pair,
|
||||
"poolAddress": row["poolAddress"],
|
||||
"currentRatio": str(current),
|
||||
"targetRatio": str(target),
|
||||
"deviationPct": str(deviation),
|
||||
"healthy": (row.get("health") or {}).get("healthy"),
|
||||
"action": uniswap_action(pair, target),
|
||||
"operator": uniswap_commands(entry["chain_id"], pair, row["poolAddress"], chain_info),
|
||||
}
|
||||
)
|
||||
|
||||
dodo_findings.sort(key=lambda row: (row["chainId"], row["pair"]))
|
||||
uniswap_findings.sort(key=lambda row: (row["chainId"], row["pair"]))
|
||||
unpriced.sort(key=lambda row: (row["chainId"], row["token"]))
|
||||
|
||||
return {
|
||||
"generatedAt": now(),
|
||||
"inputs": {
|
||||
"priceReport": str(PRICE_REPORT),
|
||||
"deploymentStatus": str(DEPLOYMENT_STATUS),
|
||||
"uniswapDiscovery": str(UNISWAP_DISCOVERY),
|
||||
},
|
||||
"targetsUsd": {k: str(v) for k, v in TARGETS.items()},
|
||||
"thresholds": {
|
||||
"dodoDeviationPct": str(DODO_THRESHOLD_PCT),
|
||||
"uniswapDeviationPct": str(UNISWAP_THRESHOLD_PCT),
|
||||
},
|
||||
"deployer": deployer or None,
|
||||
"offPegDodoPools": dodo_findings,
|
||||
"offPegUniswapPairs": uniswap_findings,
|
||||
"unpricedTargets": unpriced,
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(payload: dict) -> str:
|
||||
lines = [
|
||||
"# cW Public Network Repeg Plan",
|
||||
"",
|
||||
f"- Generated: `{payload['generatedAt']}`",
|
||||
f"- Price report: `{payload['inputs']['priceReport']}`",
|
||||
f"- Deployment inventory: `{payload['inputs']['deploymentStatus']}`",
|
||||
f"- Uniswap discovery: `{payload['inputs']['uniswapDiscovery']}`",
|
||||
"- Scope: current off-peg or unreadable public-network `cW*` pools only.",
|
||||
"- PMM rule: if current price is below target, fund quote and buy base; if current price is above target, fund base and sell base.",
|
||||
"- Uniswap rule: remove bad LP or ignore stale LP, then reseed at the target reserve ratio.",
|
||||
"",
|
||||
"## Off-Peg DODO PMM Pools",
|
||||
"",
|
||||
"| Chain | Pair | Pool | Current | Target | Deviation | Repair Path |",
|
||||
"|---|---|---|---:|---:|---:|---|",
|
||||
]
|
||||
|
||||
if payload["offPegDodoPools"]:
|
||||
for row in payload["offPegDodoPools"]:
|
||||
lines.append(
|
||||
f"| `{row['chainId']}` {row['network']} | `{row['pair']}` | `{row['poolAddress'] or 'unknown'}` | "
|
||||
f"`{format_decimal(safe_decimal(row['currentUsd']))}` | `{format_decimal(safe_decimal(row['targetUsd']))}` | "
|
||||
f"`{format_pct(Decimal(row['deviationPct']))}` | {row['action']} |"
|
||||
)
|
||||
else:
|
||||
lines.append("| — | — | — | — | — | — | No off-peg DODO PMM pools found at the configured threshold. |")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Broken Uniswap Pairs",
|
||||
"",
|
||||
"| Chain | Pair | Pool | Current Ratio | Target | Deviation | Repair Path |",
|
||||
"|---|---|---|---:|---:|---:|---|",
|
||||
]
|
||||
)
|
||||
|
||||
if payload["offPegUniswapPairs"]:
|
||||
for row in payload["offPegUniswapPairs"]:
|
||||
lines.append(
|
||||
f"| `{row['chainId']}` {row['network']} | `{row['pair']}` | `{row['poolAddress']}` | "
|
||||
f"`{format_decimal(safe_decimal(row['currentRatio']))}` | `{format_decimal(safe_decimal(row['targetRatio']))}` | "
|
||||
f"`{format_pct(Decimal(row['deviationPct']))}` | {row['action']} |"
|
||||
)
|
||||
else:
|
||||
lines.append("| — | — | — | — | — | — | No broken Uniswap pairs found at the configured threshold. |")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Unpriced Targets",
|
||||
"",
|
||||
"| Chain | Token | Target | Status |",
|
||||
"|---|---|---:|---|",
|
||||
]
|
||||
)
|
||||
|
||||
if payload["unpricedTargets"]:
|
||||
for row in payload["unpricedTargets"]:
|
||||
lines.append(
|
||||
f"| `{row['chainId']}` {row['network']} | `{row['token']}` | "
|
||||
f"`{format_decimal(safe_decimal(row['targetUsd']))}` | {row['reason']} |"
|
||||
)
|
||||
else:
|
||||
lines.append("| — | — | — | No unpriced targets. |")
|
||||
|
||||
lines.extend(
|
||||
[
|
||||
"",
|
||||
"## Execution Order",
|
||||
"",
|
||||
"1. Repeg direct PMM anchor pools first: `cWUSDC/USDC`, `cWUSDT/USDT`, then the fiat/metal quoted rails.",
|
||||
"2. Repeg `cWUSDT/cWUSDC` next on any chain where the wrapped support pair is broken.",
|
||||
"3. Repeg wrapped edge pairs such as `cWAUSDT/cWUSDT` and `cWAUSDT/cWUSDC` after the core wrapped support rail is healthy.",
|
||||
"4. Re-run the price table and this repeg plan after each venue is repaired.",
|
||||
"",
|
||||
f"- Deployer inspected: `{payload.get('deployer') or 'not found in env'}`",
|
||||
"",
|
||||
"## Operator Commands",
|
||||
"",
|
||||
]
|
||||
)
|
||||
for row in payload["offPegDodoPools"]:
|
||||
sizing = row.get("sizing") or {}
|
||||
funding = sizing.get("deployerFundingCheck") or {}
|
||||
reserve_note = ""
|
||||
pool_reserves = sizing.get("poolReserves") or {}
|
||||
if sizing.get("error"):
|
||||
reserve_note = f"- Sizing note: `{sizing['error']}`"
|
||||
elif pool_reserves.get("baseRaw") == "0" and pool_reserves.get("quoteRaw") == "0":
|
||||
reserve_note = "- Sizing note: `Pool reports zero vault reserves; heuristic trade size is informational only.`"
|
||||
lines.extend(
|
||||
[
|
||||
f"### `{row['chainId']}` {row['network']} `{row['pair']}`",
|
||||
"",
|
||||
f"- Suggested funding side: `{sizing.get('fundingSide', 'unknown')}`",
|
||||
f"- Suggested trade raw: `{sizing.get('suggestedTradeRaw', 'unknown')}`",
|
||||
f"- Suggested trade units: `{sizing.get('suggestedTradeUnits', 'unknown')}`",
|
||||
f"- Gap fraction: `{format_decimal(safe_decimal(sizing.get('gapFraction')), 6) if sizing.get('gapFraction') else 'unknown'}`",
|
||||
f"- Deployer balance on funding side: `{funding.get('balanceUnits', 'unknown')}` `{funding.get('token', '')}`",
|
||||
f"- Deployer shortfall: `{funding.get('shortfallUnits', 'unknown')}` `{funding.get('token', '')}`",
|
||||
f"- Covered: `{funding.get('covered', 'unknown')}`",
|
||||
*(["- Reserve snapshot: `baseRaw=0 quoteRaw=0`"] if reserve_note and "zero vault reserves" in reserve_note else []),
|
||||
*([reserve_note] if reserve_note else []),
|
||||
"",
|
||||
"```bash",
|
||||
row["operator"]["commands"],
|
||||
"```",
|
||||
"",
|
||||
]
|
||||
)
|
||||
for row in payload["offPegUniswapPairs"]:
|
||||
lines.extend(
|
||||
[
|
||||
f"### `{row['chainId']}` {row['network']} `{row['pair']}`",
|
||||
"",
|
||||
"```bash",
|
||||
row["operator"]["commands"],
|
||||
"```",
|
||||
"",
|
||||
]
|
||||
)
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
payload = build_payload()
|
||||
write_json(JSON_OUT, payload)
|
||||
write_text(DOC_OUT, render_markdown(payload))
|
||||
print(JSON_OUT)
|
||||
print(DOC_OUT)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
229
scripts/verify/build-liquidity-pools-completion-report.py
Normal file
229
scripts/verify/build-liquidity-pools-completion-report.py
Normal file
@@ -0,0 +1,229 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a strict completion report for non-live liquidity venue inventory."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import json
|
||||
from collections import Counter, defaultdict
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
MASTER_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
|
||||
OUT_JSON = ROOT / "reports/status/liquidity-pools-completion-latest.json"
|
||||
OUT_MD = ROOT / "reports/status/liquidity-pools-completion-latest.md"
|
||||
OUT_CSV = ROOT / "reports/status/liquidity-pools-completion-remaining-latest.csv"
|
||||
|
||||
|
||||
CSV_FIELDS = [
|
||||
"chainId",
|
||||
"network",
|
||||
"venue",
|
||||
"pair",
|
||||
"poolAddress",
|
||||
"baseTokenAddress",
|
||||
"quoteTokenAddress",
|
||||
"status",
|
||||
"completionClass",
|
||||
]
|
||||
|
||||
|
||||
def md_table(headers: list[str], rows: list[list[str]]) -> str:
|
||||
out = ["| " + " | ".join(headers) + " |", "| " + " | ".join(["---"] * len(headers)) + " |"]
|
||||
out.extend("| " + " | ".join(row) + " |" for row in rows)
|
||||
return "\n".join(out)
|
||||
|
||||
|
||||
def completion_class(status: str) -> str:
|
||||
if status.startswith("live"):
|
||||
return "complete"
|
||||
if status == "planned_gas_placeholder":
|
||||
return "planned_gas_surface"
|
||||
if status in {"configured_no_live_balance", "configured_no_code", "configured_code_present_no_balance_read"}:
|
||||
return "deploy_or_verify_live_balance"
|
||||
if status == "planned_reference_placeholder":
|
||||
return "planned_reference_surface"
|
||||
if status == "supported_not_live":
|
||||
return "supported_but_not_live"
|
||||
return status
|
||||
|
||||
|
||||
def build() -> dict:
|
||||
data = json.loads(MASTER_JSON.read_text())
|
||||
generated_at = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
||||
|
||||
chain_rows = []
|
||||
remaining_csv_rows = []
|
||||
totals = Counter()
|
||||
|
||||
for chain in data["chains"]:
|
||||
remaining_pools = [p for p in chain["pools"] if not str(p["status"]).startswith("live")]
|
||||
remaining_refs = [r for r in chain.get("referenceVenues", []) if not str(r["status"]).startswith("live")]
|
||||
|
||||
pool_status_counts = Counter(p["status"] for p in remaining_pools)
|
||||
ref_status_counts = Counter(r["status"] for r in remaining_refs)
|
||||
venue_counts = Counter(p["venue"] for p in remaining_pools)
|
||||
|
||||
totals.update(pool_status_counts)
|
||||
|
||||
chain_rows.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"remainingPoolCount": len(remaining_pools),
|
||||
"remainingReferenceVenueCount": len(remaining_refs),
|
||||
"poolStatusCounts": dict(pool_status_counts),
|
||||
"referenceStatusCounts": dict(ref_status_counts),
|
||||
"remainingVenueCounts": dict(venue_counts),
|
||||
"remainingPools": [
|
||||
{
|
||||
"venue": p["venue"],
|
||||
"pair": f"{p['baseSymbol']}/{p['quoteSymbol']}",
|
||||
"poolAddress": p["poolAddress"],
|
||||
"baseTokenAddress": p["baseAddress"],
|
||||
"quoteTokenAddress": p["quoteAddress"],
|
||||
"status": p["status"],
|
||||
"completionClass": completion_class(p["status"]),
|
||||
}
|
||||
for p in remaining_pools
|
||||
],
|
||||
"remainingReferenceVenues": [
|
||||
{
|
||||
"protocol": r["protocol"],
|
||||
"pair": f"{r['baseSymbol']}/{r['quoteSymbol']}",
|
||||
"venueAddress": r["venueAddress"],
|
||||
"status": r["status"],
|
||||
"completionClass": completion_class(r["status"]),
|
||||
}
|
||||
for r in remaining_refs
|
||||
],
|
||||
}
|
||||
)
|
||||
|
||||
for p in remaining_pools:
|
||||
remaining_csv_rows.append(
|
||||
{
|
||||
"chainId": chain["chainId"],
|
||||
"network": chain["network"],
|
||||
"venue": p["venue"],
|
||||
"pair": f"{p['baseSymbol']}/{p['quoteSymbol']}",
|
||||
"poolAddress": p["poolAddress"],
|
||||
"baseTokenAddress": p["baseAddress"],
|
||||
"quoteTokenAddress": p["quoteAddress"],
|
||||
"status": p["status"],
|
||||
"completionClass": completion_class(p["status"]),
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"generatedAt": generated_at,
|
||||
"source": str(MASTER_JSON.relative_to(ROOT)),
|
||||
"summary": {
|
||||
"remainingPools": sum(r["remainingPoolCount"] for r in chain_rows),
|
||||
"remainingReferenceVenues": sum(r["remainingReferenceVenueCount"] for r in chain_rows),
|
||||
"poolStatusCounts": dict(totals),
|
||||
},
|
||||
"chains": chain_rows,
|
||||
"remainingCsvRows": remaining_csv_rows,
|
||||
}
|
||||
|
||||
|
||||
def write_csv(rows: list[dict]) -> None:
|
||||
with OUT_CSV.open("w", newline="") as fh:
|
||||
writer = csv.DictWriter(fh, fieldnames=CSV_FIELDS)
|
||||
writer.writeheader()
|
||||
writer.writerows(rows)
|
||||
|
||||
|
||||
def write_markdown(report: dict) -> None:
|
||||
lines = [
|
||||
"# Liquidity Pools Completion Report",
|
||||
"",
|
||||
f"- Generated: `{report['generatedAt']}`",
|
||||
f"- Source: `{report['source']}`",
|
||||
"",
|
||||
"## Summary",
|
||||
"",
|
||||
f"- Remaining non-live pool rows: `{report['summary']['remainingPools']}`",
|
||||
f"- Remaining non-live reference venue rows: `{report['summary']['remainingReferenceVenues']}`",
|
||||
f"- Pool status counts: `{json.dumps(report['summary']['poolStatusCounts'], sort_keys=True)}`",
|
||||
"",
|
||||
]
|
||||
|
||||
summary_rows = []
|
||||
for chain in report["chains"]:
|
||||
summary_rows.append(
|
||||
[
|
||||
str(chain["chainId"]),
|
||||
chain["network"],
|
||||
str(chain["remainingPoolCount"]),
|
||||
str(chain["remainingReferenceVenueCount"]),
|
||||
json.dumps(chain["poolStatusCounts"], sort_keys=True),
|
||||
]
|
||||
)
|
||||
lines += [
|
||||
"## By Chain",
|
||||
"",
|
||||
md_table(["ChainID", "Network", "Remaining Pools", "Remaining Reference Venues", "Pool Status Counts"], summary_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
for chain in report["chains"]:
|
||||
if chain["remainingPoolCount"] == 0 and chain["remainingReferenceVenueCount"] == 0:
|
||||
continue
|
||||
lines += [f"## {chain['network']} ({chain['chainId']})", ""]
|
||||
|
||||
if chain["remainingPoolCount"]:
|
||||
pool_rows = [
|
||||
[
|
||||
p["venue"],
|
||||
p["pair"],
|
||||
f"`{p['poolAddress']}`",
|
||||
p["status"],
|
||||
p["completionClass"],
|
||||
]
|
||||
for p in chain["remainingPools"]
|
||||
]
|
||||
lines += [
|
||||
"### Remaining Pools",
|
||||
"",
|
||||
md_table(["Venue", "Pair", "Pool", "Status", "Completion Class"], pool_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
if chain["remainingReferenceVenueCount"]:
|
||||
ref_rows = [
|
||||
[
|
||||
r["protocol"],
|
||||
r["pair"],
|
||||
f"`{r['venueAddress']}`" if r["venueAddress"] else "—",
|
||||
r["status"],
|
||||
r["completionClass"],
|
||||
]
|
||||
for r in chain["remainingReferenceVenues"]
|
||||
]
|
||||
lines += [
|
||||
"### Remaining Reference Venues",
|
||||
"",
|
||||
md_table(["Protocol", "Pair", "Venue Address", "Status", "Completion Class"], ref_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
OUT_MD.write_text("\n".join(lines) + "\n")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
report = build()
|
||||
OUT_JSON.write_text(json.dumps(report, indent=2) + "\n")
|
||||
write_markdown(report)
|
||||
write_csv(report["remainingCsvRows"])
|
||||
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_CSV.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
594
scripts/verify/build-liquidity-pools-master-map.py
Normal file
594
scripts/verify/build-liquidity-pools-master-map.py
Normal file
@@ -0,0 +1,594 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Build a live liquidity-pool inventory from deployment-status + on-chain reserves.
|
||||
|
||||
Outputs:
|
||||
- reports/status/liquidity-pools-master-map-latest.json
|
||||
- docs/11-references/LIQUIDITY_POOLS_MASTER_MAP.md
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
import time
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
DEPLOYMENT_STATUS = ROOT / "cross-chain-pmm-lps/config/deployment-status.json"
|
||||
CHAIN138_EXECUTION = ROOT / "smom-dbis-138/config/chain138-eth-pmm-pools-execution.json"
|
||||
CHAIN138_PMM = ROOT / "smom-dbis-138/config/chain138-pmm-pools.json"
|
||||
OUT_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
|
||||
OUT_MD = ROOT / "docs/11-references/LIQUIDITY_POOLS_MASTER_MAP.md"
|
||||
UNIV2_DISCOVERY = ROOT / "reports/extraction/promod-uniswap-v2-live-pair-discovery-latest.json"
|
||||
GRU_GAP_REPORT = ROOT / "reports/extraction/promod-gru-v2-full-mesh-gap-report-latest.json"
|
||||
|
||||
POOL_LIST_KEYS = ("pmmPools", "pmmPoolsVolatile", "gasPmmPools", "uniswapV2Pools")
|
||||
CHAIN_ORDER = ["138", "651940", "1", "10", "25", "56", "100", "137", "8453", "42161", "42220", "43114", "1111"]
|
||||
|
||||
RPC_DEFAULTS = {
|
||||
"138": os.environ.get("CHAIN138_RPC_URL")
|
||||
or os.environ.get("RPC_URL_138")
|
||||
or os.environ.get("RPC_URL")
|
||||
or "https://rpc-core.d-bis.org",
|
||||
"651940": os.environ.get("CHAIN_651940_RPC")
|
||||
or os.environ.get("CHAIN_651940_RPC_URL")
|
||||
or os.environ.get("ALLTRA_MAINNET_RPC")
|
||||
or "https://mainnet-rpc.alltra.global",
|
||||
"1": os.environ.get("ETHEREUM_MAINNET_RPC") or "https://eth.llamarpc.com",
|
||||
"10": os.environ.get("OPTIMISM_MAINNET_RPC") or os.environ.get("OPTIMISM_RPC_URL") or "https://mainnet.optimism.io",
|
||||
"25": os.environ.get("CRONOS_RPC") or os.environ.get("CRONOS_MAINNET_RPC") or "https://evm.cronos.org",
|
||||
"56": os.environ.get("BSC_RPC_URL") or os.environ.get("BSC_MAINNET_RPC") or "https://bsc-dataseed.binance.org",
|
||||
"100": os.environ.get("GNOSIS_RPC") or os.environ.get("GNOSIS_MAINNET_RPC") or "https://rpc.gnosischain.com",
|
||||
"137": os.environ.get("POLYGON_MAINNET_RPC") or os.environ.get("POLYGON_RPC_URL") or "https://polygon-bor-rpc.publicnode.com",
|
||||
"8453": os.environ.get("BASE_MAINNET_RPC") or os.environ.get("BASE_RPC_URL") or "https://mainnet.base.org",
|
||||
"42161": os.environ.get("ARBITRUM_MAINNET_RPC") or os.environ.get("ARBITRUM_RPC_URL") or "https://arb1.arbitrum.io/rpc",
|
||||
"42220": os.environ.get("CELO_RPC") or os.environ.get("CELO_MAINNET_RPC") or "https://forno.celo.org",
|
||||
"43114": os.environ.get("AVALANCHE_RPC_URL") or os.environ.get("AVALANCHE_MAINNET_RPC") or "https://api.avax.network/ext/bc/C/rpc",
|
||||
"1111": os.environ.get("WEMIX_RPC") or os.environ.get("WEMIX_MAINNET_RPC") or "https://api.wemix.com",
|
||||
}
|
||||
|
||||
PLACEHOLDER_NOTES = {"placeholder_scaffold_not_live"}
|
||||
ZERO_ADDR = "0x0000000000000000000000000000000000000000"
|
||||
|
||||
|
||||
def load_json(path: Path) -> Any:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def cast_call(rpc: str, to: str, sig: str, *args: str) -> list[str]:
|
||||
cmd = ["cast", "call", to, sig, *args, "--rpc-url", rpc]
|
||||
last_exc: Exception | None = None
|
||||
for attempt in range(3):
|
||||
try:
|
||||
out = subprocess.check_output(cmd, text=True, stderr=subprocess.DEVNULL, timeout=5).strip()
|
||||
return [line.strip() for line in out.splitlines() if line.strip()]
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
if attempt < 2:
|
||||
time.sleep(0.25 * (attempt + 1))
|
||||
raise last_exc or RuntimeError(f"cast call failed for {to} {sig}")
|
||||
|
||||
|
||||
def cast_code(rpc: str, address: str) -> str:
|
||||
cmd = ["cast", "code", address, "--rpc-url", rpc]
|
||||
last_exc: Exception | None = None
|
||||
for attempt in range(3):
|
||||
try:
|
||||
return subprocess.check_output(cmd, text=True, stderr=subprocess.DEVNULL, timeout=5).strip()
|
||||
except Exception as exc:
|
||||
last_exc = exc
|
||||
if attempt < 2:
|
||||
time.sleep(0.25 * (attempt + 1))
|
||||
raise last_exc or RuntimeError(f"cast code failed for {address}")
|
||||
|
||||
|
||||
def clean_int(value: str) -> int:
|
||||
token = value.split()[0]
|
||||
if token.startswith("0x"):
|
||||
return int(token, 16)
|
||||
return int(token)
|
||||
|
||||
|
||||
def is_placeholder_address(addr: str | None) -> bool:
|
||||
if not addr or not isinstance(addr, str) or not addr.startswith("0x"):
|
||||
return True
|
||||
body = addr[2:].lower()
|
||||
if body == "0" * 40:
|
||||
return True
|
||||
return "0" * 20 in body
|
||||
|
||||
|
||||
def human_amount(raw: int | None, decimals: int | None) -> str | None:
|
||||
if raw is None or decimals is None:
|
||||
return None
|
||||
negative = raw < 0
|
||||
raw = abs(raw)
|
||||
scale = 10 ** decimals
|
||||
whole = raw // scale
|
||||
frac = raw % scale
|
||||
frac_s = f"{frac:0{decimals}d}".rstrip("0")
|
||||
text = str(whole) if not frac_s else f"{whole}.{frac_s}"
|
||||
return f"-{text}" if negative else text
|
||||
|
||||
|
||||
def markdown_table(headers: list[str], rows: list[list[str]]) -> str:
|
||||
out = ["| " + " | ".join(headers) + " |", "| " + " | ".join(["---"] * len(headers)) + " |"]
|
||||
for row in rows:
|
||||
out.append("| " + " | ".join(row) + " |")
|
||||
return "\n".join(out)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TokenInfo:
|
||||
symbol: str
|
||||
address: str | None
|
||||
decimals: int | None
|
||||
|
||||
|
||||
class PoolBuilder:
|
||||
def __init__(self) -> None:
|
||||
self.deployment_status = load_json(DEPLOYMENT_STATUS)
|
||||
self.chain138_execution = load_json(CHAIN138_EXECUTION)
|
||||
self.chain138_pmm = load_json(CHAIN138_PMM)
|
||||
self.univ2_discovery = load_json(UNIV2_DISCOVERY) if UNIV2_DISCOVERY.exists() else None
|
||||
self.gru_gap_report = load_json(GRU_GAP_REPORT) if GRU_GAP_REPORT.exists() else None
|
||||
self.decimals_cache: dict[tuple[str, str], int | None] = {}
|
||||
self.univ2_health_index = self.build_univ2_health_index()
|
||||
self.documented_live_dodo_pairs = self.build_documented_live_dodo_pairs()
|
||||
self.fallback_symbol_decimals = {
|
||||
"WETH": 18,
|
||||
"WETH9": 18,
|
||||
"WETH10": 18,
|
||||
"WETHL2": 18,
|
||||
"WWEMIX": 18,
|
||||
"WCRO": 18,
|
||||
"WALL": 18,
|
||||
"cWETH": 18,
|
||||
"cWETHL2": 18,
|
||||
}
|
||||
|
||||
def build_univ2_health_index(self) -> dict[tuple[int, str], dict[str, Any]]:
|
||||
index: dict[tuple[int, str], dict[str, Any]] = {}
|
||||
if not self.univ2_discovery:
|
||||
return index
|
||||
for entry in self.univ2_discovery.get("entries", []):
|
||||
chain_id = int(entry["chain_id"])
|
||||
for pair in entry.get("pairsChecked", []):
|
||||
addr = pair.get("poolAddress")
|
||||
if addr and addr != ZERO_ADDR:
|
||||
index[(chain_id, addr.lower())] = pair
|
||||
return index
|
||||
|
||||
def build_documented_live_dodo_pairs(self) -> dict[int, set[str]]:
|
||||
out: dict[int, set[str]] = {}
|
||||
if not self.gru_gap_report:
|
||||
return out
|
||||
for chain in self.gru_gap_report.get("chains", []):
|
||||
dodo = ((chain.get("venue_status") or {}).get("dodo_pmm") or {})
|
||||
out[int(chain["chain_id"])] = set(dodo.get("live_pairs", []))
|
||||
return out
|
||||
|
||||
def resolve_token_address(self, chain_id: str, chain_data: dict[str, Any], symbol: str) -> str | None:
|
||||
if chain_id == "138":
|
||||
if symbol in self.chain138_execution.get("tokens", {}):
|
||||
return self.chain138_execution["tokens"][symbol]
|
||||
if symbol in chain_data.get("anchorAddresses", {}):
|
||||
return chain_data["anchorAddresses"][symbol]
|
||||
if symbol in self.chain138_pmm.get("tokens", {}):
|
||||
return self.chain138_pmm["tokens"][symbol]
|
||||
for key in ("cwTokens", "anchorAddresses", "gasMirrors", "gasQuoteAddresses"):
|
||||
mapping = chain_data.get(key, {})
|
||||
if symbol in mapping:
|
||||
return mapping[symbol]
|
||||
return None
|
||||
|
||||
def token_decimals(self, rpc: str, address: str | None, chain_id: str, symbol: str | None = None) -> int | None:
|
||||
if not address:
|
||||
return None
|
||||
cache_key = (chain_id, address.lower())
|
||||
if cache_key in self.decimals_cache:
|
||||
return self.decimals_cache[cache_key]
|
||||
try:
|
||||
lines = cast_call(rpc, address, "decimals()(uint8)")
|
||||
dec = clean_int(lines[0])
|
||||
if dec == 0 and (symbol or "") in self.fallback_symbol_decimals:
|
||||
dec = self.fallback_symbol_decimals[symbol or ""]
|
||||
except Exception:
|
||||
dec = self.fallback_symbol_decimals.get(symbol or "")
|
||||
self.decimals_cache[cache_key] = dec
|
||||
return dec
|
||||
|
||||
def build_pool_rows(self) -> dict[str, Any]:
|
||||
chains = self.deployment_status["chains"]
|
||||
generated_at = datetime.now(timezone.utc).replace(microsecond=0).isoformat()
|
||||
report_chains: list[dict[str, Any]] = []
|
||||
|
||||
for chain_id in CHAIN_ORDER:
|
||||
chain_data = chains.get(chain_id)
|
||||
if not chain_data:
|
||||
continue
|
||||
rpc = RPC_DEFAULTS.get(chain_id)
|
||||
chain_rows: list[dict[str, Any]] = []
|
||||
for list_key in POOL_LIST_KEYS:
|
||||
for pool in chain_data.get(list_key, []):
|
||||
row = self.build_pool_row(chain_id, chain_data, list_key, pool, rpc)
|
||||
chain_rows.append(row)
|
||||
reference_venues = [self.build_reference_venue_row(chain_id, chain_data, venue) for venue in chain_data.get("gasReferenceVenues", [])]
|
||||
report_chains.append(
|
||||
{
|
||||
"chainId": int(chain_id),
|
||||
"network": chain_data["name"],
|
||||
"rpc": rpc,
|
||||
"poolCount": len(chain_rows),
|
||||
"pools": chain_rows,
|
||||
"referenceVenueCount": len(reference_venues),
|
||||
"referenceVenues": reference_venues,
|
||||
}
|
||||
)
|
||||
return {
|
||||
"generatedAt": generated_at,
|
||||
"sourceFiles": [
|
||||
str(DEPLOYMENT_STATUS.relative_to(ROOT)),
|
||||
str(CHAIN138_EXECUTION.relative_to(ROOT)),
|
||||
str(CHAIN138_PMM.relative_to(ROOT)),
|
||||
],
|
||||
"chains": report_chains,
|
||||
}
|
||||
|
||||
def build_pool_row(
|
||||
self,
|
||||
chain_id: str,
|
||||
chain_data: dict[str, Any],
|
||||
list_key: str,
|
||||
pool: dict[str, Any],
|
||||
rpc: str | None,
|
||||
) -> dict[str, Any]:
|
||||
base_symbol = pool.get("base") or pool.get("tokenIn")
|
||||
quote_symbol = pool.get("quote") or pool.get("tokenOut")
|
||||
base_address = self.resolve_token_address(chain_id, chain_data, base_symbol) if base_symbol else None
|
||||
quote_address = self.resolve_token_address(chain_id, chain_data, quote_symbol) if quote_symbol else None
|
||||
venue = pool.get("venue") or ("uniswap_v2_pair" if list_key == "uniswapV2Pools" else "dodo_pmm")
|
||||
notes = list(pool.get("notes", []))
|
||||
live_enabled = pool.get("publicRoutingEnabled")
|
||||
row: dict[str, Any] = {
|
||||
"chainId": int(chain_id),
|
||||
"network": chain_data["name"],
|
||||
"inventoryGroup": list_key,
|
||||
"venue": venue,
|
||||
"baseSymbol": base_symbol,
|
||||
"baseAddress": base_address,
|
||||
"quoteSymbol": quote_symbol,
|
||||
"quoteAddress": quote_address,
|
||||
"poolAddress": pool.get("poolAddress"),
|
||||
"feeBps": pool.get("feeBps"),
|
||||
"k": pool.get("k"),
|
||||
"role": pool.get("role"),
|
||||
"poolType": pool.get("poolType"),
|
||||
"publicRoutingEnabled": live_enabled,
|
||||
"notes": notes,
|
||||
"status": "configured",
|
||||
"balances": None,
|
||||
"queryError": None,
|
||||
"balanceSource": None,
|
||||
}
|
||||
|
||||
if list_key == "uniswapV2Pools":
|
||||
row["factoryAddress"] = pool.get("factoryAddress")
|
||||
row["routerAddress"] = pool.get("routerAddress")
|
||||
|
||||
pool_address = pool.get("poolAddress")
|
||||
if not rpc:
|
||||
row["status"] = "rpc_missing"
|
||||
return row
|
||||
if is_placeholder_address(pool_address) or any(note in PLACEHOLDER_NOTES for note in notes):
|
||||
row["status"] = "planned_gas_placeholder" if list_key == "gasPmmPools" else "placeholder_not_live"
|
||||
return row
|
||||
if not pool_address or not base_address or not quote_address:
|
||||
row["status"] = "address_resolution_missing"
|
||||
return row
|
||||
|
||||
try:
|
||||
if list_key == "uniswapV2Pools":
|
||||
balances = self.fetch_uniswap_v2_discovery_balances(int(chain_id), pool_address, base_symbol, quote_symbol, base_address, quote_address, rpc)
|
||||
if balances is not None:
|
||||
row["balances"] = balances
|
||||
row["status"] = "live"
|
||||
row["balanceSource"] = "uniswap_v2_live_pair_discovery"
|
||||
else:
|
||||
row["status"] = "configured_no_live_balance"
|
||||
elif chain_id == "138":
|
||||
row["balances"] = self.fetch_dodo_balances(rpc, chain_id, pool_address, base_symbol, quote_symbol, base_address, quote_address)
|
||||
row["status"] = "live"
|
||||
row["balanceSource"] = "rpc_getVaultReserve"
|
||||
else:
|
||||
code = cast_code(rpc, pool_address)
|
||||
if code in ("", "0x"):
|
||||
row["status"] = "configured_no_code"
|
||||
elif list_key == "pmmPools" and f"{base_symbol}/{quote_symbol}" in self.documented_live_dodo_pairs.get(int(chain_id), set()):
|
||||
row["status"] = "live_documented_no_balance"
|
||||
row["balanceSource"] = "promod_gru_v2_full_mesh_gap_report"
|
||||
else:
|
||||
try:
|
||||
row["balances"] = self.fetch_dodo_balances(rpc, chain_id, pool_address, base_symbol, quote_symbol, base_address, quote_address)
|
||||
row["status"] = "live"
|
||||
row["balanceSource"] = "rpc_getVaultReserve"
|
||||
except Exception:
|
||||
row["status"] = "configured_code_present_no_balance_read"
|
||||
except Exception as exc:
|
||||
if list_key == "pmmPools" and f"{base_symbol}/{quote_symbol}" in self.documented_live_dodo_pairs.get(int(chain_id), set()):
|
||||
row["status"] = "live_documented_no_balance"
|
||||
row["balanceSource"] = "promod_gru_v2_full_mesh_gap_report"
|
||||
row["queryError"] = str(exc)
|
||||
else:
|
||||
row["status"] = "query_failed"
|
||||
row["queryError"] = str(exc)
|
||||
return row
|
||||
|
||||
def build_reference_venue_row(self, chain_id: str, chain_data: dict[str, Any], venue: dict[str, Any]) -> dict[str, Any]:
|
||||
base_symbol = venue.get("base")
|
||||
quote_symbol = venue.get("quote")
|
||||
notes = list(venue.get("notes", []))
|
||||
if any(note in PLACEHOLDER_NOTES for note in notes):
|
||||
status = "planned_reference_placeholder"
|
||||
elif venue.get("live"):
|
||||
status = "live"
|
||||
elif venue.get("supported"):
|
||||
status = "supported_not_live"
|
||||
else:
|
||||
status = "unsupported"
|
||||
return {
|
||||
"chainId": int(chain_id),
|
||||
"network": chain_data["name"],
|
||||
"protocol": venue.get("protocol"),
|
||||
"familyKey": venue.get("familyKey"),
|
||||
"baseSymbol": base_symbol,
|
||||
"baseAddress": self.resolve_token_address(chain_id, chain_data, base_symbol) if base_symbol else None,
|
||||
"quoteSymbol": quote_symbol,
|
||||
"quoteAddress": self.resolve_token_address(chain_id, chain_data, quote_symbol) if quote_symbol else None,
|
||||
"venueAddress": venue.get("venueAddress"),
|
||||
"supported": venue.get("supported"),
|
||||
"live": venue.get("live"),
|
||||
"routingVisible": venue.get("routingVisible"),
|
||||
"reference": venue.get("reference"),
|
||||
"aggregatorOnly": venue.get("aggregatorOnly"),
|
||||
"indexRequired": venue.get("indexRequired"),
|
||||
"dependsOn": venue.get("dependsOn", []),
|
||||
"notes": notes,
|
||||
"status": status,
|
||||
}
|
||||
|
||||
def fetch_uniswap_v2_discovery_balances(
|
||||
self,
|
||||
chain_id: int,
|
||||
pool_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
base_address: str,
|
||||
quote_address: str,
|
||||
rpc: str,
|
||||
) -> dict[str, Any] | None:
|
||||
pair = self.univ2_health_index.get((chain_id, pool_address.lower()))
|
||||
if not pair or not pair.get("live"):
|
||||
return None
|
||||
health = pair.get("health") or {}
|
||||
base_raw = int(health["baseReserveRaw"])
|
||||
quote_raw = int(health["quoteReserveRaw"])
|
||||
base_dec = self.token_decimals(rpc, base_address, str(chain_id), base_symbol)
|
||||
quote_dec = self.token_decimals(rpc, quote_address, str(chain_id), quote_symbol)
|
||||
return {
|
||||
"base": {
|
||||
"symbol": base_symbol,
|
||||
"address": base_address,
|
||||
"raw": str(base_raw),
|
||||
"decimals": base_dec,
|
||||
"formatted": health.get("baseReserveUnits") or human_amount(base_raw, base_dec),
|
||||
},
|
||||
"quote": {
|
||||
"symbol": quote_symbol,
|
||||
"address": quote_address,
|
||||
"raw": str(quote_raw),
|
||||
"decimals": quote_dec,
|
||||
"formatted": health.get("quoteReserveUnits") or human_amount(quote_raw, quote_dec),
|
||||
},
|
||||
"health": health,
|
||||
}
|
||||
|
||||
def fetch_dodo_balances(
|
||||
self,
|
||||
rpc: str,
|
||||
chain_id: str,
|
||||
pool_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
base_address: str,
|
||||
quote_address: str,
|
||||
) -> dict[str, Any]:
|
||||
lines = cast_call(rpc, pool_address, "getVaultReserve()(uint256,uint256)")
|
||||
base_raw = clean_int(lines[0])
|
||||
quote_raw = clean_int(lines[1])
|
||||
base_dec = self.token_decimals(rpc, base_address, chain_id, base_symbol)
|
||||
quote_dec = self.token_decimals(rpc, quote_address, chain_id, quote_symbol)
|
||||
return {
|
||||
"base": {
|
||||
"symbol": base_symbol,
|
||||
"address": base_address,
|
||||
"raw": str(base_raw),
|
||||
"decimals": base_dec,
|
||||
"formatted": human_amount(base_raw, base_dec),
|
||||
},
|
||||
"quote": {
|
||||
"symbol": quote_symbol,
|
||||
"address": quote_address,
|
||||
"raw": str(quote_raw),
|
||||
"decimals": quote_dec,
|
||||
"formatted": human_amount(quote_raw, quote_dec),
|
||||
},
|
||||
}
|
||||
|
||||
def fetch_uniswap_v2_balances(
|
||||
self,
|
||||
rpc: str,
|
||||
chain_id: str,
|
||||
pool_address: str,
|
||||
base_symbol: str,
|
||||
quote_symbol: str,
|
||||
base_address: str,
|
||||
quote_address: str,
|
||||
) -> dict[str, Any]:
|
||||
reserves = cast_call(rpc, pool_address, "getReserves()(uint112,uint112,uint32)")
|
||||
reserve0 = clean_int(reserves[0])
|
||||
reserve1 = clean_int(reserves[1])
|
||||
token0 = cast_call(rpc, pool_address, "token0()(address)")[0].split()[0]
|
||||
token1 = cast_call(rpc, pool_address, "token1()(address)")[0].split()[0]
|
||||
|
||||
if token0.lower() == base_address.lower() and token1.lower() == quote_address.lower():
|
||||
base_raw, quote_raw = reserve0, reserve1
|
||||
elif token0.lower() == quote_address.lower() and token1.lower() == base_address.lower():
|
||||
base_raw, quote_raw = reserve1, reserve0
|
||||
else:
|
||||
raise RuntimeError(f"pair token mismatch: token0={token0} token1={token1}")
|
||||
|
||||
base_dec = self.token_decimals(rpc, base_address, chain_id, base_symbol)
|
||||
quote_dec = self.token_decimals(rpc, quote_address, chain_id, quote_symbol)
|
||||
return {
|
||||
"base": {
|
||||
"symbol": base_symbol,
|
||||
"address": base_address,
|
||||
"raw": str(base_raw),
|
||||
"decimals": base_dec,
|
||||
"formatted": human_amount(base_raw, base_dec),
|
||||
},
|
||||
"quote": {
|
||||
"symbol": quote_symbol,
|
||||
"address": quote_address,
|
||||
"raw": str(quote_raw),
|
||||
"decimals": quote_dec,
|
||||
"formatted": human_amount(quote_raw, quote_dec),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def render_markdown(report: dict[str, Any]) -> str:
|
||||
generated_at = report["generatedAt"]
|
||||
lines = [
|
||||
"# Liquidity Pools Master Map — Chain 138, ALL Mainnet, and Public Networks",
|
||||
"",
|
||||
f"**Last Updated:** {generated_at}",
|
||||
"**Document Version:** 2.0",
|
||||
"**Status:** Generated from deployment-status plus live reserve reads where supported",
|
||||
"",
|
||||
"## Overview",
|
||||
"",
|
||||
"This document is generated from the machine-readable deployment graph in `cross-chain-pmm-lps/config/deployment-status.json`, plus live reserve reads from chain RPCs.",
|
||||
"",
|
||||
"Pool categories:",
|
||||
"- `pmmPools`: standard DODO PMM routing pools",
|
||||
"- `pmmPoolsVolatile`: volatile-route PMM pools",
|
||||
"- `gasPmmPools`: gas-family PMM pools",
|
||||
"- `uniswapV2Pools`: Uniswap V2-compatible pairs",
|
||||
"",
|
||||
]
|
||||
|
||||
summary_rows: list[list[str]] = []
|
||||
for chain in report["chains"]:
|
||||
live = sum(1 for pool in chain["pools"] if str(pool["status"]).startswith("live"))
|
||||
planned_placeholder = sum(1 for pool in chain["pools"] if pool["status"] == "planned_gas_placeholder")
|
||||
failed = sum(1 for pool in chain["pools"] if pool["status"] == "query_failed")
|
||||
summary_rows.append(
|
||||
[
|
||||
str(chain["chainId"]),
|
||||
chain["network"],
|
||||
str(chain["poolCount"]),
|
||||
str(live),
|
||||
str(planned_placeholder),
|
||||
str(failed),
|
||||
str(chain.get("referenceVenueCount", 0)),
|
||||
chain["rpc"] or "n/a",
|
||||
]
|
||||
)
|
||||
lines += ["## Network Summary", "", markdown_table(["ChainID", "Network", "Pools", "Live Read", "Planned Gas Placeholder", "Query Failed", "Reference Venues", "RPC"], summary_rows), ""]
|
||||
|
||||
for chain in report["chains"]:
|
||||
lines += [f"## {chain['network']} ({chain['chainId']})", ""]
|
||||
if not chain["pools"]:
|
||||
lines += ["No pools are listed in the canonical deployment graph for this network.", ""]
|
||||
continue
|
||||
rows: list[list[str]] = []
|
||||
for pool in chain["pools"]:
|
||||
balances = pool.get("balances") or {}
|
||||
base_bal = balances.get("base", {}).get("formatted") if balances else None
|
||||
quote_bal = balances.get("quote", {}).get("formatted") if balances else None
|
||||
rows.append(
|
||||
[
|
||||
pool["inventoryGroup"],
|
||||
pool["venue"],
|
||||
f"{pool['baseSymbol']} / {pool['quoteSymbol']}",
|
||||
f"`{pool['poolAddress']}`",
|
||||
f"`{pool['baseAddress']}`" if pool.get("baseAddress") else "—",
|
||||
f"`{pool['quoteAddress']}`" if pool.get("quoteAddress") else "—",
|
||||
base_bal or "—",
|
||||
quote_bal or "—",
|
||||
pool["status"],
|
||||
]
|
||||
)
|
||||
lines += [
|
||||
markdown_table(
|
||||
["Group", "Venue", "Pair", "Pool", "Base Token", "Quote Token", "Base Balance", "Quote Balance", "Status"],
|
||||
rows,
|
||||
),
|
||||
"",
|
||||
]
|
||||
ref_rows = chain.get("referenceVenues", [])
|
||||
if ref_rows:
|
||||
ref_table_rows = []
|
||||
for venue in ref_rows:
|
||||
ref_table_rows.append(
|
||||
[
|
||||
venue["protocol"],
|
||||
f"{venue.get('baseSymbol')} / {venue.get('quoteSymbol')}",
|
||||
f"`{venue['venueAddress']}`" if venue.get("venueAddress") else "—",
|
||||
"yes" if venue.get("supported") else "no",
|
||||
"yes" if venue.get("live") else "no",
|
||||
"yes" if venue.get("routingVisible") else "no",
|
||||
venue["status"],
|
||||
]
|
||||
)
|
||||
lines += [
|
||||
f"### {chain['network']} Reference Venues",
|
||||
"",
|
||||
markdown_table(["Protocol", "Pair", "Venue Address", "Supported", "Live", "Routing Visible", "Status"], ref_table_rows),
|
||||
"",
|
||||
]
|
||||
|
||||
lines += [
|
||||
"## Source Files",
|
||||
"",
|
||||
"- `cross-chain-pmm-lps/config/deployment-status.json`",
|
||||
"- `smom-dbis-138/config/chain138-eth-pmm-pools-execution.json`",
|
||||
"- `smom-dbis-138/config/chain138-pmm-pools.json`",
|
||||
f"- `reports/status/{OUT_JSON.name}`",
|
||||
"",
|
||||
]
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
builder = PoolBuilder()
|
||||
report = builder.build_pool_rows()
|
||||
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
|
||||
OUT_JSON.write_text(json.dumps(report, indent=2) + "\n")
|
||||
OUT_MD.write_text(render_markdown(report) + "\n")
|
||||
print(f"Wrote {OUT_JSON.relative_to(ROOT)}")
|
||||
print(f"Wrote {OUT_MD.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
317
scripts/verify/build-network-deployment-inventory.mjs
Normal file
317
scripts/verify/build-network-deployment-inventory.mjs
Normal file
@@ -0,0 +1,317 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
import { fileURLToPath } from "node:url";
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = path.dirname(__filename);
|
||||
const repoRoot = path.resolve(__dirname, "..", "..");
|
||||
|
||||
const deploymentStatusPath = path.join(repoRoot, "cross-chain-pmm-lps/config/deployment-status.json");
|
||||
const manifestPath = path.join(repoRoot, "atomic-swap-dapp/config/ecosystem-manifest.json");
|
||||
const liveRouteRegistryPath = path.join(repoRoot, "atomic-swap-dapp/config/live-route-registry.json");
|
||||
const routingRegistryPath = path.join(repoRoot, "config/routing-registry.json");
|
||||
const allMainnetProtocolSurfacePath = path.join(repoRoot, "config/allmainnet-non-dodo-protocol-surface.json");
|
||||
const allMainnetTokenDocPath = path.join(repoRoot, "docs/11-references/ALL_MAINNET_TOKEN_ADDRESSES.md");
|
||||
const markdownOutputPath = path.join(repoRoot, "reports/status/network-deployment-inventory-latest.md");
|
||||
const jsonOutputPath = path.join(repoRoot, "reports/status/network-deployment-inventory-latest.json");
|
||||
|
||||
function readJson(filePath) {
|
||||
return JSON.parse(fs.readFileSync(filePath, "utf8"));
|
||||
}
|
||||
|
||||
function writeText(filePath, content) {
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.writeFileSync(filePath, content);
|
||||
}
|
||||
|
||||
function normalizeBool(value) {
|
||||
if (value === true) return "✓";
|
||||
if (value === false) return "—";
|
||||
return "?";
|
||||
}
|
||||
|
||||
function parseMarkdownTable(markdown, sectionTitle) {
|
||||
const lines = markdown.split("\n");
|
||||
const heading = `### ${sectionTitle}`;
|
||||
const startIndex = lines.findIndex((line) => line.trim() === heading);
|
||||
if (startIndex === -1) {
|
||||
return [];
|
||||
}
|
||||
const tableLines = [];
|
||||
for (const rawLine of lines.slice(startIndex + 1)) {
|
||||
const line = rawLine.trim();
|
||||
if (line.startsWith("### ") || line.startsWith("## ")) {
|
||||
break;
|
||||
}
|
||||
tableLines.push(line);
|
||||
}
|
||||
|
||||
const relevant = tableLines.filter((line) => line.startsWith("|"));
|
||||
|
||||
if (relevant.length < 3) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const headers = relevant[0]
|
||||
.split("|")
|
||||
.slice(1, -1)
|
||||
.map((cell) => cell.trim());
|
||||
|
||||
return relevant.slice(2).map((line) => {
|
||||
const values = line
|
||||
.split("|")
|
||||
.slice(1, -1)
|
||||
.map((cell) => cell.trim());
|
||||
return Object.fromEntries(headers.map((header, index) => [header, values[index] ?? ""]));
|
||||
}).filter(
|
||||
(row) =>
|
||||
row.Symbol &&
|
||||
row.Symbol !== "Symbol" &&
|
||||
row.Symbol !== "--------" &&
|
||||
row.Address &&
|
||||
row.Address !== "Address" &&
|
||||
row.Address !== "---------"
|
||||
);
|
||||
}
|
||||
|
||||
function parseAllMainnetTokens(markdown) {
|
||||
const sections = ["Stablecoins", "Wrapped Tokens", "DeFi Tokens"];
|
||||
return sections.flatMap((section) =>
|
||||
parseMarkdownTable(markdown, section).map((row) => ({
|
||||
category: section,
|
||||
token: row.Token,
|
||||
symbol: row.Symbol,
|
||||
address: row.Address?.replace(/`/g, "") ?? "",
|
||||
decimals: Number(row.Decimals || 0),
|
||||
status: row.Status,
|
||||
notes: row.Notes
|
||||
}))
|
||||
);
|
||||
}
|
||||
|
||||
function buildSpecialCaseBridgeList(liveRouteRegistry, routingRegistry, chainId) {
|
||||
const liveRoutes = (liveRouteRegistry.liveBridgeRoutes ?? [])
|
||||
.filter((route) => Number(route.fromChainId) === Number(chainId) || Number(route.toChainId) === Number(chainId))
|
||||
.map((route) => ({
|
||||
routeId: route.routeId,
|
||||
fromChainId: route.fromChainId,
|
||||
toChainId: route.toChainId,
|
||||
bridgeType: route.bridgeType,
|
||||
asset: route.assetSymbol,
|
||||
bridgeAddress: route.bridgeAddress,
|
||||
status: "live"
|
||||
}));
|
||||
|
||||
if (liveRoutes.length > 0) {
|
||||
return liveRoutes;
|
||||
}
|
||||
|
||||
return (routingRegistry.routes ?? [])
|
||||
.filter((route) => Number(route.fromChain) === Number(chainId) || Number(route.toChain) === Number(chainId))
|
||||
.map((route) => ({
|
||||
routeId: `${route.fromChain}-${route.toChain}-${route.asset}-${route.pathType}`.toLowerCase(),
|
||||
fromChainId: route.fromChain,
|
||||
toChainId: route.toChain,
|
||||
bridgeType: route.pathType,
|
||||
asset: route.asset,
|
||||
bridgeAddress: route.bridgeAddress,
|
||||
status: "configured"
|
||||
}));
|
||||
}
|
||||
|
||||
function countTokens(chainStatus) {
|
||||
return Object.keys(chainStatus.cwTokens ?? {}).length;
|
||||
}
|
||||
|
||||
function countAnchors(chainStatus) {
|
||||
return Object.keys(chainStatus.anchorAddresses ?? {}).length;
|
||||
}
|
||||
|
||||
function countPools(chainStatus, key) {
|
||||
return Array.isArray(chainStatus[key]) ? chainStatus[key].length : 0;
|
||||
}
|
||||
|
||||
function buildNextTasks(chainId) {
|
||||
switch (Number(chainId)) {
|
||||
case 138:
|
||||
return [
|
||||
"Keep Chain 138 PMM inventory, live swap routes, and public manifest synchronized after every token or pool deployment.",
|
||||
"Replace remaining placeholder bridge metadata with canonical deployed addresses and keep bridge notes current.",
|
||||
"Re-run route, inventory, and explorer verification audits after any bridge or pool change.",
|
||||
"Maintain source-chain bridge support for CCIP, GRU, and Alltra routes as the canonical launch surface."
|
||||
];
|
||||
case 1111:
|
||||
return [
|
||||
"Keep Wemix marked `planned_gas_scaffold` and `bridgeAvailable: false` until a successful bridge proof transfer is recorded.",
|
||||
"Replace placeholder gas PMM pools and gas reference venues with real deployed venues or remove them from the launch inventory.",
|
||||
"Complete canonical Wemix bridge readiness: funding, proof transfer verification, and any required inbound/outbound confirmation.",
|
||||
"Add real same-chain settlement inventory only after WEMIX-side execution is actually deployed and routable."
|
||||
];
|
||||
case 651940:
|
||||
return [
|
||||
"Add real same-chain Alltra swap inventory if users should settle into assets other than direct bridge receive.",
|
||||
"Publish deployed PMM or public DEX pool addresses into canonical inventory instead of anchor-only metadata.",
|
||||
"Expand the canonical Alltra token/routing surface only when those assets are actually part of supported settlement flows.",
|
||||
"Keep AlltraAdapter bridge metadata, supported assets, and fee assumptions synchronized with deployed bridge behavior."
|
||||
];
|
||||
default:
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function main() {
|
||||
const deploymentStatus = readJson(deploymentStatusPath);
|
||||
const manifest = readJson(manifestPath);
|
||||
const liveRouteRegistry = readJson(liveRouteRegistryPath);
|
||||
const routingRegistry = readJson(routingRegistryPath);
|
||||
const allMainnetProtocolSurface = readJson(allMainnetProtocolSurfacePath);
|
||||
const allMainnetTokenDoc = fs.readFileSync(allMainnetTokenDocPath, "utf8");
|
||||
|
||||
const supportedNetworks = manifest.supportedNetworks ?? [];
|
||||
const manifestByChain = new Map(supportedNetworks.map((network) => [String(network.chainId), network]));
|
||||
const allMainnetTokens = parseAllMainnetTokens(allMainnetTokenDoc);
|
||||
|
||||
const chains = Object.entries(deploymentStatus.chains ?? {})
|
||||
.map(([chainId, status]) => {
|
||||
const manifestRow = manifestByChain.get(chainId) ?? {};
|
||||
return {
|
||||
chainId: Number(chainId),
|
||||
chainKey: chainId,
|
||||
network: status.name ?? manifestRow.name ?? `Chain ${chainId}`,
|
||||
activationState: status.activationState ?? manifestRow.activationState ?? "live",
|
||||
bridgeAvailable:
|
||||
typeof status.bridgeAvailable === "boolean"
|
||||
? status.bridgeAvailable
|
||||
: Boolean(manifestRow.bridgeAvailable),
|
||||
tokenCount: countTokens(status),
|
||||
anchorCount: countAnchors(status),
|
||||
pmmPoolCount: countPools(status, "pmmPools"),
|
||||
volatilePoolCount: countPools(status, "pmmPoolsVolatile"),
|
||||
gasPmmPoolCount: countPools(status, "gasPmmPools"),
|
||||
referenceVenueCount: countPools(status, "gasReferenceVenues"),
|
||||
uniswapV2PoolCount: countPools(status, "uniswapV2Pools"),
|
||||
bridgeRouteCount: (manifestRow.routeCoverage?.inboundBridgeRoutes ?? 0) + (manifestRow.routeCoverage?.outboundBridgeRoutes ?? 0),
|
||||
cwTokens: status.cwTokens ?? {},
|
||||
anchorAddresses: status.anchorAddresses ?? {},
|
||||
gasMirrors: status.gasMirrors ?? {},
|
||||
gasQuoteAddresses: status.gasQuoteAddresses ?? {},
|
||||
specialCaseBridges: buildSpecialCaseBridgeList(liveRouteRegistry, routingRegistry, chainId)
|
||||
};
|
||||
})
|
||||
.sort((left, right) => left.chainId - right.chainId);
|
||||
|
||||
const specialCases = chains
|
||||
.filter((chain) => [138, 1111, 651940].includes(chain.chainId))
|
||||
.map((chain) => ({
|
||||
...chain,
|
||||
documentedTokens:
|
||||
chain.chainId === 651940
|
||||
? (allMainnetProtocolSurface.documentedTokens ?? allMainnetTokens)
|
||||
: [],
|
||||
nextTasks: buildNextTasks(chain.chainId)
|
||||
}));
|
||||
|
||||
const markdown = [
|
||||
"# Network Deployment Inventory",
|
||||
"",
|
||||
"| Chain | Network | Activation | Bridge Available | Tokens | Anchors | PMM | Volatile | Gas PMM | Ref Venues | UniV2 | Bridge Routes |",
|
||||
"|---:|---|---|:---:|---:|---:|---:|---:|---:|---:|---:|---:|",
|
||||
...chains.map(
|
||||
(chain) =>
|
||||
`| \`${chain.chainId}\` | ${chain.network} | \`${chain.activationState}\` | ${normalizeBool(chain.bridgeAvailable)} | ${chain.tokenCount} | ${chain.anchorCount} | ${chain.pmmPoolCount} | ${chain.volatilePoolCount} | ${chain.gasPmmPoolCount} | ${chain.referenceVenueCount} | ${chain.uniswapV2PoolCount} | ${chain.bridgeRouteCount} |`
|
||||
),
|
||||
"",
|
||||
"## Special-Case Networks",
|
||||
"",
|
||||
...specialCases.flatMap((chain) => {
|
||||
const bridgeLines =
|
||||
chain.specialCaseBridges.length > 0
|
||||
? chain.specialCaseBridges.map(
|
||||
(bridge) =>
|
||||
` - \`${bridge.fromChainId} -> ${bridge.toChainId}\` \`${bridge.bridgeType}\` \`${bridge.asset}\` at \`${bridge.bridgeAddress}\``
|
||||
)
|
||||
: [" - none recorded"];
|
||||
|
||||
const documentedTokenLines =
|
||||
chain.documentedTokens.length > 0
|
||||
? [
|
||||
"",
|
||||
"- Documented ecosystem tokens:",
|
||||
...chain.documentedTokens.map(
|
||||
(token) =>
|
||||
` - \`${token.symbol}\` ${token.address} (${token.decimals} decimals; ${token.category}; ${token.status})`
|
||||
)
|
||||
]
|
||||
: [];
|
||||
|
||||
return [
|
||||
`### \`${chain.chainId}\` ${chain.network}`,
|
||||
"",
|
||||
`- Activation state: \`${chain.activationState}\``,
|
||||
`- Bridge available: \`${chain.bridgeAvailable}\``,
|
||||
`- cW tokens: ${Object.keys(chain.cwTokens).length ? Object.keys(chain.cwTokens).join(", ") : "none"}`,
|
||||
`- Anchors: ${Object.keys(chain.anchorAddresses).length ? Object.entries(chain.anchorAddresses).map(([symbol, address]) => `${symbol}=${address}`).join(", ") : "none"}`,
|
||||
`- PMM pools: \`${chain.pmmPoolCount}\``,
|
||||
`- Gas PMM pools: \`${chain.gasPmmPoolCount}\``,
|
||||
`- Gas/reference venues: \`${chain.referenceVenueCount}\``,
|
||||
`- UniV2 pools: \`${chain.uniswapV2PoolCount}\``,
|
||||
"- Bridge routes:",
|
||||
...bridgeLines,
|
||||
...documentedTokenLines,
|
||||
"",
|
||||
"- Next tasks:",
|
||||
...chain.nextTasks.map((task) => ` - ${task}`),
|
||||
""
|
||||
];
|
||||
})
|
||||
].join("\n");
|
||||
|
||||
const json = {
|
||||
name: "Network Deployment Inventory",
|
||||
generatedAt: new Date().toISOString(),
|
||||
sourceFiles: [
|
||||
path.relative(repoRoot, deploymentStatusPath),
|
||||
path.relative(repoRoot, manifestPath),
|
||||
path.relative(repoRoot, liveRouteRegistryPath),
|
||||
path.relative(repoRoot, routingRegistryPath),
|
||||
path.relative(repoRoot, allMainnetProtocolSurfacePath),
|
||||
path.relative(repoRoot, allMainnetTokenDocPath)
|
||||
],
|
||||
networks: chains.map((chain) => ({
|
||||
chainId: chain.chainId,
|
||||
network: chain.network,
|
||||
activationState: chain.activationState,
|
||||
bridgeAvailable: chain.bridgeAvailable,
|
||||
tokenCount: chain.tokenCount,
|
||||
anchorCount: chain.anchorCount,
|
||||
pmmPoolCount: chain.pmmPoolCount,
|
||||
volatilePoolCount: chain.volatilePoolCount,
|
||||
gasPmmPoolCount: chain.gasPmmPoolCount,
|
||||
referenceVenueCount: chain.referenceVenueCount,
|
||||
uniswapV2PoolCount: chain.uniswapV2PoolCount,
|
||||
bridgeRouteCount: chain.bridgeRouteCount
|
||||
})),
|
||||
specialCases: specialCases.map((chain) => ({
|
||||
chainId: chain.chainId,
|
||||
network: chain.network,
|
||||
activationState: chain.activationState,
|
||||
bridgeAvailable: chain.bridgeAvailable,
|
||||
cwTokens: chain.cwTokens,
|
||||
anchors: chain.anchorAddresses,
|
||||
gasMirrors: chain.gasMirrors,
|
||||
gasQuoteAddresses: chain.gasQuoteAddresses,
|
||||
bridgeRoutes: chain.specialCaseBridges,
|
||||
documentedTokens: chain.documentedTokens,
|
||||
nextTasks: chain.nextTasks
|
||||
}))
|
||||
};
|
||||
|
||||
writeText(markdownOutputPath, `${markdown}\n`);
|
||||
writeText(jsonOutputPath, `${JSON.stringify(json, null, 2)}\n`);
|
||||
console.log(`Wrote ${path.relative(repoRoot, markdownOutputPath)}`);
|
||||
console.log(`Wrote ${path.relative(repoRoot, jsonOutputPath)}`);
|
||||
}
|
||||
|
||||
main();
|
||||
@@ -29,9 +29,9 @@ done
|
||||
|
||||
echo "All required dependencies present: ${REQUIRED[*]}"
|
||||
if [ ${#OPTIONAL_MISSING[@]} -gt 0 ]; then
|
||||
echo "Optional (recommended for automation): ${OPTIONAL[*]}"
|
||||
echo "Missing optional: ${OPTIONAL_MISSING[*]}"
|
||||
echo "Install (Debian/Ubuntu): sudo apt install -y sshpass rsync dnsutils iproute2 screen tmux htop shellcheck parallel sqlite3"
|
||||
echo " (dig from dnsutils; ss from iproute2; wscat/websocat: npm install -g wscat or cargo install websocat)"
|
||||
# Not a failure — optional tools (exit code stays 0)
|
||||
echo "Note — optional tools not in PATH: ${OPTIONAL_MISSING[*]}"
|
||||
echo " To install (Debian/Ubuntu): sudo apt install -y sshpass rsync dnsutils iproute2 screen tmux htop shellcheck parallel sqlite3"
|
||||
echo " (dig: dnsutils; ss: iproute2; wscat/websocat: npm install -g wscat or cargo install websocat)"
|
||||
fi
|
||||
exit 0
|
||||
|
||||
56
scripts/verify/export-liquidity-pools-compact-csv.py
Normal file
56
scripts/verify/export-liquidity-pools-compact-csv.py
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Export the compact liquidity-pools CSV from the generated master-map JSON."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import csv
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
INPUT_JSON = ROOT / "reports/status/liquidity-pools-master-map-latest.json"
|
||||
OUTPUT_CSV = ROOT / "reports/status/liquidity-pools-master-map-compact-latest.csv"
|
||||
|
||||
FIELDNAMES = [
|
||||
"chainId",
|
||||
"network",
|
||||
"pair",
|
||||
"poolAddress",
|
||||
"baseTokenAddress",
|
||||
"quoteTokenAddress",
|
||||
"baseBalance",
|
||||
"quoteBalance",
|
||||
"status",
|
||||
]
|
||||
|
||||
|
||||
def main() -> int:
|
||||
data = json.loads(INPUT_JSON.read_text())
|
||||
OUTPUT_CSV.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with OUTPUT_CSV.open("w", newline="") as fh:
|
||||
writer = csv.DictWriter(fh, fieldnames=FIELDNAMES)
|
||||
writer.writeheader()
|
||||
for chain in data.get("chains", []):
|
||||
for pool in chain.get("pools", []):
|
||||
writer.writerow(
|
||||
{
|
||||
"chainId": pool.get("chainId", chain.get("chainId")),
|
||||
"network": pool.get("network", chain.get("network")),
|
||||
"pair": f"{pool.get('baseSymbol')}/{pool.get('quoteSymbol')}",
|
||||
"poolAddress": pool.get("poolAddress", ""),
|
||||
"baseTokenAddress": pool.get("baseAddress", ""),
|
||||
"quoteTokenAddress": pool.get("quoteAddress", ""),
|
||||
"baseBalance": ((pool.get("balances") or {}).get("base") or {}).get("formatted", ""),
|
||||
"quoteBalance": ((pool.get("balances") or {}).get("quote") or {}).get("formatted", ""),
|
||||
"status": pool.get("status", ""),
|
||||
}
|
||||
)
|
||||
|
||||
print(f"Wrote {OUTPUT_CSV.relative_to(ROOT)}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
409
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.py
Normal file
409
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.py
Normal file
@@ -0,0 +1,409 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal, ROUND_CEILING, getcontext
|
||||
from pathlib import Path
|
||||
|
||||
getcontext().prec = 42
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
LATEST_SNAPSHOT = ROOT / "reports" / "status" / "mainnet-cwusdc-usdc-preflight-latest.json"
|
||||
POLICY_PATH = ROOT / "config" / "extraction" / "mainnet-cwusdc-usdc-support-policy.json"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
DEFAULT_CWUSDC = "0x2de5F116bFcE3d0f922d9C8351e0c5Fc24b9284a"
|
||||
DEFAULT_USDC = "0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48"
|
||||
SIX_DECIMALS = Decimal(10) ** 6
|
||||
ADDRESS_RE = re.compile(r"0x[a-fA-F0-9]{40}")
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = os.environ.get(key) or env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def normalize_units(raw: int, decimals: int = 6) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** decimals)
|
||||
|
||||
|
||||
def units_to_raw(units: Decimal, decimals: int = 6) -> int:
|
||||
scale = Decimal(10) ** decimals
|
||||
return int((units * scale).to_integral_value(rounding=ROUND_CEILING))
|
||||
|
||||
|
||||
def decimal_max(a: Decimal, b: Decimal) -> Decimal:
|
||||
return a if a >= b else b
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
matches = UINT_RE.findall(value)
|
||||
if not matches:
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
return int(matches[0])
|
||||
|
||||
|
||||
def parse_address(value: str) -> str:
|
||||
match = ADDRESS_RE.search(value)
|
||||
if not match:
|
||||
raise ValueError(f"could not parse address from {value!r}")
|
||||
return match.group(0)
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
return subprocess.check_output(cmd, text=True).strip()
|
||||
|
||||
|
||||
def query_balance(rpc_url: str, token: str, holder: str) -> int:
|
||||
return parse_uint(cast_call(rpc_url, token, "balanceOf(address)(uint256)", holder))
|
||||
|
||||
|
||||
def derive_holder_from_private_key(env_values: dict[str, str]) -> str:
|
||||
private_key = resolve_env_value("PRIVATE_KEY", env_values) or resolve_env_value("KEEPER_PRIVATE_KEY", env_values)
|
||||
if not private_key or "${" in private_key:
|
||||
return ""
|
||||
output = subprocess.check_output(["cast", "wallet", "address", "--private-key", private_key], text=True).strip()
|
||||
return parse_address(output)
|
||||
|
||||
|
||||
def shell_quote(value: str) -> str:
|
||||
return "'" + value.replace("'", "'\"'\"'") + "'"
|
||||
|
||||
|
||||
def command_block(lines: list[str]) -> str:
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def funding_status(required_raw: int, available_raw: int, decimals: int = 6) -> dict:
|
||||
shortfall_raw = max(required_raw - available_raw, 0)
|
||||
return {
|
||||
"requiredRaw": str(required_raw),
|
||||
"requiredUnits": str(normalize_units(required_raw, decimals)),
|
||||
"availableRaw": str(available_raw),
|
||||
"availableUnits": str(normalize_units(available_raw, decimals)),
|
||||
"shortfallRaw": str(shortfall_raw),
|
||||
"shortfallUnits": str(normalize_units(shortfall_raw, decimals)),
|
||||
"covered": shortfall_raw == 0,
|
||||
}
|
||||
|
||||
|
||||
def build_plan(snapshot: dict, policy: dict, env_values: dict[str, str], holder_override: str) -> dict:
|
||||
rpc_url = resolve_env_value("ETHEREUM_MAINNET_RPC", env_values)
|
||||
if not rpc_url:
|
||||
raise RuntimeError("missing ETHEREUM_MAINNET_RPC")
|
||||
|
||||
summary = snapshot["summary"]
|
||||
public_health = snapshot["health"]["publicPairHealth"]
|
||||
defended_health = snapshot["health"]["defendedVenueHealth"]
|
||||
treasury = snapshot.get("treasuryManager") or {}
|
||||
|
||||
base_reserve_raw = int(summary["defendedBaseReserveRaw"])
|
||||
quote_reserve_raw = int(summary["defendedQuoteReserveRaw"])
|
||||
target_reserve_raw = max(base_reserve_raw, quote_reserve_raw)
|
||||
add_quote_raw = max(base_reserve_raw - quote_reserve_raw, 0)
|
||||
add_base_raw = max(quote_reserve_raw - base_reserve_raw, 0)
|
||||
|
||||
min_base_units = Decimal(str(policy["thresholds"]["minBaseReserveUnits"]))
|
||||
min_quote_units = Decimal(str(policy["thresholds"]["minQuoteReserveUnits"]))
|
||||
public_base_units = Decimal(str(summary["publicPairBaseReserveUnits"]))
|
||||
public_quote_units = Decimal(str(summary["publicPairQuoteReserveUnits"]))
|
||||
public_base_shortfall_units = decimal_max(min_base_units - public_base_units, Decimal(0))
|
||||
public_quote_shortfall_units = decimal_max(min_quote_units - public_quote_units, Decimal(0))
|
||||
public_base_shortfall_raw = units_to_raw(public_base_shortfall_units)
|
||||
public_quote_shortfall_raw = units_to_raw(public_quote_shortfall_units)
|
||||
|
||||
max_automated_raw = int(policy["managedCycle"]["maxAutomatedFlashQuoteAmountRaw"])
|
||||
manager_available_raw = int(treasury.get("availableQuoteRaw") or 0)
|
||||
|
||||
holder = holder_override or derive_holder_from_private_key(env_values)
|
||||
cwusdc = resolve_env_value("CWUSDC_MAINNET", env_values) or DEFAULT_CWUSDC
|
||||
usdc = resolve_env_value("USDC_MAINNET", env_values) or DEFAULT_USDC
|
||||
manager = snapshot["resolvedAddresses"].get("treasuryManager") or ""
|
||||
receiver = snapshot["resolvedAddresses"].get("receiver") or ""
|
||||
defended_pool = snapshot["health"]["defendedVenue"]["poolAddress"]
|
||||
public_pair = snapshot["health"]["publicPair"]["poolAddress"]
|
||||
integration = resolve_env_value("DODO_PMM_INTEGRATION_MAINNET", env_values)
|
||||
router = resolve_env_value("CHAIN_1_UNISWAP_V2_ROUTER", env_values)
|
||||
|
||||
holder_state = None
|
||||
holder_usdc_raw = 0
|
||||
holder_cwusdc_raw = 0
|
||||
holder_blockers: list[str] = []
|
||||
if holder and holder.lower() != ZERO_ADDRESS:
|
||||
try:
|
||||
holder_cwusdc_raw = query_balance(rpc_url, cwusdc, holder)
|
||||
holder_usdc_raw = query_balance(rpc_url, usdc, holder)
|
||||
holder_state = {
|
||||
"address": holder,
|
||||
"cwusdcBalanceRaw": str(holder_cwusdc_raw),
|
||||
"cwusdcBalanceUnits": str(normalize_units(holder_cwusdc_raw)),
|
||||
"usdcBalanceRaw": str(holder_usdc_raw),
|
||||
"usdcBalanceUnits": str(normalize_units(holder_usdc_raw)),
|
||||
}
|
||||
except Exception as exc:
|
||||
holder_blockers.append(f"holder balance query failed: {exc}")
|
||||
|
||||
manager_funding = funding_status(max_automated_raw, manager_available_raw)
|
||||
defended_quote_funding = funding_status(add_quote_raw, holder_usdc_raw)
|
||||
public_base_funding = funding_status(public_base_shortfall_raw, holder_cwusdc_raw)
|
||||
public_quote_funding = funding_status(public_quote_shortfall_raw, holder_usdc_raw)
|
||||
|
||||
blockers: list[str] = []
|
||||
warnings = snapshot.get("warnings") or []
|
||||
if add_base_raw > 0:
|
||||
blockers.append("defended pool needs base-side top-up logic; current plan only supports quote-side top-up for this rail")
|
||||
if add_quote_raw > 0 and holder_state and not defended_quote_funding["covered"]:
|
||||
blockers.append(
|
||||
"operator wallet does not hold enough USDC to restore defended pool reserve parity; external funding is required"
|
||||
)
|
||||
if public_base_shortfall_raw > 0 and holder_state and not public_base_funding["covered"]:
|
||||
blockers.append(
|
||||
"operator wallet does not hold enough cWUSDC to reseed the public pair to policy floor; external mint/bridge is required"
|
||||
)
|
||||
if public_quote_shortfall_raw > 0 and holder_state and not public_quote_funding["covered"]:
|
||||
blockers.append(
|
||||
"operator wallet does not hold enough USDC to reseed the public pair to policy floor"
|
||||
)
|
||||
if manager_funding["covered"] is False and holder_state and holder_usdc_raw < max_automated_raw:
|
||||
blockers.append("operator wallet cannot fully fund even one max-sized automated defense cycle from current USDC balance")
|
||||
if not integration:
|
||||
blockers.append("missing DODO_PMM_INTEGRATION_MAINNET")
|
||||
if not router:
|
||||
blockers.append("missing CHAIN_1_UNISWAP_V2_ROUTER")
|
||||
if any("defended quote query failed" in warning for warning in warnings):
|
||||
blockers.append("defended pool quote preview reverted; set MIN_BASE_OUT_RAW manually before any quote-in trade")
|
||||
|
||||
operator_commands = {
|
||||
"rerunPreflight": "bash scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.sh",
|
||||
"rerunPlan": "bash scripts/verify/plan-mainnet-cwusdc-usdc-repeg.sh",
|
||||
}
|
||||
|
||||
if manager and manager.lower() != ZERO_ADDRESS:
|
||||
operator_commands["fundManagerUsdc"] = command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export USDC={usdc}",
|
||||
f"export MANAGER={manager}",
|
||||
f"export AMOUNT_RAW={max_automated_raw}",
|
||||
'cast send "$USDC" \'transfer(address,uint256)(bool)\' "$MANAGER" "$AMOUNT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
]
|
||||
)
|
||||
|
||||
if integration and add_quote_raw > 0:
|
||||
operator_commands["tradeDefendedPoolQuoteIn"] = command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export CWUSDC={cwusdc}",
|
||||
f"export USDC={usdc}",
|
||||
f"export INTEGRATION={integration}",
|
||||
f"export POOL={defended_pool}",
|
||||
f"export QUOTE_IN_RAW={add_quote_raw}",
|
||||
"export MIN_BASE_OUT_RAW=REPLACE_AFTER_DRY_RUN",
|
||||
'cast send "$USDC" \'approve(address,uint256)(bool)\' "$INTEGRATION" "$QUOTE_IN_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
'cast send "$INTEGRATION" \'swapExactIn(address,address,uint256,uint256)\' "$POOL" "$USDC" "$QUOTE_IN_RAW" "$MIN_BASE_OUT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
]
|
||||
)
|
||||
|
||||
if router and public_base_shortfall_raw > 0 and public_quote_shortfall_raw > 0:
|
||||
operator_commands["reseedPublicPair"] = command_block(
|
||||
[
|
||||
"source smom-dbis-138/scripts/load-env.sh >/dev/null",
|
||||
f"export ROUTER={router}",
|
||||
f"export CWUSDC={cwusdc}",
|
||||
f"export USDC={usdc}",
|
||||
f"export BASE_AMOUNT_RAW={public_base_shortfall_raw}",
|
||||
f"export QUOTE_AMOUNT_RAW={public_quote_shortfall_raw}",
|
||||
'export DEADLINE="$(( $(date +%s) + 3600 ))"',
|
||||
'export SIGNER="$(cast wallet address --private-key "$PRIVATE_KEY")"',
|
||||
'cast send "$CWUSDC" \'approve(address,uint256)(bool)\' "$ROUTER" "$BASE_AMOUNT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
'cast send "$USDC" \'approve(address,uint256)(bool)\' "$ROUTER" "$QUOTE_AMOUNT_RAW" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
'cast send "$ROUTER" \'addLiquidity(address,address,uint256,uint256,uint256,uint256,address,uint256)\' \\',
|
||||
' "$CWUSDC" "$USDC" "$BASE_AMOUNT_RAW" "$QUOTE_AMOUNT_RAW" "$BASE_AMOUNT_RAW" "$QUOTE_AMOUNT_RAW" "$SIGNER" "$DEADLINE" \\',
|
||||
' --private-key "$PRIVATE_KEY" --rpc-url "$ETHEREUM_MAINNET_RPC"',
|
||||
]
|
||||
)
|
||||
|
||||
recommended_actions = [
|
||||
{
|
||||
"step": "fund_manager_for_one_max_cycle",
|
||||
"quoteAmountRaw": str(max_automated_raw),
|
||||
"quoteAmountUnits": str(normalize_units(max_automated_raw)),
|
||||
"status": "ready" if manager_funding["covered"] else "needs_usdc",
|
||||
},
|
||||
{
|
||||
"step": "sell_usdc_into_defended_pool_toward_simple_1_to_1_reserve_parity",
|
||||
"baseAmountRaw": str(add_base_raw),
|
||||
"quoteAmountRaw": str(add_quote_raw),
|
||||
"quoteAmountUnits": str(normalize_units(add_quote_raw)),
|
||||
"status": "ready" if add_quote_raw == 0 or defended_quote_funding["covered"] else "needs_usdc",
|
||||
},
|
||||
{
|
||||
"step": "reseed_public_pair_to_policy_floor",
|
||||
"baseAmountRaw": str(public_base_shortfall_raw),
|
||||
"baseAmountUnits": str(normalize_units(public_base_shortfall_raw)),
|
||||
"quoteAmountRaw": str(public_quote_shortfall_raw),
|
||||
"quoteAmountUnits": str(normalize_units(public_quote_shortfall_raw)),
|
||||
"status": (
|
||||
"ready"
|
||||
if public_base_shortfall_raw == 0
|
||||
or (public_base_funding["covered"] and public_quote_funding["covered"])
|
||||
else "needs_inventory"
|
||||
),
|
||||
},
|
||||
]
|
||||
|
||||
return {
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"mode": "read_only_repeg_plan",
|
||||
"snapshotPath": str(LATEST_SNAPSHOT),
|
||||
"policyPath": str(POLICY_PATH),
|
||||
"inferenceNotes": [
|
||||
"Defended-pool 1:1 sizing is inferred from equal 6-decimal matched-rail tokens and reserve-balance parity.",
|
||||
"DODO PMM mid-price can still differ from reserve ratio; rerun preflight after each funding action.",
|
||||
"Public-pair reseed target uses the current policy reserve floors, not a smaller cosmetic liquidity target.",
|
||||
],
|
||||
"resolvedAddresses": {
|
||||
"holder": holder or None,
|
||||
"cwusdc": cwusdc,
|
||||
"usdc": usdc,
|
||||
"publicPair": public_pair,
|
||||
"defendedPool": defended_pool,
|
||||
"treasuryManager": manager or None,
|
||||
"receiver": receiver or None,
|
||||
"dodoIntegration": integration or None,
|
||||
"uniswapV2Router": router or None,
|
||||
},
|
||||
"defendedVenue": {
|
||||
"midPrice": summary["defendedMidPrice"],
|
||||
"deviationBps": summary["defendedDeviationBps"],
|
||||
"baseReserveRaw": str(base_reserve_raw),
|
||||
"baseReserveUnits": str(normalize_units(base_reserve_raw)),
|
||||
"quoteReserveRaw": str(quote_reserve_raw),
|
||||
"quoteReserveUnits": str(normalize_units(quote_reserve_raw)),
|
||||
"simpleReserveParity": {
|
||||
"targetReservePerSideRaw": str(target_reserve_raw),
|
||||
"targetReservePerSideUnits": str(normalize_units(target_reserve_raw)),
|
||||
"addBaseRaw": str(add_base_raw),
|
||||
"addBaseUnits": str(normalize_units(add_base_raw)),
|
||||
"addQuoteRaw": str(add_quote_raw),
|
||||
"addQuoteUnits": str(normalize_units(add_quote_raw)),
|
||||
},
|
||||
},
|
||||
"publicLane": {
|
||||
"pairAddress": public_pair,
|
||||
"priceQuotePerBase": public_health["priceQuotePerBase"],
|
||||
"deviationBps": summary["publicPairDeviationBps"],
|
||||
"baseReserveUnits": str(public_base_units),
|
||||
"quoteReserveUnits": str(public_quote_units),
|
||||
"policyFloorBaseUnits": str(min_base_units),
|
||||
"policyFloorQuoteUnits": str(min_quote_units),
|
||||
"policyFloorBaseShortfallRaw": str(public_base_shortfall_raw),
|
||||
"policyFloorBaseShortfallUnits": str(normalize_units(public_base_shortfall_raw)),
|
||||
"policyFloorQuoteShortfallRaw": str(public_quote_shortfall_raw),
|
||||
"policyFloorQuoteShortfallUnits": str(normalize_units(public_quote_shortfall_raw)),
|
||||
},
|
||||
"automation": {
|
||||
"managerAvailableQuoteRaw": str(manager_available_raw),
|
||||
"managerAvailableQuoteUnits": str(normalize_units(manager_available_raw)),
|
||||
"maxAutomatedFlashQuoteAmountRaw": str(max_automated_raw),
|
||||
"maxAutomatedFlashQuoteAmountUnits": str(normalize_units(max_automated_raw)),
|
||||
"managerFundingForOneMaxCycle": manager_funding,
|
||||
},
|
||||
"holderState": holder_state,
|
||||
"holderFundingChecks": {
|
||||
"defendedQuoteTopUp": defended_quote_funding,
|
||||
"publicPairBaseTopUp": public_base_funding,
|
||||
"publicPairQuoteTopUp": public_quote_funding,
|
||||
},
|
||||
"recommendedActions": recommended_actions,
|
||||
"operatorCommands": operator_commands,
|
||||
"warnings": warnings,
|
||||
"blockers": holder_blockers + blockers,
|
||||
"status": {
|
||||
"canFullyReachSimple1To1WithCurrentHolder": len(holder_blockers + blockers) == 0,
|
||||
"needsExternalFunding": (
|
||||
not defended_quote_funding["covered"]
|
||||
or not public_base_funding["covered"]
|
||||
or not public_quote_funding["covered"]
|
||||
),
|
||||
"canFundManagerFromCurrentHolder": holder_usdc_raw >= max_automated_raw if holder_state else None,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--snapshot", default=str(LATEST_SNAPSHOT), help="Path to a preflight snapshot JSON.")
|
||||
parser.add_argument("--holder", default="", help="Optional holder address to inventory-check instead of deriving from PRIVATE_KEY.")
|
||||
parser.add_argument("--out", help="Write the plan JSON to this file.")
|
||||
args = parser.parse_args()
|
||||
|
||||
snapshot_path = Path(args.snapshot)
|
||||
if not snapshot_path.exists():
|
||||
raise RuntimeError(f"missing snapshot file: {snapshot_path}")
|
||||
|
||||
snapshot = load_json(snapshot_path)
|
||||
policy = load_json(POLICY_PATH)
|
||||
env_values = merged_env_values()
|
||||
plan = build_plan(snapshot, policy, env_values, args.holder.strip())
|
||||
|
||||
output = json.dumps(plan, indent=2)
|
||||
if args.out:
|
||||
out_path = Path(args.out)
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
out_path.write_text(output + "\n")
|
||||
print(output)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
50
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.sh
Normal file
50
scripts/verify/plan-mainnet-cwusdc-usdc-repeg.sh
Normal file
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
|
||||
PLAN_PY="${PROJECT_ROOT}/scripts/verify/plan-mainnet-cwusdc-usdc-repeg.py"
|
||||
STAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
OUT_DIR="${PROJECT_ROOT}/reports/status"
|
||||
OUT_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-repeg-plan-${STAMP}.json"
|
||||
LATEST_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-repeg-plan-latest.json"
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
python3 "${PLAN_PY}" --out "${OUT_FILE}" > "${LATEST_FILE}"
|
||||
|
||||
echo "Wrote repeg plan:"
|
||||
echo " ${OUT_FILE}"
|
||||
echo "Updated latest pointer:"
|
||||
echo " ${LATEST_FILE}"
|
||||
echo
|
||||
python3 - <<'PY' "${LATEST_FILE}"
|
||||
import json, sys
|
||||
from pathlib import Path
|
||||
|
||||
data = json.loads(Path(sys.argv[1]).read_text())
|
||||
defended = data["defendedVenue"]["simpleReserveParity"]
|
||||
public_lane = data["publicLane"]
|
||||
automation = data["automation"]["managerFundingForOneMaxCycle"]
|
||||
holder = data.get("holderState") or {}
|
||||
|
||||
print("Summary:")
|
||||
print(f" defendedMidPrice={data['defendedVenue']['midPrice']}")
|
||||
print(f" defendedAddQuoteUnits={defended['addQuoteUnits']}")
|
||||
print(f" publicPolicyFloorBaseShortfallUnits={public_lane['policyFloorBaseShortfallUnits']}")
|
||||
print(f" publicPolicyFloorQuoteShortfallUnits={public_lane['policyFloorQuoteShortfallUnits']}")
|
||||
print(f" managerFundingShortfallUnits={automation['shortfallUnits']}")
|
||||
if holder:
|
||||
print(f" holder={holder['address']}")
|
||||
print(f" holderCwusdcUnits={holder['cwusdcBalanceUnits']}")
|
||||
print(f" holderUsdcUnits={holder['usdcBalanceUnits']}")
|
||||
blockers = data.get("blockers") or []
|
||||
if blockers:
|
||||
print("Blockers:")
|
||||
for blocker in blockers:
|
||||
print(f" - {blocker}")
|
||||
PY
|
||||
277
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.py
Executable file
277
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.py
Executable file
@@ -0,0 +1,277 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
from datetime import datetime, timezone
|
||||
from decimal import Decimal, getcontext
|
||||
from pathlib import Path
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
getcontext().prec = 42
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[2]
|
||||
CHECKER_PATH = ROOT / "scripts" / "verify" / "check-mainnet-cwusdc-usdc-support-health.py"
|
||||
POLICY_PATH = ROOT / "config" / "extraction" / "mainnet-cwusdc-usdc-support-policy.json"
|
||||
ROOT_ENV_PATH = ROOT / ".env"
|
||||
SMOM_ENV_PATH = ROOT / "smom-dbis-138" / ".env"
|
||||
ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
|
||||
UINT_RE = re.compile(r"\b\d+\b")
|
||||
ADDRESS_RE = re.compile(r"0x[a-fA-F0-9]{40}")
|
||||
BROADCAST_RECEIVER = ROOT / "smom-dbis-138" / "broadcast" / "DeployAaveQuotePushFlashReceiver.s.sol" / "1" / "run-latest.json"
|
||||
BROADCAST_MANAGER = ROOT / "smom-dbis-138" / "broadcast" / "DeployQuotePushTreasuryManager.s.sol" / "1" / "run-latest.json"
|
||||
|
||||
|
||||
def load_json(path: Path) -> dict:
|
||||
return json.loads(path.read_text())
|
||||
|
||||
|
||||
def load_env_file(path: Path) -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
if not path.exists():
|
||||
return values
|
||||
for raw_line in path.read_text().splitlines():
|
||||
line = raw_line.strip()
|
||||
if not line or line.startswith("#") or "=" not in line:
|
||||
continue
|
||||
key, value = line.split("=", 1)
|
||||
values[key.strip()] = value.strip().strip('"').strip("'")
|
||||
return values
|
||||
|
||||
|
||||
def merged_env_values() -> dict[str, str]:
|
||||
values: dict[str, str] = {}
|
||||
values.update(load_env_file(ROOT_ENV_PATH))
|
||||
values.update(load_env_file(SMOM_ENV_PATH))
|
||||
return values
|
||||
|
||||
|
||||
def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | None = None) -> str:
|
||||
if seen is None:
|
||||
seen = set()
|
||||
if key in seen:
|
||||
return env_values.get(key, "")
|
||||
seen.add(key)
|
||||
value = os.environ.get(key) or env_values.get(key, "")
|
||||
if value.startswith("${") and value.endswith("}"):
|
||||
inner = value[2:-1]
|
||||
target = inner.split(":-", 1)[0]
|
||||
fallback = inner.split(":-", 1)[1] if ":-" in inner else ""
|
||||
resolved = resolve_env_value(target, env_values, seen)
|
||||
return resolved or fallback
|
||||
return value.rstrip("\r\n")
|
||||
|
||||
|
||||
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
|
||||
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
|
||||
return subprocess.check_output(cmd, text=True).strip()
|
||||
|
||||
|
||||
def parse_uint(value: str) -> int:
|
||||
matches = UINT_RE.findall(value)
|
||||
if not matches:
|
||||
raise ValueError(f"could not parse integer from {value!r}")
|
||||
return int(matches[0])
|
||||
|
||||
|
||||
def parse_uints(value: str, count: int) -> list[int]:
|
||||
matches = [int(match) for match in UINT_RE.findall(value)]
|
||||
if len(matches) < count:
|
||||
raise ValueError(f"expected at least {count} integers from {value!r}")
|
||||
return matches[:count]
|
||||
|
||||
|
||||
def parse_address(value: str) -> str:
|
||||
match = ADDRESS_RE.search(value)
|
||||
if not match:
|
||||
raise ValueError(f"could not parse address from {value!r}")
|
||||
return match.group(0)
|
||||
|
||||
|
||||
def normalize_units(raw: int, decimals: int) -> Decimal:
|
||||
return Decimal(raw) / (Decimal(10) ** decimals)
|
||||
|
||||
|
||||
def run_health_checker() -> dict:
|
||||
output = subprocess.check_output(["python3", str(CHECKER_PATH)], text=True)
|
||||
return json.loads(output)
|
||||
|
||||
|
||||
def read_latest_create_address(path: Path, expected_contract_name: str) -> str:
|
||||
if not path.exists():
|
||||
return ""
|
||||
data = load_json(path)
|
||||
for tx in reversed(data.get("transactions", [])):
|
||||
if tx.get("transactionType") == "CREATE" and tx.get("contractName") == expected_contract_name:
|
||||
return str(tx.get("contractAddress") or "").strip()
|
||||
return ""
|
||||
|
||||
|
||||
def query_token_meta(rpc_url: str, token: str) -> dict:
|
||||
decimals = parse_uint(cast_call(rpc_url, token, "decimals()(uint8)"))
|
||||
symbol = cast_call(rpc_url, token, "symbol()(string)")
|
||||
return {"address": token, "symbol": symbol, "decimals": decimals}
|
||||
|
||||
|
||||
def query_manager_state(rpc_url: str, manager: str) -> dict:
|
||||
quote_token = parse_address(cast_call(rpc_url, manager, "quoteToken()(address)"))
|
||||
receiver = parse_address(cast_call(rpc_url, manager, "receiver()(address)"))
|
||||
state = {
|
||||
"manager": manager,
|
||||
"quoteToken": query_token_meta(rpc_url, quote_token),
|
||||
"receiver": receiver,
|
||||
"receiverOwner": parse_address(cast_call(rpc_url, manager, "receiverOwner()(address)")),
|
||||
"isReceiverOwnedByManager": "true" in cast_call(rpc_url, manager, "isReceiverOwnedByManager()(bool)").lower(),
|
||||
"quoteBalanceRaw": str(parse_uint(cast_call(rpc_url, manager, "quoteBalance()(uint256)"))),
|
||||
"availableQuoteRaw": str(parse_uint(cast_call(rpc_url, manager, "availableQuote()(uint256)"))),
|
||||
"receiverSweepableQuoteRaw": str(parse_uint(cast_call(rpc_url, manager, "receiverSweepableQuote()(uint256)"))),
|
||||
"receiverReserveRetainedRaw": str(parse_uint(cast_call(rpc_url, manager, "receiverReserveRetained()(uint256)"))),
|
||||
"managerReserveRetainedRaw": str(parse_uint(cast_call(rpc_url, manager, "managerReserveRetained()(uint256)"))),
|
||||
"gasRecipient": parse_address(cast_call(rpc_url, manager, "gasRecipient()(address)")),
|
||||
"recycleRecipient": parse_address(cast_call(rpc_url, manager, "recycleRecipient()(address)")),
|
||||
}
|
||||
decimals = state["quoteToken"]["decimals"]
|
||||
state["quoteBalanceUnits"] = str(normalize_units(int(state["quoteBalanceRaw"]), decimals))
|
||||
state["availableQuoteUnits"] = str(normalize_units(int(state["availableQuoteRaw"]), decimals))
|
||||
state["receiverSweepableQuoteUnits"] = str(normalize_units(int(state["receiverSweepableQuoteRaw"]), decimals))
|
||||
state["receiverReserveRetainedUnits"] = str(normalize_units(int(state["receiverReserveRetainedRaw"]), decimals))
|
||||
state["managerReserveRetainedUnits"] = str(normalize_units(int(state["managerReserveRetainedRaw"]), decimals))
|
||||
return state
|
||||
|
||||
|
||||
def query_receiver_state(rpc_url: str, receiver: str, quote_token: str) -> dict:
|
||||
return {
|
||||
"receiver": receiver,
|
||||
"owner": parse_address(cast_call(rpc_url, receiver, "owner()(address)")),
|
||||
"quoteBalanceRaw": str(parse_uint(cast_call(rpc_url, quote_token, "balanceOf(address)(uint256)", receiver))),
|
||||
}
|
||||
|
||||
|
||||
def query_defended_quotes(rpc_url: str, defended_pool: str, trader: str, policy: dict) -> list[dict]:
|
||||
rows: list[dict] = []
|
||||
for item in policy["managedCycle"]["quoteAmountByDeviationBps"]:
|
||||
amount_raw = int(item["flashQuoteAmountRaw"])
|
||||
base_out_raw, mt_fee_raw = parse_uints(
|
||||
cast_call(rpc_url, defended_pool, "querySellQuote(address,uint256)(uint256,uint256)", trader, str(amount_raw)),
|
||||
2,
|
||||
)
|
||||
rows.append(
|
||||
{
|
||||
"minDeviationBps": int(item["minDeviationBps"]),
|
||||
"flashQuoteAmountRaw": amount_raw,
|
||||
"receiveBaseAmountRaw": str(base_out_raw),
|
||||
"mtFeeRaw": str(mt_fee_raw),
|
||||
}
|
||||
)
|
||||
return rows
|
||||
|
||||
|
||||
def build_summary(snapshot: dict) -> dict:
|
||||
public_health = snapshot["health"]["publicPairHealth"]
|
||||
defended_health = snapshot["health"]["defendedVenueHealth"]
|
||||
treasury = snapshot.get("treasuryManager") or {}
|
||||
return {
|
||||
"publicPairDeviationBps": public_health.get("deviationBps"),
|
||||
"publicPairBaseReserveUnits": public_health.get("baseReserveUnits"),
|
||||
"publicPairQuoteReserveUnits": public_health.get("quoteReserveUnits"),
|
||||
"defendedMidPrice": defended_health.get("midPrice"),
|
||||
"defendedDeviationBps": defended_health.get("deviationBps"),
|
||||
"defendedBaseReserveRaw": defended_health.get("baseReserveRaw"),
|
||||
"defendedQuoteReserveRaw": defended_health.get("quoteReserveRaw"),
|
||||
"managerAvailableQuoteUnits": treasury.get("availableQuoteUnits"),
|
||||
"receiverSweepableQuoteUnits": treasury.get("receiverSweepableQuoteUnits"),
|
||||
"decisionAction": snapshot["health"]["decision"]["action"],
|
||||
"decisionSeverity": snapshot["health"]["decision"]["severity"],
|
||||
"flashQuoteAmountRaw": snapshot["health"]["decision"]["flashQuoteAmountRaw"],
|
||||
}
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--out", help="Write the JSON snapshot to this file.")
|
||||
args = parser.parse_args()
|
||||
|
||||
env_values = merged_env_values()
|
||||
policy = load_json(POLICY_PATH)
|
||||
health = run_health_checker()
|
||||
rpc_url = ""
|
||||
for key in policy["network"].get("rpcEnvKeys", []):
|
||||
rpc_url = resolve_env_value(key, env_values)
|
||||
if rpc_url:
|
||||
break
|
||||
if not rpc_url:
|
||||
raise RuntimeError("missing mainnet RPC URL")
|
||||
|
||||
manager_addr = resolve_env_value("QUOTE_PUSH_TREASURY_MANAGER_MAINNET", env_values)
|
||||
receiver_addr = resolve_env_value("AAVE_QUOTE_PUSH_RECEIVER_MAINNET", env_values)
|
||||
if not receiver_addr:
|
||||
receiver_addr = read_latest_create_address(BROADCAST_RECEIVER, "AaveQuotePushFlashReceiver")
|
||||
if not manager_addr:
|
||||
manager_addr = read_latest_create_address(BROADCAST_MANAGER, "QuotePushTreasuryManager")
|
||||
defended_pool = health["defendedVenue"]["poolAddress"]
|
||||
|
||||
treasury_manager = None
|
||||
receiver_state = None
|
||||
defended_quotes = []
|
||||
warnings: list[str] = []
|
||||
|
||||
if manager_addr and manager_addr.lower() != ZERO_ADDRESS:
|
||||
try:
|
||||
treasury_manager = query_manager_state(rpc_url, manager_addr)
|
||||
receiver_addr = treasury_manager["receiver"]
|
||||
except Exception as exc:
|
||||
warnings.append(f"treasury manager query failed: {exc}")
|
||||
else:
|
||||
warnings.append("treasury manager query skipped: QUOTE_PUSH_TREASURY_MANAGER_MAINNET not configured")
|
||||
|
||||
if receiver_addr and receiver_addr.lower() != ZERO_ADDRESS:
|
||||
quote_token = treasury_manager["quoteToken"]["address"] if treasury_manager else health["publicPair"]["quoteAddress"]
|
||||
try:
|
||||
receiver_state = query_receiver_state(rpc_url, receiver_addr, quote_token)
|
||||
quote_decimals = treasury_manager["quoteToken"]["decimals"] if treasury_manager else 6
|
||||
receiver_state["quoteBalanceUnits"] = str(normalize_units(int(receiver_state["quoteBalanceRaw"]), quote_decimals))
|
||||
except Exception as exc:
|
||||
warnings.append(f"receiver query failed: {exc}")
|
||||
else:
|
||||
warnings.append("receiver query skipped: AAVE_QUOTE_PUSH_RECEIVER_MAINNET not configured")
|
||||
|
||||
trader = receiver_addr if receiver_addr and receiver_addr.lower() != ZERO_ADDRESS else ""
|
||||
if trader:
|
||||
try:
|
||||
defended_quotes = query_defended_quotes(rpc_url, defended_pool, trader, policy)
|
||||
except Exception as exc:
|
||||
warnings.append(f"defended quote query failed: {exc}")
|
||||
else:
|
||||
warnings.append("defended quote query skipped: no receiver configured")
|
||||
|
||||
snapshot = {
|
||||
"generatedAt": datetime.now(timezone.utc).isoformat(),
|
||||
"mode": "read_only_preflight",
|
||||
"policyPath": str(POLICY_PATH),
|
||||
"checkerPath": str(CHECKER_PATH),
|
||||
"resolvedAddresses": {
|
||||
"receiver": receiver_addr or None,
|
||||
"treasuryManager": manager_addr or None,
|
||||
},
|
||||
"health": health,
|
||||
"treasuryManager": treasury_manager,
|
||||
"receiver": receiver_state,
|
||||
"defendedLaneQuotes": defended_quotes,
|
||||
"warnings": warnings,
|
||||
}
|
||||
snapshot["summary"] = build_summary(snapshot)
|
||||
|
||||
output = json.dumps(snapshot, indent=2)
|
||||
if args.out:
|
||||
out_path = Path(args.out)
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
out_path.write_text(output + "\n")
|
||||
print(output)
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
||||
45
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.sh
Executable file
45
scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.sh
Executable file
@@ -0,0 +1,45 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/../.." && pwd)"
|
||||
|
||||
# shellcheck source=/home/intlc/projects/proxmox/scripts/lib/load-project-env.sh
|
||||
source "${PROJECT_ROOT}/scripts/lib/load-project-env.sh"
|
||||
|
||||
SNAPSHOT_PY="${PROJECT_ROOT}/scripts/verify/snapshot-mainnet-cwusdc-usdc-preflight.py"
|
||||
STAMP="$(date -u +%Y%m%dT%H%M%SZ)"
|
||||
OUT_DIR="${PROJECT_ROOT}/reports/status"
|
||||
OUT_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-preflight-${STAMP}.json"
|
||||
LATEST_FILE="${OUT_DIR}/mainnet-cwusdc-usdc-preflight-latest.json"
|
||||
|
||||
mkdir -p "${OUT_DIR}"
|
||||
|
||||
python3 "${SNAPSHOT_PY}" --out "${OUT_FILE}" > "${LATEST_FILE}"
|
||||
|
||||
echo "Wrote snapshot:"
|
||||
echo " ${OUT_FILE}"
|
||||
echo "Updated latest pointer:"
|
||||
echo " ${LATEST_FILE}"
|
||||
echo
|
||||
python3 - <<'PY' "${LATEST_FILE}"
|
||||
import json, sys
|
||||
from pathlib import Path
|
||||
|
||||
data = json.loads(Path(sys.argv[1]).read_text())
|
||||
s = data["summary"]
|
||||
print("Summary:")
|
||||
print(f" publicPairDeviationBps={s.get('publicPairDeviationBps')}")
|
||||
print(f" publicPairBaseReserveUnits={s.get('publicPairBaseReserveUnits')}")
|
||||
print(f" publicPairQuoteReserveUnits={s.get('publicPairQuoteReserveUnits')}")
|
||||
print(f" defendedMidPrice={s.get('defendedMidPrice')}")
|
||||
print(f" managerAvailableQuoteUnits={s.get('managerAvailableQuoteUnits')}")
|
||||
print(f" receiverSweepableQuoteUnits={s.get('receiverSweepableQuoteUnits')}")
|
||||
print(f" decision={s.get('decisionSeverity')}/{s.get('decisionAction')}")
|
||||
print(f" flashQuoteAmountRaw={s.get('flashQuoteAmountRaw')}")
|
||||
warnings = data.get("warnings") or []
|
||||
if warnings:
|
||||
print("Warnings:")
|
||||
for warning in warnings:
|
||||
print(f" - {warning}")
|
||||
PY
|
||||
Reference in New Issue
Block a user