chore: sync workspace — configs, docs, scripts, CI, pnpm, submodules
Some checks failed
Deploy to Phoenix / validate (push) Failing after 15s
Deploy to Phoenix / deploy (push) Has been skipped

- Submodule pins: dbis_core, cross-chain-pmm-lps, mcp-proxmox (local, push may be pending), metamask-integration, smom-dbis-138
- Atomic swap + cross-chain-pmm-lops-publish, deploy-portal workflow, phoenix deploy-targets, routing/aggregator matrices
- Docs, token-lists, forge proxy, phoenix API, runbooks, verify scripts

Made-with: Cursor
This commit is contained in:
defiQUG
2026-04-21 22:01:33 -07:00
parent e6bc7a6d7c
commit b8613905bd
231 changed files with 31657 additions and 2184 deletions

View File

@@ -0,0 +1,180 @@
#!/usr/bin/env python3
"""Merge deployment-status.json + Uni V2 pair-discovery report into a mesh matrix.
Read-only: does not call RPC. Use after running promod_uniswap_v2_live_pair_discovery.py
or pointing at an existing reports/extraction/promod-uniswap-v2-live-pair-discovery-*.json.
"""
from __future__ import annotations
import argparse
import json
from pathlib import Path
ROOT = Path(__file__).resolve().parents[2]
DEFAULT_STATUS = ROOT / "cross-chain-pmm-lps" / "config" / "deployment-status.json"
DEFAULT_DISCOVERY = ROOT / "reports" / "extraction" / "promod-uniswap-v2-live-pair-discovery-latest.json"
def load_json(path: Path) -> dict:
return json.loads(path.read_text())
def pmm_settlement_pools(pools: list) -> list[str]:
out: list[str] = []
for x in pools or []:
b, q = x.get("base"), x.get("quote")
if b in ("cWUSDC", "cWUSDT") and q in ("USDC", "USDT"):
out.append(f"{b}/{q}")
return sorted(set(out))
def pmm_has_cw_mesh(pools: list) -> bool:
for x in pools or []:
if x.get("base") == "cWUSDT" and x.get("quote") == "cWUSDC":
return True
return False
def discovery_mesh_entry(entries: list, chain_id: int) -> dict | None:
for e in entries:
if int(e.get("chain_id", -1)) == chain_id:
return e
return None
def pair_mesh_state(entry: dict | None) -> tuple[bool | None, bool | None, str | None]:
"""Returns live, healthy (None if unknown), pool address (None if n/a)."""
if not entry:
return None, None, None
for p in entry.get("pairsChecked") or []:
if p.get("base") == "cWUSDT" and p.get("quote") == "cWUSDC":
live = p.get("live")
h = p.get("health") or {}
healthy = h.get("healthy")
addr = p.get("poolAddress") or ""
if addr in ("", "0x0000000000000000000000000000000000000000"):
return bool(live), None, None
return bool(live), bool(healthy) if healthy is not None else None, addr
return False, None, None
def build_rows(status_path: Path, discovery_path: Path) -> list[dict]:
dep = load_json(status_path)
disc_data = load_json(discovery_path)
entries = disc_data.get("entries") or []
rows: list[dict] = []
for cid_s, info in sorted((dep.get("chains") or {}).items(), key=lambda kv: int(kv[0])):
cid = int(cid_s)
cw = info.get("cwTokens") or {}
pools = info.get("pmmPools") or []
has_wusdt = "cWUSDT" in cw
has_wusdc = "cWUSDC" in cw
d_entry = discovery_mesh_entry(entries, cid)
live, healthy, pool_addr = pair_mesh_state(d_entry)
settle = pmm_settlement_pools(pools)
rows.append(
{
"chainId": cid,
"network": info.get("name", ""),
"activationState": info.get("activationState", ""),
"hasCWUSDT": has_wusdt,
"hasCWUSDC": has_wusdc,
"cwTokenCount": len(cw),
"pmmCWUSDTvsCWUSDC": pmm_has_cw_mesh(pools),
"pmmSettlementRails": settle,
"uniswapV2PairDiscoveryPresent": d_entry is not None,
"uniswapV2CWUSDTvsCWUSDCLive": live,
"uniswapV2CWUSDTvsCWUSDCHealthy": healthy,
"uniswapV2CWUSDTvsCWDCPool": pool_addr,
}
)
return rows
def print_markdown(rows: list[dict], generated_from: dict[str, str]) -> None:
print("# cW* mesh deployment matrix\n")
for k, v in generated_from.items():
print(f"- **{k}:** `{v}`")
print()
print(
"| Chain | Network | cWUSDT | cWUSDC | PMM cWUSDT↔cWUSDC | PMM settlement | "
"UniV2 cWUSDT/cWUSDC live | healthy | Pool |"
)
print("|------:|:---|:---:|:---:|:---:|:---|:---:|:---:|:---|")
def _fmt_bool(v: bool | None) -> str:
if v is None:
return ""
return str(v)
for r in rows:
settle = ", ".join(r["pmmSettlementRails"]) if r["pmmSettlementRails"] else ""
if len(settle) > 48:
settle = settle[:45] + ""
print(
f"| {r['chainId']} | {r['network'][:26]} | "
f"{'' if r['hasCWUSDT'] else ''} | {'' if r['hasCWUSDC'] else ''} | "
f"{'' if r['pmmCWUSDTvsCWUSDC'] else ''} | {settle} | "
f"{_fmt_bool(r['uniswapV2CWUSDTvsCWUSDCLive'])} | {_fmt_bool(r['uniswapV2CWUSDTvsCWUSDCHealthy'])} | "
f"`{r['uniswapV2CWUSDTvsCWDCPool'] or ''}` |"
)
print()
print("## Notes\n")
print(
"- **PMM settlement**: pools where base is `cWUSDC` or `cWUSDT` and quote is `USDC` or `USDT` "
"in `deployment-status.json`."
)
print(
"- **Uni V2** columns come from the pair-discovery report (reserves/health are snapshot, not live RPC)."
)
def main() -> None:
ap = argparse.ArgumentParser(description=__doc__)
ap.add_argument(
"--deployment-status",
type=Path,
default=DEFAULT_STATUS,
help=f"Default: {DEFAULT_STATUS}",
)
ap.add_argument(
"--pair-discovery",
type=Path,
default=DEFAULT_DISCOVERY,
help=f"Default: {DEFAULT_DISCOVERY}",
)
ap.add_argument(
"--json-out",
type=Path,
default=None,
help="Optional path to write machine-readable rows (e.g. reports/status/cw-mesh-deployment-matrix-latest.json)",
)
ap.add_argument(
"--no-markdown",
action="store_true",
help="Do not print markdown (useful with --json-out only)",
)
args = ap.parse_args()
rows = build_rows(args.deployment_status, args.pair_discovery)
payload = {
"schemaVersion": "1.0.0",
"description": "Per-chain merge of deployment-status cwTokens/pmmPools and Uni V2 pair-discovery snapshot.",
"generatedFrom": {
"deploymentStatus": str(args.deployment_status.resolve()),
"pairDiscovery": str(args.pair_discovery.resolve()),
},
"rows": rows,
}
if args.json_out:
args.json_out.parent.mkdir(parents=True, exist_ok=True)
args.json_out.write_text(json.dumps(payload, indent=2) + "\n")
if not args.no_markdown:
print_markdown(rows, payload["generatedFrom"])
if __name__ == "__main__":
main()

View File

@@ -4,10 +4,13 @@ from __future__ import annotations
from pathlib import Path
import argparse
from decimal import Decimal, getcontext
from functools import lru_cache
import json
import os
import re
import subprocess
import time
from concurrent.futures import ThreadPoolExecutor, as_completed
getcontext().prec = 42
@@ -20,6 +23,7 @@ ZERO_ADDRESS = "0x0000000000000000000000000000000000000000"
HEALTHY_DEVIATION_BPS = Decimal("25")
MIN_HEALTHY_RESERVE_UNITS = Decimal("1000")
UINT_RE = re.compile(r"\b\d+\b")
CAST_CALL_TIMEOUT_SECONDS = int(os.environ.get("PROMOD_CAST_TIMEOUT_SECONDS", "20"))
CHAIN_CONFIG = {
"1": {"rpc_keys": ["ETHEREUM_MAINNET_RPC"], "hub": "USDC"},
@@ -68,9 +72,9 @@ def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | Non
if seen is None:
seen = set()
if key in seen:
return env_values.get(key, "")
return os.environ.get(key, env_values.get(key, ""))
seen.add(key)
value = env_values.get(key, "")
value = os.environ.get(key, env_values.get(key, ""))
if value.startswith("${") and value.endswith("}"):
inner = value[2:-1]
target = inner.split(":-", 1)[0]
@@ -80,18 +84,30 @@ def resolve_env_value(key: str, env_values: dict[str, str], seen: set[str] | Non
def cast_call(rpc_url: str, target: str, signature: str, *args: str) -> str:
cmd = ["cast", "call", target, signature, *args, "--rpc-url", rpc_url]
return subprocess.check_output(cmd, text=True).strip()
return subprocess.check_output(cmd, text=True, timeout=CAST_CALL_TIMEOUT_SECONDS).strip()
@lru_cache(maxsize=1024)
def cast_call_cached(rpc_url: str, target: str, signature: str, *args: str) -> str:
return cast_call(rpc_url, target, signature, *args)
def parse_uint(value: str) -> int:
matches = UINT_RE.findall(value)
if not matches:
stripped = value.strip()
if not stripped:
raise ValueError(f"could not parse integer from {value!r}")
return int(matches[0])
return int(stripped.split()[0])
def parse_uints(value: str, count: int) -> list[int]:
matches = [int(match) for match in UINT_RE.findall(value)]
matches = []
for raw_line in value.splitlines():
line = raw_line.strip()
if not line:
continue
matches.append(int(line.split()[0]))
if len(matches) == count:
break
if len(matches) < count:
raise ValueError(f"expected {count} integers, got {value!r}")
return matches[:count]
@@ -109,11 +125,13 @@ def normalize_units(raw: int, decimals: int) -> Decimal:
def compute_pair_health(rpc_url: str, pair_address: str, base_address: str, quote_address: str) -> dict:
token0 = parse_address(cast_call(rpc_url, pair_address, "token0()(address)"))
token1 = parse_address(cast_call(rpc_url, pair_address, "token1()(address)"))
reserve0_raw, reserve1_raw, _ = parse_uints(cast_call(rpc_url, pair_address, "getReserves()(uint112,uint112,uint32)"), 3)
decimals0 = parse_uint(cast_call(rpc_url, token0, "decimals()(uint8)"))
decimals1 = parse_uint(cast_call(rpc_url, token1, "decimals()(uint8)"))
token0 = parse_address(cast_call_cached(rpc_url, pair_address, "token0()(address)"))
token1 = parse_address(cast_call_cached(rpc_url, pair_address, "token1()(address)"))
reserve0_raw, reserve1_raw, _ = parse_uints(
cast_call_cached(rpc_url, pair_address, "getReserves()(uint112,uint112,uint32)"), 3
)
decimals0 = parse_uint(cast_call_cached(rpc_url, token0, "decimals()(uint8)"))
decimals1 = parse_uint(cast_call_cached(rpc_url, token1, "decimals()(uint8)"))
if token0.lower() == base_address.lower() and token1.lower() == quote_address.lower():
base_raw, quote_raw = reserve0_raw, reserve1_raw
@@ -171,6 +189,68 @@ def append_discovered_pair(status: dict, chain_id: str, pair: dict):
return True
def build_chain_entry(chain_id: str, chain: dict, config: dict, env_values: dict[str, str]):
factory = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY", env_values)
router = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER", env_values)
start_block = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK", env_values) or "0"
rpc_url = ""
for key in config["rpc_keys"]:
value = resolve_env_value(key, env_values)
if value:
rpc_url = value
break
env_ready = bool(factory and router and rpc_url)
pairs = []
discovered_rows = []
if env_ready:
for base, quote, token0, token1 in candidate_pairs(chain):
try:
pair_address = cast_call_cached(rpc_url, factory, "getPair(address,address)(address)", token0, token1)
except Exception as exc:
pair_address = f"ERROR:{exc}"
live = pair_address.lower() != ZERO_ADDRESS and not pair_address.startswith("ERROR:")
row = {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"live": live,
}
if live:
try:
row["health"] = compute_pair_health(rpc_url, pair_address, token0, token1)
except Exception as exc:
row["health"] = {"healthy": False, "error": str(exc)}
discovered_rows.append(
{
"chain_id": chain_id,
"row": {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"factoryAddress": factory,
"routerAddress": router,
"startBlock": int(start_block),
"venue": "uniswap_v2_pair",
"publicRoutingEnabled": False,
},
}
)
pairs.append(row)
entry = {
"chain_id": int(chain_id),
"network": chain.get("name"),
"factoryAddress": factory or None,
"routerAddress": router or None,
"startBlock": int(start_block),
"rpcConfigured": bool(rpc_url),
"envReady": env_ready,
"pairsChecked": pairs,
}
return entry, discovered_rows
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--write-discovered", action="store_true", help="Write discovered live pairs into deployment-status.json under uniswapV2Pools.")
@@ -179,74 +259,22 @@ def main():
status = load_json(DEPLOYMENT_STATUS)
env_values = load_env(ENV_PATH)
entries = []
entries_by_chain: dict[str, dict] = {}
discovered_for_write = []
jobs = []
with ThreadPoolExecutor(max_workers=min(8, len(CHAIN_CONFIG))) as executor:
for chain_id, config in CHAIN_CONFIG.items():
chain = status["chains"].get(chain_id)
if not chain:
continue
jobs.append((chain_id, executor.submit(build_chain_entry, chain_id, chain, config, env_values)))
for chain_id, config in CHAIN_CONFIG.items():
chain = status["chains"].get(chain_id)
if not chain:
continue
for chain_id, future in jobs:
entry, discovered_rows = future.result()
entries_by_chain[chain_id] = entry
discovered_for_write.extend(discovered_rows)
factory = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY", env_values)
router = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_ROUTER", env_values)
start_block = resolve_env_value(f"CHAIN_{chain_id}_UNISWAP_V2_START_BLOCK", env_values) or "0"
rpc_url = ""
for key in config["rpc_keys"]:
value = resolve_env_value(key, env_values)
if value:
rpc_url = value
break
env_ready = bool(factory and router and rpc_url)
pairs = []
if env_ready:
for base, quote, token0, token1 in candidate_pairs(chain):
try:
pair_address = cast_call(rpc_url, factory, "getPair(address,address)(address)", token0, token1)
except Exception as exc:
pair_address = f"ERROR:{exc}"
live = pair_address.lower() != ZERO_ADDRESS and not pair_address.startswith("ERROR:")
row = {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"live": live,
}
if live:
try:
row["health"] = compute_pair_health(rpc_url, pair_address, token0, token1)
except Exception as exc:
row["health"] = {"healthy": False, "error": str(exc)}
pairs.append(row)
if live:
discovered_for_write.append(
{
"chain_id": chain_id,
"row": {
"base": base,
"quote": quote,
"poolAddress": pair_address,
"factoryAddress": factory,
"routerAddress": router,
"startBlock": int(start_block),
"venue": "uniswap_v2_pair",
"publicRoutingEnabled": False,
},
}
)
entries.append(
{
"chain_id": int(chain_id),
"network": chain.get("name"),
"factoryAddress": factory or None,
"routerAddress": router or None,
"startBlock": int(start_block),
"rpcConfigured": bool(rpc_url),
"envReady": env_ready,
"pairsChecked": pairs,
}
)
entries = [entries_by_chain[chain_id] for chain_id in CHAIN_CONFIG if chain_id in entries_by_chain]
writes = []
if args.write_discovered: