Files
proxmox/scripts/lib/promod_uniswap_v2_phase2_wave1_completion_status.py
defiQUG 4fab998e51
All checks were successful
Deploy to Phoenix / deploy (push) Successful in 9s
chore: sync workspace docs, configs, and submodules
2026-04-18 12:07:15 -07:00

215 lines
8.0 KiB
Python

#!/usr/bin/env python3
from __future__ import annotations
import json
import os
import subprocess
from datetime import datetime, timezone
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[2]
PHASE2_SEQUENCE = REPO_ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-operator-sequence-latest.json"
OUT_JSON = REPO_ROOT / "reports" / "extraction" / "promod-uniswap-v2-phase2-wave1-completion-status-latest.json"
OUT_MD = REPO_ROOT / "docs" / "03-deployment" / "PROMOD_UNISWAP_V2_PHASE2_WAVE1_COMPLETION_STATUS.md"
WAVE1_LABELS = {
"cWAUDC/cWUSDC",
"cWAUDC/cWUSDT",
"cWEURC/cWUSDC",
"cWEURC/cWUSDT",
"cWGBPC/cWUSDC",
"cWGBPC/cWUSDT",
}
TOKEN_OVERRIDES = {
137: {
"cWAUDC/cWUSDC": ("0xFb4B6Cc81211F7d886950158294A44C312abCA29", "0xd6969bC19b53f866C64f2148aE271B2Dae0C58E4"),
"cWAUDC/cWUSDT": ("0xFb4B6Cc81211F7d886950158294A44C312abCA29", "0x0cb0192C056aa425C557BdeAD8E56C7eEabf7acF"),
"cWEURC/cWUSDC": ("0x3CD9ee18db7ad13616FCC1c83bC6098e03968E66", "0xd6969bC19b53f866C64f2148aE271B2Dae0C58E4"),
"cWEURC/cWUSDT": ("0x3CD9ee18db7ad13616FCC1c83bC6098e03968E66", "0x0cb0192C056aa425C557BdeAD8E56C7eEabf7acF"),
"cWGBPC/cWUSDC": ("0x948690147D2e50ffe50C5d38C14125aD6a9FA036", "0xd6969bC19b53f866C64f2148aE271B2Dae0C58E4"),
"cWGBPC/cWUSDT": ("0x948690147D2e50ffe50C5d38C14125aD6a9FA036", "0x0cb0192C056aa425C557BdeAD8E56C7eEabf7acF"),
}
}
PAIR_ADDRESS_OVERRIDES = {
137: {
"cWAUDC/cWUSDC": "0x6ffa939d75bd6affe019705f2c9240f97975ffa0",
"cWAUDC/cWUSDT": "0x526a3a38b77d199e8fd07f37597f9ca0fa5a87cd",
"cWEURC/cWUSDC": "0xd5907a692f7e8f650fc5feb8ebb3196fea2069a3",
"cWEURC/cWUSDT": "0x3292c0ed9eec0443635367717047876fe3cdb514",
"cWGBPC/cWUSDC": "0x52786e752be5fb1b18e86959f87b7a59e2c6de6d",
"cWGBPC/cWUSDT": "0x1b6e8484db0cd9c00d39e457c2d126c8983f5390",
}
}
RPC_KEYS = {
1: ["ETHEREUM_MAINNET_RPC"],
10: ["OPTIMISM_MAINNET_RPC"],
25: ["CRONOS_RPC_URL", "CRONOS_MAINNET_RPC"],
56: ["BSC_MAINNET_RPC", "BSC_RPC_URL"],
100: ["GNOSIS_MAINNET_RPC", "GNOSIS_RPC_URL"],
137: ["POLYGON_MAINNET_RPC", "POLYGON_RPC_URL"],
8453: ["BASE_MAINNET_RPC", "BASE_RPC_URL"],
42161: ["ARBITRUM_MAINNET_RPC", "ARBITRUM_RPC_URL"],
42220: ["CELO_MAINNET_RPC", "CELO_RPC_URL"],
43114: ["AVALANCHE_MAINNET_RPC", "AVALANCHE_RPC_URL"],
}
VERIFICATION_STATUS = {
"status": "blocked",
"summary": "Deployment and liquidity rollout is complete, but explorer publication is not fully complete.",
"blockers": [
"Current local CompliantWrappedToken artifact does not exactly match deployed runtime bytecode.",
"forge verify-contract cannot use the historical deploy profile directly in this environment.",
"Some explorer backends require paid API access or manual submission paths.",
],
}
def load_env() -> dict[str, str]:
env_dump = Path("/tmp/promod_phase2_env_snapshot.txt")
subprocess.run(
[
"bash",
"-lc",
f"cd {REPO_ROOT / 'smom-dbis-138'} && source scripts/load-env.sh >/dev/null && env | sort > {env_dump}",
],
check=True,
)
env: dict[str, str] = {}
for line in env_dump.read_text().splitlines():
if "=" in line:
k, v = line.split("=", 1)
env[k] = v
return env
def cast_call(rpc_url: str, to: str, signature: str, *args: str) -> str:
return subprocess.check_output(
["cast", "call", to, signature, *args, "--rpc-url", rpc_url],
text=True,
timeout=30,
).strip()
def write_json(data: dict) -> None:
OUT_JSON.parent.mkdir(parents=True, exist_ok=True)
OUT_JSON.write_text(json.dumps(data, indent=2) + "\n")
def write_markdown(data: dict) -> None:
lines: list[str] = []
lines.append("# Promod Uniswap V2 Phase 2 Wave 1 Completion Status")
lines.append("")
lines.append(f"**Generated:** {data['generated_at']}")
lines.append("")
lines.append(f"**Overall Status:** `{data['overall_status']}`")
lines.append("")
lines.append(f"**Completed Chains:** `{', '.join(str(x) for x in data['completed_chain_ids'])}`")
lines.append("")
lines.append("## Reserve Verification")
lines.append("")
lines.append("| Chain | Network | Pair | Pair Address | Reserves | Status |")
lines.append("|---|---|---|---|---|---|")
for chain in data["chains"]:
for pair in chain["wave1_pairs"]:
lines.append(
f"| `{chain['chain_id']}` | {chain['network']} | `{pair['pair']}` | `{pair['pair_address']}` | `{pair['reserves']}` | `{pair['status']}` |"
)
lines.append("")
lines.append("## Explorer Publication")
lines.append("")
lines.append(f"**Status:** `{data['verification_status']['status']}`")
lines.append("")
lines.append(data["verification_status"]["summary"])
lines.append("")
for blocker in data["verification_status"]["blockers"]:
lines.append(f"- {blocker}")
lines.append("")
OUT_MD.parent.mkdir(parents=True, exist_ok=True)
OUT_MD.write_text("\n".join(lines) + "\n")
def main() -> None:
env = load_env()
sequence = json.loads(PHASE2_SEQUENCE.read_text())
chains_out = []
completed_chain_ids = []
for entry in sequence["entries"]:
chain_id = entry["chain_id"]
rpc_url = next((env.get(k) for k in RPC_KEYS[chain_id] if env.get(k)), None)
if not rpc_url:
raise RuntimeError(f"Missing RPC URL for chain {chain_id}")
factory = env[f"CHAIN_{chain_id}_UNISWAP_V2_FACTORY"]
wave1_pairs = []
for pair in entry["phase_2_pairs"]:
if pair["pair"] not in WAVE1_LABELS:
continue
token_a = pair["token_a_address"]
token_b = pair["token_b_address"]
if chain_id in TOKEN_OVERRIDES and pair["pair"] in TOKEN_OVERRIDES[chain_id]:
token_a, token_b = TOKEN_OVERRIDES[chain_id][pair["pair"]]
pair_address = PAIR_ADDRESS_OVERRIDES.get(chain_id, {}).get(pair["pair"])
if not pair_address:
pair_address = cast_call(
rpc_url,
factory,
"getPair(address,address)(address)",
token_a,
token_b,
)
reserves = "PAIR_NOT_FOUND"
status = "missing"
if pair_address.lower() != "0x0000000000000000000000000000000000000000":
reserves = cast_call(
rpc_url,
pair_address,
"getReserves()((uint112,uint112,uint32))",
)
status = "complete" if "(1000000000 [1e9], 1000000000 [1e9]" in reserves else "unexpected_reserves"
wave1_pairs.append(
{
"pair": pair["pair"],
"pair_address": pair_address,
"reserves": reserves,
"status": status,
}
)
chain_status = "complete" if all(p["status"] == "complete" for p in wave1_pairs) else "incomplete"
if chain_status == "complete":
completed_chain_ids.append(chain_id)
chains_out.append(
{
"chain_id": chain_id,
"network": entry["network"],
"status": chain_status,
"wave1_pairs": wave1_pairs,
}
)
data = {
"generated_at": datetime.now(timezone.utc).isoformat(),
"program_name": "promod-uniswap-v2-phase2-wave1-completion-status",
"overall_status": "complete" if len(completed_chain_ids) == len(chains_out) else "incomplete",
"completed_chain_ids": completed_chain_ids,
"verification_status": VERIFICATION_STATUS,
"chains": chains_out,
"source_artifacts": {
"phase2_operator_sequence": str(PHASE2_SEQUENCE.relative_to(REPO_ROOT)),
},
}
write_json(data)
write_markdown(data)
print(OUT_JSON)
if __name__ == "__main__":
main()