Loading balance…
) : balance ? (
- Balance (Chain 138):{' '}
+ Balance ({defaultChain.name}):{' '}
{balance.displayValue} {balance.symbol}
diff --git a/real-robinhood b/real-robinhood
index ac6e49c..cb5c811 160000
--- a/real-robinhood
+++ b/real-robinhood
@@ -1 +1 @@
-Subproject commit ac6e49c2edbe5a7c06523a0aba8cd7cb02075b17
+Subproject commit cb5c81142404a18d873bf4fe0064f6afa92117e8
diff --git a/script/deploy/DeployDBISRail.s.sol b/script/deploy/DeployDBISRail.s.sol
index ea731a7..d8d2d0b 100644
--- a/script/deploy/DeployDBISRail.s.sol
+++ b/script/deploy/DeployDBISRail.s.sol
@@ -6,6 +6,7 @@ import "../../contracts/dbis/DBIS_RootRegistry.sol";
import "../../contracts/dbis/DBIS_ParticipantRegistry.sol";
import "../../contracts/dbis/DBIS_SignerRegistry.sol";
import "../../contracts/dbis/DBIS_GRU_MintController.sol";
+import "../../contracts/dbis/DBIS_EIP712Helper.sol";
import "../../contracts/dbis/DBIS_SettlementRouter.sol";
import "../../contracts/dbis/StablecoinReferenceRegistry.sol";
import "../../contracts/dbis/DBIS_ConversionRouter.sol";
@@ -27,9 +28,10 @@ contract DeployDBISRail is Script {
DBIS_ParticipantRegistry participantReg = new DBIS_ParticipantRegistry(admin);
DBIS_SignerRegistry signerReg = new DBIS_SignerRegistry(admin);
DBIS_GRU_MintController mintController = new DBIS_GRU_MintController(admin, address(0));
- DBIS_SettlementRouter router = new DBIS_SettlementRouter(admin, address(root));
+ address eip712LibAddr = address(new DBIS_EIP712Helper());
+ DBIS_SettlementRouter router = new DBIS_SettlementRouter(admin, address(root), eip712LibAddr);
StablecoinReferenceRegistry stableReg = new StablecoinReferenceRegistry(admin);
- DBIS_ConversionRouter conversionRouter = new DBIS_ConversionRouter(admin, address(root));
+ DBIS_ConversionRouter conversionRouter = new DBIS_ConversionRouter(admin, address(root), eip712LibAddr);
root.setComponent(keccak256("ParticipantRegistry"), address(participantReg));
root.setComponent(keccak256("SignerRegistry"), address(signerReg));
@@ -42,6 +44,7 @@ contract DeployDBISRail is Script {
signerReg.setSwapQuorum(1e24, 2, 3);
+ console.log("DBIS_EIP712Helper", eip712LibAddr);
console.log("DBIS_RootRegistry", address(root));
console.log("DBIS_ParticipantRegistry", address(participantReg));
console.log("DBIS_SignerRegistry", address(signerReg));
diff --git a/script/dex/AddLiquidityPMMPoolsChain138.s.sol b/script/dex/AddLiquidityPMMPoolsChain138.s.sol
index 77346c1..a38e83d 100644
--- a/script/dex/AddLiquidityPMMPoolsChain138.s.sol
+++ b/script/dex/AddLiquidityPMMPoolsChain138.s.sol
@@ -12,14 +12,22 @@ import {IERC20} from "@openzeppelin/contracts/token/ERC20/IERC20.sol";
* POOL_CUSDTCUSDC, POOL_CUSDTUSDT, POOL_CUSDCUSDC,
* ADD_LIQUIDITY_BASE_AMOUNT, ADD_LIQUIDITY_QUOTE_AMOUNT (e.g. 1000000e6 for 1M units, 6 decimals).
* Optional: ADD_LIQUIDITY_CUSDTCUSDC_BASE, ADD_LIQUIDITY_CUSDTCUSDC_QUOTE, etc. for per-pool amounts.
+ * Optional: NEXT_NONCE — set to pending nonce (e.g. after mints) to avoid -32001 "Nonce too low" on broadcast.
*/
contract AddLiquidityPMMPoolsChain138 is Script {
function run() external {
uint256 pk = vm.envUint("PRIVATE_KEY");
+ address deployer = vm.addr(pk);
address integrationAddr = vm.envAddress("DODO_PMM_INTEGRATION");
if (integrationAddr == address(0)) integrationAddr = vm.envAddress("DODO_PMM_INTEGRATION_ADDRESS");
require(integrationAddr != address(0), "DODO_PMM_INTEGRATION not set");
+ // Use explicit nonce when set (e.g. after mints in same session) to avoid -32001 "Nonce too low"
+ uint64 nextNonce = uint64(vm.envOr("NEXT_NONCE", uint256(0)));
+ if (nextNonce > 0) {
+ vm.setNonce(deployer, nextNonce);
+ }
+
address poolCusdtCusdc = vm.envOr("POOL_CUSDTCUSDC", address(0));
address poolCusdtUsdt = vm.envOr("POOL_CUSDTUSDT", address(0));
address poolCusdcUsdc = vm.envOr("POOL_CUSDCUSDC", address(0));
@@ -33,6 +41,13 @@ contract AddLiquidityPMMPoolsChain138 is Script {
address usdt = integration.officialUSDT();
address usdc = integration.officialUSDC();
+ // On Chain 138, DODOPMMIntegration may have been deployed with mainnet official USDT/USDC
+ // (0xdAC17F958D2ee523a2206206994597C13D831ec7, 0xA0b86991c6218b36c1d19D4a2e9Eb0cE3606eB48).
+ // Those addresses have no code on 138, so skip cUSDT/USDT and cUSDC/USDC to avoid "call to non-contract".
+ bool skipOfficialPools = block.chainid == 138 && (
+ !_isContract(usdt) || !_isContract(usdc)
+ );
+
vm.startBroadcast(pk);
if (poolCusdtCusdc != address(0) && (defaultBase > 0 || defaultQuote > 0)) {
@@ -43,7 +58,7 @@ contract AddLiquidityPMMPoolsChain138 is Script {
console.log("Added liquidity to cUSDT/cUSDC pool:", poolCusdtCusdc);
}
}
- if (poolCusdtUsdt != address(0) && (defaultBase > 0 || defaultQuote > 0)) {
+ if (!skipOfficialPools && poolCusdtUsdt != address(0) && (defaultBase > 0 || defaultQuote > 0)) {
uint256 b = vm.envOr("ADD_LIQUIDITY_CUSDTUSDT_BASE", defaultBase);
uint256 q = vm.envOr("ADD_LIQUIDITY_CUSDTUSDT_QUOTE", defaultQuote);
if (b > 0 && q > 0) {
@@ -51,7 +66,7 @@ contract AddLiquidityPMMPoolsChain138 is Script {
console.log("Added liquidity to cUSDT/USDT pool:", poolCusdtUsdt);
}
}
- if (poolCusdcUsdc != address(0) && (defaultBase > 0 || defaultQuote > 0)) {
+ if (!skipOfficialPools && poolCusdcUsdc != address(0) && (defaultBase > 0 || defaultQuote > 0)) {
uint256 b = vm.envOr("ADD_LIQUIDITY_CUSDCUSDC_BASE", defaultBase);
uint256 q = vm.envOr("ADD_LIQUIDITY_CUSDCUSDC_QUOTE", defaultQuote);
if (b > 0 && q > 0) {
@@ -63,6 +78,14 @@ contract AddLiquidityPMMPoolsChain138 is Script {
vm.stopBroadcast();
}
+ function _isContract(address account) internal view returns (bool) {
+ uint256 size;
+ assembly {
+ size := extcodesize(account)
+ }
+ return size > 0;
+ }
+
function _approveAndAdd(
DODOPMMIntegration integration,
address baseToken,
diff --git a/scripts/create-pmm-full-mesh-chain138.sh b/scripts/create-pmm-full-mesh-chain138.sh
new file mode 100755
index 0000000..8a45580
--- /dev/null
+++ b/scripts/create-pmm-full-mesh-chain138.sh
@@ -0,0 +1,128 @@
+#!/usr/bin/env bash
+# Create the full PMM pool mesh on Chain 138: all c* vs c* pairs plus c* vs official USDT/USDC.
+# Uses DODOPMMIntegration.createPool() and registers each pool with DODOPMMProvider.
+# Skip pairs that already have a pool. Requires POOL_MANAGER_ROLE on integration and provider.
+#
+# Usage:
+# ./scripts/create-pmm-full-mesh-chain138.sh # all c* mesh + optional official pairs
+# MESH_ONLY_C_STAR=1 ./scripts/create-pmm-full-mesh-chain138.sh # only c* vs c* (no official)
+# DRY_RUN=1 ./scripts/create-pmm-full-mesh-chain138.sh # print only, no txs
+#
+# Requires: PRIVATE_KEY, RPC_URL_138, DODO_PMM_INTEGRATION_ADDRESS, DODO_PMM_PROVIDER_ADDRESS in .env
+
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
+cd "$PROJECT_ROOT"
+[[ -f .env ]] && set -a && source .env && set +a
+
+RPC="${RPC_URL_138:-${RPC_URL:-http://192.168.11.211:8545}}"
+INT="${DODO_PMM_INTEGRATION_ADDRESS:-${DODO_PMM_INTEGRATION:-}}"
+PROV="${DODO_PMM_PROVIDER_ADDRESS:-}"
+LP_FEE="${LP_FEE_RATE:-3}"
+I="${INITIAL_PRICE:-1000000000000000000}"
+K="${K_FACTOR:-500000000000000000}"
+TWAP="${ENABLE_TWAP:-false}"
+DRY_RUN="${DRY_RUN:-0}"
+MESH_ONLY_C_STAR="${MESH_ONLY_C_STAR:-0}"
+
+# 12 c* tokens on Chain 138 (symbol:address)
+declare -a C_STAR_SYMS=(cUSDT cUSDC cEURC cEURT cGBPC cGBPT cAUDC cJPYC cCHFC cCADC cXAUC cXAUT)
+declare -a C_STAR_ADDRS=(
+ 0x93E66202A11B1772E55407B32B44e5Cd8eda7f22
+ 0xf22258f57794CC8E06237084b353Ab30fFfa640b
+ 0x8085961F9cF02b4d800A3c6d386D31da4B34266a
+ 0xdf4b71c61E5912712C1Bdd451416B9aC26949d72
+ 0x003960f16D9d34F2e98d62723B6721Fb92074aD2
+ 0x350f54e4D23795f86A9c03988c7135357CCaD97c
+ 0xD51482e567c03899eecE3CAe8a058161FD56069D
+ 0xEe269e1226a334182aace90056EE4ee5Cc8A6770
+ 0x873990849DDa5117d7C644f0aF24370797C03885
+ 0x54dBd40cF05e15906A2C21f600937e96787f5679
+ 0x290E52a8819A4fbD0714E517225429aA2B70EC6b
+ 0x94e408E26c6FD8F4ee00b54dF19082FDA07dC96E
+)
+
+[[ -n "${PRIVATE_KEY:-}" ]] || { echo "PRIVATE_KEY not set"; exit 1; }
+[[ -n "$INT" ]] || { echo "DODO_PMM_INTEGRATION_ADDRESS (or DODO_PMM_INTEGRATION) not set"; exit 1; }
+[[ -n "$PROV" ]] || { echo "DODO_PMM_PROVIDER_ADDRESS not set"; exit 1; }
+
+# Official USDT/USDC on 138 (from integration if available)
+OFFICIAL_USDT="${OFFICIAL_USDT_ADDRESS:-0x15DF1D5BFDD8Aa4b380445D4e3E9B38d34283619}"
+OFFICIAL_USDC="${OFFICIAL_USDC_ADDRESS:-}"
+if [[ -z "$OFFICIAL_USDC" ]] && command -v cast &>/dev/null; then
+ OFFICIAL_USDC=$(cast call "$INT" "officialUSDC()(address)" --rpc-url "$RPC" 2>/dev/null | cast --to-addr 2>/dev/null || true)
+fi
+[[ -z "$OFFICIAL_USDC" ]] && OFFICIAL_USDC="0x0000000000000000000000000000000000000000"
+
+created=0
+skipped=0
+failed=0
+
+pool_exists() {
+ local base="$1" quote="$2"
+ local addr
+ addr=$(cast call "$INT" "pools(address,address)(address)" "$base" "$quote" --rpc-url "$RPC" 2>/dev/null | cast --to-addr 2>/dev/null || echo "0x0")
+ [[ -n "$addr" && "$addr" != "0x0000000000000000000000000000000000000000" ]]
+}
+
+create_pool() {
+ local base="$1" quote="$2" label="$3"
+ if pool_exists "$base" "$quote"; then
+ echo " SKIP $label (pool exists)"
+ ((skipped++)) || true
+ return 0
+ fi
+ if [[ "$DRY_RUN" == "1" ]]; then
+ echo " [DRY] would create $label"
+ ((created++)) || true
+ return 0
+ fi
+ if cast send "$INT" "createPool(address,address,uint256,uint256,uint256,bool)" \
+ "$base" "$quote" "$LP_FEE" "$I" "$K" "$TWAP" \
+ --rpc-url "$RPC" --private-key "$PRIVATE_KEY" --legacy --gas-limit 500000 -q 2>/dev/null; then
+ local pool_addr
+ pool_addr=$(cast call "$INT" "pools(address,address)(address)" "$base" "$quote" --rpc-url "$RPC" | cast --to-addr)
+ echo " OK $label -> $pool_addr"
+ if [[ -n "$PROV" ]]; then
+ cast send "$PROV" "registerPool(address,address,address)" "$base" "$quote" "$pool_addr" \
+ --rpc-url "$RPC" --private-key "$PRIVATE_KEY" --legacy --gas-limit 200000 -q 2>/dev/null && echo " registered with provider" || echo " provider register failed"
+ fi
+ ((created++)) || true
+ else
+ echo " FAIL $label"
+ ((failed++)) || true
+ fi
+}
+
+echo "=== PMM full mesh Chain 138 ==="
+echo " Integration: $INT Provider: $PROV RPC: $RPC"
+echo " Mesh: c* vs c* (+ c* vs official USDT/USDC if MESH_ONLY_C_STAR != 1)"
+echo ""
+
+# 1) All c* vs c* pairs (base < quote by address to avoid duplicates)
+n=${#C_STAR_ADDRS[@]}
+for ((i=0; i/dev/null) || exit 1
+
+echo "=== Mint all c* on Chain 138 ==="
+echo " Deployer: $DEPLOYER Amount: $AMOUNT_HUMAN tokens each ($BASE_UNITS base)"
+echo ""
+
+for pair in "cUSDT:0x93E66202A11B1772E55407B32B44e5Cd8eda7f22" "cUSDC:0xf22258f57794CC8E06237084b353Ab30fFfa640b" "cEURC:0x8085961F9cF02b4d800A3c6d386D31da4B34266a" "cEURT:0xdf4b71c61E5912712C1Bdd451416B9aC26949d72" "cGBPC:0x003960f16D9d34F2e98d62723B6721Fb92074aD2" "cGBPT:0x350f54e4D23795f86A9c03988c7135357CCaD97c" "cAUDC:0xD51482e567c03899eecE3CAe8a058161FD56069D" "cJPYC:0xEe269e1226a334182aace90056EE4ee5Cc8A6770" "cCHFC:0x873990849DDa5117d7C644f0aF24370797C03885" "cCADC:0x54dBd40cF05e15906A2C21f600937e96787f5679" "cXAUC:0x290E52a8819A4fbD0714E517225429aA2B70EC6b" "cXAUT:0x94e408E26c6FD8F4ee00b54dF19082FDA07dC96E"; do
+ sym="${pair%%:*}"
+ addr="${pair#*:}"
+ echo -n "Minting $sym... "
+ if cast send "$addr" "mint(address,uint256)" "$DEPLOYER" "$BASE_UNITS" --rpc-url "$RPC" --private-key "$PRIVATE_KEY" --legacy --gas-limit 100000 2>/dev/null; then echo "OK"; else echo "FAIL"; fi
+done
+echo "Done."
diff --git a/scripts/mint-cw-on-chain.sh b/scripts/mint-cw-on-chain.sh
new file mode 100755
index 0000000..8d3b9e3
--- /dev/null
+++ b/scripts/mint-cw-on-chain.sh
@@ -0,0 +1,56 @@
+#!/usr/bin/env bash
+# Mint cW* tokens on a given chain to the deployer. Deployer has MINTER_ROLE on CompliantWrappedToken.
+# Usage: ./scripts/mint-cw-on-chain.sh [amount_human]
+# CHAIN_NAME = Mainnet | Cronos | BSC | Polygon | Gnosis | Avalanche | Base | Arbitrum | Optimism
+# amount_human = tokens in human units (default 1000000 = 1M). 6 decimals.
+# Requires: PRIVATE_KEY, _RPC, CWUSDT_, CWUSDC_, etc. in .env.
+
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
+cd "$PROJECT_ROOT"
+[[ -f .env ]] && set -a && source .env && set +a
+
+CHAIN_NAME="${1:-}"
+AMOUNT_HUMAN="${2:-1000000}"
+BASE_UNITS=$((AMOUNT_HUMAN * 10**6))
+
+[[ -n "$CHAIN_NAME" ]] || { echo "Usage: $0 [amount_human]. CHAIN_NAME=Mainnet|Cronos|BSC|Polygon|Gnosis|Avalanche|Base|Arbitrum|Optimism"; exit 1; }
+[[ -n "${PRIVATE_KEY:-}" ]] || { echo "PRIVATE_KEY not set"; exit 1; }
+DEPLOYER=$(cast wallet address "$PRIVATE_KEY" 2>/dev/null) || exit 1
+
+CHAIN_UPPER=$(echo "$CHAIN_NAME" | tr '[:lower:]' '[:upper:]')
+
+get_rpc() {
+ case "$CHAIN_UPPER" in
+ MAINNET) echo "${ETHEREUM_MAINNET_RPC:-${ETH_MAINNET_RPC_URL:-}}";;
+ CRONOS) echo "${CRONOS_RPC_URL:-${CRONOS_RPC:-}}";;
+ BSC) echo "${BSC_RPC_URL:-${BSC_RPC:-}}";;
+ POLYGON) echo "${POLYGON_MAINNET_RPC:-${POLYGON_RPC_URL:-}}";;
+ GNOSIS) echo "${GNOSIS_RPC:-${GNOSIS_MAINNET_RPC:-}}";;
+ AVALANCHE) echo "${AVALANCHE_RPC_URL:-${AVALANCHE_RPC:-}}";;
+ BASE) echo "${BASE_MAINNET_RPC:-${BASE_RPC_URL:-}}";;
+ ARBITRUM) echo "${ARBITRUM_MAINNET_RPC:-${ARBITRUM_RPC:-}}";;
+ OPTIMISM) echo "${OPTIMISM_MAINNET_RPC:-${OPTIMISM_RPC:-}}";;
+ *) echo "";;
+ esac
+}
+
+RPC=$(get_rpc)
+[[ -n "$RPC" ]] || { echo "No RPC for $CHAIN_NAME. Set e.g. POLYGON_MAINNET_RPC in .env"; exit 1; }
+
+# cW* env vars: CWUSDT_POLYGON, CWUSDC_POLYGON, ...
+for var in CWUSDT CWUSDC CWEURC CWEURT CWGBPC CWGBPT CWAUDC CWJPYC CWCHFC CWCADC CWXAUC CWXAUT; do
+ addr_var="${var}_${CHAIN_UPPER}"
+ addr="${!addr_var:-}"
+ [[ -n "$addr" ]] || continue
+ echo -n "Minting ${var}... "
+ if cast send "$addr" "mint(address,uint256)" "$DEPLOYER" "$BASE_UNITS" \
+ --rpc-url "$RPC" --private-key "$PRIVATE_KEY" --legacy --gas-limit 100000 2>/dev/null; then
+ echo "OK"
+ else
+ echo "FAIL"
+ fi
+done
+echo "Done. Ensure CWUSDT_${CHAIN_UPPER}, CWUSDC_${CHAIN_UPPER}, etc. are in .env (from DeployCWTokens output)."
diff --git a/scripts/mint-for-liquidity.sh b/scripts/mint-for-liquidity.sh
index a1b77ce..0680743 100755
--- a/scripts/mint-for-liquidity.sh
+++ b/scripts/mint-for-liquidity.sh
@@ -63,8 +63,9 @@ mint_one() {
echo " SKIP $name: contract owner is $OWNER, deployer is $DEPLOYER (only owner can mint)"
return 0
fi
+ GAS_PRICE="${GAS_PRICE_138:-1000000000}"
if cast send "$addr" "mint(address,uint256)" "$DEPLOYER" "$amount_base" \
- --rpc-url "$RPC" --private-key "$PRIVATE_KEY" --legacy; then
+ --rpc-url "$RPC" --private-key "$PRIVATE_KEY" --legacy --gas-limit 100000 --gas-price "$GAS_PRICE"; then
echo " OK $name"
else
echo " FAIL $name"
@@ -77,8 +78,10 @@ mint_one "$CUSDC" "cUSDC" "$BASE_CUSDC"
echo ""
echo "Mint done. Deployer balances:"
-cast call "$CUSDT" "balanceOf(address)(uint256)" "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null | xargs -I {} echo " cUSDT: {}"
-cast call "$CUSDC" "balanceOf(address)(uint256)" "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null | xargs -I {} echo " cUSDC: {}"
+B1=$(cast call "$CUSDT" "balanceOf(address)(uint256)" "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null || echo "?")
+B2=$(cast call "$CUSDC" "balanceOf(address)(uint256)" "$DEPLOYER" --rpc-url "$RPC" 2>/dev/null || echo "?")
+echo " cUSDT: $B1"
+echo " cUSDC: $B2"
echo ""
if [[ "$RUN_ADD_LIQUIDITY" == true ]]; then
@@ -94,9 +97,12 @@ if [[ "$RUN_ADD_LIQUIDITY" == true ]]; then
export POOL_CUSDTUSDT="${POOL_CUSDTUSDT:-0xa3Ee6091696B28e5497b6F491fA1e99047250c59}"
export POOL_CUSDCUSDC="${POOL_CUSDCUSDC:-0x90bd9Bf18Daa26Af3e814ea224032d015db58Ea5}"
if [[ -n "${DODO_PMM_INTEGRATION:-}" || -n "${DODO_PMM_INTEGRATION_ADDRESS:-}" ]]; then
+ # Use pending nonce so broadcast does not get -32001 "Nonce too low" (mints just used N and N+1)
+ NEXT_NONCE=$(cast nonce "$DEPLOYER" --rpc-url "$RPC" --block pending 2>/dev/null || true)
+ [[ -n "$NEXT_NONCE" && "$NEXT_NONCE" =~ ^[0-9]+$ ]] && export NEXT_NONCE || unset -v NEXT_NONCE
echo "Running AddLiquidityPMMPoolsChain138 (cUSDT/cUSDC pool only if base/quote set)..."
forge script script/dex/AddLiquidityPMMPoolsChain138.s.sol:AddLiquidityPMMPoolsChain138 \
- --rpc-url "$RPC" --broadcast --private-key "$PRIVATE_KEY" --with-gas-price 1000000000
+ --rpc-url "$RPC" --broadcast --private-key "$PRIVATE_KEY" --with-gas-price 1000000000 --gas-estimate-multiplier 150
echo "Add-liquidity done."
else
echo "Set DODO_PMM_INTEGRATION (or DODO_PMM_INTEGRATION_ADDRESS) and POOL_* in .env, then run:"
diff --git a/services/token-aggregation/src/adapters/base-adapter.ts b/services/token-aggregation/src/adapters/base-adapter.ts
index b690918..5da0bff 100644
--- a/services/token-aggregation/src/adapters/base-adapter.ts
+++ b/services/token-aggregation/src/adapters/base-adapter.ts
@@ -49,6 +49,6 @@ export interface MarketData {
export interface ApiCacheEntry {
key: string;
- data: any;
+ data: unknown;
expiresAt: Date;
}
diff --git a/services/token-aggregation/src/adapters/cmc-adapter.ts b/services/token-aggregation/src/adapters/cmc-adapter.ts
index 0958987..f3b21a6 100644
--- a/services/token-aggregation/src/adapters/cmc-adapter.ts
+++ b/services/token-aggregation/src/adapters/cmc-adapter.ts
@@ -1,5 +1,6 @@
import axios, { AxiosInstance } from 'axios';
import { ExternalApiAdapter, TokenMetadata, MarketData } from './base-adapter';
+import { logger } from '../utils/logger';
interface CMCDexPair {
pair_address: string;
@@ -81,12 +82,12 @@ const CHAIN_TO_CMC_ID: Record = {
export class CoinMarketCapAdapter implements ExternalApiAdapter {
private api: AxiosInstance;
private apiKey?: string;
- private cache: Map = new Map();
+ private cache: Map = new Map();
constructor() {
this.apiKey = process.env.COINMARKETCAP_API_KEY;
if (!this.apiKey) {
- console.warn('CoinMarketCap API key not provided. CMC adapter will not function.');
+ logger.warn('CoinMarketCap API key not provided. CMC adapter will not function.');
}
this.api = axios.create({
@@ -126,11 +127,12 @@ export class CoinMarketCapAdapter implements ExternalApiAdapter {
},
});
return response.status === 200;
- } catch (error: any) {
- if (error.response?.status === 400 || error.response?.status === 404) {
+ } catch (error: unknown) {
+ const err = error as { response?: { status?: number } };
+ if (err.response?.status === 400 || err.response?.status === 404) {
return false; // Chain not supported
}
- console.error(`Error checking CMC chain support for ${chainId}:`, error);
+ logger.error(`Error checking CMC chain support for ${chainId}:`, error);
return false;
}
}
@@ -138,7 +140,8 @@ export class CoinMarketCapAdapter implements ExternalApiAdapter {
/**
* Get token by contract address (CMC doesn't have direct contract lookup in free tier)
*/
- async getTokenByContract(chainId: number, address: string): Promise {
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars -- interface requires (chainId, address)
+ async getTokenByContract(_chainId: number, _address: string): Promise {
// CMC DEX API doesn't provide token metadata directly
// Would need CMC Pro API with different endpoints
return null;
@@ -210,11 +213,12 @@ export class CoinMarketCapAdapter implements ExternalApiAdapter {
});
return marketData;
- } catch (error: any) {
- if (error.response?.status === 404 || error.response?.status === 400) {
+ } catch (error: unknown) {
+ const err = error as { response?: { status?: number } };
+ if (err.response?.status === 404 || err.response?.status === 400) {
return null;
}
- console.error(`Error fetching CMC market data for ${address} on chain ${chainId}:`, error);
+ logger.error(`Error fetching CMC market data for ${address} on chain ${chainId}:`, error);
return null;
}
}
@@ -243,7 +247,7 @@ export class CoinMarketCapAdapter implements ExternalApiAdapter {
return response.data.data || [];
} catch (error) {
- console.error(`Error fetching CMC DEX pairs for ${tokenAddress} on chain ${chainId}:`, error);
+ logger.error(`Error fetching CMC DEX pairs for ${tokenAddress} on chain ${chainId}:`, error);
return [];
}
}
@@ -271,7 +275,7 @@ export class CoinMarketCapAdapter implements ExternalApiAdapter {
return Object.values(response.data.data || {});
} catch (error) {
- console.error(`Error fetching CMC pair quotes for chain ${chainId}:`, error);
+ logger.error(`Error fetching CMC pair quotes for chain ${chainId}:`, error);
return [];
}
}
@@ -321,7 +325,7 @@ export class CoinMarketCapAdapter implements ExternalApiAdapter {
return pair.timeframes[intervalMap[interval] || '1h'] || [];
} catch (error) {
- console.error(`Error fetching CMC OHLCV for ${pairAddress} on chain ${chainId}:`, error);
+ logger.error(`Error fetching CMC OHLCV for ${pairAddress} on chain ${chainId}:`, error);
return [];
}
}
diff --git a/services/token-aggregation/src/adapters/coingecko-adapter.ts b/services/token-aggregation/src/adapters/coingecko-adapter.ts
index 33fd576..b30914a 100644
--- a/services/token-aggregation/src/adapters/coingecko-adapter.ts
+++ b/services/token-aggregation/src/adapters/coingecko-adapter.ts
@@ -1,6 +1,6 @@
import axios, { AxiosInstance } from 'axios';
import { ExternalApiAdapter, TokenMetadata, MarketData } from './base-adapter';
-import { getDatabasePool } from '../database/client';
+import { logger } from '../utils/logger';
interface CoinGeckoPlatform {
id: string;
@@ -83,7 +83,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
private api: AxiosInstance;
private apiKey?: string;
private supportedPlatforms: Map = new Map();
- private cache: Map = new Map();
+ private cache: Map = new Map();
constructor() {
this.apiKey = process.env.COINGECKO_API_KEY;
@@ -114,7 +114,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
const cacheKey = `chain_support_${chainId}`;
const cached = this.cache.get(cacheKey);
if (cached && cached.expiresAt > new Date()) {
- return cached.data;
+ return cached.data as boolean;
}
try {
@@ -130,7 +130,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
});
return supported;
} catch (error) {
- console.error(`Error checking CoinGecko chain support for ${chainId}:`, error);
+ logger.error(`Error checking CoinGecko chain support for ${chainId}:`, error);
return false;
}
}
@@ -147,7 +147,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
}
});
} catch (error) {
- console.error('Error loading CoinGecko platforms:', error);
+ logger.error('Error loading CoinGecko platforms:', error);
// Fallback to known mappings
Object.entries(CHAIN_TO_PLATFORM).forEach(([chainId, platformId]) => {
this.supportedPlatforms.set(parseInt(chainId, 10), platformId);
@@ -167,7 +167,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
const cacheKey = `token_${chainId}_${address.toLowerCase()}`;
const cached = this.cache.get(cacheKey);
if (cached && cached.expiresAt > new Date()) {
- return cached.data;
+ return cached.data as TokenMetadata;
}
try {
@@ -200,11 +200,12 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
});
return metadata;
- } catch (error: any) {
- if (error.response?.status === 404) {
+ } catch (error: unknown) {
+ const err = error as { response?: { status?: number } };
+ if (err.response?.status === 404) {
return null; // Token not found
}
- console.error(`Error fetching CoinGecko token ${address} on chain ${chainId}:`, error);
+ logger.error(`Error fetching CoinGecko token ${address} on chain ${chainId}:`, error);
return null;
}
}
@@ -221,7 +222,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
const cacheKey = `market_${chainId}_${address.toLowerCase()}`;
const cached = this.cache.get(cacheKey);
if (cached && cached.expiresAt > new Date()) {
- return cached.data;
+ return cached.data as MarketData;
}
try {
@@ -244,11 +245,12 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
});
return marketData;
- } catch (error: any) {
- if (error.response?.status === 404) {
+ } catch (error: unknown) {
+ const err = error as { response?: { status?: number } };
+ if (err.response?.status === 404) {
return null;
}
- console.error(`Error fetching CoinGecko market data for ${address} on chain ${chainId}:`, error);
+ logger.error(`Error fetching CoinGecko market data for ${address} on chain ${chainId}:`, error);
return null;
}
}
@@ -260,7 +262,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
const cacheKey = 'trending';
const cached = this.cache.get(cacheKey);
if (cached && cached.expiresAt > new Date()) {
- return cached.data;
+ return cached.data as Array<{ id: string; name: string; symbol: string; score: number }>;
}
try {
@@ -280,7 +282,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
return trending;
} catch (error) {
- console.error('Error fetching CoinGecko trending:', error);
+ logger.error('Error fetching CoinGecko trending:', error);
return [];
}
}
@@ -294,7 +296,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
const cacheKey = `markets_${coinIds.join(',')}`;
const cached = this.cache.get(cacheKey);
if (cached && cached.expiresAt > new Date()) {
- return cached.data;
+ return cached.data as CoinGeckoMarket[];
}
try {
@@ -316,7 +318,7 @@ export class CoinGeckoAdapter implements ExternalApiAdapter {
return response.data;
} catch (error) {
- console.error('Error fetching CoinGecko markets:', error);
+ logger.error('Error fetching CoinGecko markets:', error);
return [];
}
}
diff --git a/services/token-aggregation/src/adapters/dexscreener-adapter.ts b/services/token-aggregation/src/adapters/dexscreener-adapter.ts
index 109bac6..bfaaf12 100644
--- a/services/token-aggregation/src/adapters/dexscreener-adapter.ts
+++ b/services/token-aggregation/src/adapters/dexscreener-adapter.ts
@@ -1,5 +1,6 @@
import axios, { AxiosInstance } from 'axios';
import { ExternalApiAdapter, TokenMetadata, MarketData } from './base-adapter';
+import { logger } from '../utils/logger';
interface DexScreenerPair {
chainId: string;
@@ -85,7 +86,7 @@ Object.entries(CHAIN_TO_DEXSCREENER_ID).forEach(([chainId, dexId]) => {
export class DexScreenerAdapter implements ExternalApiAdapter {
private api: AxiosInstance;
private apiKey?: string;
- private cache: Map = new Map();
+ private cache: Map = new Map();
private supportedChains: Set = new Set();
constructor() {
@@ -118,7 +119,7 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
const cacheKey = `chain_support_${chainId}`;
const cached = this.cache.get(cacheKey);
if (cached && cached.expiresAt > new Date()) {
- return cached.data;
+ return cached.data as boolean;
}
// Try a test request to verify support
@@ -145,11 +146,12 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
}
return supported;
- } catch (error: any) {
- if (error.response?.status === 404 || error.response?.status === 400) {
+ } catch (error: unknown) {
+ const err = error as { response?: { status?: number } };
+ if (err.response?.status === 404 || err.response?.status === 400) {
return false;
}
- console.error(`Error checking DexScreener chain support for ${chainId}:`, error);
+ logger.error(`Error checking DexScreener chain support for ${chainId}:`, error);
return false;
}
}
@@ -157,7 +159,8 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
/**
* Get token by contract address (DexScreener doesn't provide token metadata)
*/
- async getTokenByContract(chainId: number, address: string): Promise {
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars -- interface requires (chainId, address)
+ async getTokenByContract(_chainId: number, _address: string): Promise {
// DexScreener doesn't provide token metadata, only pair data
return null;
}
@@ -174,7 +177,7 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
const cacheKey = `market_${chainId}_${address.toLowerCase()}`;
const cached = this.cache.get(cacheKey);
if (cached && cached.expiresAt > new Date()) {
- return cached.data;
+ return cached.data as MarketData | null;
}
try {
@@ -191,8 +194,6 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
let totalLiquidity = 0;
let avgPrice = 0;
let priceCount = 0;
- let totalTxns24h = 0;
-
response.data.pairs.forEach((pair) => {
if (pair.priceUsd) {
avgPrice += parseFloat(pair.priceUsd);
@@ -204,9 +205,7 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
if (pair.liquidity?.usd) {
totalLiquidity += pair.liquidity.usd;
}
- if (pair.txns?.h24) {
- totalTxns24h += (pair.txns.h24.buys || 0) + (pair.txns.h24.sells || 0);
- }
+ // txns h24 available on pair.txns?.h24 for future use
});
const marketData: MarketData = {
@@ -223,11 +222,12 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
});
return marketData;
- } catch (error: any) {
- if (error.response?.status === 404) {
+ } catch (error: unknown) {
+ const err = error as { response?: { status?: number } };
+ if (err.response?.status === 404) {
return null; // Token not found
}
- console.error(`Error fetching DexScreener market data for ${address} on chain ${chainId}:`, error);
+ logger.error(`Error fetching DexScreener market data for ${address} on chain ${chainId}:`, error);
return null;
}
}
@@ -248,7 +248,7 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
return response.data.pairs || [];
} catch (error) {
- console.error(`Error fetching DexScreener pairs for ${tokenAddress} on chain ${chainId}:`, error);
+ logger.error(`Error fetching DexScreener pairs for ${tokenAddress} on chain ${chainId}:`, error);
return [];
}
}
@@ -268,11 +268,12 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
);
return response.data.pair || null;
- } catch (error: any) {
- if (error.response?.status === 404) {
+ } catch (error: unknown) {
+ const err = error as { response?: { status?: number } };
+ if (err.response?.status === 404) {
return null;
}
- console.error(`Error fetching DexScreener pair data for ${pairAddress} on chain ${chainId}:`, error);
+ logger.error(`Error fetching DexScreener pair data for ${pairAddress} on chain ${chainId}:`, error);
return null;
}
}
@@ -320,7 +321,7 @@ export class DexScreenerAdapter implements ExternalApiAdapter {
});
}
} catch (error) {
- console.error(`Error fetching DexScreener pairs for chunk on chain ${chainId}:`, error);
+ logger.error(`Error fetching DexScreener pairs for chunk on chain ${chainId}:`, error);
}
}
diff --git a/services/token-aggregation/src/api/central-audit.ts b/services/token-aggregation/src/api/central-audit.ts
index 49bafde..e88926b 100644
--- a/services/token-aggregation/src/api/central-audit.ts
+++ b/services/token-aggregation/src/api/central-audit.ts
@@ -3,6 +3,8 @@
* Sends audit entries to dbis_core Admin Central API when DBIS_CENTRAL_URL and ADMIN_CENTRAL_API_KEY are set.
*/
+import { logger } from '../utils/logger';
+
const DBIS_CENTRAL_URL = process.env.DBIS_CENTRAL_URL?.replace(/\/$/, '');
const ADMIN_CENTRAL_API_KEY = process.env.ADMIN_CENTRAL_API_KEY;
const SERVICE_NAME = 'token_aggregation';
@@ -48,9 +50,9 @@ export async function appendCentralAudit(payload: CentralAuditPayload): Promise<
}),
});
if (!res.ok) {
- console.warn(`[central-audit] POST failed: ${res.status} ${await res.text()}`);
+ logger.warn(`[central-audit] POST failed: ${res.status} ${await res.text()}`);
}
} catch (err) {
- console.warn('[central-audit] append failed:', err instanceof Error ? err.message : err);
+ logger.warn('[central-audit] append failed:', err instanceof Error ? err.message : err);
}
}
diff --git a/services/token-aggregation/src/api/middleware/__mocks__/cache.ts b/services/token-aggregation/src/api/middleware/__mocks__/cache.ts
index 4b1c433..ade6331 100644
--- a/services/token-aggregation/src/api/middleware/__mocks__/cache.ts
+++ b/services/token-aggregation/src/api/middleware/__mocks__/cache.ts
@@ -1,3 +1,4 @@
+// eslint-disable-next-line @typescript-eslint/no-unused-vars -- mock middleware
export function cacheMiddleware(_ttl?: number) {
return (req: unknown, res: unknown, next: () => void) => next();
}
diff --git a/services/token-aggregation/src/api/middleware/cache.ts b/services/token-aggregation/src/api/middleware/cache.ts
index 9686468..f68425a 100644
--- a/services/token-aggregation/src/api/middleware/cache.ts
+++ b/services/token-aggregation/src/api/middleware/cache.ts
@@ -1,7 +1,7 @@
import { Request, Response, NextFunction } from 'express';
interface CacheEntry {
- data: any;
+ data: unknown;
expiresAt: number;
}
@@ -21,7 +21,7 @@ export function cacheMiddleware(ttl: number = DEFAULT_TTL) {
const originalJson = res.json.bind(res);
// Override json method to cache response
- res.json = function (body: any) {
+ res.json = function (body: unknown) {
cache.set(key, {
data: body,
expiresAt: Date.now() + ttl,
diff --git a/services/token-aggregation/src/api/routes/admin.ts b/services/token-aggregation/src/api/routes/admin.ts
index b1935be..c8ed227 100644
--- a/services/token-aggregation/src/api/routes/admin.ts
+++ b/services/token-aggregation/src/api/routes/admin.ts
@@ -1,6 +1,6 @@
import { Router, Request, Response } from 'express';
-import jwt from 'jsonwebtoken';
import { AdminRepository } from '../../database/repositories/admin-repo';
+import { logger } from '../../utils/logger';
import { authenticateToken, requireRole, AuthRequest, generateToken } from '../middleware/auth';
import { cacheMiddleware } from '../middleware/cache';
import { appendCentralAudit } from '../central-audit';
@@ -46,7 +46,7 @@ router.post('/auth/login', async (req: Request, res: Response) => {
},
});
} catch (error) {
- console.error('Login error:', error);
+ logger.error('Login error:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -61,7 +61,7 @@ router.get('/api-keys', requireRole('admin', 'super_admin', 'operator'), async (
const keys = await adminRepo.getApiKeys(provider);
res.json({ apiKeys: keys });
} catch (error) {
- console.error('Error fetching API keys:', error);
+ logger.error('Error fetching API keys:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -103,7 +103,7 @@ router.post('/api-keys', requireRole('admin', 'super_admin'), async (req: AuthRe
res.status(201).json({ apiKey: { ...newKey, apiKeyEncrypted: undefined } });
} catch (error) {
- console.error('Error creating API key:', error);
+ logger.error('Error creating API key:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -111,7 +111,7 @@ router.post('/api-keys', requireRole('admin', 'super_admin'), async (req: AuthRe
router.put('/api-keys/:id', requireRole('admin', 'super_admin'), async (req: AuthRequest, res: Response) => {
try {
const id = parseInt(req.params.id, 10);
- const updates: any = {};
+ const updates: { isActive?: boolean; rateLimitPerMinute?: number; expiresAt?: Date } = {};
if (req.body.isActive !== undefined) updates.isActive = req.body.isActive;
if (req.body.rateLimitPerMinute !== undefined) updates.rateLimitPerMinute = req.body.rateLimitPerMinute;
@@ -126,8 +126,8 @@ router.put('/api-keys/:id', requireRole('admin', 'super_admin'), async (req: Aut
'update',
'api_key',
id,
- oldKey,
- updates,
+ oldKey as unknown as Record,
+ updates as unknown as Record,
req.ip,
req.get('user-agent')
);
@@ -135,7 +135,7 @@ router.put('/api-keys/:id', requireRole('admin', 'super_admin'), async (req: Aut
res.json({ success: true });
} catch (error) {
- console.error('Error updating API key:', error);
+ logger.error('Error updating API key:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -153,7 +153,7 @@ router.delete('/api-keys/:id', requireRole('admin', 'super_admin'), async (req:
'delete',
'api_key',
id,
- oldKey,
+ oldKey as unknown as Record,
null,
req.ip,
req.get('user-agent')
@@ -162,7 +162,7 @@ router.delete('/api-keys/:id', requireRole('admin', 'super_admin'), async (req:
res.json({ success: true });
} catch (error) {
- console.error('Error deleting API key:', error);
+ logger.error('Error deleting API key:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -175,7 +175,7 @@ router.get('/endpoints', requireRole('admin', 'super_admin', 'operator', 'viewer
const endpoints = await adminRepo.getEndpoints(chainId, endpointType);
res.json({ endpoints });
} catch (error) {
- console.error('Error fetching endpoints:', error);
+ logger.error('Error fetching endpoints:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -230,7 +230,7 @@ router.post('/endpoints', requireRole('admin', 'super_admin'), async (req: AuthR
res.status(201).json({ endpoint });
} catch (error) {
- console.error('Error creating endpoint:', error);
+ logger.error('Error creating endpoint:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -238,7 +238,7 @@ router.post('/endpoints', requireRole('admin', 'super_admin'), async (req: AuthR
router.put('/endpoints/:id', requireRole('admin', 'super_admin'), async (req: AuthRequest, res: Response) => {
try {
const id = parseInt(req.params.id, 10);
- const updates: any = {};
+ const updates: { endpointUrl?: string; isActive?: boolean; isPrimary?: boolean } = {};
if (req.body.endpointUrl !== undefined) updates.endpointUrl = req.body.endpointUrl;
if (req.body.isActive !== undefined) updates.isActive = req.body.isActive;
@@ -248,7 +248,7 @@ router.put('/endpoints/:id', requireRole('admin', 'super_admin'), async (req: Au
res.json({ success: true });
} catch (error) {
- console.error('Error updating endpoint:', error);
+ logger.error('Error updating endpoint:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -260,7 +260,7 @@ router.get('/dex-factories', requireRole('admin', 'super_admin', 'operator', 'vi
const factories = await adminRepo.getDexFactories(chainId);
res.json({ factories });
} catch (error) {
- console.error('Error fetching DEX factories:', error);
+ logger.error('Error fetching DEX factories:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -308,7 +308,7 @@ router.post('/dex-factories', requireRole('admin', 'super_admin'), async (req: A
res.status(201).json({ factory });
} catch (error) {
- console.error('Error creating DEX factory:', error);
+ logger.error('Error creating DEX factory:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -340,7 +340,7 @@ router.get('/status', requireRole('admin', 'super_admin', 'operator', 'viewer'),
},
});
} catch (error) {
- console.error('Error fetching status:', error);
+ logger.error('Error fetching status:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -380,7 +380,7 @@ router.get('/audit-log', requireRole('admin', 'super_admin'), async (req: Reques
},
});
} catch (error) {
- console.error('Error fetching audit log:', error);
+ logger.error('Error fetching audit log:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
diff --git a/services/token-aggregation/src/api/routes/arbitrage.ts b/services/token-aggregation/src/api/routes/arbitrage.ts
new file mode 100644
index 0000000..3988890
--- /dev/null
+++ b/services/token-aggregation/src/api/routes/arbitrage.ts
@@ -0,0 +1,27 @@
+import { Router, Request, Response } from 'express';
+import { cacheMiddleware } from '../middleware/cache';
+import { getArbitrageOpportunities } from '../../services/arbitrage-scanner';
+
+const router = Router();
+
+/**
+ * GET /api/v1/arbitrage/opportunities
+ * Returns list of triangular arbitrage cycles with expected PnL, risk score, capital required.
+ * Spec: repo_ready_graphviz_and_liquidity_heatmap_spec.md §2.4.4
+ */
+router.get(
+ '/arbitrage/opportunities',
+ cacheMiddleware(30 * 1000),
+ async (req: Request, res: Response) => {
+ try {
+ const opportunities = await getArbitrageOpportunities();
+ res.json({ opportunities });
+ } catch (error) {
+ // eslint-disable-next-line no-console -- route error logging
+ console.error('Arbitrage opportunities error:', error);
+ res.status(500).json({ error: 'Internal server error' });
+ }
+ }
+);
+
+export default router;
diff --git a/services/token-aggregation/src/api/routes/config.ts b/services/token-aggregation/src/api/routes/config.ts
index d4a3195..9b4e013 100644
--- a/services/token-aggregation/src/api/routes/config.ts
+++ b/services/token-aggregation/src/api/routes/config.ts
@@ -2,6 +2,7 @@ import { Router, Request, Response } from 'express';
import { getNetworks, getConfigByChain, API_VERSION } from '../../config/networks';
import { cacheMiddleware } from '../middleware/cache';
import { fetchRemoteJson } from '../utils/fetch-remote-json';
+import { logger } from '../../utils/logger';
const router: Router = Router();
@@ -21,7 +22,7 @@ router.get('/networks', cacheMiddleware(5 * 60 * 1000), async (req: Request, res
networks: data.networks ?? [],
});
} catch (err) {
- console.error('NETWORKS_JSON_URL fetch failed, using built-in networks:', err);
+ logger.error('NETWORKS_JSON_URL fetch failed, using built-in networks:', err);
}
}
const networks = getNetworks();
diff --git a/services/token-aggregation/src/api/routes/heatmap.ts b/services/token-aggregation/src/api/routes/heatmap.ts
new file mode 100644
index 0000000..83a2ad5
--- /dev/null
+++ b/services/token-aggregation/src/api/routes/heatmap.ts
@@ -0,0 +1,169 @@
+import { Router, Request, Response } from 'express';
+import { PoolRepository } from '../../database/repositories/pool-repo';
+import { TokenRepository } from '../../database/repositories/token-repo';
+import {
+ HEATMAP_CHAINS,
+ getRoutesList,
+ getChainIds,
+ DEFAULT_HEATMAP_ASSETS,
+} from '../../config/heatmap-chains';
+import { cacheMiddleware } from '../middleware/cache';
+
+const router = Router();
+const poolRepo = new PoolRepository();
+const tokenRepo = new TokenRepository();
+
+/**
+ * GET /api/v1/heatmap
+ * Query: metric=tvlUsd|spreadBps|volume24h, assets=WETH,cUSDT,cUSDC (optional), chains=138,1,137 (optional)
+ * Returns Chain × Asset matrix per repo_ready_graphviz_and_liquidity_heatmap_spec.
+ */
+router.get('/heatmap', cacheMiddleware(60 * 1000), async (req: Request, res: Response) => {
+ try {
+ const metric = (req.query.metric as string) || 'tvlUsd';
+ const assetsParam = (req.query.assets as string) || DEFAULT_HEATMAP_ASSETS.join(',');
+ const chainsParam = (req.query.chains as string) || getChainIds().join(',');
+ const assets = assetsParam.split(',').map((s) => s.trim()).filter(Boolean);
+ const chainIds = chainsParam.split(',').map((s) => parseInt(s.trim(), 10)).filter((n) => !isNaN(n));
+ if (chainIds.length === 0) chainIds.push(...getChainIds());
+ if (assets.length === 0) assets.push(...DEFAULT_HEATMAP_ASSETS);
+
+ const matrix: number[][] = [];
+ for (const chainId of chainIds) {
+ const row: number[] = [];
+ const pools = await poolRepo.getPoolsByChain(chainId, 500);
+ const symbolToTvl: Record = {};
+ for (const sym of assets) symbolToTvl[sym] = 0;
+ for (const pool of pools) {
+ const tvl = pool.totalLiquidityUsd || 0;
+ const half = tvl / 2;
+ const token0 = await tokenRepo.getToken(chainId, pool.token0Address);
+ const token1 = await tokenRepo.getToken(chainId, pool.token1Address);
+ const sym0 = token0?.symbol || '';
+ const sym1 = token1?.symbol || '';
+ if (assets.includes(sym0)) symbolToTvl[sym0] = (symbolToTvl[sym0] || 0) + half;
+ if (assets.includes(sym1)) symbolToTvl[sym1] = (symbolToTvl[sym1] || 0) + half;
+ }
+ for (const asset of assets) {
+ const val = metric === 'tvlUsd' ? (symbolToTvl[asset] || 0) : 0;
+ row.push(Math.round(val * 100) / 100);
+ }
+ matrix.push(row);
+ }
+
+ res.json({
+ metric,
+ chains: chainIds,
+ assets,
+ matrix,
+ });
+ } catch (error) {
+ // eslint-disable-next-line no-console -- route error logging
+ console.error('Heatmap error:', error);
+ res.status(500).json({ error: 'Internal server error' });
+ }
+});
+
+/**
+ * GET /api/v1/pools?chainId=138
+ * List pools for a chain (spec minimal API contract).
+ */
+router.get('/pools', cacheMiddleware(60 * 1000), async (req: Request, res: Response) => {
+ try {
+ const chainId = parseInt(req.query.chainId as string, 10);
+ if (!chainId || isNaN(chainId)) {
+ return res.status(400).json({ error: 'chainId is required' });
+ }
+ const pools = await poolRepo.getPoolsByChain(chainId, 500);
+ const list = await Promise.all(
+ pools.map(async (p) => {
+ const token0 = await tokenRepo.getToken(chainId, p.token0Address);
+ const token1 = await tokenRepo.getToken(chainId, p.token1Address);
+ return {
+ poolId: `${chainId}:${(p.dexType || 'dodo').toLowerCase()}:${token0?.symbol || p.token0Address}-${token1?.symbol || p.token1Address}:${p.poolAddress}`,
+ chainId: p.chainId,
+ dex: p.dexType,
+ poolAddress: p.poolAddress,
+ token0: { symbol: token0?.symbol || '?', address: p.token0Address },
+ token1: { symbol: token1?.symbol || '?', address: p.token1Address },
+ liquidity: {
+ tvlUsd: p.totalLiquidityUsd,
+ reserve0: p.reserve0,
+ reserve1: p.reserve1,
+ },
+ isDeployed: true,
+ isRoutable: true,
+ };
+ })
+ );
+ res.json({ chainId, pools: list });
+ } catch (error) {
+ // eslint-disable-next-line no-console -- route error logging
+ console.error('Pools list error:', error);
+ res.status(500).json({ error: 'Internal server error' });
+ }
+});
+
+/**
+ * GET /api/v1/routes/health
+ * Route health summary: routeId, status, success rate, p95 latency, avg slippage.
+ */
+router.get('/routes/health', cacheMiddleware(60 * 1000), async (_req: Request, res: Response) => {
+ try {
+ const routes = getRoutesList();
+ const health = routes.slice(0, 50).map((r) => ({
+ routeId: r.routeId,
+ status: r.status,
+ successRate: r.status === 'live' ? 0.99 : r.status === 'partial' ? 0.95 : 0,
+ p95LatencySeconds: r.status === 'live' ? 300 : 600,
+ avgSlippageBps: 20,
+ }));
+ res.json({ routes: health });
+ } catch (error) {
+ // eslint-disable-next-line no-console -- route error logging
+ console.error('Routes health error:', error);
+ res.status(500).json({ error: 'Internal server error' });
+ }
+});
+
+/**
+ * GET /api/v1/bridges/metrics
+ * Bridge telemetry (stub; fill from relay/CCIP when available).
+ */
+router.get('/bridges/metrics', cacheMiddleware(60 * 1000), async (_req: Request, res: Response) => {
+ try {
+ res.json({
+ bridges: [
+ {
+ bridge: 'CCIP',
+ fromChainId: 138,
+ toChainId: 1,
+ asset: 'WETH',
+ p50LatencySeconds: 180,
+ p95LatencySeconds: 420,
+ feeUsd: 4.25,
+ successRate: 0.998,
+ health: 'ok',
+ },
+ ],
+ });
+ } catch (error) {
+ // eslint-disable-next-line no-console -- route error logging
+ console.error('Bridges metrics error:', error);
+ res.status(500).json({ error: 'Internal server error' });
+ }
+});
+
+/**
+ * GET /api/v1/chains
+ * List chains with group (hub, edge, althub, external) for heatmap config.
+ */
+router.get('/chains/list', cacheMiddleware(5 * 60 * 1000), async (_req: Request, res: Response) => {
+ try {
+ res.json({ chains: HEATMAP_CHAINS });
+ } catch (error) {
+ res.status(500).json({ error: 'Internal server error' });
+ }
+});
+
+export default router;
diff --git a/services/token-aggregation/src/api/routes/quote.ts b/services/token-aggregation/src/api/routes/quote.ts
index 9bbad28..3ec0fcc 100644
--- a/services/token-aggregation/src/api/routes/quote.ts
+++ b/services/token-aggregation/src/api/routes/quote.ts
@@ -1,6 +1,7 @@
import { Router, Request, Response } from 'express';
import { PoolRepository } from '../../database/repositories/pool-repo';
import { cacheMiddleware } from '../middleware/cache';
+import { logger } from '../../utils/logger';
const router: Router = Router();
const poolRepo = new PoolRepository();
@@ -98,7 +99,7 @@ router.get(
dexType: bestPool.dexType,
});
} catch (error) {
- console.error('Quote error:', error);
+ logger.error('Quote error:', error);
res.status(500).json({
error: error instanceof Error ? error.message : 'Internal server error',
amountOut: null,
diff --git a/services/token-aggregation/src/api/routes/report.ts b/services/token-aggregation/src/api/routes/report.ts
index 05634da..242a890 100644
--- a/services/token-aggregation/src/api/routes/report.ts
+++ b/services/token-aggregation/src/api/routes/report.ts
@@ -16,6 +16,7 @@ import { getSupportedChainIds } from '../../config/chains';
import { cacheMiddleware } from '../middleware/cache';
import { fetchRemoteJson } from '../utils/fetch-remote-json';
import { buildCrossChainReport } from '../../indexer/cross-chain-indexer';
+import { logger } from '../../utils/logger';
const router: Router = Router();
const tokenRepo = new TokenRepository();
@@ -111,7 +112,7 @@ router.get(
documentation: 'Use for CMC/CoinGecko submission alongside single-chain reports. Includes CCIP, Alltra, Trustless bridge events and volume by lane.',
});
} catch (error) {
- console.error('Error building report/cross-chain:', error);
+ logger.error('Error building report/cross-chain:', error);
res.status(500).json({
error: 'Internal server error',
crossChainPools: [],
@@ -180,7 +181,7 @@ router.get(
: undefined,
});
} catch (error) {
- console.error('Error building report/all:', error);
+ logger.error('Error building report/all:', error);
res.status(500).json({ error: 'Internal server error' });
}
}
@@ -239,7 +240,7 @@ router.get(
documentation: 'https://www.coingecko.com/en/api/documentation',
});
} catch (error) {
- console.error('Error building report/coingecko:', error);
+ logger.error('Error building report/coingecko:', error);
res.status(500).json({ error: 'Internal server error' });
}
}
@@ -291,7 +292,7 @@ router.get(
documentation: 'https://coinmarketcap.com/api/documentation',
});
} catch (error) {
- console.error('Error building report/cmc:', error);
+ logger.error('Error building report/cmc:', error);
res.status(500).json({ error: 'Internal server error' });
}
}
@@ -329,7 +330,7 @@ router.get(
tokens,
});
} catch (err) {
- console.error('TOKEN_LIST_JSON_URL fetch failed, using built-in token list:', err);
+ logger.error('TOKEN_LIST_JSON_URL fetch failed, using built-in token list:', err);
}
}
@@ -374,7 +375,7 @@ router.get(
tokens: list,
});
} catch (error) {
- console.error('Error building report/token-list:', error);
+ logger.error('Error building report/token-list:', error);
res.status(500).json({ error: 'Internal server error' });
}
}
@@ -398,7 +399,7 @@ router.get(
})),
});
} catch (error) {
- console.error('Error building report/canonical:', error);
+ logger.error('Error building report/canonical:', error);
res.status(500).json({ error: 'Internal server error' });
}
}
diff --git a/services/token-aggregation/src/api/routes/tokens.ts b/services/token-aggregation/src/api/routes/tokens.ts
index ffb9d3e..ffb7cf2 100644
--- a/services/token-aggregation/src/api/routes/tokens.ts
+++ b/services/token-aggregation/src/api/routes/tokens.ts
@@ -7,6 +7,7 @@ import { CoinGeckoAdapter } from '../../adapters/coingecko-adapter';
import { CoinMarketCapAdapter } from '../../adapters/cmc-adapter';
import { DexScreenerAdapter } from '../../adapters/dexscreener-adapter';
import { cacheMiddleware } from '../middleware/cache';
+import { logger } from '../../utils/logger';
const router: Router = Router();
const tokenRepo = new TokenRepository();
@@ -76,7 +77,7 @@ router.get('/tokens', cacheMiddleware(60 * 1000), async (req: Request, res: Resp
},
});
} catch (error) {
- console.error('Error fetching tokens:', error);
+ logger.error('Error fetching tokens:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -132,7 +133,7 @@ router.get('/tokens/:address', cacheMiddleware(60 * 1000), async (req: Request,
},
});
} catch (error) {
- console.error('Error fetching token:', error);
+ logger.error('Error fetching token:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -164,7 +165,7 @@ router.get('/tokens/:address/pools', cacheMiddleware(60 * 1000), async (req: Req
})),
});
} catch (error) {
- console.error('Error fetching pools:', error);
+ logger.error('Error fetching pools:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -189,7 +190,7 @@ router.get('/tokens/:address/ohlcv', cacheMiddleware(5 * 60 * 1000), async (req:
const ohlcv = await ohlcvGenerator.getOHLCV(
chainId,
address,
- interval as any,
+ interval as '5m' | '15m' | '1h' | '4h' | '24h',
from,
to,
poolAddress
@@ -202,7 +203,7 @@ router.get('/tokens/:address/ohlcv', cacheMiddleware(5 * 60 * 1000), async (req:
data: ohlcv,
});
} catch (error) {
- console.error('Error fetching OHLCV:', error);
+ logger.error('Error fetching OHLCV:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -226,7 +227,7 @@ router.get('/tokens/:address/signals', cacheMiddleware(5 * 60 * 1000), async (re
},
});
} catch (error) {
- console.error('Error fetching signals:', error);
+ logger.error('Error fetching signals:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -248,7 +249,7 @@ router.get('/search', cacheMiddleware(60 * 1000), async (req: Request, res: Resp
results: tokens,
});
} catch (error) {
- console.error('Error searching tokens:', error);
+ logger.error('Error searching tokens:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
@@ -283,7 +284,7 @@ router.get('/pools/:poolAddress', cacheMiddleware(60 * 1000), async (req: Reques
},
});
} catch (error) {
- console.error('Error fetching pool:', error);
+ logger.error('Error fetching pool:', error);
res.status(500).json({ error: 'Internal server error' });
}
});
diff --git a/services/token-aggregation/src/api/server.ts b/services/token-aggregation/src/api/server.ts
index 95b3b27..d8b7763 100644
--- a/services/token-aggregation/src/api/server.ts
+++ b/services/token-aggregation/src/api/server.ts
@@ -9,6 +9,8 @@ import configRoutes from './routes/config';
import bridgeRoutes from './routes/bridge';
import quoteRoutes from './routes/quote';
import tokenMappingRoutes from './routes/token-mapping';
+import heatmapRoutes from './routes/heatmap';
+import arbitrageRoutes from './routes/arbitrage';
import { MultiChainIndexer } from '../indexer/chain-indexer';
import { getDatabasePool } from '../database/client';
import winston from 'winston';
@@ -102,6 +104,8 @@ export class ApiServer {
this.app.use('/api/v1/bridge', bridgeRoutes);
this.app.use('/api/v1/token-mapping', tokenMappingRoutes);
this.app.use('/api/v1', quoteRoutes);
+ this.app.use('/api/v1', heatmapRoutes);
+ this.app.use('/api/v1', arbitrageRoutes);
// Admin routes (stricter rate limit)
this.app.use('/api/v1/admin', strictRateLimiter, adminRoutes);
@@ -126,7 +130,8 @@ export class ApiServer {
});
// Error handler
- this.app.use((err: Error, req: Request, res: Response, next: NextFunction) => {
+ // eslint-disable-next-line @typescript-eslint/no-unused-vars -- Express error handler requires 4-arg signature
+ this.app.use((err: Error, req: Request, res: Response, _next: NextFunction) => {
logger.error('Unhandled error:', err);
res.status(500).json({
error: 'Internal server error',
diff --git a/services/token-aggregation/src/config/cross-chain-bridges.ts b/services/token-aggregation/src/config/cross-chain-bridges.ts
index c00ec33..8b90808 100644
--- a/services/token-aggregation/src/config/cross-chain-bridges.ts
+++ b/services/token-aggregation/src/config/cross-chain-bridges.ts
@@ -75,3 +75,55 @@ if (envAddr('UNIVERSAL_CCIP_BRIDGE_ADDRESS')) {
],
});
}
+
+/** Routing registry entry: path type ALT | CCIP, bridge address, label. Aligns with config/routing-registry.json. */
+export interface RoutingRegistryEntry {
+ pathType: 'ALT' | 'CCIP';
+ bridgeAddress: string;
+ bridgeChainId: number;
+ label: string;
+ fromChain: number;
+ toChain: number;
+ asset?: string;
+}
+
+const ALLTRA_ADAPTER_138 = envAddr('ALLTRA_ADAPTER_ADDRESS') || envAddr('ALLTRA_CUSTOM_BRIDGE_ADDRESS') || '0x66FEBA2fC9a0B47F26DD4284DAd24F970436B8Dc';
+const CCIP_WETH9_138 = envAddr('CCIPWETH9_BRIDGE_CHAIN138') || '0x971cD9D156f193df8051E48043C476e53ECd4693';
+
+/**
+ * Get routing registry entry for (fromChain, toChain, asset).
+ * Used by UI and indexer to choose ALT vs CCIP and to fill routing in activity_events.
+ * Canonical registry JSON: repo root config/routing-registry.json.
+ */
+export function getRouteFromRegistry(
+ fromChain: number,
+ toChain: number,
+ asset: string = 'WETH',
+): RoutingRegistryEntry | null {
+ if (fromChain === toChain) return null;
+ const is138To651940 = fromChain === 138 && toChain === 651940;
+ const is651940To138 = fromChain === 651940 && toChain === 138;
+ if (is138To651940 || is651940To138) {
+ return {
+ pathType: 'ALT',
+ bridgeAddress: ALLTRA_ADAPTER_138,
+ bridgeChainId: fromChain === 138 ? 138 : 651940,
+ label: 'AlltraAdapter',
+ fromChain,
+ toChain,
+ asset,
+ };
+ }
+ if (fromChain === 138 || toChain === 138) {
+ return {
+ pathType: 'CCIP',
+ bridgeAddress: CCIP_WETH9_138,
+ bridgeChainId: 138,
+ label: 'CCIPWETH9Bridge',
+ fromChain,
+ toChain,
+ asset,
+ };
+ }
+ return null;
+}
diff --git a/services/token-aggregation/src/config/heatmap-chains.ts b/services/token-aggregation/src/config/heatmap-chains.ts
new file mode 100644
index 0000000..d676886
--- /dev/null
+++ b/services/token-aggregation/src/config/heatmap-chains.ts
@@ -0,0 +1,135 @@
+/**
+ * 13-chain liquidity heatmap config: chain groups and route list.
+ * Aligns with real-robinhood project_plans and ultra_advanced_global_arbitrage_engine_blueprint.
+ */
+
+export type ChainGroup = 'hub' | 'edge' | 'althub' | 'external';
+
+export interface HeatmapChain {
+ chainId: number;
+ name: string;
+ rpc: string;
+ explorer: string;
+ group: ChainGroup;
+}
+
+export interface RouteEntry {
+ routeId: string;
+ type: 'swap-bridge-swap' | 'bridge' | 'alt';
+ fromChainId: number;
+ toChainId: number;
+ status: 'live' | 'partial' | 'design' | 'disabled';
+ bridge?: { type: string; asset?: string };
+}
+
+const CHAIN_INDEX = [138, 1, 56, 137, 10, 42161, 43114, 8453, 100, 25, 42220, 1111, 651940] as const;
+
+const CHAIN_NAMES: Record = {
+ 138: 'DBIS / DeFi Oracle',
+ 1: 'Ethereum',
+ 56: 'BSC',
+ 137: 'Polygon',
+ 10: 'Optimism',
+ 42161: 'Arbitrum',
+ 43114: 'Avalanche',
+ 8453: 'Base',
+ 100: 'Gnosis',
+ 25: 'Cronos',
+ 42220: 'Celo',
+ 1111: 'Wemix',
+ 651940: 'ALL Mainnet',
+};
+
+/** Default asset set for heatmap columns (spec). */
+export const DEFAULT_HEATMAP_ASSETS = [
+ 'WETH',
+ 'cUSDT',
+ 'cUSDC',
+ 'cEURT',
+ 'cWUSDT',
+ 'cWUSDC',
+ 'USDW',
+ 'AUSDT',
+ 'USDC',
+ 'USDT',
+ 'XAU',
+];
+
+function buildChains(): HeatmapChain[] {
+ const rpc = (cid: number) =>
+ process.env[`CHAIN_${cid}_RPC_URL`] ||
+ process.env[`RPC_URL_138`] ||
+ 'https://rpc.d-bis.org';
+ const explorer = (cid: number) => {
+ const urls: Record = {
+ 138: 'https://explorer.d-bis.org',
+ 1: 'https://etherscan.io',
+ 56: 'https://bscscan.com',
+ 137: 'https://polygonscan.com',
+ 10: 'https://optimistic.etherscan.io',
+ 42161: 'https://arbiscan.io',
+ 43114: 'https://snowtrace.io',
+ 8453: 'https://basescan.org',
+ 100: 'https://gnosisscan.io',
+ 25: 'https://cronoscan.com',
+ 42220: 'https://celoscan.io',
+ 1111: 'https://scan.wemix.com',
+ 651940: 'https://alltra.global',
+ };
+ return urls[cid] || '';
+ };
+ return CHAIN_INDEX.map((chainId) => ({
+ chainId,
+ name: CHAIN_NAMES[chainId] || `Chain ${chainId}`,
+ rpc: rpc(chainId),
+ explorer: explorer(chainId),
+ group:
+ chainId === 138
+ ? ('hub' as ChainGroup)
+ : chainId === 651940
+ ? ('althub' as ChainGroup)
+ : ('edge' as ChainGroup),
+ }));
+}
+
+export const HEATMAP_CHAINS = buildChains();
+
+export function getChainsByGroup(group: ChainGroup): HeatmapChain[] {
+ return HEATMAP_CHAINS.filter((c) => c.group === group);
+}
+
+/** Build route list from 13×13 matrix (hub-routed). */
+export function getRoutesList(): RouteEntry[] {
+ const routes: RouteEntry[] = [];
+ const fromIds = [...CHAIN_INDEX];
+ const toIds = [...CHAIN_INDEX];
+ for (const fromChainId of fromIds) {
+ for (const toChainId of toIds) {
+ if (fromChainId === toChainId) continue;
+ const mode =
+ fromChainId === 138 && toChainId === 651940
+ ? 'ALT'
+ : toChainId === 138 && fromChainId === 651940
+ ? 'ALT'
+ : fromChainId === 138
+ ? 'B/SBS'
+ : toChainId === 138
+ ? 'B/SBS'
+ : 'via 138';
+ const status: RouteEntry['status'] = 'partial';
+ routes.push({
+ routeId: `SBS:${fromChainId}->${toChainId}`,
+ type: mode === 'ALT' ? 'alt' : 'swap-bridge-swap',
+ fromChainId,
+ toChainId,
+ status,
+ bridge: mode.includes('SBS') || mode.includes('B') ? { type: 'CCIP', asset: 'WETH' } : undefined,
+ });
+ }
+ }
+ return routes;
+}
+
+export function getChainIds(): number[] {
+ return [...CHAIN_INDEX];
+}
diff --git a/services/token-aggregation/src/database/client.ts b/services/token-aggregation/src/database/client.ts
index 4fba9a7..7844e97 100644
--- a/services/token-aggregation/src/database/client.ts
+++ b/services/token-aggregation/src/database/client.ts
@@ -1,5 +1,6 @@
import { Pool, PoolConfig } from 'pg';
import * as dotenv from 'dotenv';
+import { logger } from '../utils/logger';
dotenv.config();
@@ -39,7 +40,7 @@ export function getDatabasePool(): Pool {
pool = new Pool(config);
pool.on('error', (err) => {
- console.error('Unexpected error on idle database client', err);
+ logger.error('Unexpected error on idle database client', err);
});
return pool;
diff --git a/services/token-aggregation/src/database/repositories/admin-repo.ts b/services/token-aggregation/src/database/repositories/admin-repo.ts
index ab381c4..17ef5b4 100644
--- a/services/token-aggregation/src/database/repositories/admin-repo.ts
+++ b/services/token-aggregation/src/database/repositories/admin-repo.ts
@@ -27,7 +27,7 @@ export interface ApiEndpoint {
isActive: boolean;
requiresAuth: boolean;
authType?: 'jwt' | 'api_key' | 'basic' | 'none';
- authConfig?: any;
+ authConfig?: Record;
rateLimitPerMinute?: number;
timeoutMs: number;
healthCheckEnabled: boolean;
@@ -98,7 +98,7 @@ export class AdminRepository {
async getApiKeys(provider?: string): Promise {
let query = `SELECT * FROM api_keys WHERE is_active = true`;
- const params: any[] = [];
+ const params: (string | number)[] = [];
if (provider) {
query += ` AND provider = $1`;
@@ -119,7 +119,7 @@ export class AdminRepository {
async updateApiKey(id: number, updates: Partial): Promise {
const fields: string[] = [];
- const values: any[] = [];
+ const values: (string | number | boolean | Date | null)[] = [];
let paramCount = 1;
if (updates.isActive !== undefined) {
@@ -180,7 +180,7 @@ export class AdminRepository {
async getEndpoints(chainId?: number, endpointType?: string): Promise {
let query = `SELECT * FROM api_endpoints WHERE is_active = true`;
- const params: any[] = [];
+ const params: (string | number)[] = [];
let paramCount = 1;
if (chainId) {
@@ -200,7 +200,7 @@ export class AdminRepository {
async updateEndpoint(id: number, updates: Partial): Promise {
const fields: string[] = [];
- const values: any[] = [];
+ const values: (string | number | boolean | Date | null)[] = [];
let paramCount = 1;
if (updates.endpointUrl !== undefined) {
@@ -256,7 +256,7 @@ export class AdminRepository {
async getDexFactories(chainId?: number): Promise {
let query = `SELECT * FROM dex_factory_config WHERE is_active = true`;
- const params: any[] = [];
+ const params: (string | number)[] = [];
if (chainId) {
query += ` AND chain_id = $1`;
@@ -301,8 +301,8 @@ export class AdminRepository {
action: string,
resourceType: string,
resourceId: number | null,
- oldValues: any,
- newValues: any,
+ oldValues: Record | null,
+ newValues: Record | null,
ipAddress?: string,
userAgent?: string
): Promise {
@@ -325,7 +325,8 @@ export class AdminRepository {
);
}
- // Mappers
+ // Mappers (row from pg has dynamic keys; use type assertion for type safety)
+ /* eslint-disable @typescript-eslint/no-explicit-any */
private mapApiKey(row: any): ApiKey {
return {
id: row.id,
diff --git a/services/token-aggregation/src/database/repositories/pool-repo.ts b/services/token-aggregation/src/database/repositories/pool-repo.ts
index 8ff29bd..94b4b00 100644
--- a/services/token-aggregation/src/database/repositories/pool-repo.ts
+++ b/services/token-aggregation/src/database/repositories/pool-repo.ts
@@ -183,6 +183,7 @@ export class PoolRepository {
}));
}
+ /* eslint-disable @typescript-eslint/no-explicit-any */
private mapRowToPool(row: any): LiquidityPool {
return {
id: row.id,
diff --git a/services/token-aggregation/src/index.ts b/services/token-aggregation/src/index.ts
index 0f01692..2de22b7 100644
--- a/services/token-aggregation/src/index.ts
+++ b/services/token-aggregation/src/index.ts
@@ -3,6 +3,7 @@ import path from 'path';
import { existsSync } from 'fs';
import { ApiServer } from './api/server';
import { closeDatabasePool } from './database/client';
+import { logger } from './utils/logger';
// Load smom-dbis-138 root .env first (single source); works from dist/ or src/
const rootEnvCandidates = [
@@ -28,20 +29,20 @@ const server = new ApiServer();
// Start server
server.start().catch((error) => {
- console.error('Failed to start server:', error);
+ logger.error('Failed to start server:', error);
process.exit(1);
});
// Graceful shutdown
process.on('SIGTERM', async () => {
- console.log('SIGTERM received, shutting down gracefully...');
+ logger.info('SIGTERM received, shutting down gracefully...');
await server.stop();
await closeDatabasePool();
process.exit(0);
});
process.on('SIGINT', async () => {
- console.log('SIGINT received, shutting down gracefully...');
+ logger.info('SIGINT received, shutting down gracefully...');
await server.stop();
await closeDatabasePool();
process.exit(0);
diff --git a/services/token-aggregation/src/indexer/chain-indexer.ts b/services/token-aggregation/src/indexer/chain-indexer.ts
index 9719cc6..ce9038e 100644
--- a/services/token-aggregation/src/indexer/chain-indexer.ts
+++ b/services/token-aggregation/src/indexer/chain-indexer.ts
@@ -8,6 +8,7 @@ import { MarketDataRepository } from '../database/repositories/market-data-repo'
import { CoinGeckoAdapter } from '../adapters/coingecko-adapter';
import { CoinMarketCapAdapter } from '../adapters/cmc-adapter';
import { DexScreenerAdapter } from '../adapters/dexscreener-adapter';
+import { logger } from '../utils/logger';
export class ChainIndexer {
private chainId: number;
@@ -51,12 +52,12 @@ export class ChainIndexer {
*/
async start(): Promise {
if (this.isRunning) {
- console.warn(`Chain indexer for ${this.chainId} is already running`);
+ logger.warn(`Chain indexer for ${this.chainId} is already running`);
return;
}
this.isRunning = true;
- console.log(`Starting chain indexer for chain ${this.chainId}`);
+ logger.info(`Starting chain indexer for chain ${this.chainId}`);
// Initial indexing
await this.indexAll();
@@ -65,7 +66,7 @@ export class ChainIndexer {
const interval = parseInt(process.env.INDEXING_INTERVAL || '5000', 10);
this.indexingInterval = setInterval(() => {
this.indexAll().catch((error) => {
- console.error(`Error in periodic indexing for chain ${this.chainId}:`, error);
+ logger.error(`Error in periodic indexing for chain ${this.chainId}:`, error);
});
}, interval);
}
@@ -84,7 +85,7 @@ export class ChainIndexer {
this.indexingInterval = undefined;
}
- console.log(`Stopped chain indexer for chain ${this.chainId}`);
+ logger.info(`Stopped chain indexer for chain ${this.chainId}`);
}
/**
@@ -93,11 +94,11 @@ export class ChainIndexer {
private async indexAll(): Promise {
try {
// 1. Index pools
- console.log(`Indexing pools for chain ${this.chainId}...`);
+ logger.info(`Indexing pools for chain ${this.chainId}...`);
await this.poolIndexer.indexAllPools();
// 2. Discover and index tokens from pools
- console.log(`Discovering tokens for chain ${this.chainId}...`);
+ logger.info(`Discovering tokens for chain ${this.chainId}...`);
const pools = await this.poolIndexer.indexAllPools();
const tokenAddresses = new Set();
pools.forEach((pool) => {
@@ -107,13 +108,13 @@ export class ChainIndexer {
await this.tokenIndexer.indexTokens(Array.from(tokenAddresses));
// 3. Calculate volumes and update market data
- console.log(`Calculating volumes for chain ${this.chainId}...`);
+ logger.info(`Calculating volumes for chain ${this.chainId}...`);
for (const tokenAddress of tokenAddresses) {
await this.updateMarketData(tokenAddress);
}
// 4. Generate OHLCV data
- console.log(`Generating OHLCV for chain ${this.chainId}...`);
+ logger.info(`Generating OHLCV for chain ${this.chainId}...`);
const intervals: Array<'5m' | '1h' | '24h'> = ['5m', '1h', '24h'];
const now = new Date();
const from = new Date(now.getTime() - 7 * 24 * 60 * 60 * 1000); // Last 7 days
@@ -130,7 +131,7 @@ export class ChainIndexer {
}
}
} catch (error) {
- console.error(`Error in indexAll for chain ${this.chainId}:`, error);
+ logger.error(`Error in indexAll for chain ${this.chainId}:`, error);
throw error;
}
}
@@ -179,7 +180,7 @@ export class ChainIndexer {
lastUpdated: new Date(),
});
} catch (error) {
- console.error(`Error updating market data for ${tokenAddress}:`, error);
+ logger.error(`Error updating market data for ${tokenAddress}:`, error);
}
}
@@ -206,9 +207,9 @@ export class MultiChainIndexer {
try {
const indexer = new ChainIndexer(chainId);
this.indexers.set(chainId, indexer);
- console.log(`Initialized indexer for chain ${chainId}`);
+ logger.info(`Initialized indexer for chain ${chainId}`);
} catch (error) {
- console.error(`Failed to initialize indexer for chain ${chainId}:`, error);
+ logger.error(`Failed to initialize indexer for chain ${chainId}:`, error);
}
}
}
@@ -221,7 +222,7 @@ export class MultiChainIndexer {
try {
await indexer.start();
} catch (error) {
- console.error(`Failed to start indexer for chain ${chainId}:`, error);
+ logger.error(`Failed to start indexer for chain ${chainId}:`, error);
}
}
}
@@ -234,7 +235,7 @@ export class MultiChainIndexer {
try {
indexer.stop();
} catch (error) {
- console.error(`Failed to stop indexer for chain ${chainId}:`, error);
+ logger.error(`Failed to stop indexer for chain ${chainId}:`, error);
}
}
}
diff --git a/services/token-aggregation/src/indexer/cross-chain-indexer.ts b/services/token-aggregation/src/indexer/cross-chain-indexer.ts
index 00b80ca..34a68f6 100644
--- a/services/token-aggregation/src/indexer/cross-chain-indexer.ts
+++ b/services/token-aggregation/src/indexer/cross-chain-indexer.ts
@@ -7,6 +7,7 @@
import { ethers } from 'ethers';
import { getChainConfig } from '../config/chains';
import { CHAIN_138_BRIDGES, BridgeConfig, BridgeLane } from '../config/cross-chain-bridges';
+import { logger } from '../utils/logger';
export interface CrossChainEvent {
txHash: string;
@@ -124,7 +125,7 @@ async function fetchCCIPEvents(
});
}
} catch (err) {
- console.warn(`Cross-chain indexer: CCIP events for ${bridge.address} failed:`, err);
+ logger.warn(`Cross-chain indexer: CCIP events for ${bridge.address} failed:`, err);
}
return events;
}
@@ -232,7 +233,7 @@ async function fetchAlltraEvents(
});
}
} catch (err) {
- console.warn(`Cross-chain indexer: Alltra events for ${bridge.address} failed:`, err);
+ logger.warn(`Cross-chain indexer: Alltra events for ${bridge.address} failed:`, err);
}
return events;
}
@@ -272,7 +273,7 @@ async function fetchUniversalCCIPEvents(
});
}
} catch (err) {
- console.warn(`Cross-chain indexer: UniversalCCIP events for ${bridge.address} failed:`, err);
+ logger.warn(`Cross-chain indexer: UniversalCCIP events for ${bridge.address} failed:`, err);
}
return events;
}
diff --git a/services/token-aggregation/src/indexer/ohlcv-generator.ts b/services/token-aggregation/src/indexer/ohlcv-generator.ts
index 30bc84f..c263a8f 100644
--- a/services/token-aggregation/src/indexer/ohlcv-generator.ts
+++ b/services/token-aggregation/src/indexer/ohlcv-generator.ts
@@ -32,7 +32,6 @@ export class OHLCVGenerator {
poolAddress?: string
): Promise {
const intervalMs = this.getIntervalMs(interval);
- const results: OHLCVData[] = [];
// Get swap events for the time range
let query = `
@@ -43,7 +42,7 @@ export class OHLCVGenerator {
AND timestamp >= $3
AND timestamp <= $4
`;
- const params: any[] = [chainId, tokenAddress.toLowerCase(), from, to];
+ const params: (string | number | Date)[] = [chainId, tokenAddress.toLowerCase(), from, to];
if (poolAddress) {
query += ` AND pool_address = $5`;
@@ -108,7 +107,7 @@ export class OHLCVGenerator {
return `($${base + 1}, $${base + 2}, $${base + 3}, $${base + 4}, $${base + 5}, $${base + 6}, $${base + 7}, $${base + 8})`;
});
- const params: any[] = [];
+ const params: (string | number | Date | null)[] = [];
data.forEach((d) => {
params.push(
chainId,
@@ -162,7 +161,7 @@ export class OHLCVGenerator {
AND timestamp >= $4
AND timestamp <= $5
`;
- const params: any[] = [chainId, tokenAddress.toLowerCase(), interval, from, to];
+ const params: (string | number | Date)[] = [chainId, tokenAddress.toLowerCase(), interval, from, to];
if (poolAddress) {
query += ` AND pool_address = $6`;
diff --git a/services/token-aggregation/src/indexer/pool-indexer.ts b/services/token-aggregation/src/indexer/pool-indexer.ts
index ccf2f43..f50bb36 100644
--- a/services/token-aggregation/src/indexer/pool-indexer.ts
+++ b/services/token-aggregation/src/indexer/pool-indexer.ts
@@ -1,7 +1,7 @@
import { ethers } from 'ethers';
import { PoolRepository, LiquidityPool, DexType } from '../database/repositories/pool-repo';
import { getDexFactories, UniswapV2Config, UniswapV3Config, DodoConfig } from '../config/dex-factories';
-import { getChainConfig } from '../config/chains';
+import { logger } from '../utils/logger';
// UniswapV2 Factory ABI
const UNISWAP_V2_FACTORY_ABI = [
@@ -48,10 +48,6 @@ const DODO_PMM_INTEGRATION_ABI = [
'function getPoolPriceOrOracle(address) view returns (uint256 price)',
];
-// Swap event signatures
-const UNISWAP_V2_SWAP_TOPIC = ethers.id('Swap(address,uint256,uint256,uint256,uint256,address)');
-const UNISWAP_V3_SWAP_TOPIC = ethers.id('Swap(address,address,int256,int256,uint160,uint128,int24)');
-
export class PoolIndexer {
private provider: ethers.JsonRpcProvider;
private poolRepo: PoolRepository;
@@ -69,7 +65,7 @@ export class PoolIndexer {
async indexAllPools(): Promise {
const dexConfig = getDexFactories(this.chainId);
if (!dexConfig) {
- console.warn(`No DEX configuration found for chain ${this.chainId}`);
+ logger.warn(`No DEX configuration found for chain ${this.chainId}`);
return [];
}
@@ -165,11 +161,11 @@ export class PoolIndexer {
await this.poolRepo.upsertPool(pool);
pools.push(pool);
} catch (err) {
- console.error(`Error indexing DODO PMM pool ${poolAddress}:`, err);
+ logger.error(`Error indexing DODO PMM pool ${poolAddress}:`, err);
}
}
} catch (error) {
- console.error('Error indexing DODO PMM Integration pools:', error);
+ logger.error('Error indexing DODO PMM Integration pools:', error);
}
return pools;
@@ -226,7 +222,7 @@ export class PoolIndexer {
}
}
} catch (error) {
- console.error(`Error indexing UniswapV2 pools:`, error);
+ logger.error(`Error indexing UniswapV2 pools:`, error);
}
return pools;
@@ -282,7 +278,7 @@ export class PoolIndexer {
}
}
} catch (error) {
- console.error(`Error indexing UniswapV3 pools:`, error);
+ logger.error(`Error indexing UniswapV3 pools:`, error);
}
return pools;
@@ -342,11 +338,11 @@ export class PoolIndexer {
await this.poolRepo.upsertPool(pool);
pools.push(pool);
} catch (error) {
- console.error(`Error indexing DODO pool ${poolAddress}:`, error);
+ logger.error(`Error indexing DODO pool ${poolAddress}:`, error);
}
}
} catch (error) {
- console.error(`Error indexing DODO pools:`, error);
+ logger.error(`Error indexing DODO pools:`, error);
}
return pools;
@@ -358,7 +354,7 @@ export class PoolIndexer {
async updatePoolReserves(poolAddress: string, dexType: DexType): Promise {
const pool = await this.poolRepo.getPool(this.chainId, poolAddress);
if (!pool) {
- console.warn(`Pool ${poolAddress} not found`);
+ logger.warn(`Pool ${poolAddress} not found`);
return;
}
@@ -375,7 +371,7 @@ export class PoolIndexer {
}
// UniswapV3 and DODO use different models, would need specific implementations
} catch (error) {
- console.error(`Error updating pool reserves for ${poolAddress}:`, error);
+ logger.error(`Error updating pool reserves for ${poolAddress}:`, error);
}
}
}
diff --git a/services/token-aggregation/src/indexer/token-indexer.ts b/services/token-aggregation/src/indexer/token-indexer.ts
index 2829645..1c6144c 100644
--- a/services/token-aggregation/src/indexer/token-indexer.ts
+++ b/services/token-aggregation/src/indexer/token-indexer.ts
@@ -1,5 +1,6 @@
import { ethers } from 'ethers';
import { TokenRepository, Token } from '../database/repositories/token-repo';
+import { logger } from '../utils/logger';
// ERC20 ABI for token metadata
const ERC20_ABI = [
@@ -51,7 +52,7 @@ export class TokenIndexer {
return token;
} catch (error) {
- console.error(`Error indexing token ${address} on chain ${this.chainId}:`, error);
+ logger.error(`Error indexing token ${address} on chain ${this.chainId}:`, error);
return null;
}
}
@@ -97,12 +98,12 @@ export class TokenIndexer {
discoveredAddresses.add(log.address.toLowerCase());
});
- console.log(
+ logger.info(
`Discovered ${discoveredAddresses.size} unique tokens from blocks ${start}-${end}`
);
}
} catch (error) {
- console.error(`Error discovering tokens from transfers:`, error);
+ logger.error(`Error discovering tokens from transfers:`, error);
}
return Array.from(discoveredAddresses);
diff --git a/services/token-aggregation/src/services/arbitrage-scanner.ts b/services/token-aggregation/src/services/arbitrage-scanner.ts
new file mode 100644
index 0000000..ced0137
--- /dev/null
+++ b/services/token-aggregation/src/services/arbitrage-scanner.ts
@@ -0,0 +1,109 @@
+/**
+ * Arbitrage opportunities: triangular cycles with expected PnL.
+ * Enumerates same-chain (138), hub-edge-hub, and 3-chain cycles per
+ * global_arbitrage_engine_full_architecture.md and ultra_advanced_global_arbitrage_engine_blueprint.md.
+ */
+
+import { PoolRepository } from '../database/repositories/pool-repo';
+import { TokenRepository } from '../database/repositories/token-repo';
+import { getRoutesList, getChainIds } from '../config/heatmap-chains';
+
+export interface ArbitrageOpportunity {
+ cycleId: string;
+ description: string;
+ expectedPnlUsd: number;
+ riskScore: number;
+ capitalRequiredUsd: number;
+ legs: { chainId: number; action: string; asset: string }[];
+}
+
+const poolRepo = new PoolRepository();
+const tokenRepo = new TokenRepository();
+
+const HUB_CHAIN = 138;
+const EDGE_CHAINS = getChainIds().filter((c) => c !== HUB_CHAIN && c !== 651940);
+
+/** Same-chain triangle on 138: e.g. cUSDT -> cUSDC -> cUSDT via two pools. */
+async function getSameChainCycles(): Promise {
+ const out: ArbitrageOpportunity[] = [];
+ const pools = await poolRepo.getPoolsByChain(HUB_CHAIN, 100);
+ for (const p of pools) {
+ const t0 = await tokenRepo.getToken(HUB_CHAIN, p.token0Address);
+ const t1 = await tokenRepo.getToken(HUB_CHAIN, p.token1Address);
+ const sym0 = t0?.symbol || '';
+ const sym1 = t1?.symbol || '';
+ if (!sym0 || !sym1) continue;
+ const capital = p.totalLiquidityUsd ? Math.min(10000, p.totalLiquidityUsd * 0.01) : 10000;
+ out.push({
+ cycleId: `138:${sym0}-${sym1}-${sym0}`,
+ description: `Same-chain triangle ${sym0} → ${sym1} → ${sym0} (Chain 138)`,
+ expectedPnlUsd: 0,
+ riskScore: 0.2,
+ capitalRequiredUsd: capital,
+ legs: [
+ { chainId: HUB_CHAIN, action: 'swap', asset: sym0 },
+ { chainId: HUB_CHAIN, action: 'swap', asset: sym1 },
+ { chainId: HUB_CHAIN, action: 'swap', asset: sym0 },
+ ],
+ });
+ }
+ return out;
+}
+
+/** Hub-edge-hub: 138 -> edge -> 138 (SBS). */
+function getHubEdgeHubCycles(): ArbitrageOpportunity[] {
+ const out: ArbitrageOpportunity[] = [];
+ const routes = getRoutesList();
+ const hubOut = routes.filter((r) => r.fromChainId === HUB_CHAIN && r.toChainId !== HUB_CHAIN);
+ for (const r of hubOut.slice(0, 5)) {
+ out.push({
+ cycleId: `SBS:138-${r.toChainId}-138`,
+ description: `Hub-edge-hub: 138 → ${r.toChainId} → 138 (swap-bridge-swap)`,
+ expectedPnlUsd: 0,
+ riskScore: 0.5,
+ capitalRequiredUsd: 20000,
+ legs: [
+ { chainId: HUB_CHAIN, action: 'swap', asset: 'cUSDT' },
+ { chainId: HUB_CHAIN, action: 'bridge', asset: 'WETH' },
+ { chainId: r.toChainId, action: 'swap', asset: 'USDC' },
+ { chainId: r.toChainId, action: 'bridge', asset: 'WETH' },
+ { chainId: HUB_CHAIN, action: 'swap', asset: 'cUSDT' },
+ ],
+ });
+ }
+ return out;
+}
+
+/** 3-chain triangle: 138 -> A -> B -> 138. */
+function getThreeChainCycles(): ArbitrageOpportunity[] {
+ const out: ArbitrageOpportunity[] = [];
+ const edges = EDGE_CHAINS.slice(0, 3);
+ if (edges.length >= 2) {
+ out.push({
+ cycleId: `3chain:138-${edges[0]}-${edges[1]}-138`,
+ description: `3-chain triangle 138 → ${edges[0]} → ${edges[1]} → 138`,
+ expectedPnlUsd: 0,
+ riskScore: 0.7,
+ capitalRequiredUsd: 50000,
+ legs: [
+ { chainId: HUB_CHAIN, action: 'swap', asset: 'cUSDT' },
+ { chainId: HUB_CHAIN, action: 'bridge', asset: 'WETH' },
+ { chainId: edges[0], action: 'swap', asset: 'USDC' },
+ { chainId: edges[0], action: 'bridge', asset: 'WETH' },
+ { chainId: edges[1], action: 'swap', asset: 'USDT' },
+ { chainId: edges[1], action: 'bridge', asset: 'WETH' },
+ { chainId: HUB_CHAIN, action: 'swap', asset: 'cUSDT' },
+ ],
+ });
+ }
+ return out;
+}
+
+export async function getArbitrageOpportunities(): Promise {
+ const [sameChain, hubEdgeHub, threeChain] = await Promise.all([
+ getSameChainCycles(),
+ Promise.resolve(getHubEdgeHubCycles()),
+ Promise.resolve(getThreeChainCycles()),
+ ]);
+ return [...sameChain, ...hubEdgeHub, ...threeChain];
+}
diff --git a/services/token-aggregation/src/utils/iso4217-symbol-validator.ts b/services/token-aggregation/src/utils/iso4217-symbol-validator.ts
index 1ae00a6..93b0504 100644
--- a/services/token-aggregation/src/utils/iso4217-symbol-validator.ts
+++ b/services/token-aggregation/src/utils/iso4217-symbol-validator.ts
@@ -5,9 +5,6 @@
*/
import {
- FIN_CHAIN_SET,
- ISO4217_SUPPORTED,
- ASSET_TYPE_SET,
V0_TO_V1_SYMBOL_MAP,
isFinChainDesignator,
isISO4217Supported,
diff --git a/services/token-aggregation/src/utils/logger.ts b/services/token-aggregation/src/utils/logger.ts
new file mode 100644
index 0000000..a6cee99
--- /dev/null
+++ b/services/token-aggregation/src/utils/logger.ts
@@ -0,0 +1,18 @@
+import winston from 'winston';
+
+export const logger = winston.createLogger({
+ level: process.env.LOG_LEVEL || 'info',
+ format: winston.format.combine(
+ winston.format.timestamp(),
+ winston.format.errors({ stack: true }),
+ winston.format.json()
+ ),
+ transports: [
+ new winston.transports.Console({
+ format: winston.format.combine(
+ winston.format.colorize(),
+ winston.format.simple()
+ ),
+ }),
+ ],
+});
diff --git a/test/dbis/DBIS_Rail.t.sol b/test/dbis/DBIS_Rail.t.sol
index c817fe4..16d7a92 100644
--- a/test/dbis/DBIS_Rail.t.sol
+++ b/test/dbis/DBIS_Rail.t.sol
@@ -7,9 +7,10 @@ import "../../contracts/dbis/DBIS_RootRegistry.sol";
import "../../contracts/dbis/DBIS_ParticipantRegistry.sol";
import "../../contracts/dbis/DBIS_SignerRegistry.sol";
import "../../contracts/dbis/DBIS_GRU_MintController.sol";
+import "../../contracts/dbis/DBIS_EIP712Helper.sol";
import "../../contracts/dbis/DBIS_SettlementRouter.sol";
import "../../contracts/dbis/StablecoinReferenceRegistry.sol";
-import "../../contracts/tokens/CompliantFiatToken.sol";
+import "./MockMintableToken.sol";
contract DBIS_RailTest is Test, IDBISTypes {
DBIS_RootRegistry public root;
@@ -18,7 +19,7 @@ contract DBIS_RailTest is Test, IDBISTypes {
DBIS_GRU_MintController public mintController;
DBIS_SettlementRouter public router;
StablecoinReferenceRegistry public stableReg;
- CompliantFiatToken public token;
+ MockMintableToken public token;
address public admin;
address public alice;
@@ -41,7 +42,7 @@ contract DBIS_RailTest is Test, IDBISTypes {
participantReg = new DBIS_ParticipantRegistry(admin);
signerReg = new DBIS_SignerRegistry(admin);
mintController = new DBIS_GRU_MintController(admin, address(0));
- router = new DBIS_SettlementRouter(admin, address(root));
+ router = new DBIS_SettlementRouter(admin, address(root), address(new DBIS_EIP712Helper()));
stableReg = new StablecoinReferenceRegistry(admin);
root.setComponent(keccak256("ParticipantRegistry"), address(participantReg));
@@ -49,7 +50,7 @@ contract DBIS_RailTest is Test, IDBISTypes {
root.setComponent(keccak256("GRUMintController"), address(mintController));
mintController.setSettlementRouter(address(router));
- token = new CompliantFiatToken("Test GRU", "tGRU", 6, "USD", admin, admin, 0);
+ token = new MockMintableToken("Test GRU", "tGRU", 6, admin);
mintController.setGruToken(address(token));
token.grantRole(token.MINTER_ROLE(), address(mintController));
diff --git a/test/dbis/MockMintableToken.sol b/test/dbis/MockMintableToken.sol
new file mode 100644
index 0000000..91704cb
--- /dev/null
+++ b/test/dbis/MockMintableToken.sol
@@ -0,0 +1,24 @@
+// SPDX-License-Identifier: MIT
+pragma solidity ^0.8.20;
+
+import "@openzeppelin/contracts/token/ERC20/ERC20.sol";
+import "@openzeppelin/contracts/access/AccessControl.sol";
+
+contract MockMintableToken is ERC20, AccessControl {
+ bytes32 public constant MINTER_ROLE = keccak256("MINTER_ROLE");
+
+ constructor(string memory name, string memory symbol, uint8 decimals_, address admin)
+ ERC20(name, symbol)
+ {
+ _grantRole(DEFAULT_ADMIN_ROLE, admin);
+ _grantRole(MINTER_ROLE, admin);
+ }
+
+ function mint(address to, uint256 amount) external onlyRole(MINTER_ROLE) {
+ _mint(to, amount);
+ }
+
+ function decimals() public view virtual override returns (uint8) {
+ return 6;
+ }
+}