Complete markdown files cleanup and organization
- Organized 252 files across project - Root directory: 187 → 2 files (98.9% reduction) - Moved configuration guides to docs/04-configuration/ - Moved troubleshooting guides to docs/09-troubleshooting/ - Moved quick start guides to docs/01-getting-started/ - Moved reports to reports/ directory - Archived temporary files - Generated comprehensive reports and documentation - Created maintenance scripts and guides All files organized according to established standards.
This commit is contained in:
129
token-lists/scripts/checksum-addresses.js
Executable file
129
token-lists/scripts/checksum-addresses.js
Executable file
@@ -0,0 +1,129 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Address Checksum Validator and Fixer
|
||||
* Validates and optionally fixes EIP-55 checksummed addresses in token lists
|
||||
*/
|
||||
|
||||
import { readFileSync, writeFileSync } from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, resolve } from 'path';
|
||||
import { ethers } from 'ethers';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
function isChecksummed(address) {
|
||||
try {
|
||||
return ethers.isAddress(address) && address === ethers.getAddress(address);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function checksumAddress(address) {
|
||||
try {
|
||||
// Convert to lowercase first if it's not valid, then checksum
|
||||
return ethers.getAddress(address.toLowerCase());
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function validateAndFixAddresses(filePath, dryRun = true) {
|
||||
console.log(`\n🔍 ${dryRun ? 'Validating' : 'Fixing'} addresses in: ${filePath}\n`);
|
||||
|
||||
// Read token list file
|
||||
let tokenList;
|
||||
try {
|
||||
const fileContent = readFileSync(filePath, 'utf-8');
|
||||
tokenList = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
console.error('❌ Error reading or parsing token list file:');
|
||||
console.error(` ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const issues = [];
|
||||
const fixed = [];
|
||||
|
||||
// Check all addresses
|
||||
if (tokenList.tokens && Array.isArray(tokenList.tokens)) {
|
||||
tokenList.tokens.forEach((token, index) => {
|
||||
if (token.address) {
|
||||
if (!isChecksummed(token.address)) {
|
||||
const checksummed = checksumAddress(token.address);
|
||||
if (checksummed) {
|
||||
issues.push({
|
||||
index,
|
||||
token: token.symbol || token.name,
|
||||
original: token.address,
|
||||
checksummed,
|
||||
type: 'non-checksummed'
|
||||
});
|
||||
|
||||
if (!dryRun) {
|
||||
token.address = checksummed;
|
||||
fixed.push({
|
||||
index,
|
||||
token: token.symbol || token.name,
|
||||
original: token.address,
|
||||
fixed: checksummed
|
||||
});
|
||||
}
|
||||
} else {
|
||||
issues.push({
|
||||
index,
|
||||
token: token.symbol || token.name,
|
||||
original: token.address,
|
||||
checksummed: null,
|
||||
type: 'invalid'
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Report results
|
||||
if (issues.length === 0) {
|
||||
console.log('✅ All addresses are properly checksummed!\n');
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`Found ${issues.length} address issue(s):\n`);
|
||||
issues.forEach(issue => {
|
||||
if (issue.type === 'invalid') {
|
||||
console.error(`❌ Token[${issue.index}] (${issue.token}): Invalid address format: ${issue.original}`);
|
||||
} else {
|
||||
console.log(`⚠️ Token[${issue.index}] (${issue.token}):`);
|
||||
console.log(` Original: ${issue.original}`);
|
||||
console.log(` Checksummed: ${issue.checksummed}`);
|
||||
}
|
||||
});
|
||||
|
||||
if (!dryRun && fixed.length > 0) {
|
||||
console.log(`\n✏️ Fixed ${fixed.length} address(es)\n`);
|
||||
|
||||
// Write back to file
|
||||
writeFileSync(filePath, JSON.stringify(tokenList, null, 2) + '\n', 'utf-8');
|
||||
console.log(`✅ Updated file: ${filePath}\n`);
|
||||
} else if (dryRun && issues.some(i => i.type !== 'invalid')) {
|
||||
console.log(`\n💡 Run with --fix to automatically fix checksummed addresses\n`);
|
||||
}
|
||||
|
||||
return issues.some(i => i.type === 'invalid') ? 1 : 0;
|
||||
}
|
||||
|
||||
// Main
|
||||
const args = process.argv.slice(2);
|
||||
const filePath = args.find(arg => !arg.startsWith('--')) || resolve(__dirname, '../lists/dbis-138.tokenlist.json');
|
||||
const dryRun = !args.includes('--fix');
|
||||
|
||||
if (!filePath) {
|
||||
console.error('Usage: node checksum-addresses.js [path/to/token-list.json] [--fix]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const exitCode = validateAndFixAddresses(filePath, dryRun);
|
||||
process.exit(exitCode);
|
||||
|
||||
202
token-lists/scripts/release.sh
Executable file
202
token-lists/scripts/release.sh
Executable file
@@ -0,0 +1,202 @@
|
||||
#!/usr/bin/env bash
|
||||
# Release automation script for token lists
|
||||
# Handles version bumping, validation, and release preparation
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
TOKEN_LISTS_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
LISTS_DIR="$TOKEN_LISTS_DIR/lists"
|
||||
TOKEN_LIST_FILE="$LISTS_DIR/dbis-138.tokenlist.json"
|
||||
CHANGELOG_FILE="$TOKEN_LISTS_DIR/docs/CHANGELOG.md"
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
log_success() { echo -e "${GREEN}[✓]${NC} $1"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# Check if jq is available
|
||||
if ! command -v jq &> /dev/null; then
|
||||
log_error "jq is required but not installed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Check if node is available
|
||||
if ! command -v node &> /dev/null; then
|
||||
log_error "node is required but not installed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Get current version
|
||||
get_current_version() {
|
||||
jq -r '.version | "\(.major).\(.minor).\(.patch)"' "$TOKEN_LIST_FILE"
|
||||
}
|
||||
|
||||
# Bump version
|
||||
bump_version() {
|
||||
local bump_type="${1:-patch}" # major, minor, patch
|
||||
local current_major current_minor current_patch
|
||||
|
||||
current_major=$(jq -r '.version.major' "$TOKEN_LIST_FILE")
|
||||
current_minor=$(jq -r '.version.minor' "$TOKEN_LIST_FILE")
|
||||
current_patch=$(jq -r '.version.patch' "$TOKEN_LIST_FILE")
|
||||
|
||||
case "$bump_type" in
|
||||
major)
|
||||
((current_major++))
|
||||
current_minor=0
|
||||
current_patch=0
|
||||
;;
|
||||
minor)
|
||||
((current_minor++))
|
||||
current_patch=0
|
||||
;;
|
||||
patch)
|
||||
((current_patch++))
|
||||
;;
|
||||
*)
|
||||
log_error "Invalid bump type: $bump_type (must be major, minor, or patch)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
|
||||
# Update version in JSON
|
||||
local tmp_file=$(mktemp)
|
||||
jq --arg major "$current_major" --arg minor "$current_minor" --arg patch "$current_patch" \
|
||||
'.version.major = ($major | tonumber) | .version.minor = ($minor | tonumber) | .version.patch = ($patch | tonumber)' \
|
||||
"$TOKEN_LIST_FILE" > "$tmp_file"
|
||||
mv "$tmp_file" "$TOKEN_LIST_FILE"
|
||||
|
||||
echo "$current_major.$current_minor.$current_patch"
|
||||
}
|
||||
|
||||
# Update timestamp
|
||||
update_timestamp() {
|
||||
local timestamp=$(date -u +"%Y-%m-%dT%H:%M:%S.000Z")
|
||||
local tmp_file=$(mktemp)
|
||||
jq --arg ts "$timestamp" '.timestamp = $ts' "$TOKEN_LIST_FILE" > "$tmp_file"
|
||||
mv "$tmp_file" "$TOKEN_LIST_FILE"
|
||||
log_success "Updated timestamp to: $timestamp"
|
||||
}
|
||||
|
||||
# Run all validations
|
||||
run_validations() {
|
||||
log_info "Running validations..."
|
||||
|
||||
# JSON schema and basic validation
|
||||
log_info "Validating token list schema..."
|
||||
if ! node "$SCRIPT_DIR/validate-token-list.js" "$TOKEN_LIST_FILE"; then
|
||||
log_error "Schema validation failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Checksum validation
|
||||
log_info "Validating address checksums..."
|
||||
if ! node "$SCRIPT_DIR/checksum-addresses.js" "$TOKEN_LIST_FILE"; then
|
||||
log_error "Checksum validation failed"
|
||||
return 1
|
||||
fi
|
||||
|
||||
# Logo validation (non-blocking)
|
||||
log_info "Validating logos..."
|
||||
node "$SCRIPT_DIR/validate-logos.js" "$TOKEN_LIST_FILE" || log_warn "Logo validation had issues (continuing)"
|
||||
|
||||
log_success "All critical validations passed"
|
||||
return 0
|
||||
}
|
||||
|
||||
# Generate release notes from CHANGELOG
|
||||
generate_release_notes() {
|
||||
local version="$1"
|
||||
|
||||
if [[ ! -f "$CHANGELOG_FILE" ]]; then
|
||||
log_warn "CHANGELOG.md not found, skipping release notes generation"
|
||||
return
|
||||
fi
|
||||
|
||||
# Extract section for this version
|
||||
log_info "Extracting release notes for version $version from CHANGELOG.md"
|
||||
# This is a simple implementation - you might want to use a more sophisticated parser
|
||||
if grep -q "## \[$version\]" "$CHANGELOG_FILE"; then
|
||||
log_success "Release notes found in CHANGELOG.md"
|
||||
else
|
||||
log_warn "No release notes found for version $version in CHANGELOG.md"
|
||||
fi
|
||||
}
|
||||
|
||||
# Main release function
|
||||
main() {
|
||||
local bump_type="${1:-patch}"
|
||||
local skip_validation="${2:-}"
|
||||
|
||||
log_info "========================================="
|
||||
log_info "Token List Release Preparation"
|
||||
log_info "========================================="
|
||||
log_info ""
|
||||
|
||||
# Check if file exists
|
||||
if [[ ! -f "$TOKEN_LIST_FILE" ]]; then
|
||||
log_error "Token list file not found: $TOKEN_LIST_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local current_version=$(get_current_version)
|
||||
log_info "Current version: $current_version"
|
||||
|
||||
# Bump version
|
||||
log_info "Bumping $bump_type version..."
|
||||
local new_version=$(bump_version "$bump_type")
|
||||
log_success "New version: $new_version"
|
||||
|
||||
# Update timestamp
|
||||
update_timestamp
|
||||
|
||||
# Run validations (unless skipped)
|
||||
if [[ -z "$skip_validation" ]]; then
|
||||
if ! run_validations; then
|
||||
log_error "Validation failed. Release aborted."
|
||||
log_info "You can fix the issues and run again, or use --skip-validation to bypass"
|
||||
exit 1
|
||||
fi
|
||||
else
|
||||
log_warn "Skipping validations (--skip-validation flag)"
|
||||
fi
|
||||
|
||||
# Generate release notes
|
||||
generate_release_notes "$new_version"
|
||||
|
||||
log_info ""
|
||||
log_success "Release preparation complete!"
|
||||
log_info ""
|
||||
log_info "Next steps:"
|
||||
log_info "1. Review changes: git diff $TOKEN_LIST_FILE"
|
||||
log_info "2. Commit: git add $TOKEN_LIST_FILE"
|
||||
log_info "3. Create tag: git tag -a v$new_version -m \"Release v$new_version\""
|
||||
log_info "4. Push: git push && git push --tags"
|
||||
log_info ""
|
||||
log_info "Or run the signing script: $SCRIPT_DIR/sign-list.sh"
|
||||
}
|
||||
|
||||
# Parse arguments
|
||||
if [[ "${1:-}" == "--help" ]] || [[ "${1:-}" == "-h" ]]; then
|
||||
echo "Usage: $0 [bump-type] [--skip-validation]"
|
||||
echo ""
|
||||
echo "Bump types: major, minor, patch (default: patch)"
|
||||
echo ""
|
||||
echo "Examples:"
|
||||
echo " $0 patch # Bump patch version"
|
||||
echo " $0 minor # Bump minor version"
|
||||
echo " $0 major # Bump major version"
|
||||
echo " $0 patch --skip-validation # Skip validation checks"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
main "${1:-patch}" "${2:-}"
|
||||
|
||||
180
token-lists/scripts/sign-list.sh
Executable file
180
token-lists/scripts/sign-list.sh
Executable file
@@ -0,0 +1,180 @@
|
||||
#!/usr/bin/env bash
|
||||
# minisign signing script for token lists
|
||||
# Signs token list files for integrity verification
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
TOKEN_LISTS_DIR="$(cd "$SCRIPT_DIR/.." && pwd)"
|
||||
LISTS_DIR="$TOKEN_LISTS_DIR/lists"
|
||||
TOKEN_LIST_FILE="$LISTS_DIR/dbis-138.tokenlist.json"
|
||||
PUBLIC_KEY_FILE="$TOKEN_LISTS_DIR/minisign.pub"
|
||||
SIGNATURE_FILE="${TOKEN_LIST_FILE}.sig"
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
NC='\033[0m'
|
||||
|
||||
log_info() { echo -e "${BLUE}[INFO]${NC} $1"; }
|
||||
log_success() { echo -e "${GREEN}[✓]${NC} $1"; }
|
||||
log_warn() { echo -e "${YELLOW}[WARN]${NC} $1"; }
|
||||
log_error() { echo -e "${RED}[ERROR]${NC} $1"; }
|
||||
|
||||
# Check if minisign is available
|
||||
if ! command -v minisign &> /dev/null; then
|
||||
log_error "minisign is required but not installed"
|
||||
log_info "Installation:"
|
||||
log_info " macOS: brew install minisign"
|
||||
log_info " Ubuntu/Debian: apt-get install minisign"
|
||||
log_info " From source: https://github.com/jedisct1/minisign"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate keypair (only if keys don't exist)
|
||||
generate_keypair() {
|
||||
local private_key_file="${MINISIGN_PRIVATE_KEY_FILE:-$TOKEN_LISTS_DIR/minisign.key}"
|
||||
|
||||
if [[ -f "$private_key_file" ]]; then
|
||||
log_warn "Private key already exists: $private_key_file"
|
||||
log_info "Skipping key generation"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log_info "Generating minisign keypair..."
|
||||
log_info "Private key will be saved to: $private_key_file"
|
||||
log_info "Public key will be saved to: $PUBLIC_KEY_FILE"
|
||||
log_warn "Keep the private key secure and never commit it to the repository!"
|
||||
|
||||
# Generate keypair (minisign will prompt for password)
|
||||
if minisign -G -s "$private_key_file" -p "$PUBLIC_KEY_FILE"; then
|
||||
log_success "Keypair generated successfully"
|
||||
log_info ""
|
||||
log_info "Next steps:"
|
||||
log_info "1. Store the private key securely (e.g., password manager, secure vault)"
|
||||
log_info "2. Add private key to GitHub Secrets as MINISIGN_PRIVATE_KEY"
|
||||
log_info "3. Commit the public key: git add $PUBLIC_KEY_FILE"
|
||||
log_info "4. Set MINISIGN_PRIVATE_KEY_FILE environment variable if using custom path"
|
||||
else
|
||||
log_error "Failed to generate keypair"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Sign token list
|
||||
sign_list() {
|
||||
local private_key_file="${MINISIGN_PRIVATE_KEY_FILE:-$TOKEN_LISTS_DIR/minisign.key}"
|
||||
local private_key_content="${MINISIGN_PRIVATE_KEY:-}"
|
||||
|
||||
if [[ ! -f "$TOKEN_LIST_FILE" ]]; then
|
||||
log_error "Token list file not found: $TOKEN_LIST_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "Signing token list: $TOKEN_LIST_FILE"
|
||||
|
||||
# Check if private key exists or is provided via environment
|
||||
if [[ -n "$private_key_content" ]]; then
|
||||
# Use private key from environment variable
|
||||
log_info "Using private key from MINISIGN_PRIVATE_KEY environment variable"
|
||||
echo "$private_key_content" | minisign -S -s /dev/stdin -m "$TOKEN_LIST_FILE" -x "$SIGNATURE_FILE" || {
|
||||
log_error "Failed to sign token list"
|
||||
exit 1
|
||||
}
|
||||
elif [[ -f "$private_key_file" ]]; then
|
||||
# Use private key file
|
||||
minisign -S -s "$private_key_file" -m "$TOKEN_LIST_FILE" -x "$SIGNATURE_FILE" || {
|
||||
log_error "Failed to sign token list"
|
||||
exit 1
|
||||
}
|
||||
else
|
||||
log_error "Private key not found"
|
||||
log_info "Provide private key via:"
|
||||
log_info " 1. File: Set MINISIGN_PRIVATE_KEY_FILE environment variable"
|
||||
log_info " 2. Environment: Set MINISIGN_PRIVATE_KEY environment variable"
|
||||
log_info " 3. Generate new: Run '$0 --generate-key'"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_success "Token list signed successfully"
|
||||
log_info "Signature file: $SIGNATURE_FILE"
|
||||
|
||||
# Display signature info
|
||||
if [[ -f "$SIGNATURE_FILE" ]]; then
|
||||
log_info ""
|
||||
log_info "Signature preview:"
|
||||
head -n 2 "$SIGNATURE_FILE" | head -c 100
|
||||
echo "..."
|
||||
log_info ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Verify signature
|
||||
verify_signature() {
|
||||
if [[ ! -f "$TOKEN_LIST_FILE" ]]; then
|
||||
log_error "Token list file not found: $TOKEN_LIST_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$SIGNATURE_FILE" ]]; then
|
||||
log_error "Signature file not found: $SIGNATURE_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$PUBLIC_KEY_FILE" ]]; then
|
||||
log_error "Public key file not found: $PUBLIC_KEY_FILE"
|
||||
log_info "Public key should be at: $PUBLIC_KEY_FILE"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
log_info "Verifying signature..."
|
||||
|
||||
if minisign -V -p "$PUBLIC_KEY_FILE" -m "$TOKEN_LIST_FILE" -x "$SIGNATURE_FILE"; then
|
||||
log_success "Signature verification passed!"
|
||||
return 0
|
||||
else
|
||||
log_error "Signature verification failed!"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Main
|
||||
main() {
|
||||
local command="${1:-sign}"
|
||||
|
||||
case "$command" in
|
||||
--generate-key|-g)
|
||||
generate_keypair
|
||||
;;
|
||||
--sign|-s)
|
||||
sign_list
|
||||
;;
|
||||
--verify|-v)
|
||||
verify_signature
|
||||
;;
|
||||
sign)
|
||||
sign_list
|
||||
;;
|
||||
verify)
|
||||
verify_signature
|
||||
;;
|
||||
*)
|
||||
echo "Usage: $0 [command]"
|
||||
echo ""
|
||||
echo "Commands:"
|
||||
echo " sign, -s Sign the token list (default)"
|
||||
echo " verify, -v Verify the signature"
|
||||
echo " --generate-key, -g Generate a new keypair"
|
||||
echo ""
|
||||
echo "Environment variables:"
|
||||
echo " MINISIGN_PRIVATE_KEY_FILE Path to private key file"
|
||||
echo " MINISIGN_PRIVATE_KEY Private key content (for CI/CD)"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
main "${1:-sign}"
|
||||
|
||||
176
token-lists/scripts/validate-chainlists.js
Executable file
176
token-lists/scripts/validate-chainlists.js
Executable file
@@ -0,0 +1,176 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Validates chain configuration for Chainlists submission
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, resolve } from 'path';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const REQUIRED_FIELDS = [
|
||||
'name',
|
||||
'chain',
|
||||
'chainId',
|
||||
'networkId',
|
||||
'rpc',
|
||||
'nativeCurrency'
|
||||
];
|
||||
|
||||
function validateChainConfig(filePath) {
|
||||
console.log(`\n🔍 Validating chain configuration: ${filePath}\n`);
|
||||
|
||||
let config;
|
||||
try {
|
||||
const fileContent = readFileSync(filePath, 'utf-8');
|
||||
config = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
console.error('❌ Error reading or parsing chain config file:');
|
||||
console.error(` ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
|
||||
// Validate required fields
|
||||
REQUIRED_FIELDS.forEach(field => {
|
||||
if (!(field in config)) {
|
||||
errors.push(`Missing required field: ${field}`);
|
||||
}
|
||||
});
|
||||
|
||||
// Validate chainId
|
||||
if (config.chainId !== 138) {
|
||||
errors.push(`chainId must be 138, got ${config.chainId}`);
|
||||
}
|
||||
|
||||
// Validate networkId
|
||||
if (config.networkId && config.networkId !== 138) {
|
||||
warnings.push(`networkId should match chainId (138), got ${config.networkId}`);
|
||||
}
|
||||
|
||||
// Validate RPC URLs
|
||||
if (!Array.isArray(config.rpc) || config.rpc.length === 0) {
|
||||
errors.push('rpc must be a non-empty array');
|
||||
} else {
|
||||
config.rpc.forEach((url, index) => {
|
||||
if (typeof url !== 'string' || (!url.startsWith('http://') && !url.startsWith('https://'))) {
|
||||
errors.push(`rpc[${index}] must be a valid HTTP/HTTPS URL`);
|
||||
}
|
||||
if (url.startsWith('http://')) {
|
||||
warnings.push(`rpc[${index}] should use HTTPS, not HTTP: ${url}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Validate nativeCurrency
|
||||
if (config.nativeCurrency) {
|
||||
if (!config.nativeCurrency.symbol) {
|
||||
errors.push('nativeCurrency.symbol is required');
|
||||
}
|
||||
if (typeof config.nativeCurrency.decimals !== 'number') {
|
||||
errors.push('nativeCurrency.decimals must be a number');
|
||||
}
|
||||
if (config.nativeCurrency.symbol !== 'ETH') {
|
||||
warnings.push(`Expected nativeCurrency.symbol to be "ETH", got "${config.nativeCurrency.symbol}"`);
|
||||
}
|
||||
if (config.nativeCurrency.decimals !== 18) {
|
||||
warnings.push(`Expected nativeCurrency.decimals to be 18, got ${config.nativeCurrency.decimals}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Validate explorers (optional but recommended)
|
||||
if (config.explorers && Array.isArray(config.explorers)) {
|
||||
config.explorers.forEach((explorer, index) => {
|
||||
if (!explorer.url) {
|
||||
errors.push(`explorers[${index}].url is required`);
|
||||
}
|
||||
if (explorer.url && !explorer.url.startsWith('https://')) {
|
||||
warnings.push(`explorers[${index}].url should use HTTPS: ${explorer.url}`);
|
||||
}
|
||||
if (!explorer.name) {
|
||||
warnings.push(`explorers[${index}].name is recommended`);
|
||||
}
|
||||
if (!explorer.standard) {
|
||||
warnings.push(`explorers[${index}].standard is recommended (e.g., "EIP3091")`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
warnings.push('No explorers configured (recommended for better UX)');
|
||||
}
|
||||
|
||||
// Validate shortName
|
||||
if (config.shortName && typeof config.shortName !== 'string') {
|
||||
errors.push('shortName must be a string');
|
||||
} else if (!config.shortName) {
|
||||
warnings.push('shortName is recommended');
|
||||
}
|
||||
|
||||
// Validate icon (optional)
|
||||
if (config.icon && !config.icon.startsWith('https://') && !config.icon.startsWith('ipfs://')) {
|
||||
warnings.push(`icon should use HTTPS or IPFS URL: ${config.icon}`);
|
||||
}
|
||||
|
||||
// Report results
|
||||
if (errors.length > 0) {
|
||||
console.error('❌ Validation failed!\n');
|
||||
console.error('Errors:');
|
||||
errors.forEach(error => console.error(` ❌ ${error}`));
|
||||
console.log('');
|
||||
if (warnings.length > 0) {
|
||||
console.log('⚠️ Warnings:');
|
||||
warnings.forEach(warning => console.log(` ⚠️ ${warning}`));
|
||||
console.log('');
|
||||
}
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('✅ Chain configuration is valid!\n');
|
||||
|
||||
if (warnings.length > 0) {
|
||||
console.log('⚠️ Warnings:');
|
||||
warnings.forEach(warning => console.log(` ⚠️ ${warning}`));
|
||||
console.log('');
|
||||
}
|
||||
|
||||
console.log('📋 Configuration Summary:');
|
||||
console.log(` Name: ${config.name}`);
|
||||
console.log(` Chain: ${config.chain}`);
|
||||
console.log(` Short Name: ${config.shortName || '(not set)'}`);
|
||||
console.log(` Chain ID: ${config.chainId}`);
|
||||
console.log(` Network ID: ${config.networkId || '(not set)'}`);
|
||||
console.log(` RPC URLs: ${config.rpc.length}`);
|
||||
config.rpc.forEach((url, i) => console.log(` ${i + 1}. ${url}`));
|
||||
if (config.explorers && config.explorers.length > 0) {
|
||||
console.log(` Explorers: ${config.explorers.length}`);
|
||||
config.explorers.forEach((exp, i) => {
|
||||
console.log(` ${i + 1}. ${exp.name || '(unnamed)'}: ${exp.url}`);
|
||||
if (exp.standard) {
|
||||
console.log(` Standard: ${exp.standard}`);
|
||||
}
|
||||
});
|
||||
}
|
||||
if (config.nativeCurrency) {
|
||||
console.log(` Native Currency: ${config.nativeCurrency.symbol} (${config.nativeCurrency.decimals} decimals)`);
|
||||
}
|
||||
if (config.infoURL) {
|
||||
console.log(` Info URL: ${config.infoURL}`);
|
||||
}
|
||||
console.log('');
|
||||
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
// Main
|
||||
const filePath = process.argv[2] || resolve(__dirname, '../chainlists/chain-138.json');
|
||||
|
||||
if (!filePath) {
|
||||
console.error('Usage: node validate-chainlists.js [path/to/chain.json]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
validateChainConfig(filePath);
|
||||
|
||||
135
token-lists/scripts/validate-logos.js
Executable file
135
token-lists/scripts/validate-logos.js
Executable file
@@ -0,0 +1,135 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Logo URL Validator
|
||||
* Validates that all logoURI URLs are accessible and return image content
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, resolve } from 'path';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const MAX_LOGO_SIZE = 500 * 1024; // 500KB
|
||||
const IMAGE_MIME_TYPES = ['image/png', 'image/jpeg', 'image/jpg', 'image/svg+xml', 'image/webp', 'image/gif'];
|
||||
|
||||
async function validateLogo(logoURI, tokenInfo) {
|
||||
const issues = [];
|
||||
|
||||
// Check protocol
|
||||
if (!logoURI.startsWith('https://') && !logoURI.startsWith('ipfs://')) {
|
||||
issues.push(`URL should use HTTPS or IPFS (got: ${logoURI.substring(0, 20)}...)`);
|
||||
}
|
||||
|
||||
// For HTTPS URLs, validate accessibility
|
||||
if (logoURI.startsWith('https://')) {
|
||||
try {
|
||||
const response = await fetch(logoURI, { method: 'HEAD' });
|
||||
|
||||
if (!response.ok) {
|
||||
issues.push(`HTTP ${response.status}: ${response.statusText}`);
|
||||
} else {
|
||||
const contentType = response.headers.get('content-type');
|
||||
const contentLength = response.headers.get('content-length');
|
||||
|
||||
if (contentType && !IMAGE_MIME_TYPES.some(mime => contentType.includes(mime))) {
|
||||
issues.push(`Invalid Content-Type: ${contentType} (expected image/*)`);
|
||||
}
|
||||
|
||||
if (contentLength && parseInt(contentLength) > MAX_LOGO_SIZE) {
|
||||
issues.push(`Logo too large: ${(parseInt(contentLength) / 1024).toFixed(2)}KB (max ${MAX_LOGO_SIZE / 1024}KB)`);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
issues.push(`Failed to fetch: ${error.message}`);
|
||||
}
|
||||
} else if (logoURI.startsWith('ipfs://')) {
|
||||
// IPFS URLs are valid but we can't easily validate them
|
||||
// Just check format
|
||||
if (!logoURI.match(/^ipfs:\/\/[a-zA-Z0-9]+/)) {
|
||||
issues.push('Invalid IPFS URL format');
|
||||
}
|
||||
}
|
||||
|
||||
return issues;
|
||||
}
|
||||
|
||||
async function validateLogos(filePath) {
|
||||
console.log(`\n🖼️ Validating logos in: ${filePath}\n`);
|
||||
|
||||
// Read token list file
|
||||
let tokenList;
|
||||
try {
|
||||
const fileContent = readFileSync(filePath, 'utf-8');
|
||||
tokenList = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
console.error('❌ Error reading or parsing token list file:');
|
||||
console.error(` ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const results = [];
|
||||
let totalIssues = 0;
|
||||
|
||||
// Validate top-level logoURI
|
||||
if (tokenList.logoURI) {
|
||||
console.log('Validating list logoURI...');
|
||||
const issues = await validateLogo(tokenList.logoURI, 'List');
|
||||
if (issues.length > 0) {
|
||||
results.push({ type: 'list', uri: tokenList.logoURI, issues });
|
||||
totalIssues += issues.length;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate token logos
|
||||
if (tokenList.tokens && Array.isArray(tokenList.tokens)) {
|
||||
for (const [index, token] of tokenList.tokens.entries()) {
|
||||
if (token.logoURI) {
|
||||
const tokenInfo = `${token.symbol || token.name} (Token[${index}])`;
|
||||
const issues = await validateLogo(token.logoURI, tokenInfo);
|
||||
if (issues.length > 0) {
|
||||
results.push({ type: 'token', token: tokenInfo, uri: token.logoURI, issues });
|
||||
totalIssues += issues.length;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Report results
|
||||
if (totalIssues === 0) {
|
||||
console.log('✅ All logos are valid!\n');
|
||||
return 0;
|
||||
}
|
||||
|
||||
console.log(`Found ${totalIssues} logo issue(s):\n`);
|
||||
results.forEach(result => {
|
||||
if (result.type === 'list') {
|
||||
console.log(`❌ List logoURI: ${result.uri}`);
|
||||
} else {
|
||||
console.log(`❌ ${result.token}: ${result.uri}`);
|
||||
}
|
||||
result.issues.forEach(issue => {
|
||||
console.log(` - ${issue}`);
|
||||
});
|
||||
console.log('');
|
||||
});
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Main
|
||||
const filePath = process.argv[2] || resolve(__dirname, '../lists/dbis-138.tokenlist.json');
|
||||
|
||||
if (!filePath) {
|
||||
console.error('Usage: node validate-logos.js [path/to/token-list.json]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
validateLogos(filePath).then(exitCode => {
|
||||
process.exit(exitCode);
|
||||
}).catch(error => {
|
||||
console.error('Unexpected error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
287
token-lists/scripts/validate-token-list.js
Executable file
287
token-lists/scripts/validate-token-list.js
Executable file
@@ -0,0 +1,287 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Enhanced Token List Validator
|
||||
* Validates token lists against the Uniswap Token Lists JSON schema
|
||||
* Based on: https://github.com/Uniswap/token-lists
|
||||
* Schema: https://uniswap.org/tokenlist.schema.json
|
||||
*
|
||||
* Enhanced with:
|
||||
* - EIP-55 checksum validation
|
||||
* - Duplicate detection
|
||||
* - Logo URL validation
|
||||
* - Chain ID strict validation
|
||||
* - Semantic versioning validation
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, resolve } from 'path';
|
||||
import { ethers } from 'ethers';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
// Required chain ID
|
||||
const REQUIRED_CHAIN_ID = 138;
|
||||
|
||||
// Fetch schema from Uniswap
|
||||
const SCHEMA_URL = 'https://uniswap.org/tokenlist.schema.json';
|
||||
|
||||
async function fetchSchema() {
|
||||
try {
|
||||
const response = await fetch(SCHEMA_URL);
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to fetch schema: ${response.statusText}`);
|
||||
}
|
||||
return await response.json();
|
||||
} catch (error) {
|
||||
console.error('Error fetching schema:', error.message);
|
||||
console.error('Falling back to basic validation...');
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Validate EIP-55 checksum
|
||||
function isChecksummed(address) {
|
||||
try {
|
||||
return ethers.isAddress(address) && address === ethers.getAddress(address);
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Basic validation with enhanced checks
|
||||
function enhancedValidation(tokenList) {
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
const seenAddresses = new Set();
|
||||
const seenSymbols = new Map(); // chainId -> Set of symbols
|
||||
|
||||
// Required fields
|
||||
if (!tokenList.name || typeof tokenList.name !== 'string') {
|
||||
errors.push('Missing or invalid "name" field');
|
||||
}
|
||||
|
||||
if (!tokenList.version) {
|
||||
errors.push('Missing "version" field');
|
||||
} else {
|
||||
if (typeof tokenList.version.major !== 'number') {
|
||||
errors.push('version.major must be a number');
|
||||
}
|
||||
if (typeof tokenList.version.minor !== 'number') {
|
||||
errors.push('version.minor must be a number');
|
||||
}
|
||||
if (typeof tokenList.version.patch !== 'number') {
|
||||
errors.push('version.patch must be a number');
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenList.tokens || !Array.isArray(tokenList.tokens)) {
|
||||
errors.push('Missing or invalid "tokens" array');
|
||||
return { errors, warnings, valid: false };
|
||||
}
|
||||
|
||||
// Validate each token
|
||||
tokenList.tokens.forEach((token, index) => {
|
||||
const prefix = `Token[${index}]`;
|
||||
|
||||
// Required token fields
|
||||
if (typeof token.chainId !== 'number') {
|
||||
errors.push(`${prefix}: Missing or invalid "chainId"`);
|
||||
} else {
|
||||
// Strict chain ID validation
|
||||
if (token.chainId !== REQUIRED_CHAIN_ID) {
|
||||
errors.push(`${prefix}: chainId must be ${REQUIRED_CHAIN_ID}, got ${token.chainId}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (!token.address || typeof token.address !== 'string') {
|
||||
errors.push(`${prefix}: Missing or invalid "address"`);
|
||||
} else {
|
||||
// Validate Ethereum address format
|
||||
if (!/^0x[a-fA-F0-9]{40}$/.test(token.address)) {
|
||||
errors.push(`${prefix}: Invalid Ethereum address format: ${token.address}`);
|
||||
} else {
|
||||
// EIP-55 checksum validation
|
||||
if (!isChecksummed(token.address)) {
|
||||
errors.push(`${prefix}: Address not EIP-55 checksummed: ${token.address}`);
|
||||
}
|
||||
|
||||
// Duplicate address detection
|
||||
const addressLower = token.address.toLowerCase();
|
||||
if (seenAddresses.has(addressLower)) {
|
||||
errors.push(`${prefix}: Duplicate address: ${token.address}`);
|
||||
}
|
||||
seenAddresses.add(addressLower);
|
||||
}
|
||||
}
|
||||
|
||||
if (!token.name || typeof token.name !== 'string') {
|
||||
errors.push(`${prefix}: Missing or invalid "name"`);
|
||||
}
|
||||
|
||||
if (!token.symbol || typeof token.symbol !== 'string') {
|
||||
errors.push(`${prefix}: Missing or invalid "symbol"`);
|
||||
} else {
|
||||
// Symbol uniqueness per chainId
|
||||
const chainId = token.chainId || 0;
|
||||
if (!seenSymbols.has(chainId)) {
|
||||
seenSymbols.set(chainId, new Set());
|
||||
}
|
||||
const symbolSet = seenSymbols.get(chainId);
|
||||
if (symbolSet.has(token.symbol.toUpperCase())) {
|
||||
warnings.push(`${prefix}: Duplicate symbol "${token.symbol}" on chainId ${chainId}`);
|
||||
}
|
||||
symbolSet.add(token.symbol.toUpperCase());
|
||||
}
|
||||
|
||||
if (typeof token.decimals !== 'number' || token.decimals < 0 || token.decimals > 255) {
|
||||
errors.push(`${prefix}: Invalid "decimals" (must be 0-255), got ${token.decimals}`);
|
||||
}
|
||||
|
||||
// Optional fields (warnings)
|
||||
if (!token.logoURI) {
|
||||
warnings.push(`${prefix}: Missing "logoURI" (optional but recommended)`);
|
||||
} else if (typeof token.logoURI !== 'string') {
|
||||
warnings.push(`${prefix}: Invalid "logoURI" type`);
|
||||
} else if (!token.logoURI.startsWith('http://') &&
|
||||
!token.logoURI.startsWith('https://') &&
|
||||
!token.logoURI.startsWith('ipfs://')) {
|
||||
warnings.push(`${prefix}: Invalid "logoURI" format (should be HTTP/HTTPS/IPFS URL): ${token.logoURI}`);
|
||||
} else if (!token.logoURI.startsWith('https://') && !token.logoURI.startsWith('ipfs://')) {
|
||||
warnings.push(`${prefix}: logoURI should use HTTPS (not HTTP): ${token.logoURI}`);
|
||||
}
|
||||
});
|
||||
|
||||
return { errors, warnings, valid: errors.length === 0 };
|
||||
}
|
||||
|
||||
async function validateTokenList(filePath) {
|
||||
console.log(`\n🔍 Validating token list: ${filePath}\n`);
|
||||
|
||||
// Read token list file
|
||||
let tokenList;
|
||||
try {
|
||||
const fileContent = readFileSync(filePath, 'utf-8');
|
||||
tokenList = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
console.error('❌ Error reading or parsing token list file:');
|
||||
console.error(` ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Try to fetch and use Uniswap schema
|
||||
const schema = await fetchSchema();
|
||||
let validationResult;
|
||||
|
||||
if (schema) {
|
||||
// Use AJV if available, otherwise fall back to enhanced validation
|
||||
try {
|
||||
// Try to use dynamic import for ajv (if installed)
|
||||
const { default: Ajv } = await import('ajv');
|
||||
const addFormats = (await import('ajv-formats')).default;
|
||||
|
||||
const ajv = new Ajv({ allErrors: true, verbose: true });
|
||||
addFormats(ajv);
|
||||
const validate = ajv.compile(schema);
|
||||
const valid = validate(tokenList);
|
||||
|
||||
if (valid) {
|
||||
// Schema validation passed, now run enhanced checks
|
||||
validationResult = enhancedValidation(tokenList);
|
||||
} else {
|
||||
const schemaErrors = validate.errors?.map(err => {
|
||||
const path = err.instancePath || err.schemaPath || '';
|
||||
return `${path}: ${err.message}`;
|
||||
}) || [];
|
||||
const enhanced = enhancedValidation(tokenList);
|
||||
validationResult = {
|
||||
errors: [...schemaErrors, ...enhanced.errors],
|
||||
warnings: enhanced.warnings,
|
||||
valid: false
|
||||
};
|
||||
}
|
||||
} catch (importError) {
|
||||
// AJV not available, use enhanced validation
|
||||
console.log('⚠️ AJV not available, using enhanced validation');
|
||||
validationResult = enhancedValidation(tokenList);
|
||||
}
|
||||
} else {
|
||||
// Schema fetch failed, use enhanced validation
|
||||
validationResult = enhancedValidation(tokenList);
|
||||
}
|
||||
|
||||
// Display results
|
||||
if (validationResult.valid) {
|
||||
console.log('✅ Token list is valid!\n');
|
||||
|
||||
// Display token list info
|
||||
console.log('📋 Token List Info:');
|
||||
console.log(` Name: ${tokenList.name}`);
|
||||
if (tokenList.version) {
|
||||
console.log(` Version: ${tokenList.version.major}.${tokenList.version.minor}.${tokenList.version.patch}`);
|
||||
}
|
||||
if (tokenList.timestamp) {
|
||||
console.log(` Timestamp: ${tokenList.timestamp}`);
|
||||
}
|
||||
console.log(` Tokens: ${tokenList.tokens.length}`);
|
||||
console.log('');
|
||||
|
||||
// List tokens
|
||||
console.log('🪙 Tokens:');
|
||||
tokenList.tokens.forEach((token, index) => {
|
||||
console.log(` ${index + 1}. ${token.symbol} (${token.name})`);
|
||||
console.log(` Address: ${token.address}`);
|
||||
console.log(` Chain ID: ${token.chainId}`);
|
||||
console.log(` Decimals: ${token.decimals}`);
|
||||
if (token.logoURI) {
|
||||
console.log(` Logo: ${token.logoURI}`);
|
||||
}
|
||||
console.log('');
|
||||
});
|
||||
|
||||
if (validationResult.warnings.length > 0) {
|
||||
console.log('⚠️ Warnings:');
|
||||
validationResult.warnings.forEach(warning => {
|
||||
console.log(` - ${warning}`);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
|
||||
process.exit(0);
|
||||
} else {
|
||||
console.error('❌ Token list validation failed!\n');
|
||||
|
||||
if (validationResult.errors.length > 0) {
|
||||
console.error('Errors:');
|
||||
validationResult.errors.forEach(error => {
|
||||
console.error(` ❌ ${error}`);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
|
||||
if (validationResult.warnings.length > 0) {
|
||||
console.log('Warnings:');
|
||||
validationResult.warnings.forEach(warning => {
|
||||
console.log(` ⚠️ ${warning}`);
|
||||
});
|
||||
console.log('');
|
||||
}
|
||||
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Main
|
||||
const filePath = process.argv[2] || resolve(__dirname, '../lists/dbis-138.tokenlist.json');
|
||||
|
||||
if (!filePath) {
|
||||
console.error('Usage: node validate-token-list.js [path/to/token-list.json]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
validateTokenList(filePath).catch(error => {
|
||||
console.error('Unexpected error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
284
token-lists/scripts/verify-on-chain.js
Executable file
284
token-lists/scripts/verify-on-chain.js
Executable file
@@ -0,0 +1,284 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* On-Chain Verification Script
|
||||
* Verifies token list entries against on-chain contracts using RPC calls
|
||||
*
|
||||
* RPC endpoints (fallback order):
|
||||
* 1. https://rpc-http-pub.d-bis.org (primary)
|
||||
* 2. https://rpc-core.d-bis.org (fallback)
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname, resolve } from 'path';
|
||||
import { ethers } from 'ethers';
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url);
|
||||
const __dirname = dirname(__filename);
|
||||
|
||||
const RPC_ENDPOINTS = [
|
||||
'https://rpc-http-pub.d-bis.org',
|
||||
'https://rpc-core.d-bis.org'
|
||||
];
|
||||
|
||||
const REQUIRED_CHAIN_ID = 138;
|
||||
const REQUIRED_CHAIN_ID_HEX = '0x8a';
|
||||
|
||||
// ERC-20 ABI (minimal)
|
||||
const ERC20_ABI = [
|
||||
'function decimals() view returns (uint8)',
|
||||
'function symbol() view returns (string)',
|
||||
'function name() view returns (string)',
|
||||
'function totalSupply() view returns (uint256)'
|
||||
];
|
||||
|
||||
// Oracle ABI (Chainlink-compatible)
|
||||
const ORACLE_ABI = [
|
||||
'function latestRoundData() view returns (uint80 roundId, int256 answer, uint256 startedAt, uint256 updatedAt, uint80 answeredInRound)',
|
||||
'function decimals() view returns (uint8)',
|
||||
'function description() view returns (string)'
|
||||
];
|
||||
|
||||
async function getProvider() {
|
||||
let lastError;
|
||||
|
||||
for (const rpcUrl of RPC_ENDPOINTS) {
|
||||
try {
|
||||
const provider = new ethers.JsonRpcProvider(rpcUrl);
|
||||
|
||||
// Verify chain ID
|
||||
const network = await provider.getNetwork();
|
||||
const chainId = Number(network.chainId);
|
||||
|
||||
if (chainId !== REQUIRED_CHAIN_ID) {
|
||||
throw new Error(`Chain ID mismatch: expected ${REQUIRED_CHAIN_ID}, got ${chainId}`);
|
||||
}
|
||||
|
||||
// Test connection
|
||||
await provider.getBlockNumber();
|
||||
|
||||
console.log(`✅ Connected to RPC: ${rpcUrl} (Chain ID: ${chainId})\n`);
|
||||
return provider;
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
console.log(`⚠️ Failed to connect to ${rpcUrl}: ${error.message}`);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
throw new Error(`Failed to connect to any RPC endpoint. Last error: ${lastError?.message}`);
|
||||
}
|
||||
|
||||
async function verifyERC20Token(provider, token, index) {
|
||||
const results = [];
|
||||
const prefix = `Token[${index}] ${token.symbol || token.name}`;
|
||||
|
||||
try {
|
||||
// Check if contract exists
|
||||
const code = await provider.getCode(token.address);
|
||||
if (code === '0x') {
|
||||
results.push({ type: 'error', message: `${prefix}: No contract code at address ${token.address}` });
|
||||
return results;
|
||||
}
|
||||
|
||||
const contract = new ethers.Contract(token.address, ERC20_ABI, provider);
|
||||
|
||||
// Verify decimals
|
||||
try {
|
||||
const onChainDecimals = await contract.decimals();
|
||||
if (Number(onChainDecimals) !== token.decimals) {
|
||||
results.push({
|
||||
type: 'error',
|
||||
message: `${prefix}: Decimals mismatch - list: ${token.decimals}, on-chain: ${onChainDecimals}`
|
||||
});
|
||||
} else {
|
||||
results.push({ type: 'success', message: `${prefix}: Decimals verified (${token.decimals})` });
|
||||
}
|
||||
} catch (error) {
|
||||
results.push({ type: 'warning', message: `${prefix}: Failed to read decimals: ${error.message}` });
|
||||
}
|
||||
|
||||
// Verify symbol (warn if different)
|
||||
try {
|
||||
const onChainSymbol = await contract.symbol();
|
||||
if (onChainSymbol !== token.symbol) {
|
||||
results.push({
|
||||
type: 'warning',
|
||||
message: `${prefix}: Symbol mismatch - list: "${token.symbol}", on-chain: "${onChainSymbol}"`
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
results.push({ type: 'warning', message: `${prefix}: Failed to read symbol: ${error.message}` });
|
||||
}
|
||||
|
||||
// Verify name (warn if different)
|
||||
try {
|
||||
const onChainName = await contract.name();
|
||||
if (onChainName !== token.name) {
|
||||
results.push({
|
||||
type: 'warning',
|
||||
message: `${prefix}: Name mismatch - list: "${token.name}", on-chain: "${onChainName}"`
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
results.push({ type: 'warning', message: `${prefix}: Failed to read name: ${error.message}` });
|
||||
}
|
||||
|
||||
// Verify totalSupply (optional)
|
||||
try {
|
||||
await contract.totalSupply();
|
||||
results.push({ type: 'success', message: `${prefix}: totalSupply() callable` });
|
||||
} catch (error) {
|
||||
results.push({ type: 'warning', message: `${prefix}: totalSupply() failed: ${error.message}` });
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
results.push({ type: 'error', message: `${prefix}: Verification failed: ${error.message}` });
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
async function verifyOracleToken(provider, token, index) {
|
||||
const results = [];
|
||||
const prefix = `Token[${index}] ${token.symbol || token.name} (Oracle)`;
|
||||
|
||||
try {
|
||||
// Check if contract exists
|
||||
const code = await provider.getCode(token.address);
|
||||
if (code === '0x') {
|
||||
results.push({ type: 'error', message: `${prefix}: No contract code at address ${token.address}` });
|
||||
return results;
|
||||
}
|
||||
|
||||
const contract = new ethers.Contract(token.address, ORACLE_ABI, provider);
|
||||
|
||||
// Verify latestRoundData
|
||||
try {
|
||||
await contract.latestRoundData();
|
||||
results.push({ type: 'success', message: `${prefix}: latestRoundData() callable` });
|
||||
} catch (error) {
|
||||
results.push({ type: 'error', message: `${prefix}: latestRoundData() failed: ${error.message}` });
|
||||
}
|
||||
|
||||
// Verify decimals
|
||||
try {
|
||||
const onChainDecimals = await contract.decimals();
|
||||
if (Number(onChainDecimals) !== token.decimals) {
|
||||
results.push({
|
||||
type: 'error',
|
||||
message: `${prefix}: Decimals mismatch - list: ${token.decimals}, on-chain: ${onChainDecimals}`
|
||||
});
|
||||
} else {
|
||||
results.push({ type: 'success', message: `${prefix}: Decimals verified (${token.decimals})` });
|
||||
}
|
||||
} catch (error) {
|
||||
results.push({ type: 'warning', message: `${prefix}: Failed to read decimals: ${error.message}` });
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
results.push({ type: 'error', message: `${prefix}: Verification failed: ${error.message}` });
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
function isOracleToken(token) {
|
||||
return token.tags && (token.tags.includes('oracle') || token.tags.includes('pricefeed'));
|
||||
}
|
||||
|
||||
async function verifyOnChain(filePath, required = false) {
|
||||
console.log(`\n🔗 Verifying on-chain contracts: ${filePath}\n`);
|
||||
|
||||
// Read token list file
|
||||
let tokenList;
|
||||
try {
|
||||
const fileContent = readFileSync(filePath, 'utf-8');
|
||||
tokenList = JSON.parse(fileContent);
|
||||
} catch (error) {
|
||||
console.error('❌ Error reading or parsing token list file:');
|
||||
console.error(` ${error.message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
let provider;
|
||||
try {
|
||||
provider = await getProvider();
|
||||
} catch (error) {
|
||||
if (required) {
|
||||
console.error(`❌ ${error.message}`);
|
||||
console.error('On-chain verification is required but RPC connection failed.');
|
||||
process.exit(1);
|
||||
} else {
|
||||
console.log(`⚠️ ${error.message}`);
|
||||
console.log('Skipping on-chain verification (optional mode)\n');
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (!tokenList.tokens || !Array.isArray(tokenList.tokens)) {
|
||||
console.log('No tokens to verify.\n');
|
||||
return 0;
|
||||
}
|
||||
|
||||
const allResults = [];
|
||||
|
||||
for (const [index, token] of tokenList.tokens.entries()) {
|
||||
let results;
|
||||
if (isOracleToken(token)) {
|
||||
results = await verifyOracleToken(provider, token, index);
|
||||
} else {
|
||||
results = await verifyERC20Token(provider, token, index);
|
||||
}
|
||||
allResults.push(...results);
|
||||
}
|
||||
|
||||
// Report results
|
||||
const errors = allResults.filter(r => r.type === 'error');
|
||||
const warnings = allResults.filter(r => r.type === 'warning');
|
||||
const successes = allResults.filter(r => r.type === 'success');
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.log('❌ Errors:');
|
||||
errors.forEach(r => console.log(` ${r.message}`));
|
||||
console.log('');
|
||||
}
|
||||
|
||||
if (warnings.length > 0) {
|
||||
console.log('⚠️ Warnings:');
|
||||
warnings.forEach(r => console.log(` ${r.message}`));
|
||||
console.log('');
|
||||
}
|
||||
|
||||
if (successes.length > 0) {
|
||||
console.log('✅ Verified:');
|
||||
successes.forEach(r => console.log(` ${r.message}`));
|
||||
console.log('');
|
||||
}
|
||||
|
||||
if (errors.length > 0) {
|
||||
console.log(`❌ Verification failed with ${errors.length} error(s)\n`);
|
||||
return 1;
|
||||
}
|
||||
|
||||
console.log('✅ All on-chain verifications passed!\n');
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Main
|
||||
const args = process.argv.slice(2);
|
||||
const filePath = args.find(arg => !arg.startsWith('--')) || resolve(__dirname, '../lists/dbis-138.tokenlist.json');
|
||||
const required = args.includes('--required');
|
||||
|
||||
if (!filePath) {
|
||||
console.error('Usage: node verify-on-chain.js [path/to/token-list.json] [--required]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
verifyOnChain(filePath, required).then(exitCode => {
|
||||
process.exit(exitCode);
|
||||
}).catch(error => {
|
||||
console.error('Unexpected error:', error);
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user