Complete optional next steps: fix references and consolidate duplicates

- Fixed 104 broken references in 59 files
- Consolidated 40+ duplicate status files
- Archived duplicates to reports/archive/duplicates/
- Created scripts for reference fixing and consolidation
- Updated content inconsistency reports

All optional cleanup tasks complete.
This commit is contained in:
defiQUG
2026-01-06 02:25:38 -08:00
parent e888e04d12
commit 9c37af10c0
147 changed files with 1797 additions and 915 deletions

View File

@@ -104,7 +104,7 @@ This script validates:
## Script Dependencies
All scripts use the standardized `~/.env` file for configuration. See [docs/ENV_STANDARDIZATION.md](../docs/ENV_STANDARDIZATION.md) for details.
All scripts use the standardized `~/.env` file for configuration. See [docs/ENV_STANDARDIZATION.md](/docs/04-configuration/ENV_STANDARDIZATION.md) for details.
## Environment Variables

View File

@@ -0,0 +1,193 @@
#!/bin/bash
# Comprehensive Duplicate Consolidation
# Consolidates all duplicate status files across the project
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
ARCHIVE_DIR="reports/archive/duplicates/$(date +%Y-%m-%d)"
mkdir -p "$ARCHIVE_DIR"
log() {
echo -e "\033[0;34m[$(date +'%Y-%m-%d %H:%M:%S')]\033[0m $1"
}
success() {
echo -e "\033[0;32m[OK]\033[0m $1"
}
# Function to keep newest and archive others
consolidate_files() {
local group_name="$1"
shift
local files=("$@")
if [ ${#files[@]} -le 1 ]; then
return
fi
# Filter to only existing files
local existing_files=()
for file in "${files[@]}"; do
if [ -f "$file" ]; then
existing_files+=("$file")
fi
done
if [ ${#existing_files[@]} -le 1 ]; then
return
fi
log "Consolidating: $group_name (${#existing_files[@]} files)"
# Find newest file
local newest_file=""
local newest_time=0
for file in "${existing_files[@]}"; do
local mtime=$(stat -c %Y "$file" 2>/dev/null || echo 0)
if [ "$mtime" -gt "$newest_time" ]; then
newest_time=$mtime
newest_file="$file"
fi
done
if [ -z "$newest_file" ]; then
return
fi
success "Keeping: $newest_file"
# Archive others
for file in "${existing_files[@]}"; do
if [ "$file" != "$newest_file" ]; then
local filename=$(basename "$file")
local dest="$ARCHIVE_DIR/$filename"
local counter=1
while [ -f "$dest" ]; do
dest="$ARCHIVE_DIR/${filename%.md}_${counter}.md"
counter=$((counter + 1))
done
mv "$file" "$dest"
success " Archived: $file"
fi
done
echo ""
}
log "=== Comprehensive Duplicate Consolidation ==="
log ""
# Consolidate ALL_TASKS files in reports/status/
consolidate_files "ALL_TASKS (reports/status)" \
"reports/status/ALL_TASKS_COMPLETE_FINAL.md"
# Consolidate ALL_NEXT_STEPS files in reports/status/
consolidate_files "ALL_NEXT_STEPS (reports/status)" \
"reports/status/ALL_NEXT_STEPS_COMPLETE.md"
# Consolidate FIREFLY duplicates
consolidate_files "FIREFLY" \
"reports/status/FIREFLY_FIX_COMPLETE.md" \
"reports/status/FIREFLY_ALL_FIXED_COMPLETE.md" \
"reports/status/FIREFLY_ALL_ISSUES_FIXED_FINAL.md" \
"reports/status/FIREFLY_ISSUES_COMPLETE.md" \
"reports/status/FIREFLY_FINAL_STATUS.md" \
"reports/status/FIREFLY_ALL_FIXED_FINAL.md" \
"reports/status/FIREFLY_COMPLETE_FIX_SUMMARY.md" \
"reports/status/FIREFLY_COMPLETE_FIX_FINAL.md" \
"reports/status/FIREFLY_ISSUES_ANALYSIS.md" \
"reports/status/FIREFLY_ALL_ISSUES_FIXED_COMPLETE.md" \
"reports/status/FIREFLY_ALL_ISSUES_FIXED.md"
# Consolidate DBIS duplicates
consolidate_files "DBIS_ALL_ISSUES" \
"reports/status/DBIS_ALL_ISSUES_FIXED_SUMMARY.md" \
"reports/status/DBIS_ALL_ISSUES_FIXED_FINAL.md" \
"reports/status/DBIS_ALL_ISSUES_FIXED.md"
consolidate_files "DBIS_SOURCE_CODE_FIXES" \
"reports/status/DBIS_SOURCE_CODE_FIXES_COMPLETE.md" \
"reports/status/DBIS_SOURCE_CODE_FIXES_FINAL.md" \
"reports/status/DBIS_SOURCE_CODE_FIXES_SUCCESS.md" \
"reports/status/DBIS_SOURCE_CODE_FIXES_APPLIED.md"
consolidate_files "DBIS_COMPLETION" \
"reports/status/DBIS_COMPLETION_FINAL_SUMMARY.md" \
"reports/status/DBIS_COMPLETE_STATUS_CHECK_SUMMARY.md"
consolidate_files "DBIS_TASKS" \
"reports/status/DBIS_TASKS_COMPLETION_STATUS.md" \
"reports/status/DBIS_TASKS_COMPLETION_REPORT.md"
# Consolidate BESU duplicates
consolidate_files "BESU_FIXES" \
"reports/status/BESU_FIXES_COMPLETE.md" \
"reports/status/BESU_ALL_FIXES_COMPLETE.md" \
"reports/status/BESU_FIXES_APPLIED.md" \
"reports/status/BESU_FIXES_PROGRESS.md"
consolidate_files "BESU_RPC_FIXES" \
"reports/status/BESU_RPC_FIXES_FINAL.md" \
"reports/status/BESU_RPC_FIXES_APPLIED.md"
# Consolidate R630_02 duplicates
consolidate_files "R630_02" \
"reports/status/R630_02_START_COMPLETE.md" \
"reports/status/R630_02_NEXT_STEPS_COMPLETE.md" \
"reports/status/R630_02_SERVICES_FINAL_REPORT.md" \
"reports/status/R630_02_SERVICES_VERIFICATION_COMPLETE.md"
# Consolidate VMID2400 duplicates
consolidate_files "VMID2400" \
"reports/status/VMID2400_COMPLETE_STATUS.md" \
"reports/status/VMID2400_SETUP_COMPLETE.md" \
"reports/status/VMID2400_NEXT_STEPS_COMPLETE.md" \
"reports/status/VMID2400_TUNNEL_ROUTING_COMPLETE.md" \
"reports/status/VMID2400_CONNECTIVITY_FIX_COMPLETE.md" \
"reports/status/VMID2400_ROUTING_SUMMARY.md"
# Consolidate RESERVED_IP duplicates
consolidate_files "RESERVED_IP" \
"reports/status/RESERVED_IP_FIX_COMPLETE.md" \
"reports/status/RESERVED_IP_FIX_COMPLETE_FINAL.md" \
"reports/status/RESERVED_IP_FIX_SUMMARY.md"
# Consolidate DHCP duplicates
consolidate_files "DHCP_CONVERSION" \
"reports/status/DHCP_TO_STATIC_CONVERSION_COMPLETE.md" \
"reports/status/DHCP_TO_STATIC_CONVERSION_FINAL_REPORT.md"
# Consolidate JWT duplicates
consolidate_files "JWT_SETUP" \
"reports/status/JWT_SETUP_COMPLETE.md" \
"reports/status/JWT_SETUP_SUMMARY.md"
# Consolidate VALIDATION duplicates
consolidate_files "VALIDATION" \
"reports/status/VALIDATION_COMPLETE.md" \
"reports/status/VALIDATION_COMPLETE_SUMMARY.md"
# Consolidate ENHANCEMENTS duplicates
consolidate_files "ENHANCEMENTS" \
"reports/status/ENHANCEMENTS_COMPLETE.md" \
"reports/status/ENHANCEMENTS_SUMMARY.md"
# Consolidate COMPLETE duplicates
consolidate_files "COMPLETE_SETUP" \
"reports/status/COMPLETE_SETUP_SUMMARY.md"
consolidate_files "COMPLETE_EXECUTION" \
"reports/status/COMPLETE_EXECUTION_SUMMARY.md"
consolidate_files "COMPLETE_IMPLEMENTATION" \
"reports/status/COMPLETE_IMPLEMENTATION_SUMMARY.md"
log "=== Consolidation Complete ==="
log "Archived files: $(ls -1 "$ARCHIVE_DIR" 2>/dev/null | wc -l)"
log "Archive location: $ARCHIVE_DIR"

View File

@@ -0,0 +1,136 @@
#!/bin/bash
# Consolidate Duplicate Status Files
# Archives older duplicates, keeps most recent/complete version
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(cd "$SCRIPT_DIR/.." && pwd)"
cd "$PROJECT_ROOT"
# Colors
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m'
log() {
echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1"
}
success() {
echo -e "${GREEN}[OK]${NC} $1"
}
warn() {
echo -e "${YELLOW}[WARN]${NC} $1"
}
# Create archive directory
ARCHIVE_DIR="reports/archive/duplicates/$(date +%Y-%m-%d)"
mkdir -p "$ARCHIVE_DIR"
log "Consolidating duplicate status files..."
log "Archive directory: $ARCHIVE_DIR"
log ""
# Function to keep newest file and archive others
consolidate_group() {
local base_name="$1"
shift
local files=("$@")
if [ ${#files[@]} -le 1 ]; then
return
fi
log "Consolidating: $base_name (${#files[@]} files)"
# Find newest file (by modification time)
local newest_file=""
local newest_time=0
for file in "${files[@]}"; do
if [ ! -f "$file" ]; then
continue
fi
local mtime=$(stat -c %Y "$file" 2>/dev/null || echo 0)
if [ "$mtime" -gt "$newest_time" ]; then
newest_time=$mtime
newest_file="$file"
fi
done
if [ -z "$newest_file" ]; then
warn "No valid files found for $base_name"
return
fi
success "Keeping: $newest_file"
# Archive others
for file in "${files[@]}"; do
if [ "$file" != "$newest_file" ] && [ -f "$file" ]; then
local filename=$(basename "$file")
local dest="$ARCHIVE_DIR/$filename"
# Handle duplicates in archive
local counter=1
while [ -f "$dest" ]; do
dest="$ARCHIVE_DIR/${filename%.md}_${counter}.md"
counter=$((counter + 1))
done
mv "$file" "$dest"
success " Archived: $file$dest"
fi
done
log ""
}
# Consolidate reports/status/ duplicates
log "=== Consolidating reports/status/ ==="
# BESU_RPC files
consolidate_group "BESU_RPC" \
"reports/status/BESU_RPC_COMPLETE_CHECK.md" \
"reports/status/BESU_RPC_STATUS_CHECK.md" \
"reports/status/BESU_RPC_STATUS_FINAL.md"
# R630_02_MINOR_ISSUES files
consolidate_group "R630_02_MINOR_ISSUES" \
"reports/status/R630_02_MINOR_ISSUES_COMPLETE.md" \
"reports/status/R630_02_MINOR_ISSUES_FINAL.md"
# DBIS_SERVICES files
consolidate_group "DBIS_SERVICES" \
"reports/status/DBIS_SERVICES_STATUS_FINAL.md" \
"reports/status/DBIS_SERVICES_STATUS_CHECK.md" \
"reports/status/DBIS_SERVICES_STATUS_REPORT.md"
# BLOCKSCOUT_START files
consolidate_group "BLOCKSCOUT_START" \
"reports/status/BLOCKSCOUT_START_COMPLETE.md" \
"reports/status/BLOCKSCOUT_START_STATUS.md"
# PHASE1_IP_INVESTIGATION files
consolidate_group "PHASE1_IP_INVESTIGATION" \
"reports/status/PHASE1_IP_INVESTIGATION_STATUS.md" \
"reports/status/PHASE1_IP_INVESTIGATION_COMPLETE.md"
# BLOCK_PRODUCTION files
consolidate_group "BLOCK_PRODUCTION" \
"reports/status/BLOCK_PRODUCTION_STATUS.md"
# ALL_TASKS files in reports/status/
consolidate_group "ALL_TASKS (reports)" \
"reports/status/ALL_TASKS_COMPLETE_FINAL.md"
# ALL_NEXT_STEPS files in reports/status/
consolidate_group "ALL_NEXT_STEPS (reports)" \
"reports/status/ALL_NEXT_STEPS_COMPLETE.md"
log "=== Consolidation Complete ==="
log "Archived files: $(ls -1 "$ARCHIVE_DIR" 2>/dev/null | wc -l)"
log "Archive location: $ARCHIVE_DIR"

199
scripts/fix-broken-references.py Executable file
View File

@@ -0,0 +1,199 @@
#!/usr/bin/env python3
"""
Fix Broken References Script
Automatically fixes broken markdown links based on file moves during cleanup
"""
import re
import json
from pathlib import Path
from collections import defaultdict
# Mapping of old paths to new paths (from cleanup)
PATH_MAPPINGS = {
# Root → reports/status/
r'^BESU_.*\.md$': 'reports/status/',
r'^FIREFLY_.*\.md$': 'reports/status/',
r'^DBIS_.*\.md$': 'reports/status/',
r'^.*STATUS.*\.md$': 'reports/status/',
r'^.*COMPLETE.*\.md$': 'reports/status/',
r'^.*FINAL.*\.md$': 'reports/status/',
r'^.*REPORT.*\.md$': 'reports/status/',
# Root → reports/analyses/
r'^.*ANALYSIS.*\.md$': 'reports/analyses/',
r'^IP_CONFLICT.*\.md$': 'reports/analyses/',
r'^PHASE1_IP.*\.md$': 'reports/analyses/',
# Root → docs/
r'^CLOUDFLARE_API_SETUP\.md$': 'docs/04-configuration/',
r'^CLOUDFLARE_TUNNEL.*\.md$': 'docs/04-configuration/',
r'^SETUP_TUNNEL.*\.md$': 'docs/04-configuration/',
r'^TUNNEL.*\.md$': 'docs/04-configuration/',
r'^NGINX_CONFIGURATIONS.*\.md$': 'docs/04-configuration/',
r'^NO_SSH_ACCESS.*\.md$': 'docs/09-troubleshooting/',
r'^TROUBLESHOOT.*\.md$': 'docs/09-troubleshooting/',
r'^FIX_TUNNEL.*\.md$': 'docs/09-troubleshooting/',
r'^R630-04.*\.md$': 'docs/09-troubleshooting/',
r'^LIST_VMS.*\.md$': 'docs/01-getting-started/',
r'^THIRDWEB_RPC.*\.md$': 'docs/01-getting-started/',
r'^CHAIN138_TOKEN.*\.md$': 'docs/11-references/',
r'^OMADA.*\.md$': 'docs/11-references/',
r'^GET_EMAIL.*\.md$': 'docs/11-references/',
# Specific file mappings
'docs/ENV_STANDARDIZATION.md': 'docs/04-configuration/ENV_STANDARDIZATION.md',
'docs/MCP_SETUP.md': 'docs/04-configuration/MCP_SETUP.md',
'MCP_SETUP.md': 'docs/04-configuration/MCP_SETUP.md',
}
# Exclude submodules from fixing
EXCLUDE_PATHS = [
'ProxmoxVE/',
'smom-dbis-138/',
'explorer-monorepo/',
'metamask-integration/',
'metaverseDubai/',
'miracles_in_motion/',
'dbis_core/',
'gru-docs/',
'node_modules/',
'.git/',
]
def should_process_file(file_path):
"""Check if file should be processed"""
path_str = str(file_path)
# Don't process submodules
if any(exclude in path_str for exclude in EXCLUDE_PATHS):
return False
# Only process files in our main project
return True
def find_file_in_new_location(filename):
"""Find where a file was moved to"""
# Check reports/status/
if Path(f'reports/status/{filename}').exists():
return f'reports/status/{filename}'
# Check reports/analyses/
if Path(f'reports/analyses/{filename}').exists():
return f'reports/analyses/{filename}'
# Check reports/
if Path(f'reports/{filename}').exists():
return f'reports/{filename}'
# Check docs subdirectories
for subdir in ['01-getting-started', '04-configuration', '09-troubleshooting', '11-references']:
if Path(f'docs/{subdir}/{filename}').exists():
return f'docs/{subdir}/{filename}'
return None
def fix_references_in_file(file_path):
"""Fix broken references in a single file"""
try:
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
content = f.read()
original_content = content
changes = []
# Pattern for markdown links: [text](path)
link_pattern = r'\[([^\]]+)\]\(([^)]+)\)'
def replace_link(match):
link_text = match.group(1)
link_path = match.group(2)
# Skip external links
if link_path.startswith('http'):
return match.group(0)
# Skip anchors only
if link_path.startswith('#'):
return match.group(0)
# Extract filename
if '#' in link_path:
file_part, anchor = link_path.split('#', 1)
anchor_part = '#' + anchor
else:
file_part = link_path
anchor_part = ''
filename = Path(file_part).name
# Try to find file in new location
new_location = find_file_in_new_location(filename)
if new_location:
# Calculate relative path
current_dir = file_path.parent
new_path = Path(new_location)
try:
relative_path = new_path.relative_to(current_dir)
new_link = f'[{link_text}]({relative_path}{anchor_part})'
changes.append(f" Fixed: {link_path}{relative_path}{anchor_part}")
return new_link
except ValueError:
# Paths don't share common ancestor, use absolute from root
new_link = f'[{link_text}](/{new_location}{anchor_part})'
changes.append(f" Fixed: {link_path} → /{new_location}{anchor_part}")
return new_link
return match.group(0)
content = re.sub(link_pattern, replace_link, content)
if content != original_content:
with open(file_path, 'w', encoding='utf-8') as f:
f.write(content)
return changes
return []
except Exception as e:
return [f" Error: {e}"]
def main():
"""Main function"""
root = Path('.')
fixed_count = 0
total_changes = []
print("🔧 Fixing broken references...")
print("")
# Process markdown files
for md_file in root.rglob('*.md'):
if not should_process_file(md_file):
continue
changes = fix_references_in_file(md_file)
if changes:
fixed_count += 1
print(f"✅ Fixed: {md_file}")
for change in changes:
print(change)
total_changes.extend([(str(md_file), c) for c in changes])
print("")
print(f"✅ Fixed references in {fixed_count} files")
print(f" Total changes: {len(total_changes)}")
# Save report
report = {
'files_fixed': fixed_count,
'total_changes': len(total_changes),
'changes': total_changes
}
with open('REFERENCE_FIXES_REPORT.json', 'w') as f:
json.dump(report, f, indent=2)
print("✅ Report saved: REFERENCE_FIXES_REPORT.json")
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,73 @@
#!/usr/bin/env python3
"""
Update Outdated Dates Script
Updates dates in files that are marked as outdated
"""
import re
import json
from pathlib import Path
from datetime import datetime
def load_inconsistencies():
"""Load content inconsistencies"""
with open('CONTENT_INCONSISTENCIES.json', 'r') as f:
return json.load(f)
def update_outdated_dates():
"""Update outdated dates in files"""
data = load_inconsistencies()
old_dates = [inc for inc in data['inconsistencies'] if inc['type'] == 'old_date']
updated_count = 0
print("📅 Updating outdated dates...")
print("")
for item in old_dates:
file_path = Path(item['file'])
if not file_path.exists():
continue
try:
with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
content = f.read()
original_content = content
# Update date patterns
today = datetime.now().strftime('%Y-%m-%d')
# Replace old dates with today's date
# Pattern: Date: YYYY-MM-DD or Last Updated: YYYY-MM-DD
content = re.sub(
r'(Date|Last Updated|Generated)[:\s]+(\d{4}-\d{2}-\d{2})',
rf'\1: {today}',
content,
flags=re.IGNORECASE
)
# Replace standalone dates in headers
content = re.sub(
r'\*\*Date\*\*[:\s]+(\d{4}-\d{2}-\d{2})',
f'**Date**: {today}',
content,
flags=re.IGNORECASE
)
if content != original_content:
with open(file_path, 'w', encoding='utf-8') as f:
f.write(content)
print(f"✅ Updated: {file_path}")
updated_count += 1
except Exception as e:
print(f"⚠️ Error updating {file_path}: {e}")
print("")
print(f"✅ Updated {updated_count} files")
if __name__ == '__main__':
update_outdated_dates()