- Organized 252 files across project - Root directory: 187 → 2 files (98.9% reduction) - Moved configuration guides to docs/04-configuration/ - Moved troubleshooting guides to docs/09-troubleshooting/ - Moved quick start guides to docs/01-getting-started/ - Moved reports to reports/ directory - Archived temporary files - Generated comprehensive reports and documentation - Created maintenance scripts and guides All files organized according to established standards.
82 lines
3.1 KiB
Python
82 lines
3.1 KiB
Python
#!/usr/bin/env python3
|
|
"""
|
|
Consolidate Duplicate Status Files
|
|
Identifies and reports on duplicate status files for manual review
|
|
"""
|
|
|
|
import json
|
|
from pathlib import Path
|
|
from collections import defaultdict
|
|
|
|
def load_inconsistencies():
|
|
"""Load content inconsistencies"""
|
|
with open('CONTENT_INCONSISTENCIES.json', 'r') as f:
|
|
return json.load(f)
|
|
|
|
def generate_consolidation_report():
|
|
"""Generate report for duplicate status files"""
|
|
data = load_inconsistencies()
|
|
|
|
conflicts = [inc for inc in data['inconsistencies'] if inc['type'] == 'conflicting_status']
|
|
duplicates = [inc for inc in data['inconsistencies'] if inc['type'] == 'duplicate_intro']
|
|
|
|
report = []
|
|
report.append("# Duplicate Status Files - Consolidation Report")
|
|
report.append("")
|
|
report.append(f"**Conflicting Status Files**: {len(conflicts)}")
|
|
report.append(f"**Duplicate Introductions**: {len(duplicates)}")
|
|
report.append("")
|
|
report.append("## Conflicting Status Files")
|
|
report.append("")
|
|
report.append("These files report status for the same component but have different statuses.")
|
|
report.append("Review and consolidate to a single source of truth.")
|
|
report.append("")
|
|
|
|
for i, conflict in enumerate(conflicts, 1):
|
|
report.append(f"### Conflict {i}: {conflict['issue']}")
|
|
report.append("")
|
|
report.append("**Files:**")
|
|
for file_path in conflict['files']:
|
|
report.append(f"- `{file_path}`")
|
|
report.append("")
|
|
report.append("**Action:** Review these files and consolidate to a single status file.")
|
|
report.append("")
|
|
|
|
report.append("## Duplicate Introductions")
|
|
report.append("")
|
|
report.append("These files have identical first 10 lines (likely duplicates or templates).")
|
|
report.append("")
|
|
|
|
for i, dup in enumerate(duplicates[:20], 1): # Limit to first 20
|
|
report.append(f"### Duplicate Set {i}")
|
|
report.append("")
|
|
report.append("**Files:**")
|
|
for file_path in dup['files']:
|
|
report.append(f"- `{file_path}`")
|
|
report.append("")
|
|
|
|
if len(duplicates) > 20:
|
|
report.append(f"\n... and {len(duplicates) - 20} more duplicate sets")
|
|
|
|
report.append("")
|
|
report.append("## Recommendations")
|
|
report.append("")
|
|
report.append("1. **For Conflicting Status**:")
|
|
report.append(" - Keep the most recent/complete status file")
|
|
report.append(" - Archive or delete older versions")
|
|
report.append(" - Update cross-references")
|
|
report.append("")
|
|
report.append("2. **For Duplicate Introductions**:")
|
|
report.append(" - Review files to determine if they're true duplicates")
|
|
report.append(" - If duplicates, keep one and archive others")
|
|
report.append(" - If templates, ensure they're in appropriate location")
|
|
report.append("")
|
|
|
|
return "\n".join(report)
|
|
|
|
if __name__ == '__main__':
|
|
report = generate_consolidation_report()
|
|
with open('DUPLICATE_STATUS_CONSOLIDATION_REPORT.md', 'w') as f:
|
|
f.write(report)
|
|
print("✅ Duplicate status consolidation report generated: DUPLICATE_STATUS_CONSOLIDATION_REPORT.md")
|