Initial commit

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Todd
2026-03-29 22:42:55 -04:00
commit 0d7b2b1aab
389 changed files with 280296 additions and 0 deletions

106
scripts/cleanup-old-logs.py Executable file
View File

@@ -0,0 +1,106 @@
#!/usr/bin/env python3
"""
Log Cleanup Script for Media Downloader
Removes log files older than 7 days
Usage: python3 scripts/cleanup-old-logs.py
Cron: 0 0 * * * /opt/media-downloader/venv/bin/python3 /opt/media-downloader/scripts/cleanup-old-logs.py
"""
import sys
from pathlib import Path
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from datetime import datetime, timedelta
import glob
from modules.universal_logger import get_logger
# Configuration
RETENTION_DAYS = 7
LOG_DIR = Path("/opt/media-downloader/logs")
# Initialize logger
logger = get_logger('LogCleanup')
def cleanup_old_logs():
"""Remove log files older than retention days"""
logger.info("LogCleanup", f"Starting log cleanup (retention: {RETENTION_DAYS} days)")
# Check if log directory exists
if not LOG_DIR.exists():
logger.error("LogCleanup", f"Log directory not found: {LOG_DIR}")
return False
# Calculate cutoff date
cutoff_date = datetime.now() - timedelta(days=RETENTION_DAYS)
logger.debug("LogCleanup", f"Cutoff date: {cutoff_date.strftime('%Y-%m-%d %H:%M:%S')}")
# Find all log files
datetime_logs = list(LOG_DIR.glob("[0-9]*_*.log")) # YYYYMMDD_component.log
rotated_logs = list(LOG_DIR.glob("*.log.*")) # component.log.1, component.log.2, etc.
all_logs = datetime_logs + rotated_logs
logger.info("LogCleanup", f"Found {len(all_logs)} total log files to check")
# Track cleanup stats
removed_count = 0
removed_size = 0
skipped_count = 0
error_count = 0
# Process each log file
for log_file in all_logs:
try:
# Check file modification time
mtime = datetime.fromtimestamp(log_file.stat().st_mtime)
file_age_days = (datetime.now() - mtime).days
file_size = log_file.stat().st_size
if mtime < cutoff_date:
# File is old enough to delete
try:
log_file.unlink()
removed_count += 1
removed_size += file_size
logger.info("LogCleanup", f"Removed old log: {log_file.name} (age: {file_age_days} days, size: {file_size:,} bytes)")
except Exception as e:
error_count += 1
logger.error("LogCleanup", f"Failed to remove {log_file.name}: {e}")
else:
# File is still within retention period
skipped_count += 1
logger.debug("LogCleanup", f"Kept: {log_file.name} (age: {file_age_days} days)")
except Exception as e:
error_count += 1
logger.error("LogCleanup", f"Error processing {log_file.name}: {e}")
# Log summary
if removed_count > 0:
size_mb = removed_size / (1024 * 1024)
logger.success("LogCleanup", f"Cleanup complete: Removed {removed_count} log file(s), freed {size_mb:.2f} MB")
else:
logger.info("LogCleanup", f"No old logs to clean up (all {skipped_count} logs are within {RETENTION_DAYS} days)")
if error_count > 0:
logger.warning("LogCleanup", f"Encountered {error_count} error(s) during cleanup")
# Log final stats
logger.info("LogCleanup", f"Summary: {removed_count} removed, {skipped_count} kept, {error_count} errors")
return error_count == 0
def main():
"""Main entry point"""
try:
success = cleanup_old_logs()
sys.exit(0 if success else 1)
except Exception as e:
logger.error("LogCleanup", f"Fatal error during log cleanup: {e}")
sys.exit(1)
if __name__ == '__main__':
main()