Initial commit

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Todd
2026-03-29 22:42:55 -04:00
commit 0d7b2b1aab
389 changed files with 280296 additions and 0 deletions

View File

@@ -0,0 +1,535 @@
"""
Stats Router
Handles statistics, monitoring, settings, and integrations:
- Dashboard statistics
- Downloader monitoring
- Settings management
- Immich integration
"""
import json
import sqlite3
import time
from typing import Dict, Optional
import requests
from fastapi import APIRouter, Depends, Request
from pydantic import BaseModel
from slowapi import Limiter
from slowapi.util import get_remote_address
from ..core.dependencies import get_current_user, require_admin, get_app_state
from ..core.exceptions import handle_exceptions, NotFoundError, ValidationError
from modules.universal_logger import get_logger
logger = get_logger('API')
router = APIRouter(prefix="/api", tags=["Stats & Monitoring"])
limiter = Limiter(key_func=get_remote_address)
# ============================================================================
# PYDANTIC MODELS
# ============================================================================
class SettingUpdate(BaseModel):
value: dict | list | str | int | float | bool
category: Optional[str] = None
description: Optional[str] = None
# ============================================================================
# DASHBOARD STATISTICS
# ============================================================================
@router.get("/stats/dashboard")
@limiter.limit("60/minute")
@handle_exceptions
async def get_dashboard_stats(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get comprehensive dashboard statistics."""
app_state = get_app_state()
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
# Get download counts per platform (combine downloads and video_downloads)
cursor.execute("""
SELECT platform, SUM(cnt) as count FROM (
SELECT platform, COUNT(*) as cnt FROM downloads GROUP BY platform
UNION ALL
SELECT platform, COUNT(*) as cnt FROM video_downloads GROUP BY platform
) GROUP BY platform
""")
platform_data = {}
for row in cursor.fetchall():
platform = row[0]
if platform not in platform_data:
platform_data[platform] = {
'count': 0,
'size_bytes': 0
}
platform_data[platform]['count'] += row[1]
# Calculate storage sizes from file_inventory (final + review)
cursor.execute("""
SELECT platform, COALESCE(SUM(file_size), 0) as total_size
FROM file_inventory
WHERE location IN ('final', 'review')
GROUP BY platform
""")
for row in cursor.fetchall():
platform = row[0]
if platform not in platform_data:
platform_data[platform] = {'count': 0, 'size_bytes': 0}
platform_data[platform]['size_bytes'] += row[1]
# Only show platforms with actual files
storage_by_platform = []
for platform in sorted(platform_data.keys(), key=lambda p: platform_data[p]['size_bytes'], reverse=True):
if platform_data[platform]['size_bytes'] > 0:
storage_by_platform.append({
'platform': platform,
'count': platform_data[platform]['count'],
'size_bytes': platform_data[platform]['size_bytes'],
'size_mb': round(platform_data[platform]['size_bytes'] / 1024 / 1024, 2)
})
# Downloads per day (last 30 days) - combine downloads and video_downloads
cursor.execute("""
SELECT date, SUM(count) as count FROM (
SELECT DATE(download_date) as date, COUNT(*) as count
FROM downloads
WHERE download_date >= DATE('now', '-30 days')
GROUP BY DATE(download_date)
UNION ALL
SELECT DATE(download_date) as date, COUNT(*) as count
FROM video_downloads
WHERE download_date >= DATE('now', '-30 days')
GROUP BY DATE(download_date)
) GROUP BY date ORDER BY date
""")
downloads_per_day = [{'date': row[0], 'count': row[1]} for row in cursor.fetchall()]
# Content type breakdown
cursor.execute("""
SELECT
content_type,
COUNT(*) as count
FROM downloads
WHERE content_type IS NOT NULL
GROUP BY content_type
ORDER BY count DESC
""")
content_types = {row[0]: row[1] for row in cursor.fetchall()}
# Top sources
cursor.execute("""
SELECT
source,
platform,
COUNT(*) as count
FROM downloads
WHERE source IS NOT NULL
GROUP BY source, platform
ORDER BY count DESC
LIMIT 10
""")
top_sources = [{'source': row[0], 'platform': row[1], 'count': row[2]} for row in cursor.fetchall()]
# Total statistics - use file_inventory for accurate file counts
cursor.execute("""
SELECT
(SELECT COUNT(*) FROM file_inventory WHERE location IN ('final', 'review')) as total_downloads,
(SELECT COALESCE(SUM(file_size), 0) FROM file_inventory WHERE location IN ('final', 'review')) as total_size,
(SELECT COUNT(DISTINCT source) FROM downloads) +
(SELECT COUNT(DISTINCT uploader) FROM video_downloads) as unique_sources,
(SELECT COUNT(DISTINCT platform) FROM file_inventory) as platforms_used
""")
totals = cursor.fetchone()
# Get recycle bin and review counts separately
cursor.execute("SELECT COUNT(*) FROM recycle_bin")
recycle_count = cursor.fetchone()[0] or 0
cursor.execute("SELECT COUNT(*) FROM file_inventory WHERE location = 'review'")
review_count = cursor.fetchone()[0] or 0
# Growth rate - combine downloads and video_downloads
cursor.execute("""
SELECT
(SELECT SUM(CASE WHEN download_date >= DATE('now', '-7 days') THEN 1 ELSE 0 END) FROM downloads) +
(SELECT SUM(CASE WHEN download_date >= DATE('now', '-7 days') THEN 1 ELSE 0 END) FROM video_downloads) as this_week,
(SELECT SUM(CASE WHEN download_date >= DATE('now', '-14 days') AND download_date < DATE('now', '-7 days') THEN 1 ELSE 0 END) FROM downloads) +
(SELECT SUM(CASE WHEN download_date >= DATE('now', '-14 days') AND download_date < DATE('now', '-7 days') THEN 1 ELSE 0 END) FROM video_downloads) as last_week
""")
growth_row = cursor.fetchone()
growth_rate = 0
if growth_row and growth_row[1] > 0:
growth_rate = round(((growth_row[0] - growth_row[1]) / growth_row[1]) * 100, 1)
return {
'storage_by_platform': storage_by_platform,
'downloads_per_day': downloads_per_day,
'content_types': content_types,
'top_sources': top_sources,
'totals': {
'total_downloads': totals[0] or 0,
'total_size_bytes': totals[1] or 0,
'total_size_gb': round((totals[1] or 0) / 1024 / 1024 / 1024, 2),
'unique_sources': totals[2] or 0,
'platforms_used': totals[3] or 0,
'recycle_bin_count': recycle_count,
'review_count': review_count
},
'growth_rate': growth_rate
}
# ============================================================================
# FLARESOLVERR HEALTH CHECK
# ============================================================================
@router.get("/health/flaresolverr")
@limiter.limit("60/minute")
@handle_exceptions
async def check_flaresolverr_health(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Check FlareSolverr health status."""
app_state = get_app_state()
flaresolverr_url = "http://localhost:8191/v1"
try:
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key='flaresolverr'")
result = cursor.fetchone()
if result:
flaresolverr_config = json.loads(result[0])
if 'url' in flaresolverr_config:
flaresolverr_url = flaresolverr_config['url']
except (sqlite3.Error, json.JSONDecodeError, KeyError):
pass
start_time = time.time()
try:
response = requests.post(
flaresolverr_url,
json={"cmd": "sessions.list"},
timeout=5
)
response_time = round((time.time() - start_time) * 1000, 2)
if response.status_code == 200:
return {
'status': 'healthy',
'url': flaresolverr_url,
'response_time_ms': response_time,
'last_check': time.time(),
'sessions': response.json().get('sessions', [])
}
else:
return {
'status': 'unhealthy',
'url': flaresolverr_url,
'response_time_ms': response_time,
'last_check': time.time(),
'error': f"HTTP {response.status_code}: {response.text}"
}
except requests.exceptions.ConnectionError:
return {
'status': 'offline',
'url': flaresolverr_url,
'last_check': time.time(),
'error': 'Connection refused - FlareSolverr may not be running'
}
except requests.exceptions.Timeout:
return {
'status': 'timeout',
'url': flaresolverr_url,
'last_check': time.time(),
'error': 'Request timed out after 5 seconds'
}
except Exception as e:
return {
'status': 'error',
'url': flaresolverr_url,
'last_check': time.time(),
'error': str(e)
}
# ============================================================================
# MONITORING ENDPOINTS
# ============================================================================
@router.get("/monitoring/status")
@limiter.limit("100/minute")
@handle_exceptions
async def get_monitoring_status(
request: Request,
hours: int = 24,
current_user: Dict = Depends(get_current_user)
):
"""Get downloader monitoring status."""
from modules.downloader_monitor import get_monitor
app_state = get_app_state()
monitor = get_monitor(app_state.db, app_state.settings)
status = monitor.get_downloader_status(hours=hours)
return {
"success": True,
"downloaders": status,
"window_hours": hours
}
@router.get("/monitoring/history")
@limiter.limit("100/minute")
@handle_exceptions
async def get_monitoring_history(
request: Request,
downloader: str = None,
limit: int = 100,
current_user: Dict = Depends(get_current_user)
):
"""Get download monitoring history."""
app_state = get_app_state()
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
if downloader:
cursor.execute("""
SELECT
id, downloader, username, timestamp, success,
file_count, error_message, alert_sent
FROM download_monitor
WHERE downloader = ?
ORDER BY timestamp DESC
LIMIT ?
""", (downloader, limit))
else:
cursor.execute("""
SELECT
id, downloader, username, timestamp, success,
file_count, error_message, alert_sent
FROM download_monitor
ORDER BY timestamp DESC
LIMIT ?
""", (limit,))
history = []
for row in cursor.fetchall():
history.append({
'id': row['id'],
'downloader': row['downloader'],
'username': row['username'],
'timestamp': row['timestamp'],
'success': bool(row['success']),
'file_count': row['file_count'],
'error_message': row['error_message'],
'alert_sent': bool(row['alert_sent'])
})
return {
"success": True,
"history": history
}
@router.delete("/monitoring/history")
@limiter.limit("10/minute")
@handle_exceptions
async def clear_monitoring_history(
request: Request,
days: int = 30,
current_user: Dict = Depends(require_admin)
):
"""Clear old monitoring logs."""
from modules.downloader_monitor import get_monitor
app_state = get_app_state()
monitor = get_monitor(app_state.db, app_state.settings)
monitor.clear_old_logs(days=days)
return {
"success": True,
"message": f"Cleared logs older than {days} days"
}
# ============================================================================
# SETTINGS ENDPOINTS
# ============================================================================
@router.get("/settings/{key}")
@limiter.limit("60/minute")
@handle_exceptions
async def get_setting(
request: Request,
key: str,
current_user: Dict = Depends(get_current_user)
):
"""Get a specific setting value."""
app_state = get_app_state()
value = app_state.settings.get(key)
if value is None:
raise NotFoundError(f"Setting '{key}' not found")
return value
@router.put("/settings/{key}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_setting(
request: Request,
key: str,
body: Dict,
current_user: Dict = Depends(get_current_user)
):
"""Update a specific setting value."""
app_state = get_app_state()
value = body.get('value')
if value is None:
raise ValidationError("Missing 'value' in request body")
app_state.settings.set(
key=key,
value=value,
category=body.get('category'),
description=body.get('description'),
updated_by=current_user.get('username', 'user')
)
return {
"success": True,
"message": f"Setting '{key}' updated successfully"
}
# ============================================================================
# IMMICH INTEGRATION
# ============================================================================
@router.post("/immich/scan")
@limiter.limit("10/minute")
@handle_exceptions
async def trigger_immich_scan(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Trigger Immich library scan."""
app_state = get_app_state()
immich_config = app_state.settings.get('immich', {})
if not immich_config.get('enabled'):
return {
"success": False,
"message": "Immich integration is not enabled"
}
api_url = immich_config.get('api_url')
api_key = immich_config.get('api_key')
library_id = immich_config.get('library_id')
if not all([api_url, api_key, library_id]):
return {
"success": False,
"message": "Immich configuration incomplete (missing api_url, api_key, or library_id)"
}
try:
response = requests.post(
f"{api_url}/libraries/{library_id}/scan",
headers={'X-API-KEY': api_key},
timeout=10
)
if response.status_code in [200, 201, 204]:
return {
"success": True,
"message": f"Successfully triggered Immich scan for library {library_id}"
}
else:
return {
"success": False,
"message": f"Immich scan request failed with status {response.status_code}: {response.text}"
}
except requests.exceptions.RequestException as e:
return {
"success": False,
"message": f"Failed to connect to Immich: {str(e)}"
}
# ============================================================================
# ERROR MONITORING SETTINGS
# ============================================================================
class ErrorMonitoringSettings(BaseModel):
enabled: bool = True
push_alert_enabled: bool = True
push_alert_delay_hours: int = 24
dashboard_banner_enabled: bool = True
retention_days: int = 7
@router.get("/error-monitoring/settings")
@limiter.limit("60/minute")
@handle_exceptions
async def get_error_monitoring_settings(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get error monitoring settings."""
app_state = get_app_state()
settings = app_state.settings.get('error_monitoring', {
'enabled': True,
'push_alert_enabled': True,
'push_alert_delay_hours': 24,
'dashboard_banner_enabled': True,
'retention_days': 7
})
return settings
@router.put("/error-monitoring/settings")
@limiter.limit("30/minute")
@handle_exceptions
async def update_error_monitoring_settings(
request: Request,
settings: ErrorMonitoringSettings,
current_user: Dict = Depends(get_current_user)
):
"""Update error monitoring settings."""
app_state = get_app_state()
app_state.settings.set(
key='error_monitoring',
value=settings.model_dump(),
category='monitoring',
description='Error monitoring and alert settings',
updated_by=current_user.get('username', 'user')
)
return {
"success": True,
"message": "Error monitoring settings updated",
"settings": settings.model_dump()
}