756
web/backend/routers/config.py
Normal file
756
web/backend/routers/config.py
Normal file
@@ -0,0 +1,756 @@
|
||||
"""
|
||||
Config and Logs Router
|
||||
|
||||
Handles configuration and logging operations:
|
||||
- Get/update application configuration
|
||||
- Log viewing (single component, merged)
|
||||
- Notification history and stats
|
||||
- Changelog retrieval
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Body, Depends, HTTPException, Query, Request
|
||||
from pydantic import BaseModel
|
||||
from slowapi import Limiter
|
||||
from slowapi.util import get_remote_address
|
||||
|
||||
from ..core.dependencies import get_current_user, require_admin, get_app_state
|
||||
from ..core.config import settings
|
||||
from ..core.exceptions import (
|
||||
handle_exceptions,
|
||||
ValidationError,
|
||||
RecordNotFoundError
|
||||
)
|
||||
from ..core.responses import now_iso8601
|
||||
from modules.universal_logger import get_logger
|
||||
|
||||
logger = get_logger('API')
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["Configuration"])
|
||||
limiter = Limiter(key_func=get_remote_address)
|
||||
|
||||
LOG_PATH = settings.PROJECT_ROOT / 'logs'
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# PYDANTIC MODELS
|
||||
# ============================================================================
|
||||
|
||||
class ConfigUpdate(BaseModel):
|
||||
config: Dict
|
||||
|
||||
|
||||
class MergedLogsRequest(BaseModel):
|
||||
lines: int = 500
|
||||
components: List[str]
|
||||
around_time: Optional[str] = None # ISO timestamp to center logs around
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# CONFIGURATION ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/config")
|
||||
@limiter.limit("100/minute")
|
||||
@handle_exceptions
|
||||
async def get_config(
|
||||
request: Request,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get current configuration."""
|
||||
app_state = get_app_state()
|
||||
return app_state.settings.get_all()
|
||||
|
||||
|
||||
@router.put("/config")
|
||||
@limiter.limit("20/minute")
|
||||
@handle_exceptions
|
||||
async def update_config(
|
||||
request: Request,
|
||||
current_user: Dict = Depends(require_admin),
|
||||
update: ConfigUpdate = Body(...)
|
||||
):
|
||||
"""
|
||||
Update configuration (admin only).
|
||||
|
||||
Saves configuration to database and updates in-memory state.
|
||||
"""
|
||||
app_state = get_app_state()
|
||||
|
||||
if not isinstance(update.config, dict):
|
||||
raise ValidationError("Invalid configuration format")
|
||||
|
||||
logger.debug(f"Incoming config keys: {list(update.config.keys())}", module="Config")
|
||||
|
||||
# Save to database
|
||||
for key, value in update.config.items():
|
||||
app_state.settings.set(key, value, category=key, updated_by='api')
|
||||
|
||||
# Refresh in-memory config so other endpoints see updated values
|
||||
app_state.config = app_state.settings.get_all()
|
||||
|
||||
# Broadcast update
|
||||
try:
|
||||
if hasattr(app_state, 'websocket_manager') and app_state.websocket_manager:
|
||||
await app_state.websocket_manager.broadcast({
|
||||
"type": "config_updated",
|
||||
"timestamp": now_iso8601()
|
||||
})
|
||||
except Exception as e:
|
||||
logger.debug(f"Failed to broadcast config update: {e}", module="Config")
|
||||
|
||||
return {"success": True, "message": "Configuration updated"}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# LOG ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/logs")
|
||||
@limiter.limit("100/minute")
|
||||
@handle_exceptions
|
||||
async def get_logs(
|
||||
request: Request,
|
||||
current_user: Dict = Depends(get_current_user),
|
||||
lines: int = 100,
|
||||
component: Optional[str] = None
|
||||
):
|
||||
"""Get recent log entries from the most recent log files."""
|
||||
if not LOG_PATH.exists():
|
||||
return {"logs": [], "available_components": []}
|
||||
|
||||
all_log_files = []
|
||||
|
||||
# Find date-stamped logs: YYYYMMDD_component.log or YYYYMMDD_HHMMSS_component.log
|
||||
seen_paths = set()
|
||||
for log_file in LOG_PATH.glob('*.log'):
|
||||
if '_' not in log_file.stem:
|
||||
continue
|
||||
parts = log_file.stem.split('_')
|
||||
if not parts[0].isdigit():
|
||||
continue
|
||||
try:
|
||||
stat_info = log_file.stat()
|
||||
if stat_info.st_size == 0:
|
||||
continue
|
||||
mtime = stat_info.st_mtime
|
||||
# YYYYMMDD_HHMMSS_component.log (3+ parts, first two numeric)
|
||||
if len(parts) >= 3 and parts[1].isdigit():
|
||||
comp_name = '_'.join(parts[2:])
|
||||
# YYYYMMDD_component.log (2+ parts, first numeric)
|
||||
elif len(parts) >= 2:
|
||||
comp_name = '_'.join(parts[1:])
|
||||
else:
|
||||
continue
|
||||
seen_paths.add(log_file)
|
||||
all_log_files.append({
|
||||
'path': log_file,
|
||||
'mtime': mtime,
|
||||
'component': comp_name
|
||||
})
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# Also check for old-style logs (no date prefix)
|
||||
for log_file in LOG_PATH.glob('*.log'):
|
||||
if log_file in seen_paths:
|
||||
continue
|
||||
if '_' in log_file.stem and log_file.stem.split('_')[0].isdigit():
|
||||
continue
|
||||
try:
|
||||
stat_info = log_file.stat()
|
||||
if stat_info.st_size == 0:
|
||||
continue
|
||||
mtime = stat_info.st_mtime
|
||||
all_log_files.append({
|
||||
'path': log_file,
|
||||
'mtime': mtime,
|
||||
'component': log_file.stem
|
||||
})
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if not all_log_files:
|
||||
return {"logs": [], "available_components": []}
|
||||
|
||||
components = sorted(set(f['component'] for f in all_log_files))
|
||||
|
||||
if component:
|
||||
log_files = [f for f in all_log_files if f['component'] == component]
|
||||
else:
|
||||
log_files = all_log_files
|
||||
|
||||
if not log_files:
|
||||
return {"logs": [], "available_components": components}
|
||||
|
||||
most_recent = max(log_files, key=lambda x: x['mtime'])
|
||||
|
||||
try:
|
||||
with open(most_recent['path'], 'r', encoding='utf-8', errors='ignore') as f:
|
||||
all_lines = f.readlines()
|
||||
recent_lines = all_lines[-lines:]
|
||||
|
||||
return {
|
||||
"logs": [line.strip() for line in recent_lines],
|
||||
"available_components": components,
|
||||
"current_component": most_recent['component'],
|
||||
"log_file": str(most_recent['path'].name)
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading log file: {e}", module="Logs")
|
||||
return {"logs": [], "available_components": components, "error": str(e)}
|
||||
|
||||
|
||||
@router.post("/logs/merged")
|
||||
@limiter.limit("100/minute")
|
||||
@handle_exceptions
|
||||
async def get_merged_logs(
|
||||
request: Request,
|
||||
body: MergedLogsRequest,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get merged log entries from multiple components, sorted by timestamp."""
|
||||
lines = body.lines
|
||||
components = body.components
|
||||
|
||||
if not LOG_PATH.exists():
|
||||
return {"logs": [], "available_components": [], "selected_components": []}
|
||||
|
||||
all_log_files = []
|
||||
|
||||
# Find date-stamped logs
|
||||
for log_file in LOG_PATH.glob('*_*.log'):
|
||||
try:
|
||||
stat_info = log_file.stat()
|
||||
if stat_info.st_size == 0:
|
||||
continue
|
||||
mtime = stat_info.st_mtime
|
||||
parts = log_file.stem.split('_')
|
||||
|
||||
# Check OLD format FIRST (YYYYMMDD_HHMMSS_component.log)
|
||||
if len(parts) >= 3 and parts[0].isdigit() and len(parts[0]) == 8 and parts[1].isdigit() and len(parts[1]) == 6:
|
||||
comp_name = '_'.join(parts[2:])
|
||||
all_log_files.append({
|
||||
'path': log_file,
|
||||
'mtime': mtime,
|
||||
'component': comp_name
|
||||
})
|
||||
# Then check NEW format (YYYYMMDD_component.log)
|
||||
elif len(parts) >= 2 and parts[0].isdigit() and len(parts[0]) == 8:
|
||||
comp_name = '_'.join(parts[1:])
|
||||
all_log_files.append({
|
||||
'path': log_file,
|
||||
'mtime': mtime,
|
||||
'component': comp_name
|
||||
})
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
# Also check for old-style logs
|
||||
for log_file in LOG_PATH.glob('*.log'):
|
||||
if '_' in log_file.stem and log_file.stem.split('_')[0].isdigit():
|
||||
continue
|
||||
try:
|
||||
stat_info = log_file.stat()
|
||||
if stat_info.st_size == 0:
|
||||
continue
|
||||
mtime = stat_info.st_mtime
|
||||
all_log_files.append({
|
||||
'path': log_file,
|
||||
'mtime': mtime,
|
||||
'component': log_file.stem
|
||||
})
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
if not all_log_files:
|
||||
return {"logs": [], "available_components": [], "selected_components": []}
|
||||
|
||||
available_components = sorted(set(f['component'] for f in all_log_files))
|
||||
|
||||
if not components or len(components) == 0:
|
||||
return {
|
||||
"logs": [],
|
||||
"available_components": available_components,
|
||||
"selected_components": []
|
||||
}
|
||||
|
||||
selected_log_files = [f for f in all_log_files if f['component'] in components]
|
||||
|
||||
if not selected_log_files:
|
||||
return {
|
||||
"logs": [],
|
||||
"available_components": available_components,
|
||||
"selected_components": components
|
||||
}
|
||||
|
||||
all_logs_with_timestamps = []
|
||||
|
||||
for comp in components:
|
||||
comp_files = [f for f in selected_log_files if f['component'] == comp]
|
||||
if not comp_files:
|
||||
continue
|
||||
|
||||
most_recent = max(comp_files, key=lambda x: x['mtime'])
|
||||
|
||||
try:
|
||||
with open(most_recent['path'], 'r', encoding='utf-8', errors='ignore') as f:
|
||||
all_lines = f.readlines()
|
||||
recent_lines = all_lines[-lines:]
|
||||
|
||||
for line in recent_lines:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
# Match timestamp with optional microseconds
|
||||
timestamp_match = re.match(r'^(\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2})(?:\.(\d+))?', line)
|
||||
|
||||
if timestamp_match:
|
||||
timestamp_str = timestamp_match.group(1)
|
||||
microseconds = timestamp_match.group(2)
|
||||
try:
|
||||
timestamp = datetime.strptime(timestamp_str, '%Y-%m-%d %H:%M:%S')
|
||||
# Add microseconds if present
|
||||
if microseconds:
|
||||
# Pad or truncate to 6 digits for microseconds
|
||||
microseconds = microseconds[:6].ljust(6, '0')
|
||||
timestamp = timestamp.replace(microsecond=int(microseconds))
|
||||
all_logs_with_timestamps.append({
|
||||
'timestamp': timestamp,
|
||||
'log': line
|
||||
})
|
||||
except ValueError:
|
||||
all_logs_with_timestamps.append({
|
||||
'timestamp': None,
|
||||
'log': line
|
||||
})
|
||||
else:
|
||||
all_logs_with_timestamps.append({
|
||||
'timestamp': None,
|
||||
'log': line
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error reading log file {most_recent['path']}: {e}", module="Logs")
|
||||
continue
|
||||
|
||||
# Sort by timestamp
|
||||
sorted_logs = sorted(
|
||||
all_logs_with_timestamps,
|
||||
key=lambda x: x['timestamp'] if x['timestamp'] is not None else datetime.min
|
||||
)
|
||||
|
||||
# If around_time is specified, center the logs around that timestamp
|
||||
if body.around_time:
|
||||
try:
|
||||
# Parse the target timestamp
|
||||
target_time = datetime.fromisoformat(body.around_time.replace('Z', '+00:00').replace('+00:00', ''))
|
||||
|
||||
# Find logs within 10 minutes of the target time
|
||||
time_window = timedelta(minutes=10)
|
||||
filtered_logs = [
|
||||
entry for entry in sorted_logs
|
||||
if entry['timestamp'] is not None and
|
||||
abs((entry['timestamp'] - target_time).total_seconds()) <= time_window.total_seconds()
|
||||
]
|
||||
|
||||
# If we found logs near the target time, use those
|
||||
# Otherwise fall back to all logs and try to find the closest ones
|
||||
if filtered_logs:
|
||||
merged_logs = [entry['log'] for entry in filtered_logs]
|
||||
else:
|
||||
# Find the closest logs to the target time
|
||||
logs_with_diff = [
|
||||
(entry, abs((entry['timestamp'] - target_time).total_seconds()) if entry['timestamp'] else float('inf'))
|
||||
for entry in sorted_logs
|
||||
]
|
||||
logs_with_diff.sort(key=lambda x: x[1])
|
||||
# Take the closest logs, centered around the target
|
||||
closest_logs = logs_with_diff[:lines]
|
||||
closest_logs.sort(key=lambda x: x[0]['timestamp'] if x[0]['timestamp'] else datetime.min)
|
||||
merged_logs = [entry[0]['log'] for entry in closest_logs]
|
||||
except (ValueError, TypeError):
|
||||
# If parsing fails, fall back to normal behavior
|
||||
merged_logs = [entry['log'] for entry in sorted_logs]
|
||||
if len(merged_logs) > lines:
|
||||
merged_logs = merged_logs[-lines:]
|
||||
else:
|
||||
merged_logs = [entry['log'] for entry in sorted_logs]
|
||||
if len(merged_logs) > lines:
|
||||
merged_logs = merged_logs[-lines:]
|
||||
|
||||
return {
|
||||
"logs": merged_logs,
|
||||
"available_components": available_components,
|
||||
"selected_components": components,
|
||||
"total_logs": len(merged_logs)
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# NOTIFICATION ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/notifications")
|
||||
@limiter.limit("500/minute")
|
||||
@handle_exceptions
|
||||
async def get_notifications(
|
||||
request: Request,
|
||||
current_user: Dict = Depends(get_current_user),
|
||||
limit: int = 50,
|
||||
offset: int = 0,
|
||||
platform: Optional[str] = None,
|
||||
source: Optional[str] = None
|
||||
):
|
||||
"""Get notification history with pagination and filters."""
|
||||
app_state = get_app_state()
|
||||
|
||||
with app_state.db.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = """
|
||||
SELECT id, platform, source, content_type, message, title,
|
||||
priority, download_count, sent_at, status, metadata
|
||||
FROM notifications
|
||||
WHERE 1=1
|
||||
"""
|
||||
params = []
|
||||
|
||||
if platform:
|
||||
query += " AND platform = ?"
|
||||
params.append(platform)
|
||||
|
||||
if source:
|
||||
# Handle standardized source names
|
||||
if source == 'YouTube Monitor':
|
||||
query += " AND source = ?"
|
||||
params.append('youtube_monitor')
|
||||
else:
|
||||
query += " AND source = ?"
|
||||
params.append(source)
|
||||
|
||||
# Get total count
|
||||
count_query = query.replace(
|
||||
"SELECT id, platform, source, content_type, message, title, priority, download_count, sent_at, status, metadata",
|
||||
"SELECT COUNT(*)"
|
||||
)
|
||||
cursor.execute(count_query, params)
|
||||
result = cursor.fetchone()
|
||||
total = result[0] if result else 0
|
||||
|
||||
# Add ordering and pagination
|
||||
query += " ORDER BY sent_at DESC LIMIT ? OFFSET ?"
|
||||
params.extend([limit, offset])
|
||||
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
notifications = []
|
||||
for row in rows:
|
||||
notifications.append({
|
||||
'id': row[0],
|
||||
'platform': row[1],
|
||||
'source': row[2],
|
||||
'content_type': row[3],
|
||||
'message': row[4],
|
||||
'title': row[5],
|
||||
'priority': row[6],
|
||||
'download_count': row[7],
|
||||
'sent_at': row[8],
|
||||
'status': row[9],
|
||||
'metadata': json.loads(row[10]) if row[10] else None
|
||||
})
|
||||
|
||||
return {
|
||||
'notifications': notifications,
|
||||
'total': total,
|
||||
'limit': limit,
|
||||
'offset': offset
|
||||
}
|
||||
|
||||
|
||||
@router.get("/notifications/stats")
|
||||
@limiter.limit("500/minute")
|
||||
@handle_exceptions
|
||||
async def get_notification_stats(
|
||||
request: Request,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get notification statistics."""
|
||||
app_state = get_app_state()
|
||||
|
||||
with app_state.db.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Total sent
|
||||
cursor.execute("SELECT COUNT(*) FROM notifications WHERE status = 'sent'")
|
||||
result = cursor.fetchone()
|
||||
total_sent = result[0] if result else 0
|
||||
|
||||
# Total failed
|
||||
cursor.execute("SELECT COUNT(*) FROM notifications WHERE status = 'failed'")
|
||||
result = cursor.fetchone()
|
||||
total_failed = result[0] if result else 0
|
||||
|
||||
# By platform (consolidate and filter)
|
||||
cursor.execute("""
|
||||
SELECT platform, COUNT(*) as count
|
||||
FROM notifications
|
||||
GROUP BY platform
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
raw_platforms = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
# Consolidate similar platforms and exclude system
|
||||
by_platform = {}
|
||||
for platform, count in raw_platforms.items():
|
||||
# Skip system notifications
|
||||
if platform == 'system':
|
||||
continue
|
||||
# Consolidate forum -> forums
|
||||
if platform == 'forum':
|
||||
by_platform['forums'] = by_platform.get('forums', 0) + count
|
||||
# Consolidate fastdl -> instagram (fastdl is an Instagram download method)
|
||||
elif platform == 'fastdl':
|
||||
by_platform['instagram'] = by_platform.get('instagram', 0) + count
|
||||
# Standardize youtube_monitor/youtube_monitors -> youtube
|
||||
elif platform in ('youtube_monitor', 'youtube_monitors'):
|
||||
by_platform['youtube'] = by_platform.get('youtube', 0) + count
|
||||
else:
|
||||
by_platform[platform] = by_platform.get(platform, 0) + count
|
||||
|
||||
# Recent 24h
|
||||
cursor.execute("""
|
||||
SELECT COUNT(*) FROM notifications
|
||||
WHERE sent_at >= datetime('now', '-1 day')
|
||||
""")
|
||||
result = cursor.fetchone()
|
||||
recent_24h = result[0] if result else 0
|
||||
|
||||
# Unique sources for filter dropdown
|
||||
cursor.execute("""
|
||||
SELECT DISTINCT source FROM notifications
|
||||
WHERE source IS NOT NULL AND source != ''
|
||||
ORDER BY source
|
||||
""")
|
||||
raw_sources = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
# Standardize source names and track special sources
|
||||
sources = []
|
||||
has_youtube_monitor = False
|
||||
has_log_errors = False
|
||||
for source in raw_sources:
|
||||
# Standardize youtube_monitor -> YouTube Monitor
|
||||
if source == 'youtube_monitor':
|
||||
has_youtube_monitor = True
|
||||
elif source == 'Log Errors':
|
||||
has_log_errors = True
|
||||
else:
|
||||
sources.append(source)
|
||||
|
||||
# Put special sources at the top
|
||||
priority_sources = []
|
||||
if has_youtube_monitor:
|
||||
priority_sources.append('YouTube Monitor')
|
||||
if has_log_errors:
|
||||
priority_sources.append('Log Errors')
|
||||
sources = priority_sources + sources
|
||||
|
||||
return {
|
||||
'total_sent': total_sent,
|
||||
'total_failed': total_failed,
|
||||
'by_platform': by_platform,
|
||||
'recent_24h': recent_24h,
|
||||
'sources': sources
|
||||
}
|
||||
|
||||
|
||||
@router.delete("/notifications/{notification_id}")
|
||||
@limiter.limit("100/minute")
|
||||
@handle_exceptions
|
||||
async def delete_notification(
|
||||
request: Request,
|
||||
notification_id: int,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Delete a single notification from history."""
|
||||
app_state = get_app_state()
|
||||
|
||||
with app_state.db.get_connection(for_write=True) as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Check if notification exists
|
||||
cursor.execute("SELECT id FROM notifications WHERE id = ?", (notification_id,))
|
||||
if not cursor.fetchone():
|
||||
raise RecordNotFoundError(
|
||||
"Notification not found",
|
||||
{"notification_id": notification_id}
|
||||
)
|
||||
|
||||
# Delete the notification
|
||||
cursor.execute("DELETE FROM notifications WHERE id = ?", (notification_id,))
|
||||
conn.commit()
|
||||
|
||||
return {
|
||||
'success': True,
|
||||
'message': 'Notification deleted',
|
||||
'notification_id': notification_id
|
||||
}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# CHANGELOG ENDPOINT
|
||||
# ============================================================================
|
||||
|
||||
@router.get("/changelog")
|
||||
@limiter.limit("100/minute")
|
||||
@handle_exceptions
|
||||
async def get_changelog(
|
||||
request: Request,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get changelog data from JSON file."""
|
||||
changelog_path = settings.PROJECT_ROOT / "data" / "changelog.json"
|
||||
|
||||
if not changelog_path.exists():
|
||||
return {"versions": []}
|
||||
|
||||
with open(changelog_path, 'r') as f:
|
||||
changelog_data = json.load(f)
|
||||
|
||||
return {"versions": changelog_data}
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# APPEARANCE CONFIG ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
class AppearanceConfigUpdate(BaseModel):
|
||||
tmdb_api_key: Optional[str] = None
|
||||
tmdb_enabled: bool = True
|
||||
tmdb_check_interval_hours: int = 12
|
||||
notify_new_appearances: bool = True
|
||||
notify_days_before: int = 1
|
||||
podcast_enabled: bool = False
|
||||
radio_enabled: bool = False
|
||||
podchaser_client_id: Optional[str] = None
|
||||
podchaser_client_secret: Optional[str] = None
|
||||
podchaser_api_key: Optional[str] = None
|
||||
podchaser_enabled: bool = False
|
||||
imdb_enabled: bool = True
|
||||
|
||||
|
||||
@router.get("/config/appearance")
|
||||
@limiter.limit("100/minute")
|
||||
@handle_exceptions
|
||||
async def get_appearance_config(
|
||||
request: Request,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Get appearance tracking configuration."""
|
||||
db = get_app_state().db
|
||||
try:
|
||||
with db.get_connection() as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute('''
|
||||
SELECT tmdb_api_key, tmdb_enabled, tmdb_check_interval_hours, tmdb_last_check,
|
||||
notify_new_appearances, notify_days_before, podcast_enabled, radio_enabled,
|
||||
podchaser_client_id, podchaser_client_secret, podchaser_api_key,
|
||||
podchaser_enabled, podchaser_last_check, imdb_enabled
|
||||
FROM appearance_config
|
||||
WHERE id = 1
|
||||
''')
|
||||
row = cursor.fetchone()
|
||||
|
||||
if not row:
|
||||
# Initialize config if not exists
|
||||
cursor.execute('INSERT OR IGNORE INTO appearance_config (id) VALUES (1)')
|
||||
conn.commit()
|
||||
return {
|
||||
"tmdb_api_key": None,
|
||||
"tmdb_enabled": True,
|
||||
"tmdb_check_interval_hours": 12,
|
||||
"tmdb_last_check": None,
|
||||
"notify_new_appearances": True,
|
||||
"notify_days_before": 1,
|
||||
"podcast_enabled": False,
|
||||
"radio_enabled": False,
|
||||
"podchaser_client_id": None,
|
||||
"podchaser_client_secret": None,
|
||||
"podchaser_api_key": None,
|
||||
"podchaser_enabled": False,
|
||||
"podchaser_last_check": None
|
||||
}
|
||||
|
||||
return {
|
||||
"tmdb_api_key": row[0],
|
||||
"tmdb_enabled": bool(row[1]),
|
||||
"tmdb_check_interval_hours": row[2],
|
||||
"tmdb_last_check": row[3],
|
||||
"notify_new_appearances": bool(row[4]),
|
||||
"notify_days_before": row[5],
|
||||
"podcast_enabled": bool(row[6]),
|
||||
"radio_enabled": bool(row[7]),
|
||||
"podchaser_client_id": row[8] if len(row) > 8 else None,
|
||||
"podchaser_client_secret": row[9] if len(row) > 9 else None,
|
||||
"podchaser_api_key": row[10] if len(row) > 10 else None,
|
||||
"podchaser_enabled": bool(row[11]) if len(row) > 11 else False,
|
||||
"podchaser_last_check": row[12] if len(row) > 12 else None,
|
||||
"imdb_enabled": bool(row[13]) if len(row) > 13 else True
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting appearance config: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/config/appearance")
|
||||
@limiter.limit("100/minute")
|
||||
@handle_exceptions
|
||||
async def update_appearance_config(
|
||||
request: Request,
|
||||
config: AppearanceConfigUpdate,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Update appearance tracking configuration."""
|
||||
db = get_app_state().db
|
||||
try:
|
||||
with db.get_connection(for_write=True) as conn:
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Update config
|
||||
cursor.execute('''
|
||||
UPDATE appearance_config
|
||||
SET tmdb_api_key = ?,
|
||||
tmdb_enabled = ?,
|
||||
tmdb_check_interval_hours = ?,
|
||||
notify_new_appearances = ?,
|
||||
notify_days_before = ?,
|
||||
podcast_enabled = ?,
|
||||
radio_enabled = ?,
|
||||
podchaser_client_id = ?,
|
||||
podchaser_client_secret = ?,
|
||||
podchaser_api_key = ?,
|
||||
podchaser_enabled = ?,
|
||||
imdb_enabled = ?,
|
||||
updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = 1
|
||||
''', (config.tmdb_api_key, config.tmdb_enabled, config.tmdb_check_interval_hours,
|
||||
config.notify_new_appearances, config.notify_days_before,
|
||||
config.podcast_enabled, config.radio_enabled,
|
||||
config.podchaser_client_id, config.podchaser_client_secret,
|
||||
config.podchaser_api_key, config.podchaser_enabled, config.imdb_enabled))
|
||||
|
||||
conn.commit()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Appearance configuration updated successfully"
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating appearance config: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
Reference in New Issue
Block a user