Fix DB paths, add auth to sensitive endpoints, misc bug fixes

- scheduler.py: Use full path for scheduler_state.db instead of relative name
- recycle.py: Use full path for thumbnails.db instead of relative name
- cloud_backup.py, maintenance.py, stats.py: Require admin for config/cleanup/settings endpoints
- press.py: Add auth to press image serving endpoint
- private_gallery.py: Fix _create_pg_job call and add missing secrets import
- appearances.py: Use sync httpx instead of asyncio.run for background thread HTTP call

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Todd
2026-03-30 08:25:00 -04:00
parent 7101c96b26
commit 523f91788e
8 changed files with 18 additions and 17 deletions

View File

@@ -1615,9 +1615,9 @@ def send_appearance_notification(notifier: PushoverNotifier, celebrity_name: str
# TMDb image URL format # TMDb image URL format
poster_url = f"https://image.tmdb.org/t/p/w500{poster_url_path}" poster_url = f"https://image.tmdb.org/t/p/w500{poster_url_path}"
# Download poster to temp file # Download poster to temp file (sync HTTP call since this runs in background thread)
import asyncio import httpx
response = asyncio.run(http_client.get(poster_url)) response = httpx.get(poster_url, timeout=10.0)
if response.status_code == 200: if response.status_code == 200:
# Create temp file - strip query params before getting extension # Create temp file - strip query params before getting extension

View File

@@ -25,7 +25,7 @@ from pydantic import BaseModel, Field
from slowapi import Limiter from slowapi import Limiter
from slowapi.util import get_remote_address from slowapi.util import get_remote_address
from ..core.dependencies import get_current_user, get_app_state from ..core.dependencies import get_current_user, require_admin, get_app_state
from modules.universal_logger import get_logger from modules.universal_logger import get_logger
logger = get_logger('CloudBackup') logger = get_logger('CloudBackup')
@@ -837,7 +837,7 @@ async def get_config(user=Depends(get_current_user)):
@router.put("/config") @router.put("/config")
async def update_config(update: CloudBackupConfigUpdate, user=Depends(get_current_user)): async def update_config(update: CloudBackupConfigUpdate, user=Depends(require_admin)):
"""Save cloud backup configuration and regenerate rclone.conf.""" """Save cloud backup configuration and regenerate rclone.conf."""
existing = _load_config() existing = _load_config()
update_dict = update.model_dump(exclude_unset=True) update_dict = update.model_dump(exclude_unset=True)

View File

@@ -15,7 +15,7 @@ from fastapi import APIRouter, Depends, Request, BackgroundTasks
from slowapi import Limiter from slowapi import Limiter
from slowapi.util import get_remote_address from slowapi.util import get_remote_address
from ..core.dependencies import get_current_user, get_app_state from ..core.dependencies import get_current_user, require_admin, get_app_state
from ..core.config import settings from ..core.config import settings
from ..core.responses import now_iso8601 from ..core.responses import now_iso8601
from ..core.exceptions import handle_exceptions from ..core.exceptions import handle_exceptions
@@ -63,7 +63,7 @@ async def cleanup_missing_files(
request: Request, request: Request,
background_tasks: BackgroundTasks, background_tasks: BackgroundTasks,
dry_run: bool = True, dry_run: bool = True,
current_user: Dict = Depends(get_current_user) current_user: Dict = Depends(require_admin)
): ):
""" """
Scan all database tables for file references and remove entries for missing files. Scan all database tables for file references and remove entries for missing files.

View File

@@ -1075,7 +1075,7 @@ def cache_press_image(image_url: str, use_flaresolverr: bool = False) -> Optiona
@router.get("/images/{filename}") @router.get("/images/{filename}")
async def serve_press_image(filename: str): async def serve_press_image(filename: str, current_user: Dict = Depends(get_current_user)):
"""Serve a cached press article image.""" """Serve a cached press article image."""
# Sanitize filename # Sanitize filename
if '/' in filename or '..' in filename: if '/' in filename or '..' in filename:

View File

@@ -21,6 +21,7 @@ import json
import mimetypes import mimetypes
import os import os
import re import re
import secrets
import shutil import shutil
import tempfile import tempfile
import time import time
@@ -6968,7 +6969,7 @@ async def migrate_to_chunked(
# Run migration in background thread # Run migration in background thread
job_id = f"pg_migrate_{secrets.token_hex(6)}" job_id = f"pg_migrate_{secrets.token_hex(6)}"
_update_pg_job(job_id, { _create_pg_job(job_id, {
'status': 'running', 'status': 'running',
'total_files': len(to_migrate), 'total_files': len(to_migrate),
'processed_files': 0, 'processed_files': 0,

View File

@@ -490,7 +490,7 @@ def _get_or_create_thumbnail(file_path: Path, media_type: str, content_hash: str
from datetime import datetime from datetime import datetime
try: try:
with sqlite3.connect('thumbnails', timeout=30.0) as conn: with sqlite3.connect(str(settings.PROJECT_ROOT / 'database' / 'thumbnails.db'), timeout=30.0) as conn:
cursor = conn.cursor() cursor = conn.cursor()
# 1. Try content hash first (new method - survives file moves) # 1. Try content hash first (new method - survives file moves)
@@ -546,7 +546,7 @@ def _get_or_create_thumbnail(file_path: Path, media_type: str, content_hash: str
file_mtime = file_path.stat().st_mtime if file_path.exists() else None file_mtime = file_path.stat().st_mtime if file_path.exists() else None
# Compute file_hash if not provided # Compute file_hash if not provided
thumb_file_hash = content_hash if content_hash else hashlib.sha256(str(file_path).encode()).hexdigest() thumb_file_hash = content_hash if content_hash else hashlib.sha256(str(file_path).encode()).hexdigest()
with sqlite3.connect('thumbnails') as conn: with sqlite3.connect(str(settings.PROJECT_ROOT / 'database' / 'thumbnails.db')) as conn:
conn.execute(""" conn.execute("""
INSERT OR REPLACE INTO thumbnails INSERT OR REPLACE INTO thumbnails
(file_hash, file_path, thumbnail_data, created_at, file_mtime) (file_hash, file_path, thumbnail_data, created_at, file_mtime)

View File

@@ -91,7 +91,7 @@ async def get_scheduler_status(
if forum_cfg.get('enabled', False): if forum_cfg.get('enabled', False):
enabled_forums.add(forum_cfg.get('name')) enabled_forums.add(forum_cfg.get('name'))
with sqlite3.connect('scheduler_state') as sched_conn: with sqlite3.connect(str(settings.PROJECT_ROOT / 'database' / 'scheduler_state.db')) as sched_conn:
cursor = sched_conn.cursor() cursor = sched_conn.cursor()
# Get all tasks # Get all tasks
@@ -332,7 +332,7 @@ async def pause_scheduler_task(
"""Pause a specific scheduler task.""" """Pause a specific scheduler task."""
app_state = get_app_state() app_state = get_app_state()
with sqlite3.connect('scheduler_state') as sched_conn: with sqlite3.connect(str(settings.PROJECT_ROOT / 'database' / 'scheduler_state.db')) as sched_conn:
cursor = sched_conn.cursor() cursor = sched_conn.cursor()
cursor.execute(""" cursor.execute("""
@@ -372,7 +372,7 @@ async def resume_scheduler_task(
"""Resume a paused scheduler task.""" """Resume a paused scheduler task."""
app_state = get_app_state() app_state = get_app_state()
with sqlite3.connect('scheduler_state') as sched_conn: with sqlite3.connect(str(settings.PROJECT_ROOT / 'database' / 'scheduler_state.db')) as sched_conn:
cursor = sched_conn.cursor() cursor = sched_conn.cursor()
cursor.execute(""" cursor.execute("""
@@ -412,7 +412,7 @@ async def skip_next_run(
"""Skip the next scheduled run by advancing next_run time.""" """Skip the next scheduled run by advancing next_run time."""
app_state = get_app_state() app_state = get_app_state()
with sqlite3.connect('scheduler_state') as sched_conn: with sqlite3.connect(str(settings.PROJECT_ROOT / 'database' / 'scheduler_state.db')) as sched_conn:
cursor = sched_conn.cursor() cursor = sched_conn.cursor()
# Get current task info # Get current task info
@@ -480,7 +480,7 @@ async def reschedule_task(
except ValueError: except ValueError:
raise HTTPException(status_code=400, detail="Invalid datetime format") raise HTTPException(status_code=400, detail="Invalid datetime format")
with sqlite3.connect('scheduler_state') as sched_conn: with sqlite3.connect(str(settings.PROJECT_ROOT / 'database' / 'scheduler_state.db')) as sched_conn:
cursor = sched_conn.cursor() cursor = sched_conn.cursor()
cursor.execute( cursor.execute(
"UPDATE scheduler_state SET next_run = ? WHERE task_id = ?", "UPDATE scheduler_state SET next_run = ? WHERE task_id = ?",

View File

@@ -396,7 +396,7 @@ async def update_setting(
request: Request, request: Request,
key: str, key: str,
body: Dict, body: Dict,
current_user: Dict = Depends(get_current_user) current_user: Dict = Depends(require_admin)
): ):
"""Update a specific setting value.""" """Update a specific setting value."""
app_state = get_app_state() app_state = get_app_state()