Files
media-downloader/web/backend/routers/video_queue.py
Todd 0d7b2b1aab Initial commit
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-29 22:42:55 -04:00

1885 lines
64 KiB
Python

"""
Video Download Queue Router
Unified queue for managing video downloads from all sources.
Supports:
- Adding/removing videos from queue
- Editing video metadata (title, date)
- Priority management
- Download progress tracking
- Queue processing with start/pause controls
- Integration with celebrity discovery and manual downloads
"""
import asyncio
import json
import re
import subprocess
import threading
from concurrent.futures import ThreadPoolExecutor
from datetime import datetime
from typing import Dict, List, Optional
from fastapi import APIRouter, BackgroundTasks, Depends, Query, Request
from pydantic import BaseModel
from slowapi import Limiter
from slowapi.util import get_remote_address
from ..core.dependencies import get_current_user, get_app_state
from ..core.config import settings
from ..core.exceptions import handle_exceptions, RecordNotFoundError, ValidationError
from modules.universal_logger import get_logger
logger = get_logger('API')
router = APIRouter(prefix="/api/video-queue", tags=["Video Queue"])
limiter = Limiter(key_func=get_remote_address)
# ============================================================================
# THUMBNAIL CACHING HELPER
# ============================================================================
async def cache_queue_thumbnail(platform: str, video_id: str, thumbnail_url: str, db) -> None:
"""Pre-cache thumbnail for faster Download Queue page loading."""
if not thumbnail_url:
return
try:
import httpx
async with httpx.AsyncClient(timeout=10.0) as client:
response = await client.get(thumbnail_url, headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
})
if response.status_code == 200 and response.content:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
UPDATE video_download_queue
SET thumbnail_data = ?
WHERE platform = ? AND video_id = ?
''', (response.content, platform, video_id))
conn.commit()
except Exception:
pass # Caching is optional, don't fail the request
# ============================================================================
# QUEUE PROCESSOR STATE
# ============================================================================
class QueueProcessor:
"""Manages the download queue processing state."""
def __init__(self):
self.is_running = False
self.is_paused = False
self.current_item_id: Optional[int] = None
self.current_video_id: Optional[str] = None
self.current_title: Optional[str] = None
self.processed_count = 0
self.failed_count = 0
self.started_at: Optional[str] = None
self._task: Optional[asyncio.Task] = None
self._lock = threading.Lock()
def get_status(self) -> Dict:
with self._lock:
return {
"is_running": self.is_running,
"is_paused": self.is_paused,
"current_item_id": self.current_item_id,
"current_video_id": self.current_video_id,
"current_title": self.current_title,
"processed_count": self.processed_count,
"failed_count": self.failed_count,
"started_at": self.started_at
}
def start(self):
with self._lock:
self.is_running = True
self.is_paused = False
self.started_at = datetime.now().isoformat()
def pause(self):
with self._lock:
self.is_paused = True
def resume(self):
with self._lock:
self.is_paused = False
def stop(self):
with self._lock:
self.is_running = False
self.is_paused = False
self.current_item_id = None
self.current_video_id = None
self.current_title = None
def reset_counts(self):
with self._lock:
self.processed_count = 0
self.failed_count = 0
def set_current(self, item_id: int, video_id: str, title: str):
with self._lock:
self.current_item_id = item_id
self.current_video_id = video_id
self.current_title = title
def increment_processed(self):
with self._lock:
self.processed_count += 1
def increment_failed(self):
with self._lock:
self.failed_count += 1
# Global queue processor instance
queue_processor = QueueProcessor()
def trigger_immich_scan(app_state) -> bool:
"""Trigger Immich library scan if configured.
Returns True if scan was triggered, False otherwise.
"""
import requests
# Get immich config from settings
immich_config = app_state.settings.get('immich', {})
# Check if immich is enabled and scan_after_download is enabled
if not immich_config.get('enabled'):
logger.debug("Immich not enabled, skipping scan", module="VideoQueue")
return False
if not immich_config.get('scan_after_download'):
logger.debug("Immich scan_after_download not enabled, skipping scan", module="VideoQueue")
return False
api_url = immich_config.get('api_url')
api_key = immich_config.get('api_key')
library_id = immich_config.get('library_id')
if not all([api_url, api_key, library_id]):
logger.warning("Immich config incomplete (missing api_url, api_key, or library_id)", module="VideoQueue")
return False
try:
response = requests.post(
f"{api_url}/libraries/{library_id}/scan",
headers={'X-API-KEY': api_key},
timeout=30
)
if response.status_code in [200, 201, 204]:
logger.info(f"Successfully triggered Immich scan for library {library_id}", module="VideoQueue")
return True
else:
logger.warning(f"Immich scan trigger failed: {response.status_code}", module="VideoQueue")
return False
except requests.Timeout:
logger.warning("Immich scan request timed out after 30 seconds", module="VideoQueue")
return False
except Exception as e:
logger.error(f"Error triggering Immich scan: {e}", module="VideoQueue")
return False
# ============================================================================
# PYDANTIC MODELS
# ============================================================================
class QueueItemAdd(BaseModel):
platform: str = 'youtube'
video_id: str
url: str
title: str
custom_title: Optional[str] = None
channel_name: Optional[str] = None
thumbnail: Optional[str] = None
duration: Optional[int] = None
upload_date: Optional[str] = None
custom_date: Optional[str] = None
view_count: Optional[int] = None
description: Optional[str] = None
source_type: Optional[str] = None # 'celebrity', 'manual', 'search'
source_id: Optional[int] = None
source_name: Optional[str] = None
priority: int = 5
metadata: Optional[Dict] = None
class QueueItemUpdate(BaseModel):
custom_title: Optional[str] = None
custom_date: Optional[str] = None
priority: Optional[int] = None
status: Optional[str] = None
class BulkQueueAdd(BaseModel):
items: List[QueueItemAdd]
class BulkQueueAction(BaseModel):
ids: List[int]
action: str # 'remove', 'pause', 'resume', 'retry', 'prioritize'
priority: Optional[int] = None
# ============================================================================
# HELPER FUNCTIONS
# ============================================================================
def format_queue_item(row) -> Dict:
"""Format a queue item row for API response."""
# Handle max_resolution/max_width which may not exist in older connections
try:
max_res = row['max_resolution']
except (IndexError, KeyError):
max_res = None
try:
max_width = row['max_width']
except (IndexError, KeyError):
max_width = None
return {
'id': row['id'],
'platform': row['platform'],
'video_id': row['video_id'],
'url': row['url'],
'title': row['title'],
'custom_title': row['custom_title'],
'display_title': row['custom_title'] or row['title'],
'channel_name': row['channel_name'],
'thumbnail': row['thumbnail'],
'duration': row['duration'],
'upload_date': row['upload_date'],
'custom_date': row['custom_date'],
'display_date': row['custom_date'] or row['upload_date'],
'view_count': row['view_count'],
'max_resolution': max_res,
'max_width': max_width,
'description': row['description'],
'source_type': row['source_type'],
'source_id': row['source_id'],
'source_name': row['source_name'],
'priority': row['priority'],
'status': row['status'],
'progress': row['progress'],
'file_path': row['file_path'],
'file_size': row['file_size'],
'error_message': row['error_message'],
'attempts': row['attempts'],
'added_at': row['added_at'],
'started_at': row['started_at'],
'completed_at': row['completed_at'],
'metadata': json.loads(row['metadata']) if row['metadata'] else None
}
# ============================================================================
# QUEUE ENDPOINTS
# ============================================================================
@router.get("")
@limiter.limit("60/minute")
@handle_exceptions
async def get_queue(
request: Request,
status: Optional[str] = Query(None),
source_type: Optional[str] = Query(None),
search: Optional[str] = Query(None),
sort: Optional[str] = Query('download_order'),
limit: int = Query(50, ge=1, le=200),
offset: int = Query(0, ge=0),
current_user: Dict = Depends(get_current_user)
):
"""Get video download queue with filters."""
app_state = get_app_state()
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
# Build query
query = 'SELECT * FROM video_download_queue WHERE 1=1'
count_query = 'SELECT COUNT(*) FROM video_download_queue WHERE 1=1'
params = []
if status:
if status == 'pending':
# Pending filter shows both pending and downloading items
query += ' AND status IN (?, ?)'
count_query += ' AND status IN (?, ?)'
params.extend(['pending', 'downloading'])
else:
query += ' AND status = ?'
count_query += ' AND status = ?'
params.append(status)
if source_type:
query += ' AND source_type = ?'
count_query += ' AND source_type = ?'
params.append(source_type)
if search:
# Search in title, url, platform, and channel_name
search_pattern = f'%{search}%'
query += ' AND (title LIKE ? OR url LIKE ? OR platform LIKE ? OR channel_name LIKE ?)'
count_query += ' AND (title LIKE ? OR url LIKE ? OR platform LIKE ? OR channel_name LIKE ?)'
params.extend([search_pattern, search_pattern, search_pattern, search_pattern])
# Get total count
cursor.execute(count_query, params)
total = cursor.fetchone()[0]
# Get items - sort order based on parameter
if sort == 'recently_added':
# Recently added first
query += ' ORDER BY added_at DESC LIMIT ? OFFSET ?'
else:
# Download order (matches processor: priority ASC, oldest first)
query += ' ORDER BY priority ASC, added_at ASC LIMIT ? OFFSET ?'
params.extend([limit, offset])
cursor.execute(query, params)
items = [format_queue_item(row) for row in cursor.fetchall()]
# Get stats
cursor.execute('''
SELECT status, COUNT(*) as count
FROM video_download_queue
GROUP BY status
''')
status_counts = {row['status']: row['count'] for row in cursor.fetchall()}
# Build stats object with all expected fields
stats = {
"total": total,
"pending": status_counts.get('pending', 0),
"downloading": status_counts.get('downloading', 0),
"completed": status_counts.get('completed', 0),
"failed": status_counts.get('failed', 0),
"paused": status_counts.get('paused', 0)
}
return {
"success": True,
"items": items,
"total": total,
"stats": stats
}
@router.get("/stats")
@limiter.limit("60/minute")
@handle_exceptions
async def get_queue_stats(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get queue statistics."""
app_state = get_app_state()
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
# Status counts
cursor.execute('''
SELECT status, COUNT(*) as count
FROM video_download_queue
GROUP BY status
''')
by_status = {row['status']: row['count'] for row in cursor.fetchall()}
# Source type counts
cursor.execute('''
SELECT source_type, COUNT(*) as count
FROM video_download_queue
GROUP BY source_type
''')
by_source = {row['source_type'] or 'unknown': row['count'] for row in cursor.fetchall()}
# Total and recent
cursor.execute('SELECT COUNT(*) FROM video_download_queue')
total = cursor.fetchone()[0]
cursor.execute('''
SELECT COUNT(*) FROM video_download_queue
WHERE added_at > datetime('now', '-24 hours')
''')
last_24h = cursor.fetchone()[0]
return {
"success": True,
"stats": {
"total": total,
"by_status": by_status,
"by_source": by_source,
"pending": by_status.get('pending', 0),
"downloading": by_status.get('downloading', 0),
"completed": by_status.get('completed', 0),
"failed": by_status.get('failed', 0),
"last_24h": last_24h
}
}
# ============================================================================
# QUEUE SETTINGS ENDPOINTS (must be before /{queue_id} to avoid route conflict)
# ============================================================================
class QueueSettingsUpdate(BaseModel):
download_delay_seconds: Optional[int] = None
base_directory: Optional[str] = None
stop_on_cookie_error: Optional[bool] = None
send_cookie_notification: Optional[bool] = None
auto_start_on_restart: Optional[bool] = None
@router.get("/settings")
@limiter.limit("60/minute")
@handle_exceptions
async def get_queue_settings(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get video queue settings."""
app_state = get_app_state()
# Default settings
settings = {
"download_delay_seconds": 15,
"base_directory": "/opt/immich/md",
"stop_on_cookie_error": True,
"send_cookie_notification": True,
"auto_start_on_restart": False
}
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
# Get queue settings
cursor.execute("SELECT value FROM settings WHERE key = 'video_queue'")
row = cursor.fetchone()
if row:
try:
stored = json.loads(row[0])
settings.update(stored)
except Exception:
pass
# Also check download_settings for base_directory (legacy compatibility)
cursor.execute("SELECT value FROM settings WHERE key = 'download_settings'")
row = cursor.fetchone()
if row:
try:
stored = json.loads(row[0])
if 'base_directory' in stored:
settings['base_directory'] = stored['base_directory']
except Exception:
pass
return {
"success": True,
"settings": settings
}
@router.post("/settings")
@limiter.limit("30/minute")
@handle_exceptions
async def update_queue_settings(
request: Request,
update: QueueSettingsUpdate,
current_user: Dict = Depends(get_current_user)
):
"""Update video queue settings."""
import os
app_state = get_app_state()
# Get current settings
settings = {
"download_delay_seconds": 15,
"base_directory": "/opt/immich/md",
"stop_on_cookie_error": True,
"send_cookie_notification": True,
"auto_start_on_restart": False
}
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key = 'video_queue'")
row = cursor.fetchone()
if row:
try:
stored = json.loads(row[0])
settings.update(stored)
except Exception:
pass
# Update with new values
if update.download_delay_seconds is not None:
settings['download_delay_seconds'] = max(0, min(120, update.download_delay_seconds))
if update.base_directory is not None:
# Validate path exists or can be created
base_dir = update.base_directory.strip()
if base_dir:
try:
os.makedirs(base_dir, exist_ok=True)
settings['base_directory'] = base_dir
except Exception as e:
raise ValueError(f"Cannot create directory {base_dir}: {e}")
if update.stop_on_cookie_error is not None:
settings['stop_on_cookie_error'] = update.stop_on_cookie_error
if update.send_cookie_notification is not None:
settings['send_cookie_notification'] = update.send_cookie_notification
if update.auto_start_on_restart is not None:
settings['auto_start_on_restart'] = update.auto_start_on_restart
# Save settings to video_queue key
cursor.execute('''
INSERT OR REPLACE INTO settings (key, value, value_type, category, description)
VALUES ('video_queue', ?, 'json', 'queue', 'Video download queue settings')
''', (json.dumps(settings),))
# Also save to download_settings for universal_video_downloader compatibility
download_settings = {"base_directory": settings['base_directory']}
cursor.execute('''
INSERT OR REPLACE INTO settings (key, value, value_type, category, description)
VALUES ('download_settings', ?, 'json', 'downloads', 'Download settings')
''', (json.dumps(download_settings),))
conn.commit()
logger.info(f"Queue settings updated: delay={settings['download_delay_seconds']}s, base_dir={settings['base_directory']}", module="VideoQueue")
return {
"success": True,
"settings": settings
}
# Default anti-bot settings
DEFAULT_ANTIBOT_SETTINGS = {
'browser': 'edge',
'custom_user_agent': '',
'limit_rate': '2M',
'throttled_rate': '100K',
'sleep_requests_min': 1,
'sleep_requests_max': 3,
'retries': 10,
'fragment_retries': 10,
'concurrent_fragments': 1,
'socket_timeout': 30,
'enabled': True,
}
@router.get("/antibot-settings")
@limiter.limit("60/minute")
@handle_exceptions
async def get_antibot_settings(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get anti-bot protection settings."""
app_state = get_app_state()
settings = DEFAULT_ANTIBOT_SETTINGS.copy()
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key = 'antibot_settings'")
row = cursor.fetchone()
if row:
try:
stored = json.loads(row[0])
settings.update(stored)
except Exception:
pass
return {
"success": True,
"settings": settings
}
class AntibotSettingsUpdate(BaseModel):
browser: Optional[str] = None
custom_user_agent: Optional[str] = None
limit_rate: Optional[str] = None
throttled_rate: Optional[str] = None
sleep_requests_min: Optional[int] = None
sleep_requests_max: Optional[int] = None
retries: Optional[int] = None
fragment_retries: Optional[int] = None
concurrent_fragments: Optional[int] = None
socket_timeout: Optional[int] = None
enabled: Optional[bool] = None
@router.post("/antibot-settings")
@limiter.limit("20/minute")
@handle_exceptions
async def update_antibot_settings(
request: Request,
update: AntibotSettingsUpdate,
current_user: Dict = Depends(get_current_user)
):
"""Update anti-bot protection settings."""
app_state = get_app_state()
settings = DEFAULT_ANTIBOT_SETTINGS.copy()
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key = 'antibot_settings'")
row = cursor.fetchone()
if row:
try:
stored = json.loads(row[0])
settings.update(stored)
except Exception:
pass
# Update with new values
update_dict = update.model_dump(exclude_none=True)
settings.update(update_dict)
# Validate values
if settings.get('browser') not in ['edge', 'chrome', 'firefox', 'safari', 'custom']:
settings['browser'] = 'edge'
if settings.get('sleep_requests_min', 0) < 0:
settings['sleep_requests_min'] = 0
if settings.get('sleep_requests_max', 0) < settings.get('sleep_requests_min', 0):
settings['sleep_requests_max'] = settings['sleep_requests_min']
if settings.get('retries', 1) < 1:
settings['retries'] = 1
if settings.get('fragment_retries', 1) < 1:
settings['fragment_retries'] = 1
if settings.get('concurrent_fragments', 1) < 1:
settings['concurrent_fragments'] = 1
if settings.get('socket_timeout', 10) < 10:
settings['socket_timeout'] = 10
# Save settings
cursor.execute('''
INSERT OR REPLACE INTO settings (key, value, value_type, category, description)
VALUES ('antibot_settings', ?, 'json', 'queue', 'Anti-bot protection settings for yt-dlp')
''', (json.dumps(settings),))
conn.commit()
logger.info(f"Anti-bot settings updated: browser={settings['browser']}, enabled={settings['enabled']}", module="VideoQueue")
return {
"success": True,
"settings": settings
}
@router.post("/add")
@limiter.limit("30/minute")
@handle_exceptions
async def add_to_queue(
request: Request,
item: QueueItemAdd,
current_user: Dict = Depends(get_current_user)
):
"""Add a video to the download queue."""
app_state = get_app_state()
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
try:
cursor.execute('''
INSERT INTO video_download_queue (
platform, video_id, url, title, custom_title, channel_name,
thumbnail, duration, upload_date, custom_date, view_count,
description, source_type, source_id, source_name, priority, metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
item.platform, item.video_id, item.url, item.title, item.custom_title,
item.channel_name, item.thumbnail, item.duration, item.upload_date,
item.custom_date, item.view_count, item.description, item.source_type,
item.source_id, item.source_name, item.priority,
json.dumps(item.metadata) if item.metadata else None
))
conn.commit()
queue_id = cursor.lastrowid
# Pre-cache thumbnail for faster page loading
if item.thumbnail:
await cache_queue_thumbnail(item.platform, item.video_id, item.thumbnail, app_state.db)
return {
"success": True,
"message": "Added to queue",
"id": queue_id
}
except Exception as e:
if "UNIQUE constraint failed" in str(e):
return {
"success": False,
"message": "Video already in queue"
}
raise
@router.post("/add-bulk")
@limiter.limit("10/minute")
@handle_exceptions
async def add_bulk_to_queue(
request: Request,
data: BulkQueueAdd,
current_user: Dict = Depends(get_current_user)
):
"""Add multiple videos to the download queue."""
app_state = get_app_state()
added = 0
skipped = 0
added_items = [] # Track items for thumbnail caching
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for item in data.items:
try:
cursor.execute('''
INSERT INTO video_download_queue (
platform, video_id, url, title, custom_title, channel_name,
thumbnail, duration, upload_date, custom_date, view_count,
description, source_type, source_id, source_name, priority, metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
item.platform, item.video_id, item.url, item.title, item.custom_title,
item.channel_name, item.thumbnail, item.duration, item.upload_date,
item.custom_date, item.view_count, item.description, item.source_type,
item.source_id, item.source_name, item.priority,
json.dumps(item.metadata) if item.metadata else None
))
added += 1
if item.thumbnail:
added_items.append((item.platform, item.video_id, item.thumbnail))
except Exception:
skipped += 1
conn.commit()
# Pre-cache thumbnails for faster page loading (async, non-blocking)
for platform, video_id, thumbnail_url in added_items:
await cache_queue_thumbnail(platform, video_id, thumbnail_url, app_state.db)
return {
"success": True,
"message": f"Added {added} videos, {skipped} already in queue",
"added": added,
"skipped": skipped
}
@router.get("/{queue_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def get_queue_item(
request: Request,
queue_id: int,
current_user: Dict = Depends(get_current_user)
):
"""Get a single queue item."""
app_state = get_app_state()
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT * FROM video_download_queue WHERE id = ?', (queue_id,))
row = cursor.fetchone()
if not row:
raise RecordNotFoundError("Queue item not found")
return {
"success": True,
"item": format_queue_item(row)
}
@router.get("/{queue_id}/stream")
async def stream_downloaded_video(
request: Request,
queue_id: int,
current_user: Dict = Depends(get_current_user)
):
"""
Stream a downloaded video file.
Returns the video file with proper Range support for seeking.
Only works for completed downloads that have a file_path.
"""
import os
from starlette.responses import StreamingResponse
app_state = get_app_state()
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT file_path, status FROM video_download_queue WHERE id = ?', (queue_id,))
row = cursor.fetchone()
if not row:
raise RecordNotFoundError("Queue item not found")
file_path = row['file_path']
status = row['status']
if status != 'completed':
raise ValidationError("Video has not been downloaded yet")
if not file_path or not os.path.exists(file_path):
raise RecordNotFoundError("Downloaded file not found")
file_size = os.path.getsize(file_path)
# Determine content type
ext = os.path.splitext(file_path)[1].lower()
content_type = {
'.mp4': 'video/mp4',
'.webm': 'video/webm',
'.mkv': 'video/x-matroska',
'.mov': 'video/quicktime',
'.avi': 'video/x-msvideo',
}.get(ext, 'video/mp4')
# Handle Range requests for seeking
range_header = request.headers.get("Range")
start = 0
end = file_size - 1
if range_header:
range_match = range_header.replace("bytes=", "").split("-")
if range_match[0]:
start = int(range_match[0])
if len(range_match) > 1 and range_match[1]:
end = min(int(range_match[1]), file_size - 1)
content_length = end - start + 1
def file_stream_generator():
with open(file_path, 'rb') as f:
f.seek(start)
remaining = content_length
while remaining > 0:
chunk_size = min(65536, remaining)
chunk = f.read(chunk_size)
if not chunk:
break
remaining -= len(chunk)
yield chunk
headers = {
"Accept-Ranges": "bytes",
"Content-Length": str(content_length),
"Cache-Control": "private, max-age=3600",
}
if range_header:
headers["Content-Range"] = f"bytes {start}-{end}/{file_size}"
return StreamingResponse(
file_stream_generator(),
status_code=206,
media_type=content_type,
headers=headers
)
return StreamingResponse(
file_stream_generator(),
media_type=content_type,
headers=headers
)
@router.patch("/{queue_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_queue_item(
request: Request,
queue_id: int,
update: QueueItemUpdate,
current_user: Dict = Depends(get_current_user)
):
"""Update a queue item (edit title, date, priority, status)."""
app_state = get_app_state()
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Build update query dynamically
updates = []
params = []
if update.custom_title is not None:
updates.append('custom_title = ?')
params.append(update.custom_title if update.custom_title else None)
if update.custom_date is not None:
updates.append('custom_date = ?')
params.append(update.custom_date if update.custom_date else None)
if update.priority is not None:
updates.append('priority = ?')
params.append(update.priority)
if update.status is not None:
updates.append('status = ?')
params.append(update.status)
if not updates:
raise ValidationError("No fields to update")
params.append(queue_id)
query = f"UPDATE video_download_queue SET {', '.join(updates)} WHERE id = ?"
cursor.execute(query, params)
if cursor.rowcount == 0:
raise RecordNotFoundError("Queue item not found")
conn.commit()
# Return updated item
cursor.execute('SELECT * FROM video_download_queue WHERE id = ?', (queue_id,))
row = cursor.fetchone()
return {
"success": True,
"message": "Queue item updated",
"item": format_queue_item(row)
}
@router.delete("/{queue_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def remove_from_queue(
request: Request,
queue_id: int,
current_user: Dict = Depends(get_current_user)
):
"""Remove a video from the queue."""
app_state = get_app_state()
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM video_download_queue WHERE id = ?', (queue_id,))
if cursor.rowcount == 0:
raise RecordNotFoundError("Queue item not found")
conn.commit()
return {"success": True, "message": "Removed from queue"}
@router.post("/bulk-action")
@limiter.limit("10/minute")
@handle_exceptions
async def bulk_queue_action(
request: Request,
data: BulkQueueAction,
current_user: Dict = Depends(get_current_user)
):
"""Perform bulk actions on queue items."""
app_state = get_app_state()
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
affected = 0
placeholders = ','.join(['?' for _ in data.ids])
if data.action == 'remove':
cursor.execute(f'DELETE FROM video_download_queue WHERE id IN ({placeholders})', data.ids)
affected = cursor.rowcount
elif data.action == 'pause':
cursor.execute(f'''
UPDATE video_download_queue
SET status = 'paused'
WHERE id IN ({placeholders}) AND status = 'pending'
''', data.ids)
affected = cursor.rowcount
elif data.action == 'resume':
cursor.execute(f'''
UPDATE video_download_queue
SET status = 'pending'
WHERE id IN ({placeholders}) AND status = 'paused'
''', data.ids)
affected = cursor.rowcount
elif data.action == 'retry':
cursor.execute(f'''
UPDATE video_download_queue
SET status = 'pending', attempts = 0, error_message = NULL
WHERE id IN ({placeholders}) AND status = 'failed'
''', data.ids)
affected = cursor.rowcount
elif data.action == 'prioritize':
if data.priority is not None:
cursor.execute(f'''
UPDATE video_download_queue
SET priority = ?
WHERE id IN ({placeholders})
''', [data.priority] + data.ids)
affected = cursor.rowcount
conn.commit()
return {
"success": True,
"message": f"{data.action} applied to {affected} items",
"affected": affected
}
@router.post("/clear-completed")
@limiter.limit("5/minute")
@handle_exceptions
async def clear_completed(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Clear all completed downloads from queue."""
app_state = get_app_state()
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute("DELETE FROM video_download_queue WHERE status = 'completed'")
deleted = cursor.rowcount
conn.commit()
return {
"success": True,
"message": f"Cleared {deleted} completed items",
"deleted": deleted
}
@router.post("/clear-failed")
@limiter.limit("5/minute")
@handle_exceptions
async def clear_failed(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Clear all failed downloads from queue."""
app_state = get_app_state()
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute("DELETE FROM video_download_queue WHERE status = 'failed'")
deleted = cursor.rowcount
conn.commit()
return {
"success": True,
"message": f"Cleared {deleted} failed items",
"deleted": deleted
}
# ============================================================================
# CELEBRITY DISCOVERY INTEGRATION
# ============================================================================
@router.post("/add-from-celebrity")
@limiter.limit("20/minute")
@handle_exceptions
async def add_from_celebrity_discovery(
request: Request,
video_ids: List[int],
current_user: Dict = Depends(get_current_user)
):
"""Add videos from celebrity discovery to the download queue."""
app_state = get_app_state()
added = 0
skipped = 0
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Get the celebrity videos
placeholders = ','.join(['?' for _ in video_ids])
cursor.execute(f'''
SELECT v.*, cp.name as celebrity_name, cp.id as celeb_id
FROM celebrity_discovered_videos v
JOIN celebrity_profiles cp ON v.celebrity_id = cp.id
WHERE v.id IN ({placeholders})
''', video_ids)
for row in cursor.fetchall():
try:
cursor.execute('''
INSERT INTO video_download_queue (
platform, video_id, url, title, channel_name, thumbnail,
duration, upload_date, view_count, description,
source_type, source_id, source_name, priority
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
row['platform'], row['video_id'], row['url'], row['title'],
row['channel_name'], row['thumbnail'], row['duration'],
row['upload_date'], row['view_count'], row['description'],
'celebrity', row['celeb_id'], row['celebrity_name'], 5
))
added += 1
# Update celebrity video status to 'queued'
cursor.execute('''
UPDATE celebrity_discovered_videos
SET status = 'queued', status_updated_at = ?
WHERE id = ?
''', (datetime.now().isoformat(), row['id']))
except Exception:
skipped += 1
conn.commit()
return {
"success": True,
"message": f"Added {added} videos to queue, {skipped} already in queue",
"added": added,
"skipped": skipped
}
# ============================================================================
# QUEUE PROCESSOR ENDPOINTS
# ============================================================================
async def ensure_video_thumbnail(app_state, platform: str, video_id: str, thumbnail_url: str = None):
"""
Ensure video has a thumbnail stored in video_downloads and thumbnails.db cache.
Fetches from YouTube URL if not present.
"""
import httpx
import hashlib
import sqlite3
from datetime import datetime
from pathlib import Path
try:
# Check if thumbnail already exists in video_downloads
file_path = None
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute(
"SELECT thumbnail_data, file_path FROM video_downloads WHERE platform = ? AND video_id = ?",
(platform, video_id)
)
row = cursor.fetchone()
if row:
file_path = row[1]
if row[0] and len(row[0]) > 1000:
# Already has thumbnail in video_downloads, but sync to thumbnails.db
_sync_to_thumbnail_cache(file_path, row[0])
return
# Determine thumbnail URL - ALWAYS use standardized URL for known platforms
# YouTube metadata often returns weird URLs with query params that don't work
if platform == 'youtube':
# Try maxresdefault first (1280x720, no black bars), fallback to hqdefault
thumbnail_data = None
async with httpx.AsyncClient(timeout=15.0) as client:
for quality in ['maxresdefault', 'hqdefault']:
thumbnail_url = f"https://i.ytimg.com/vi/{video_id}/{quality}.jpg"
response = await client.get(thumbnail_url)
if response.status_code == 200 and len(response.content) > 1000:
thumbnail_data = response.content
break
elif platform == 'dailymotion':
thumbnail_url = f"https://www.dailymotion.com/thumbnail/video/{video_id}"
async with httpx.AsyncClient(timeout=15.0) as client:
response = await client.get(thumbnail_url)
if response.status_code == 200 and len(response.content) > 1000:
thumbnail_data = response.content
elif thumbnail_url:
async with httpx.AsyncClient(timeout=15.0) as client:
response = await client.get(thumbnail_url)
if response.status_code == 200 and len(response.content) > 1000:
thumbnail_data = response.content
else:
return # No known thumbnail URL pattern
if thumbnail_data:
# Store in video_downloads
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute(
"UPDATE video_downloads SET thumbnail_data = ? WHERE platform = ? AND video_id = ?",
(thumbnail_data, platform, video_id)
)
conn.commit()
# Also sync to thumbnails.db cache for Downloads/Media pages
if file_path:
_sync_to_thumbnail_cache(file_path, thumbnail_data)
logger.debug(f"Stored thumbnail for {platform}/{video_id}", module="VideoQueue")
except Exception as e:
logger.debug(f"Failed to fetch thumbnail for {video_id}: {e}", module="VideoQueue")
def _sync_to_thumbnail_cache(file_path: str, thumbnail_data: bytes):
"""Sync a thumbnail to the thumbnails.db cache used by Downloads/Media pages."""
import hashlib
import sqlite3
from datetime import datetime
from pathlib import Path
try:
thumb_db_path = settings.PROJECT_ROOT / 'database' / 'thumbnails.db'
# MUST use SHA256 to match get_or_create_thumbnail() in media.py
file_hash = hashlib.sha256(file_path.encode()).hexdigest()
# Get file mtime
try:
file_mtime = Path(file_path).stat().st_mtime
except OSError:
file_mtime = 0
with sqlite3.connect(str(thumb_db_path), timeout=10.0) as conn:
cursor = conn.cursor()
cursor.execute("""
INSERT OR REPLACE INTO thumbnails
(file_hash, file_path, thumbnail_data, created_at, file_mtime)
VALUES (?, ?, ?, ?, ?)
""", (file_hash, file_path, thumbnail_data, datetime.now().isoformat(), file_mtime))
conn.commit()
except Exception as e:
logger.debug(f"Failed to sync thumbnail to cache: {e}", module="VideoQueue")
async def process_queue_item(item: Dict, app_state) -> bool:
"""Process a single queue item. Returns True on success, False on failure."""
import tempfile
import os
from modules.universal_video_downloader import UniversalVideoDownloader
item_id = item['id']
video_id = item['video_id']
url = item['url']
platform = item['platform']
title = item['title']
cookies_file = None
try:
# Update status to downloading
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
UPDATE video_download_queue
SET status = 'downloading', started_at = CURRENT_TIMESTAMP
WHERE id = ?
''', (item_id,))
conn.commit()
queue_processor.set_current(item_id, video_id, title)
logger.info(f"Queue: Starting download of {title[:50]}...", module="VideoQueue")
# Determine which scraper to get cookies from based on platform
# gallery-dl sites: erome, bunkr, cyberdrop, etc.
gallery_dl_platforms = ['erome', 'bunkr', 'cyberdrop', 'coomer', 'kemono', 'fapello']
if platform in gallery_dl_platforms:
scraper_id = 'gallerydl'
scraper_name = 'gallery-dl'
else:
scraper_id = 'ytdlp'
scraper_name = 'yt-dlp'
# Get cookies from scraper settings if available
cookies_file = None
try:
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT cookies_json FROM scrapers WHERE id = ?", (scraper_id,))
row = cursor.fetchone()
if row and row[0]:
data = json.loads(row[0])
# Support both {"cookies": [...]} and [...] formats
if isinstance(data, dict) and 'cookies' in data:
cookies_list = data['cookies']
elif isinstance(data, list):
cookies_list = data
else:
cookies_list = []
if cookies_list:
# Write cookies to temp file in Netscape format
fd, cookies_file = tempfile.mkstemp(suffix='.txt', prefix=f'{scraper_id}_cookies_')
with os.fdopen(fd, 'w') as f:
f.write("# Netscape HTTP Cookie File\n")
for cookie in cookies_list:
# Format: domain, include_subdomains, path, secure, expiry, name, value
domain = cookie.get('domain', '')
include_subdomains = 'TRUE' if domain.startswith('.') else 'FALSE'
path = cookie.get('path', '/')
secure = 'TRUE' if cookie.get('secure', False) else 'FALSE'
expiry = str(int(cookie.get('expirationDate', 0)))
name = cookie.get('name', '')
value = cookie.get('value', '')
f.write(f"{domain}\t{include_subdomains}\t{path}\t{secure}\t{expiry}\t{name}\t{value}\n")
logger.debug(f"Queue: Using {len(cookies_list)} cookies from {scraper_name} scraper", module="VideoQueue")
except Exception as e:
logger.debug(f"Queue: Could not load cookies: {e}", module="VideoQueue")
# Create downloader and download
downloader = UniversalVideoDownloader(platform=platform, unified_db=app_state.db, cookies_file=cookies_file)
# Progress callback to update queue item
def progress_callback(message, percentage, speed=None, eta=None):
try:
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
UPDATE video_download_queue
SET progress = ?
WHERE id = ?
''', (int(percentage), item_id))
conn.commit()
except Exception:
pass # Don't fail download due to progress update issues
# Execute download in thread pool to avoid blocking the event loop
import asyncio
from concurrent.futures import ThreadPoolExecutor
loop = asyncio.get_event_loop()
with ThreadPoolExecutor(max_workers=1) as executor:
success, file_path, metadata = await loop.run_in_executor(
executor,
lambda: downloader.download_video(url, progress_callback=progress_callback, update_activity=False)
)
if success:
# Update status to completed
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
UPDATE video_download_queue
SET status = 'completed', progress = 100, file_path = ?,
completed_at = CURRENT_TIMESTAMP
WHERE id = ?
''', (file_path, item_id))
# Also update celebrity_discovered_videos if this came from there
if item.get('source_type') == 'celebrity':
cursor.execute('''
UPDATE celebrity_discovered_videos
SET status = 'downloaded', status_updated_at = CURRENT_TIMESTAMP,
downloaded_path = ?
WHERE video_id = ?
''', (file_path, video_id))
# Mark in file_inventory so it doesn't show on Media dashboard card
# (user already actioned this via Internet Discovery card)
cursor.execute('''
UPDATE file_inventory
SET from_discovery = 1
WHERE file_path = ?
''', (file_path,))
conn.commit()
logger.info(f"Queue: Completed download of {title[:50]}", module="VideoQueue")
# Fetch and store thumbnail if not already present
await ensure_video_thumbnail(app_state, platform, video_id, item.get('thumbnail'))
# Trigger Immich scan if configured
trigger_immich_scan(app_state)
return {'success': True}
else:
# Check if this is a cookie error
if metadata and metadata.get('cookie_error'):
# Requeue the item (set back to pending)
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
UPDATE video_download_queue
SET status = 'pending', progress = 0, error_message = 'Cookie expired - requeued'
WHERE id = ?
''', (item_id,))
conn.commit()
logger.warning(f"Queue: Cookie error detected for {title[:50]}, requeued", module="VideoQueue")
return {'success': False, 'cookie_error': True, 'platform': platform}
else:
raise Exception(metadata.get('error', 'Download returned failure') if metadata else 'Download returned failure')
except Exception as e:
error_msg = str(e)[:500]
logger.error(f"Queue: Failed to download {title[:50]}: {error_msg}", module="VideoQueue")
# Update status to failed
with app_state.db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
UPDATE video_download_queue
SET status = 'failed', error_message = ?, attempts = attempts + 1
WHERE id = ?
''', (error_msg, item_id))
conn.commit()
return {'success': False}
finally:
# Clean up temp cookies file
if cookies_file:
try:
import os
os.unlink(cookies_file)
except Exception:
pass
def _get_queue_settings_from_db(app_state) -> dict:
"""Get queue settings from database."""
try:
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key = 'video_queue'")
row = cursor.fetchone()
if row:
return json.loads(row[0])
except Exception:
pass
return {
'download_delay_seconds': 15,
'stop_on_cookie_error': True,
'send_cookie_notification': True
}
async def send_cookie_expired_notification(platform: str, app_state):
"""Send push notification about expired cookies."""
# Check if notifications are enabled in settings
settings = _get_queue_settings_from_db(app_state)
if not settings.get('send_cookie_notification', True):
logger.debug("Cookie notification disabled in settings, skipping", module="VideoQueue")
return
try:
from modules.pushover_notifier import PushoverNotifier
# Get pushover config from settings
pushover_config = app_state.settings.get('pushover', {})
if not pushover_config.get('enabled'):
logger.debug("Pushover notifications disabled", module="VideoQueue")
return
user_key = pushover_config.get('user_key')
api_token = pushover_config.get('api_token')
if not user_key or not api_token:
logger.warning("Pushover credentials not configured", module="VideoQueue")
return
notifier = PushoverNotifier(
user_key=user_key,
api_token=api_token,
enabled=True,
default_priority=pushover_config.get('priority', 0),
device=pushover_config.get('device'),
include_image=pushover_config.get('include_image', True)
)
notifier.send_notification(
title="⚠️ Download Queue Stopped",
message=f"Cookies expired for {platform}. Queue paused - please update cookies and restart.",
priority=1, # High priority
sound="siren"
)
logger.info(f"Sent cookie expiration notification for {platform}", module="VideoQueue")
except Exception as e:
logger.warning(f"Failed to send cookie notification: {e}", module="VideoQueue")
async def run_queue_processor(app_state):
"""Main queue processing loop."""
logger.info("Queue processor started", module="VideoQueue")
while queue_processor.is_running:
# Check if paused
if queue_processor.is_paused:
await asyncio.sleep(1)
continue
# Get next pending item (ordered by priority, then added_at)
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT * FROM video_download_queue
WHERE status = 'pending'
ORDER BY priority ASC, added_at ASC
LIMIT 1
''')
row = cursor.fetchone()
if not row:
# No more pending items
logger.info("Queue processor: No more pending items", module="VideoQueue")
queue_processor.stop()
break
item = dict(row)
# Process the item
result = await process_queue_item(item, app_state)
# Handle result (now returns dict instead of bool)
if isinstance(result, dict):
if result.get('success'):
queue_processor.increment_processed()
elif result.get('cookie_error'):
# Cookie expired - check settings for how to handle
platform = result.get('platform', 'unknown')
settings = _get_queue_settings_from_db(app_state)
if settings.get('stop_on_cookie_error', True):
logger.warning(f"Queue: Cookie expired for {platform}, stopping queue", module="VideoQueue")
# Stop the processor
queue_processor.stop()
# Send push notification
await send_cookie_expired_notification(platform, app_state)
break
else:
# Just log and continue with next item
logger.warning(f"Queue: Cookie expired for {platform}, continuing (stop_on_cookie_error=False)", module="VideoQueue")
queue_processor.increment_failed()
else:
queue_processor.increment_failed()
elif result:
queue_processor.increment_processed()
else:
queue_processor.increment_failed()
# Get configurable delay from settings (default 15 seconds to avoid rate limiting)
delay_seconds = 15
try:
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key = 'video_queue'")
row = cursor.fetchone()
if row:
settings = json.loads(row[0])
delay_seconds = settings.get('download_delay_seconds', 15)
except Exception:
pass # Use default on error
logger.debug(f"Queue: Waiting {delay_seconds}s before next download", module="VideoQueue")
await asyncio.sleep(delay_seconds)
logger.info(f"Queue processor stopped. Processed: {queue_processor.processed_count}, Failed: {queue_processor.failed_count}",
module="VideoQueue")
@router.get("/processor/status")
@limiter.limit("60/minute")
@handle_exceptions
async def get_processor_status(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get the current queue processor status."""
app_state = get_app_state()
# Get pending count
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM video_download_queue WHERE status = 'pending'")
pending_count = cursor.fetchone()[0]
status = queue_processor.get_status()
status["pending_count"] = pending_count
return {
"success": True,
"processor": status
}
@router.post("/processor/start")
@limiter.limit("10/minute")
@handle_exceptions
async def start_processor(
request: Request,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user)
):
"""Start processing the download queue."""
if queue_processor.is_running and not queue_processor.is_paused:
return {
"success": True,
"message": "Queue processor is already running"
}
if queue_processor.is_paused:
# Resume from pause
queue_processor.resume()
logger.info("Queue processor resumed", module="VideoQueue")
return {
"success": True,
"message": "Queue processor resumed"
}
app_state = get_app_state()
# Check if there are pending items
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT COUNT(*) FROM video_download_queue WHERE status = 'pending'")
pending_count = cursor.fetchone()[0]
if pending_count == 0:
return {
"success": False,
"message": "No pending items in queue"
}
# Start the processor
queue_processor.start()
queue_processor.reset_counts()
# Run in background
asyncio.create_task(run_queue_processor(app_state))
logger.info(f"Queue processor started with {pending_count} pending items", module="VideoQueue")
return {
"success": True,
"message": f"Queue processor started with {pending_count} pending items"
}
@router.post("/processor/pause")
@limiter.limit("10/minute")
@handle_exceptions
async def pause_processor(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Pause the queue processor (finishes current download first)."""
if not queue_processor.is_running:
return {
"success": False,
"message": "Queue processor is not running"
}
if queue_processor.is_paused:
return {
"success": True,
"message": "Queue processor is already paused"
}
queue_processor.pause()
logger.info("Queue processor paused", module="VideoQueue")
return {
"success": True,
"message": "Queue processor paused (current download will complete)"
}
@router.post("/processor/stop")
@limiter.limit("10/minute")
@handle_exceptions
async def stop_processor(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Stop the queue processor completely."""
if not queue_processor.is_running:
return {
"success": True,
"message": "Queue processor is not running"
}
queue_processor.stop()
logger.info("Queue processor stopped", module="VideoQueue")
return {
"success": True,
"message": "Queue processor stopped"
}
class DownloadSelectedRequest(BaseModel):
ids: List[int]
# Store selected IDs for the processor to use
_selected_ids_to_process: List[int] = []
async def run_selected_queue_processor(app_state, selected_ids: List[int]):
"""Process only selected queue items."""
logger.info(f"Queue processor started for {len(selected_ids)} selected items", module="VideoQueue")
for item_id in selected_ids:
if not queue_processor.is_running:
break
# Check if paused
while queue_processor.is_paused and queue_processor.is_running:
await asyncio.sleep(1)
if not queue_processor.is_running:
break
# Get the specific item
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT * FROM video_download_queue
WHERE id = ? AND status = 'pending'
''', (item_id,))
row = cursor.fetchone()
if not row:
# Item not found or not pending, skip
continue
item = dict(row)
# Process the item
result = await process_queue_item(item, app_state)
# Handle result (now returns dict instead of bool)
if isinstance(result, dict):
if result.get('success'):
queue_processor.increment_processed()
elif result.get('cookie_error'):
# Cookie expired - check settings for how to handle
platform = result.get('platform', 'unknown')
settings = _get_queue_settings_from_db(app_state)
if settings.get('stop_on_cookie_error', True):
logger.warning(f"Queue: Cookie expired for {platform}, stopping queue", module="VideoQueue")
# Stop the processor
queue_processor.stop()
# Send push notification
await send_cookie_expired_notification(platform, app_state)
break
else:
# Just log and continue with next item
logger.warning(f"Queue: Cookie expired for {platform}, continuing (stop_on_cookie_error=False)", module="VideoQueue")
queue_processor.increment_failed()
else:
queue_processor.increment_failed()
elif result:
queue_processor.increment_processed()
else:
queue_processor.increment_failed()
# Get configurable delay from settings
delay_seconds = 15
try:
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT value FROM settings WHERE key = 'video_queue'")
row = cursor.fetchone()
if row:
settings = json.loads(row[0])
delay_seconds = settings.get('download_delay_seconds', 15)
except Exception:
pass
# Only delay if there are more items to process
remaining = [i for i in selected_ids if i > item_id or selected_ids.index(i) > selected_ids.index(item_id)]
if remaining and queue_processor.is_running:
logger.debug(f"Queue: Waiting {delay_seconds}s before next download", module="VideoQueue")
await asyncio.sleep(delay_seconds)
queue_processor.stop()
logger.info(f"Queue processor stopped. Processed: {queue_processor.processed_count}, Failed: {queue_processor.failed_count}",
module="VideoQueue")
@router.post("/processor/start-selected")
@limiter.limit("10/minute")
@handle_exceptions
async def start_processor_selected(
request: Request,
data: DownloadSelectedRequest,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user)
):
"""Start processing only selected queue items."""
if queue_processor.is_running:
return {
"success": False,
"message": "Queue processor is already running. Stop it first to start a new selection."
}
if not data.ids:
return {
"success": False,
"message": "No items selected"
}
app_state = get_app_state()
# Verify selected items exist and are pending
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
placeholders = ','.join(['?' for _ in data.ids])
cursor.execute(f'''
SELECT COUNT(*) FROM video_download_queue
WHERE id IN ({placeholders}) AND status = 'pending'
''', data.ids)
pending_count = cursor.fetchone()[0]
if pending_count == 0:
return {
"success": False,
"message": "No pending items in selection"
}
# Start the processor for selected items
queue_processor.start()
queue_processor.reset_counts()
# Run in background with selected IDs
asyncio.create_task(run_selected_queue_processor(app_state, data.ids))
logger.info(f"Queue processor started for {pending_count} selected items", module="VideoQueue")
return {
"success": True,
"message": f"Downloading {pending_count} selected items"
}