Files
media-downloader/web/backend/routers/platforms.py
Todd 0d7b2b1aab Initial commit
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-29 22:42:55 -04:00

1476 lines
59 KiB
Python

"""
Platforms Router
Handles platform management operations:
- List all platforms and their status
- Trigger manual downloads for platforms
- Instagram-specific post downloads
"""
import asyncio
import re
import shutil
import tempfile
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional
from fastapi import APIRouter, BackgroundTasks, Body, Depends, HTTPException, Request
from pydantic import BaseModel
from slowapi import Limiter
from slowapi.util import get_remote_address
from ..core.dependencies import get_current_user, get_app_state
from ..core.exceptions import handle_exceptions, ValidationError
from ..core.responses import now_iso8601
from modules.universal_logger import get_logger
logger = get_logger('API')
router = APIRouter(prefix="/api", tags=["Platforms"])
limiter = Limiter(key_func=get_remote_address)
# Semaphore to limit concurrent Instagram post downloads to 1
_instagram_post_semaphore = asyncio.Semaphore(1)
_instagram_post_queue: List[str] = [] # Track queued posts for status
# ============================================================================
# PYDANTIC MODELS
# ============================================================================
class TriggerRequest(BaseModel):
username: Optional[str] = None
content_types: Optional[List[str]] = None
class InstagramPostRequest(BaseModel):
post_id: str
username: Optional[str] = None
# ============================================================================
# DOWNLOAD CACHE UTILITIES
# ============================================================================
def invalidate_download_cache():
"""Invalidate download-related caches after downloads complete."""
from ..core.dependencies import get_app_state
app_state = get_app_state()
if hasattr(app_state, 'cache') and app_state.cache:
try:
app_state.cache.invalidate_pattern('downloads:*')
app_state.cache.invalidate_pattern('stats:*')
app_state.cache.invalidate_pattern('gallery:*')
except Exception:
pass
# ============================================================================
# PLATFORM STATUS ENDPOINTS
# ============================================================================
@router.get("/platforms/running")
@limiter.limit("100/minute")
@handle_exceptions
async def get_running_platforms(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get list of currently running platform downloads"""
app_state = get_app_state()
running = []
if hasattr(app_state, 'running_platform_downloads'):
for platform, data in app_state.running_platform_downloads.items():
running.append({
'platform': platform,
'started_at': data.get('started_at'),
'session_id': data.get('session_id')
})
return {
"platforms": running,
"count": len(running)
}
@router.post("/platforms/{platform}/stop")
@limiter.limit("10/minute")
@handle_exceptions
async def stop_platform_download(
request: Request,
platform: str,
current_user: Dict = Depends(get_current_user)
):
"""Stop a running platform download"""
app_state = get_app_state()
if not hasattr(app_state, 'running_platform_downloads'):
raise HTTPException(status_code=404, detail="No running downloads found")
if platform not in app_state.running_platform_downloads:
raise HTTPException(status_code=404, detail=f"Platform {platform} is not running")
download_data = app_state.running_platform_downloads[platform]
process = download_data.get('process')
session_id = download_data.get('session_id')
if process:
try:
import subprocess
# Get the process ID
pid = process.pid
logger.info(f"Stopping {platform} download (PID: {pid})", module="Platforms")
# Kill all child processes first using pkill
try:
result = subprocess.run(['pkill', '-9', '-P', str(pid)],
capture_output=True, timeout=2)
logger.info(f"Killed child processes of PID {pid}", module="Platforms")
except subprocess.TimeoutExpired:
logger.warning(f"pkill timed out for PID {pid}, continuing", module="Platforms")
except Exception as pkill_error:
logger.warning(f"pkill failed for PID {pid}: {pkill_error}, continuing", module="Platforms")
# Give processes time to die
await asyncio.sleep(0.5)
# Then kill the parent process
try:
process.kill()
await asyncio.wait_for(process.wait(), timeout=5.0)
logger.info(f"Stopped {platform} download successfully", module="Platforms")
except asyncio.TimeoutError:
logger.warning(f"Process {pid} did not terminate in time", module="Platforms")
except ProcessLookupError:
logger.info(f"Process {pid} already terminated", module="Platforms")
except Exception as e:
error_msg = str(e) if str(e) else repr(e)
logger.error(f"Error stopping {platform} download: {error_msg}", module="Platforms", exc_info=True)
raise HTTPException(status_code=500, detail=f"Failed to stop download: {error_msg}")
# Remove from tracking
app_state.running_platform_downloads.pop(platform, None)
# Clean up scraper session if it exists
if session_id and hasattr(app_state, 'active_scraper_sessions'):
app_state.active_scraper_sessions.pop(session_id, None)
# Emit scraper_completed event to update frontend
if session_id and hasattr(app_state, 'scraper_event_emitter') and app_state.scraper_event_emitter:
app_state.scraper_event_emitter.emit_scraper_completed(
session_id=session_id,
stats={
'total_downloaded': 0,
'moved': 0,
'review': 0,
'duplicates': 0,
'failed': 0
}
)
return {
"success": True,
"platform": platform,
"message": f"{platform} download stopped"
}
# ============================================================================
# SCRAPER MONITOR ENDPOINTS
# ============================================================================
@router.get("/scraper-sessions/active")
@limiter.limit("100/minute")
@handle_exceptions
async def get_active_scraper_sessions(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get currently active scraping sessions for the scraping monitor page"""
app_state = get_app_state()
# Return active sessions from app_state
sessions = app_state.active_scraper_sessions if hasattr(app_state, 'active_scraper_sessions') else {}
return {
"sessions": list(sessions.values()),
"count": len(sessions)
}
# ============================================================================
# PLATFORM LIST ENDPOINT
# ============================================================================
@router.get("/platforms")
@limiter.limit("100/minute")
@handle_exceptions
async def get_platforms(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get list of all platforms and their status."""
app_state = get_app_state()
config = app_state.settings.get_all() if app_state.settings else app_state.config
platforms = []
display_names = {
'instagram_unified': 'Instagram',
'snapchat': 'Snapchat',
'snapchat_client': 'Snapchat',
'tiktok': 'TikTok',
'forums': 'Forums',
'coppermine': 'Coppermine',
}
hidden_modules = config.get('hidden_modules', [])
for platform_name in ['instagram_unified', 'snapchat', 'snapchat_client', 'tiktok', 'forums', 'coppermine']:
if platform_name in hidden_modules:
continue
platform_config = config.get(platform_name, {})
enabled = platform_config.get('enabled', False)
check_interval = None
if platform_name == 'instagram_unified':
accounts = platform_config.get('accounts', [])
if accounts:
check_interval = accounts[0].get('check_interval_hours')
elif platform_name == 'tiktok':
accounts = platform_config.get('accounts', [])
if accounts:
check_interval = accounts[0].get('check_interval_hours')
elif platform_name == 'forums':
configs = platform_config.get('configs', [])
if configs:
check_interval = configs[0].get('check_interval_hours')
elif platform_name == 'coppermine':
check_interval = platform_config.get('check_interval_hours')
else:
check_interval = platform_config.get('check_interval_hours')
account_count = 0
if platform_name == 'instagram_unified':
account_count = len(platform_config.get('accounts', []))
elif platform_name in ['snapchat', 'snapchat_client']:
account_count = len(platform_config.get('usernames', []))
elif platform_name == 'tiktok':
account_count = len(platform_config.get('accounts', []))
elif platform_name == 'forums':
account_count = len(platform_config.get('configs', []))
elif platform_name == 'coppermine':
account_count = len(platform_config.get('galleries', []))
platforms.append({
"name": platform_name,
"type": "scheduled",
"enabled": enabled,
"display_name": display_names.get(platform_name, platform_name.title()),
"check_interval_hours": check_interval,
"account_count": account_count,
"config": platform_config
})
logger.info(f"Returning {len(platforms)} scheduled platforms", module="Platforms")
return platforms
# ============================================================================
# TRIGGER PLATFORM DOWNLOAD
# ============================================================================
@router.post("/platforms/{platform}/trigger")
@limiter.limit("10/minute")
@handle_exceptions
async def trigger_platform_download(
request: Request,
platform: str,
trigger_data: TriggerRequest,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user)
):
"""Manually trigger a download for a platform."""
valid_platforms = ['instagram_unified', 'fastdl', 'imginn', 'imginn_api', 'instagram_client', 'toolzu', 'snapchat', 'snapchat_client', 'tiktok', 'forums', 'coppermine']
if platform not in valid_platforms:
raise ValidationError(f"Invalid platform: {platform}")
app_state = get_app_state()
task_id = f"{platform}_{datetime.now().timestamp()}"
async def run_download():
try:
session_id = f"{platform}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
# Get list of accounts/forums to process
accounts_to_process = []
# For forums, get ALL enabled forum names
if platform == 'forums':
forums_config = app_state.settings.get('forums', {})
for forum_cfg in forums_config.get('configs', []):
if forum_cfg.get('enabled', False):
accounts_to_process.append(forum_cfg.get('name', 'Unknown'))
# For Coppermine, get ALL galleries
elif platform == 'coppermine':
coppermine_config = app_state.settings.get('coppermine', {})
for gallery in coppermine_config.get('galleries', []):
gallery_name = gallery.get('name', 'Unknown')
accounts_to_process.append(gallery_name)
# For Instagram unified, get all accounts from unified config
elif platform == 'instagram_unified':
unified_config = app_state.settings.get('instagram_unified', {})
for acc in unified_config.get('accounts', []):
if acc.get('username'):
accounts_to_process.append(acc['username'])
# For other platforms, get ALL usernames
elif platform in ['fastdl', 'imginn', 'imginn_api', 'instagram_client', 'toolzu', 'snapchat']:
platform_config = app_state.settings.get(platform, {})
accounts_to_process = platform_config.get('usernames', [])
# For imginn/imginn_api/fastdl/instagram_client, also include phrase search usernames if enabled
if platform in ['imginn', 'imginn_api', 'fastdl', 'instagram_client']:
phrase_config = platform_config.get('phrase_search', {})
if phrase_config.get('enabled'):
phrase_usernames = phrase_config.get('usernames', [])
# Add phrase search users to the list
accounts_to_process.extend(phrase_usernames)
# For TikTok, get ALL accounts
elif platform == 'tiktok':
tiktok_config = app_state.settings.get('tiktok', {})
accounts = tiktok_config.get('accounts', [])
for acc in accounts:
if acc.get('enabled', True):
accounts_to_process.append(acc.get('username', 'Unknown'))
# Emit ONE scraper_started event with the complete list
# Map Instagram downloaders to "Instagram" for display
display_platform = platform
if platform in ['instagram_unified', 'fastdl', 'imginn', 'imginn_api', 'instagram_client', 'toolzu', 'instaloader']:
display_platform = 'Instagram'
if app_state.scraper_event_emitter:
app_state.scraper_event_emitter.emit_scraper_started(
session_id=session_id,
platform=display_platform,
account='all', # Match scheduler format
content_type="auto",
estimated_count=len(accounts_to_process),
accounts_list=accounts_to_process # Pass the full list
)
cmd = ["media-downloader", "--platform", platform]
process = await asyncio.create_subprocess_exec(
*cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE
)
# Track running process in app_state
if hasattr(app_state, 'running_platform_downloads'):
app_state.running_platform_downloads[platform] = {
'process': process,
'started_at': datetime.now().isoformat(),
'session_id': session_id
}
manager = getattr(app_state, 'websocket_manager', None)
# Track progress by parsing log output
# Note: Universal logger's console handler writes to stderr, not stdout
current_account = None
processed_count = 0 # Track how many accounts we've processed sequentially
completed_accounts = [] # Track accounts that have finished processing
if manager:
async for line in process.stderr:
line_text = line.decode().strip()
# Broadcast log message
await manager.broadcast({
"type": "log",
"level": "info",
"message": line_text,
"platform": display_platform
})
# Parse log to detect current forum/account being processed
if app_state.scraper_event_emitter and accounts_to_process:
import re
# Detect forum processing - match exact format: "Processing forum: ForumName"
if 'Processing forum:' in line_text:
forum_match = re.search(r'Processing forum:\s+(.+)', line_text)
if forum_match:
detected_forum = forum_match.group(1).strip()
logger.debug(f"Detected forum from log: '{detected_forum}'", module="ScrapingMonitor")
# Find matching account in our list
for account in accounts_to_process:
logger.debug(f"Comparing '{detected_forum.lower()}' with '{account.lower()}'", module="ScrapingMonitor")
if account.lower() in detected_forum.lower() or detected_forum.lower() in account.lower():
if current_account != account:
# Mark previous account as completed
if current_account and current_account not in completed_accounts:
completed_accounts.append(current_account)
current_account = account
processed_count += 1
logger.info(f"Forum progress: Now processing {account} ({processed_count}/{len(accounts_to_process)})", module="ScrapingMonitor")
app_state.scraper_event_emitter.emit_scraper_progress(
session_id=session_id,
status=f"Checking forum thread: {account}",
current=processed_count,
total=len(accounts_to_process),
current_account=account,
completed_accounts=completed_accounts
)
break
# Detect Coppermine gallery processing - match exact format: "Processing Coppermine gallery: GalleryName"
elif 'Processing Coppermine gallery:' in line_text:
gallery_match = re.search(r'Processing Coppermine gallery:\s+(.+)', line_text)
if gallery_match:
detected_gallery = gallery_match.group(1).strip()
# Find matching gallery in our list
for account in accounts_to_process:
if account.lower() in detected_gallery.lower() or detected_gallery.lower() in account.lower():
if current_account != account:
# Mark previous account as completed
if current_account and current_account not in completed_accounts:
completed_accounts.append(current_account)
current_account = account
processed_count += 1
logger.info(f"Coppermine progress: Now processing {account} ({processed_count}/{len(accounts_to_process)})", module="ScrapingMonitor")
app_state.scraper_event_emitter.emit_scraper_progress(
session_id=session_id,
status=f"Checking gallery: {account}",
current=processed_count,
total=len(accounts_to_process),
current_account=account,
completed_accounts=completed_accounts
)
break
# Detect TikTok user processing (format: "Downloading TikTok profile: @username")
elif 'Downloading TikTok profile: @' in line_text:
user_match = re.search(r'Downloading TikTok profile: @([a-zA-Z0-9_.]+)', line_text)
if user_match:
detected_user = user_match.group(1)
# Find matching account in our list
for account in accounts_to_process:
if account.lower() == detected_user.lower():
if current_account != account:
# Mark previous account as completed
if current_account and current_account not in completed_accounts:
completed_accounts.append(current_account)
current_account = account
processed_count += 1
logger.info(f"TikTok progress: Now processing {account} ({processed_count}/{len(accounts_to_process)})", module="ScrapingMonitor")
app_state.scraper_event_emitter.emit_scraper_progress(
session_id=session_id,
status=f"Checking videos from @{account}",
current=processed_count,
total=len(accounts_to_process),
current_account=account,
completed_accounts=completed_accounts
)
break
# Detect Snapchat user processing (format: "Navigating to @username on domain")
elif 'Navigating to @' in line_text:
user_match = re.search(r'Navigating to @([a-zA-Z0-9_.]+)', line_text)
if user_match:
detected_user = user_match.group(1)
# Find matching account in our list
for account in accounts_to_process:
if account.lower() == detected_user.lower():
if current_account != account:
# Mark previous account as completed
if current_account and current_account not in completed_accounts:
completed_accounts.append(current_account)
current_account = account
processed_count += 1
logger.info(f"Snapchat progress: Now processing {account} ({processed_count}/{len(accounts_to_process)})", module="ScrapingMonitor")
app_state.scraper_event_emitter.emit_scraper_progress(
session_id=session_id,
status=f"Checking stories from @{account}",
current=processed_count,
total=len(accounts_to_process),
current_account=account,
completed_accounts=completed_accounts
)
break
# Detect Instagram/FastDL user processing (format: "Processing Instagram {content_type} for @username")
elif 'Processing Instagram' in line_text and 'for @' in line_text:
user_match = re.search(r'Processing Instagram (\w+) for @([a-zA-Z0-9_.]+)', line_text)
if user_match:
content_type = user_match.group(1) # posts, stories, reels, tagged
detected_user = user_match.group(2)
# Find matching account in our list
for account in accounts_to_process:
if account.lower() == detected_user.lower():
if current_account != account:
# Mark previous account as completed
if current_account and current_account not in completed_accounts:
completed_accounts.append(current_account)
current_account = account
processed_count += 1
logger.info(f"FastDL/Instagram progress: Now processing {account} ({processed_count}/{len(accounts_to_process)})", module="ScrapingMonitor")
# Always update status with current content type
status_msg = f"Checking {content_type} from @{account}"
app_state.scraper_event_emitter.emit_scraper_progress(
session_id=session_id,
status=status_msg,
current=processed_count,
total=len(accounts_to_process),
current_account=account,
completed_accounts=completed_accounts
)
break
# Detect phrase search user processing (format: "Searching username for phrase matches")
elif 'for phrase matches' in line_text:
user_match = re.search(r'Searching ([a-zA-Z0-9_.]+) for phrase matches', line_text)
if user_match:
detected_user = user_match.group(1)
# Find matching account in our list
for account in accounts_to_process:
if account.lower() == detected_user.lower():
if current_account != account:
# Mark previous account as completed
if current_account and current_account not in completed_accounts:
completed_accounts.append(current_account)
current_account = account
processed_count += 1
logger.info(f"Phrase search progress: Now processing {account} ({processed_count}/{len(accounts_to_process)})", module="ScrapingMonitor")
app_state.scraper_event_emitter.emit_scraper_progress(
session_id=session_id,
status=f"Searching phrases in: {account}",
current=processed_count,
total=len(accounts_to_process),
current_account=account,
completed_accounts=completed_accounts
)
break
await process.wait()
invalidate_download_cache()
# Remove from running downloads
if hasattr(app_state, 'running_platform_downloads'):
app_state.running_platform_downloads.pop(platform, None)
# Emit scraper_completed event once for the whole platform
if app_state.scraper_event_emitter:
app_state.scraper_event_emitter.emit_scraper_completed(
session_id=session_id,
stats={
'total_downloaded': 0, # CLI doesn't report this
'moved': 0,
'review': 0,
'duplicates': 0,
'failed': 0 if process.returncode == 0 else 1
}
)
if manager:
await manager.broadcast({
"type": "download_completed",
"platform": display_platform,
"username": trigger_data.username or "all",
"exit_code": process.returncode,
"timestamp": now_iso8601(),
"from_platform_page": True
})
except Exception as e:
# Remove from running downloads
if hasattr(app_state, 'running_platform_downloads'):
app_state.running_platform_downloads.pop(platform, None)
# Emit scraper_completed event with error
if app_state.scraper_event_emitter:
app_state.scraper_event_emitter.emit_scraper_completed(
session_id=session_id,
stats={
'total_downloaded': 0,
'moved': 0,
'review': 0,
'duplicates': 0,
'failed': 1
}
)
manager = getattr(app_state, 'websocket_manager', None)
if manager:
await manager.broadcast({
"type": "download_error",
"platform": platform,
"error": str(e),
"timestamp": now_iso8601()
})
background_tasks.add_task(run_download)
return {
"success": True,
"task_id": task_id,
"platform": platform,
"message": f"Download triggered for {platform}"
}
# ============================================================================
# INSTAGRAM POST DOWNLOAD
# ============================================================================
@router.post("/instagram/download-post")
@limiter.limit("20/minute")
@handle_exceptions
async def download_instagram_post(
request: Request,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user),
post_id: str = Body(..., description="Instagram post shortcode or full URL"),
username: str = Body(None, description="Username (optional)")
):
"""Download a specific Instagram post using ImgInn and process through move manager."""
app_state = get_app_state()
# Extract shortcode from URL if full URL provided
shortcode = post_id
if 'instagram.com' in post_id or 'imginn.com' in post_id:
match = re.search(r'/p/([A-Za-z0-9_-]+)', post_id)
if match:
shortcode = match.group(1)
else:
raise ValidationError("Could not extract post ID from URL")
# Check if already downloaded
with app_state.db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT id, filename, file_path FROM downloads
WHERE media_id = ? AND platform = 'instagram'
''', (shortcode,))
existing = cursor.fetchone()
if existing and existing['file_path']:
if Path(existing['file_path']).exists():
return {
"success": False,
"message": f"Post {shortcode} already downloaded",
"filename": existing['filename'],
"file_path": existing['file_path']
}
task_id = f"instagram_post_{shortcode}_{datetime.now().timestamp()}"
async def run_post_download():
# Add to queue tracking
_instagram_post_queue.append(shortcode)
queue_position = len(_instagram_post_queue)
# Wait for semaphore (only 1 download at a time)
async with _instagram_post_semaphore:
try:
from modules.activity_status import get_activity_manager
activity_manager = get_activity_manager(app_state.db)
activity_manager.start_activity(
task_id=task_id,
platform='instagram',
account=username if username else f'post {shortcode}',
status='Downloading post'
)
activity_manager.update_status(f"Fetching post {shortcode}...")
manager = getattr(app_state, 'websocket_manager', None)
if manager:
await manager.broadcast({
"type": "download_started",
"platform": "instagram",
"username": username or f"post/{shortcode}",
"post_id": shortcode,
"timestamp": now_iso8601()
})
import concurrent.futures
def do_download():
from modules.imginn_module import ImgInnDownloader
from modules.move_module import MoveManager
imginn_config = app_state.settings.get('imginn', {})
posts_config = imginn_config.get('posts', {})
dest_path = posts_config.get('destination_path')
if not dest_path:
dest_path = "/opt/immich/md/social media/instagram/posts"
logger.warning("ImgInn posts destination not configured, using default", module="Instagram")
dest_base = Path(dest_path)
dest_base.mkdir(parents=True, exist_ok=True)
temp_dir = Path(tempfile.mkdtemp(prefix='instagram_post_'))
try:
from datetime import datetime
downloader = ImgInnDownloader(unified_db=app_state.db)
post_url = f"https://imginn.com/p/{shortcode}/"
target_username = username or 'unknown'
# Generate session ID for tracking
session_id = f"imginn_{target_username}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
# Emit scraper_started event
if app_state.scraper_event_emitter:
app_state.scraper_event_emitter.emit_scraper_started(
session_id=session_id,
platform='instagram',
account=target_username,
content_type='post',
estimated_count=1
)
downloaded = downloader.download_posts(
username=target_username,
specific_post_url=post_url,
output_dir=temp_dir,
days_back=365,
max_posts=1
)
if not downloaded:
return {"success": False, "message": "No files downloaded"}
if target_username == 'unknown' and downloaded:
first_file = Path(downloaded[0])
parts = first_file.stem.split('_')
if parts:
target_username = parts[0]
move_manager = MoveManager(
unified_db=app_state.db,
event_emitter=app_state.scraper_event_emitter
)
# Set session context for real-time monitoring (use session_id from above)
move_manager.set_session_context(
platform='instagram',
account=target_username,
session_id=session_id
)
moved_files = []
# Create username subdirectory
user_dest = dest_base / target_username
user_dest.mkdir(parents=True, exist_ok=True)
for file_path in downloaded:
src = Path(file_path)
if src.exists():
dest = user_dest / src.name
move_manager.start_batch(
platform='instagram',
source=target_username,
content_type='post'
)
success = move_manager.move_file(src, dest)
if success:
moved_files.append(str(dest))
move_manager.end_batch()
# Emit scraper_completed event
if app_state.scraper_event_emitter:
app_state.scraper_event_emitter.emit_scraper_completed(
session_id=session_id,
stats={
'total_downloaded': len(downloaded),
'moved': len(moved_files),
'review': 0, # Would need to track separately
'duplicates': 0, # Would need to track separately
'failed': len(downloaded) - len(moved_files)
}
)
return {
"success": True,
"message": f"Downloaded and processed {len(moved_files)} file(s)",
"files": moved_files,
"username": target_username
}
finally:
if temp_dir.exists():
shutil.rmtree(temp_dir, ignore_errors=True)
# Use a fresh ThreadPoolExecutor to avoid Playwright sync API issues
# Playwright detects if there's an event loop in the thread and errors
loop = asyncio.get_event_loop()
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
result = await loop.run_in_executor(executor, do_download)
activity_manager.stop_activity()
invalidate_download_cache()
if manager:
# Get username from result if available
completed_username = result.get('username') if result else None
await manager.broadcast({
"type": "download_completed",
"platform": "instagram",
"username": completed_username or username or f"post/{shortcode}",
"post_id": shortcode,
"result": result,
"timestamp": now_iso8601()
})
return result
except Exception as e:
logger.error(f"Failed to download Instagram post {shortcode}: {e}")
import traceback
traceback.print_exc()
try:
activity_manager.stop_activity()
except Exception:
pass
manager = getattr(app_state, 'websocket_manager', None)
if manager:
await manager.broadcast({
"type": "download_error",
"platform": "instagram",
"username": username or f"post/{shortcode}",
"post_id": shortcode,
"error": str(e),
"timestamp": now_iso8601()
})
finally:
# Remove from queue tracking
if shortcode in _instagram_post_queue:
_instagram_post_queue.remove(shortcode)
background_tasks.add_task(run_post_download)
return {
"success": True,
"task_id": task_id,
"post_id": shortcode,
"message": f"Download started for post {shortcode}"
}
# ============================================================================
# YOUTUBE CHANNEL MONITORS - Global Settings + Channel List
# ============================================================================
class YouTubeMonitorSettingsUpdate(BaseModel):
"""Update global YouTube monitor settings."""
phrases: Optional[List[str]] = None
check_interval_hours: Optional[int] = None
quality: Optional[str] = None
enabled: Optional[bool] = None
auto_start_queue: Optional[bool] = None
notifications_enabled: Optional[bool] = None
auto_pause_threshold_months: Optional[int] = None
paused_check_interval_days: Optional[int] = None
max_results_per_phrase: Optional[int] = None
class YouTubeChannelCreate(BaseModel):
"""Add a YouTube channel to monitor."""
channel_url: str
channel_name: Optional[str] = None
enabled: bool = True
class YouTubeChannelUpdate(BaseModel):
"""Update a YouTube channel."""
channel_url: Optional[str] = None
channel_name: Optional[str] = None
enabled: Optional[bool] = None
always_active: Optional[bool] = None
@router.get("/platforms/youtube-monitors")
@limiter.limit("100/minute")
@handle_exceptions
async def get_youtube_monitors(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get YouTube monitor global settings and all channels."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
global_settings = monitor.get_global_settings()
channels = monitor.get_all_channels()
return {
"success": True,
"settings": global_settings,
"channels": channels,
"count": len(channels)
}
@router.put("/platforms/youtube-monitors/settings")
@limiter.limit("30/minute")
@handle_exceptions
async def update_youtube_monitor_settings(
data: YouTubeMonitorSettingsUpdate,
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Update global YouTube monitor settings (phrases, interval, quality)."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
success = monitor.update_global_settings(
phrases=data.phrases,
check_interval_hours=data.check_interval_hours,
quality=data.quality,
enabled=data.enabled,
auto_start_queue=data.auto_start_queue,
notifications_enabled=data.notifications_enabled,
auto_pause_threshold_months=data.auto_pause_threshold_months,
paused_check_interval_days=data.paused_check_interval_days,
max_results_per_phrase=data.max_results_per_phrase
)
updated_settings = monitor.get_global_settings()
return {
"success": success,
"settings": updated_settings,
"message": "YouTube monitor settings updated successfully"
}
@router.post("/platforms/youtube-monitors/channels")
@limiter.limit("30/minute")
@handle_exceptions
async def add_youtube_channel(
data: YouTubeChannelCreate,
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Add a new YouTube channel to monitor."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
logger.info(f"Adding YouTube channel: {data.channel_url}")
if not data.channel_url:
logger.error("channel_url is missing")
raise ValidationError("channel_url is required")
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
try:
channel_id = monitor.add_channel(
channel_url=data.channel_url,
channel_name=data.channel_name,
enabled=data.enabled
)
logger.info(f"Successfully added channel {channel_id}: {data.channel_url}")
except Exception as e:
logger.error(f"Error adding channel: {e}")
if "UNIQUE constraint failed" in str(e):
raise ValidationError("This channel URL is already being monitored")
raise
channel = monitor.get_channel(channel_id)
return {
"success": True,
"channel": channel,
"message": "YouTube channel added successfully"
}
@router.put("/platforms/youtube-monitors/channels/{channel_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_youtube_channel(
channel_id: int,
data: YouTubeChannelUpdate,
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Update a YouTube channel."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail="Channel not found")
updates = {}
if data.channel_url is not None:
updates['channel_url'] = data.channel_url
if data.channel_name is not None:
updates['channel_name'] = data.channel_name
if data.enabled is not None:
updates['enabled'] = data.enabled
if updates:
monitor.update_channel(channel_id, **updates)
channel = monitor.get_channel(channel_id)
return {
"success": True,
"channel": channel,
"message": "YouTube channel updated successfully"
}
@router.delete("/platforms/youtube-monitors/channels/{channel_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def delete_youtube_channel(
channel_id: int,
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Delete a YouTube channel from monitoring."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail="Channel not found")
success = monitor.delete_channel(channel_id)
return {
"success": success,
"message": "YouTube channel deleted successfully" if success else "Failed to delete channel"
}
@router.post("/platforms/youtube-monitors/check-all")
@limiter.limit("5/minute")
@handle_exceptions
async def check_all_youtube_channels(
request: Request,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user)
):
"""Trigger an immediate check for all enabled YouTube channels."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from modules.activity_status import get_activity_manager
from ..core.config import Settings
settings = Settings()
app_state = get_app_state()
activity_manager = get_activity_manager(app_state.db)
monitor = YouTubeChannelMonitor(str(settings.DB_PATH), activity_manager)
global_settings = monitor.get_global_settings()
if not global_settings.get('phrases'):
raise ValidationError("No match phrases configured. Please add phrases in settings first.")
channels = monitor.get_enabled_channels()
if not channels:
raise ValidationError("No enabled channels to check")
async def run_check():
try:
videos_found = await monitor.check_all_now()
logger.info(f"YouTube monitor check-all complete: {videos_found} videos found")
except Exception as e:
logger.error(f"Error in YouTube monitor check-all: {e}")
background_tasks.add_task(run_check)
return {
"success": True,
"message": f"Checking {len(channels)} channels...",
"channels_count": len(channels)
}
@router.post("/platforms/youtube-monitors/channels/{channel_id}/check-now")
@limiter.limit("10/minute")
@handle_exceptions
async def check_youtube_channel_now(
channel_id: int,
request: Request,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user)
):
"""Trigger an immediate check for a single YouTube channel."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail="Channel not found")
global_settings = monitor.get_global_settings()
if not global_settings.get('phrases'):
raise ValidationError("No match phrases configured. Please add phrases in settings first.")
async def run_check():
try:
videos_found = await monitor.check_single_channel(channel_id)
logger.info(f"YouTube channel {channel_id} check complete: {videos_found} videos found")
except Exception as e:
logger.error(f"Error checking YouTube channel {channel_id}: {e}")
background_tasks.add_task(run_check)
return {
"success": True,
"message": f"Check started for channel",
"channel_name": existing.get('channel_name') or existing.get('channel_url')
}
@router.post("/platforms/youtube-monitors/check-selected")
@limiter.limit("10/minute")
@handle_exceptions
async def check_selected_youtube_channels(
request: Request,
background_tasks: BackgroundTasks,
channel_ids: list[int] = Body(..., embed=True),
current_user: Dict = Depends(get_current_user)
):
"""Trigger an immediate check for selected YouTube channels."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
global_settings = monitor.get_global_settings()
if not global_settings.get('phrases'):
raise ValidationError("No match phrases configured. Please add phrases in settings first.")
# Validate all channels exist
for channel_id in channel_ids:
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail=f"Channel {channel_id} not found")
async def run_checks():
try:
for channel_id in channel_ids:
try:
videos_found = await monitor.check_single_channel(channel_id)
logger.info(f"YouTube channel {channel_id} check complete: {videos_found} videos found")
except Exception as e:
logger.error(f"Error checking YouTube channel {channel_id}: {e}")
except Exception as e:
logger.error(f"Error in batch channel check: {e}")
background_tasks.add_task(run_checks)
return {
"success": True,
"message": f"Check started for {len(channel_ids)} channel(s)",
"channel_count": len(channel_ids)
}
@router.get("/platforms/youtube-monitors/channels/{channel_id}/history")
@limiter.limit("100/minute")
@handle_exceptions
async def get_youtube_channel_history(
channel_id: int,
request: Request,
limit: int = 50,
current_user: Dict = Depends(get_current_user)
):
"""Get history for a YouTube channel."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail="Channel not found")
history = monitor.get_channel_history(channel_id, limit)
return {
"success": True,
"history": history,
"count": len(history)
}
@router.get("/platforms/youtube-monitors/history")
@limiter.limit("100/minute")
@handle_exceptions
async def get_all_youtube_history(
request: Request,
limit: int = 100,
current_user: Dict = Depends(get_current_user)
):
"""Get combined history for all YouTube channels."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
history = monitor.get_all_history(limit)
return {
"success": True,
"history": history,
"count": len(history)
}
# ============================================================================
# YOUTUBE CHANNEL MONITORS - v11.20.0 Status Management
# ============================================================================
@router.get("/platforms/youtube-monitors/channels")
@limiter.limit("300/minute")
@handle_exceptions
async def get_youtube_channels_filtered(
request: Request,
status: Optional[str] = None,
always_active: Optional[str] = None,
search: Optional[str] = None,
sort_field: str = 'name',
sort_ascending: bool = True,
limit: Optional[int] = None,
offset: int = 0,
current_user: Dict = Depends(get_current_user)
):
"""Get YouTube channel monitors with filtering, searching, sorting, and pagination."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
result = monitor.get_channels_filtered(
status_filter=status,
always_active_filter=always_active,
search=search,
sort_field=sort_field,
sort_ascending=sort_ascending,
limit=limit,
offset=offset
)
return {
"success": True,
"channels": result['channels'],
"total": result['total']
}
@router.get("/platforms/youtube-monitors/active")
@limiter.limit("100/minute")
@handle_exceptions
async def get_active_youtube_channels(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get active YouTube channel monitors (deprecated - use /channels with status=active)."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
channels = monitor.get_active_channels()
return {
"success": True,
"channels": channels,
"count": len(channels)
}
@router.get("/platforms/youtube-monitors/paused")
@limiter.limit("100/minute")
@handle_exceptions
async def get_paused_youtube_channels(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get paused YouTube channel monitors (deprecated - use /channels with status=paused_all)."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
channels = monitor.get_paused_channels()
return {
"success": True,
"channels": channels,
"count": len(channels)
}
@router.get("/platforms/youtube-monitors/statistics")
@limiter.limit("300/minute")
@handle_exceptions
async def get_youtube_monitor_statistics(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get YouTube monitor statistics."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
stats = monitor.get_statistics()
return {
"success": True,
"statistics": stats
}
@router.post("/platforms/youtube-monitors/channels/{channel_id}/pause")
@limiter.limit("30/minute")
@handle_exceptions
async def pause_youtube_channel(
request: Request,
channel_id: int,
reason: Optional[str] = None,
current_user: Dict = Depends(get_current_user)
):
"""Manually pause a YouTube channel."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail="Channel not found")
success = monitor.pause_channel(channel_id, reason=reason, auto=False)
return {
"success": success,
"message": f"Channel '{existing['channel_name']}' paused"
}
@router.post("/platforms/youtube-monitors/channels/{channel_id}/resume")
@limiter.limit("30/minute")
@handle_exceptions
async def resume_youtube_channel(
request: Request,
channel_id: int,
current_user: Dict = Depends(get_current_user)
):
"""Resume a paused YouTube channel."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail="Channel not found")
success = monitor.resume_channel(channel_id)
return {
"success": success,
"message": f"Channel '{existing['channel_name']}' resumed"
}
@router.post("/platforms/youtube-monitors/channels/{channel_id}/toggle-always-active")
@limiter.limit("30/minute")
@handle_exceptions
async def toggle_always_active(
request: Request,
channel_id: int,
value: bool,
current_user: Dict = Depends(get_current_user)
):
"""Toggle always_active flag for a channel."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
existing = monitor.get_channel(channel_id)
if not existing:
raise HTTPException(status_code=404, detail="Channel not found")
success = monitor.toggle_always_active(channel_id, value)
return {
"success": success,
"message": f"Always active {'enabled' if value else 'disabled'} for '{existing['channel_name']}'"
}
@router.post("/platforms/youtube-monitors/check-paused")
@limiter.limit("5/minute")
@handle_exceptions
async def check_paused_channels_now(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Check paused channels for new activity."""
from modules.youtube_channel_monitor import YouTubeChannelMonitor
from ..core.config import Settings
settings = Settings()
monitor = YouTubeChannelMonitor(str(settings.DB_PATH))
resumed = monitor.check_paused_channels_sync()
return {
"success": True,
"resumed_count": resumed,
"message": f"{resumed} channels auto-resumed" if resumed > 0 else "No channels resumed"
}