- scheduler.py: Use full path for scheduler_state.db instead of relative name - recycle.py: Use full path for thumbnails.db instead of relative name - cloud_backup.py, maintenance.py, stats.py: Require admin for config/cleanup/settings endpoints - press.py: Add auth to press image serving endpoint - private_gallery.py: Fix _create_pg_job call and add missing secrets import - appearances.py: Use sync httpx instead of asyncio.run for background thread HTTP call Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
3423 lines
148 KiB
Python
3423 lines
148 KiB
Python
"""API endpoints for celebrity appearances tracking"""
|
|
from fastapi import APIRouter, HTTPException, Depends, BackgroundTasks, Query, Body
|
|
from typing import List, Dict, Optional
|
|
from datetime import datetime, timedelta
|
|
import asyncio
|
|
import re
|
|
import os
|
|
import tempfile
|
|
import xml.etree.ElementTree as ET
|
|
from pydantic import BaseModel
|
|
from modules.unified_database import UnifiedDatabase
|
|
from modules.tmdb_client import TMDbClient
|
|
from modules.podchaser_client import PodchaserClient
|
|
from modules.taddy_client import TaddyClient
|
|
from modules.plex_client import PlexClient, PlexOAuth
|
|
from modules.universal_logger import get_logger
|
|
from modules.pushover_notifier import PushoverNotifier, create_notifier_from_config
|
|
from modules.settings_manager import SettingsManager
|
|
from modules.activity_status import get_activity_manager
|
|
from slowapi import Limiter
|
|
from slowapi.util import get_remote_address
|
|
from web.backend.core.http_client import http_client
|
|
from web.backend.core.dependencies import get_current_user, require_admin, get_app_state
|
|
|
|
logger = get_logger('Appearances')
|
|
router = APIRouter(prefix="/api/appearances", tags=["appearances"])
|
|
limiter = Limiter(key_func=get_remote_address)
|
|
|
|
# TMDB image base URL
|
|
TMDB_IMAGE_BASE = "https://image.tmdb.org/t/p/w500"
|
|
|
|
|
|
def _enrich_with_roles(appearances: list, conn) -> list:
|
|
"""Add all_roles list to appearances that have multiple credit types.
|
|
|
|
For each appearance with >1 credit type, fetches the distinct
|
|
(credit_type, character_name, job_title) from the DB.
|
|
"""
|
|
multi = [a for a in appearances if len(a.get("all_credit_types", [])) > 1]
|
|
if not multi:
|
|
return appearances
|
|
|
|
cursor = conn.cursor()
|
|
for a in multi:
|
|
cursor.execute("""
|
|
SELECT DISTINCT credit_type, character_name, job_title
|
|
FROM celebrity_appearances
|
|
WHERE celebrity_id = ? AND show_name = ? AND appearance_type = ?
|
|
""", (a["celebrity_id"], a["show_name"], a["appearance_type"]))
|
|
a["all_roles"] = [
|
|
{"credit_type": r[0], "character_name": r[1], "job_title": r[2]}
|
|
for r in cursor.fetchall() if r[0]
|
|
]
|
|
|
|
return appearances
|
|
|
|
|
|
async def cache_uncached_posters(db) -> int:
|
|
"""
|
|
Pre-cache posters for appearances without cached data.
|
|
Returns the number of posters cached.
|
|
"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
SELECT id, poster_url FROM celebrity_appearances
|
|
WHERE poster_data IS NULL AND poster_url IS NOT NULL AND poster_url != ''
|
|
LIMIT 50
|
|
''')
|
|
rows = cursor.fetchall()
|
|
|
|
cached = 0
|
|
for row in rows:
|
|
appearance_id = row[0]
|
|
poster_url = row[1]
|
|
|
|
try:
|
|
if poster_url.startswith('http'):
|
|
url = poster_url
|
|
else:
|
|
url = f"{TMDB_IMAGE_BASE}{poster_url}"
|
|
|
|
response = await http_client.get(url)
|
|
if response.status_code == 200 and response.content:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(
|
|
'UPDATE celebrity_appearances SET poster_data = ? WHERE id = ?',
|
|
(response.content, appearance_id)
|
|
)
|
|
conn.commit()
|
|
cached += 1
|
|
except Exception:
|
|
pass # Continue with next poster
|
|
|
|
# Small delay to avoid rate limiting
|
|
await asyncio.sleep(0.1)
|
|
|
|
if cached > 0:
|
|
logger.info(f"Pre-cached {cached} appearance posters")
|
|
return cached
|
|
except Exception as e:
|
|
logger.warning(f"Error pre-caching posters: {e}")
|
|
return 0
|
|
|
|
|
|
async def download_poster_image(poster_path: str) -> Optional[str]:
|
|
"""
|
|
Download a TMDB poster image to a temporary file.
|
|
Returns the path to the temp file, or None if download failed.
|
|
The caller is responsible for cleaning up the temp file.
|
|
"""
|
|
if not poster_path:
|
|
return None
|
|
|
|
try:
|
|
# Construct full URL
|
|
if poster_path.startswith('http'):
|
|
url = poster_path
|
|
else:
|
|
url = f"{TMDB_IMAGE_BASE}{poster_path}"
|
|
|
|
# Download the image
|
|
response = await http_client.get(url)
|
|
if response.status_code != 200:
|
|
logger.warning(f"Failed to download poster: {url} (status {response.status_code})")
|
|
return None
|
|
|
|
# Determine file extension
|
|
content_type = response.headers.get('content-type', '')
|
|
if 'png' in content_type:
|
|
ext = '.png'
|
|
elif 'gif' in content_type:
|
|
ext = '.gif'
|
|
else:
|
|
ext = '.jpg'
|
|
|
|
# Save to temp file
|
|
fd, temp_path = tempfile.mkstemp(suffix=ext)
|
|
try:
|
|
os.write(fd, response.content)
|
|
finally:
|
|
os.close(fd)
|
|
|
|
return temp_path
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error downloading poster {poster_path}: {e}")
|
|
return None
|
|
|
|
# Request models
|
|
class SyncRequest(BaseModel):
|
|
celebrity_ids: Optional[List[int]] = None
|
|
|
|
# Dependency
|
|
def get_db():
|
|
return get_app_state().db
|
|
|
|
# Helper function to backfill audio URLs from RSS feeds
|
|
async def backfill_audio_urls_from_rss(celebrity_id: int, db: UnifiedDatabase):
|
|
"""
|
|
Backfill missing audio URLs by parsing podcast RSS feeds for Omny.fm hosted podcasts
|
|
"""
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Get podcast shows for this celebrity that use Omny.fm and have missing audio URLs
|
|
cursor.execute("""
|
|
SELECT DISTINCT show_name, poster_url
|
|
FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'Podcast'
|
|
AND audio_url IS NULL
|
|
AND poster_url LIKE '%omnycontent.com%'
|
|
""", (celebrity_id,))
|
|
|
|
shows = cursor.fetchall()
|
|
if not shows:
|
|
return
|
|
|
|
for show_name, poster_url in shows:
|
|
try:
|
|
# Extract RSS URL from poster URL
|
|
match = re.search(r'/d/programs/([^/]+)/([^/]+)/', poster_url)
|
|
if not match:
|
|
continue
|
|
|
|
podcast_id = match.group(1)
|
|
show_id = match.group(2)
|
|
rss_url = f"https://www.omnycontent.com/d/playlist/{podcast_id}/{show_id}/ec2a36b6-26e2-41a9-b72b-af1f0112e1ed/podcast.rss"
|
|
|
|
# Fetch and parse RSS feed
|
|
response = await http_client.get(rss_url)
|
|
root = ET.fromstring(response.content)
|
|
|
|
# Extract episode audio URLs
|
|
episodes_in_feed = {}
|
|
for item in root.findall('.//item'):
|
|
title_elem = item.find('title')
|
|
enclosure_elem = item.find('enclosure')
|
|
|
|
if title_elem is not None and enclosure_elem is not None:
|
|
title = title_elem.text
|
|
audio_url = enclosure_elem.get('url')
|
|
if title and audio_url:
|
|
episodes_in_feed[title.strip()] = audio_url
|
|
|
|
if not episodes_in_feed:
|
|
continue
|
|
|
|
# Get episodes from database that need audio URLs
|
|
cursor.execute("""
|
|
SELECT id, episode_title
|
|
FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND show_name = ?
|
|
AND appearance_type = 'Podcast'
|
|
AND audio_url IS NULL
|
|
""", (celebrity_id, show_name))
|
|
|
|
db_episodes = cursor.fetchall()
|
|
updated = 0
|
|
|
|
for db_id, episode_title in db_episodes:
|
|
# Try exact match first
|
|
if episode_title in episodes_in_feed:
|
|
audio_url = episodes_in_feed[episode_title]
|
|
cursor.execute(
|
|
"UPDATE celebrity_appearances SET audio_url = ? WHERE id = ?",
|
|
(audio_url, db_id)
|
|
)
|
|
updated += 1
|
|
|
|
if updated > 0:
|
|
conn.commit()
|
|
logger.info(f"Backfilled {updated} audio URLs from RSS feed for {show_name}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error backfilling audio URLs for {show_name}: {e}")
|
|
continue
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in backfill_audio_urls_from_rss: {e}")
|
|
|
|
@router.get("/upcoming")
|
|
async def get_upcoming_appearances(
|
|
limit: int = Query(5, ge=1, le=100),
|
|
days_ahead: int = Query(30, ge=1, le=3650), # Up to 10 years for movies announced far in advance
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""
|
|
Get upcoming appearances sorted by date
|
|
Returns next N appearances within days_ahead window
|
|
Groups by show so multiple roles (acting+producing) are combined into one row
|
|
"""
|
|
try:
|
|
cutoff_date = (datetime.now() + timedelta(days=days_ahead)).isoformat()
|
|
|
|
query = '''
|
|
WITH ShowStats AS (
|
|
SELECT
|
|
celebrity_id,
|
|
show_name,
|
|
appearance_type,
|
|
COUNT(DISTINCT COALESCE(season_number, 0) || '-' || COALESCE(episode_number, 0) || '-' || COALESCE(CAST(appearance_date AS TEXT), '')) as episode_count,
|
|
GROUP_CONCAT(DISTINCT credit_type) as all_credit_types,
|
|
MIN(appearance_date) as first_date
|
|
FROM celebrity_appearances
|
|
WHERE appearance_date >= date('now')
|
|
AND appearance_date <= ?
|
|
AND status = 'upcoming'
|
|
GROUP BY celebrity_id, show_name, appearance_type
|
|
),
|
|
RankedAppearances AS (
|
|
SELECT
|
|
ca.*,
|
|
ROW_NUMBER() OVER (
|
|
PARTITION BY
|
|
CASE WHEN ca.appearance_type IN ('TV', 'Podcast', 'Movie')
|
|
THEN ca.celebrity_id || '-' || ca.show_name || '-' || ca.appearance_type
|
|
ELSE CAST(ca.id AS TEXT)
|
|
END
|
|
ORDER BY ca.appearance_date ASC, ca.season_number ASC, ca.episode_number ASC
|
|
) as rn
|
|
FROM celebrity_appearances ca
|
|
WHERE
|
|
ca.appearance_date >= date('now')
|
|
AND ca.appearance_date <= ?
|
|
AND ca.status = 'upcoming'
|
|
)
|
|
SELECT
|
|
r.id,
|
|
r.celebrity_id,
|
|
r.celebrity_name,
|
|
r.appearance_type,
|
|
r.show_name,
|
|
r.episode_title,
|
|
r.network,
|
|
r.appearance_date,
|
|
r.url,
|
|
r.audio_url,
|
|
r.watch_url,
|
|
r.description,
|
|
r.tmdb_show_id,
|
|
r.season_number,
|
|
r.episode_number,
|
|
r.status,
|
|
r.poster_url,
|
|
COALESCE(ss.episode_count, 1) as episode_count,
|
|
r.credit_type,
|
|
r.character_name,
|
|
r.job_title,
|
|
r.plex_rating_key,
|
|
r.plex_show_rating_key,
|
|
ss.all_credit_types
|
|
FROM RankedAppearances r
|
|
LEFT JOIN ShowStats ss ON r.celebrity_id = ss.celebrity_id
|
|
AND r.show_name = ss.show_name AND r.appearance_type = ss.appearance_type
|
|
WHERE r.rn = 1
|
|
ORDER BY r.appearance_date ASC
|
|
LIMIT ?
|
|
'''
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(query, (cutoff_date, cutoff_date, limit))
|
|
rows = cursor.fetchall()
|
|
|
|
appearances = []
|
|
for row in rows:
|
|
# Parse all_credit_types into a list
|
|
all_credit_types = row[23].split(',') if row[23] else [row[18]] if row[18] else []
|
|
|
|
appearances.append({
|
|
"id": row[0],
|
|
"celebrity_id": row[1],
|
|
"celebrity_name": row[2],
|
|
"appearance_type": row[3],
|
|
"show_name": row[4],
|
|
"episode_title": row[5],
|
|
"network": row[6],
|
|
"appearance_date": row[7],
|
|
"url": row[8],
|
|
"audio_url": row[9],
|
|
"watch_url": row[10],
|
|
"description": row[11],
|
|
"tmdb_show_id": row[12],
|
|
"season_number": row[13],
|
|
"episode_number": row[14],
|
|
"status": row[15],
|
|
"poster_url": row[16],
|
|
"episode_count": row[17],
|
|
"credit_type": row[18],
|
|
"character_name": row[19],
|
|
"job_title": row[20],
|
|
"plex_rating_key": row[21],
|
|
"plex_watch_url": None, # No Plex links for upcoming - episodes haven't aired yet
|
|
"all_credit_types": all_credit_types,
|
|
})
|
|
|
|
return _enrich_with_roles(appearances, conn)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching upcoming appearances: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.get("/aired")
|
|
async def get_aired_appearances(
|
|
limit: int = Query(20, ge=1, le=100),
|
|
days_back: int = Query(36500, ge=1, le=36500), # 100 years - essentially all history
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""
|
|
Get aired (past) appearances sorted by date (most recent first)
|
|
Returns N aired appearances within days_back window (default: all history)
|
|
Groups TV shows and podcasts by show (showing most recent episode only)
|
|
"""
|
|
try:
|
|
cutoff_date = (datetime.now() - timedelta(days=days_back)).isoformat()
|
|
|
|
# Use same grouping logic as /all endpoint - group TV and Podcasts by show
|
|
query = '''
|
|
WITH ShowStats AS (
|
|
SELECT
|
|
celebrity_id,
|
|
show_name,
|
|
appearance_type,
|
|
COUNT(DISTINCT COALESCE(season_number, 0) || '-' || COALESCE(episode_number, 0) || '-' || COALESCE(CAST(appearance_date AS TEXT), '')) as episode_count,
|
|
GROUP_CONCAT(DISTINCT credit_type) as all_credit_types,
|
|
MAX(appearance_date) as last_date
|
|
FROM celebrity_appearances
|
|
WHERE appearance_type IN ('TV', 'Podcast')
|
|
AND appearance_date < datetime('now')
|
|
AND appearance_date >= ?
|
|
AND status = 'aired'
|
|
GROUP BY celebrity_id, show_name, appearance_type
|
|
),
|
|
RankedAppearances AS (
|
|
SELECT
|
|
ca.*,
|
|
ROW_NUMBER() OVER (
|
|
PARTITION BY
|
|
CASE WHEN ca.appearance_type IN ('TV', 'Podcast')
|
|
THEN ca.celebrity_id || '-' || ca.show_name || '-' || ca.appearance_type
|
|
ELSE CAST(ca.id AS TEXT)
|
|
END
|
|
ORDER BY ca.appearance_date DESC, ca.season_number DESC, ca.episode_number DESC
|
|
) as rn
|
|
FROM celebrity_appearances ca
|
|
WHERE
|
|
ca.appearance_date < datetime('now')
|
|
AND ca.appearance_date >= ?
|
|
AND ca.status = 'aired'
|
|
)
|
|
SELECT
|
|
r.id,
|
|
r.celebrity_id,
|
|
r.celebrity_name,
|
|
r.appearance_type,
|
|
r.show_name,
|
|
r.episode_title,
|
|
r.network,
|
|
CASE
|
|
WHEN r.appearance_type IN ('TV', 'Podcast') AND ss.episode_count > 1 THEN ss.last_date
|
|
ELSE r.appearance_date
|
|
END as appearance_date,
|
|
r.url,
|
|
r.audio_url,
|
|
r.watch_url,
|
|
r.description,
|
|
r.tmdb_show_id,
|
|
r.season_number,
|
|
r.episode_number,
|
|
r.status,
|
|
r.poster_url,
|
|
COALESCE(ss.episode_count, 1) as episode_count,
|
|
r.credit_type,
|
|
r.character_name,
|
|
r.job_title,
|
|
r.plex_rating_key,
|
|
r.plex_show_rating_key,
|
|
ss.all_credit_types
|
|
FROM RankedAppearances r
|
|
LEFT JOIN ShowStats ss ON r.celebrity_id = ss.celebrity_id
|
|
AND r.show_name = ss.show_name AND r.appearance_type = ss.appearance_type
|
|
WHERE r.rn = 1
|
|
ORDER BY appearance_date DESC
|
|
LIMIT ?
|
|
'''
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(query, (cutoff_date, cutoff_date, limit))
|
|
rows = cursor.fetchall()
|
|
|
|
# Get Plex config for watch URLs
|
|
plex_url = None
|
|
plex_machine_id = None
|
|
try:
|
|
cursor.execute("SELECT plex_url, plex_machine_id FROM appearance_config WHERE id = 1")
|
|
plex_row = cursor.fetchone()
|
|
if plex_row:
|
|
plex_url = plex_row[0]
|
|
plex_machine_id = plex_row[1]
|
|
except Exception as e:
|
|
logger.debug(f"Could not fetch Plex config: {e}")
|
|
|
|
appearances = []
|
|
for row in rows:
|
|
plex_watch_url = None
|
|
plex_app_url = None
|
|
episode_count = row[17]
|
|
# Use show key for multi-episode shows, episode key for single episodes
|
|
# Use episode rating_key if available, fall back to show rating_key
|
|
plex_key = row[21] if row[21] else row[22]
|
|
if plex_key and plex_url and plex_machine_id:
|
|
plex_watch_url = f"{plex_url}/web/index.html#!/server/{plex_machine_id}/details?key=/library/metadata/{plex_key}"
|
|
plex_app_url = f"plex://play?metadataKey=/library/metadata/{plex_key}&server={plex_machine_id}"
|
|
|
|
# Parse all_credit_types into a list
|
|
all_credit_types = row[23].split(',') if row[23] else [row[18]] if row[18] else []
|
|
|
|
appearances.append({
|
|
"id": row[0],
|
|
"celebrity_id": row[1],
|
|
"celebrity_name": row[2],
|
|
"appearance_type": row[3],
|
|
"show_name": row[4],
|
|
"episode_title": row[5],
|
|
"network": row[6],
|
|
"appearance_date": row[7],
|
|
"url": row[8],
|
|
"audio_url": row[9],
|
|
"watch_url": row[10],
|
|
"description": row[11],
|
|
"tmdb_show_id": row[12],
|
|
"season_number": row[13],
|
|
"episode_number": row[14],
|
|
"status": row[15],
|
|
"poster_url": row[16],
|
|
"episode_count": row[17],
|
|
"credit_type": row[18],
|
|
"character_name": row[19],
|
|
"job_title": row[20],
|
|
"plex_rating_key": row[21],
|
|
"plex_watch_url": plex_watch_url,
|
|
"plex_app_url": plex_app_url,
|
|
"all_credit_types": all_credit_types,
|
|
})
|
|
|
|
return _enrich_with_roles(appearances, conn)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching aired appearances: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.get("/all")
|
|
async def get_all_appearances(
|
|
status: Optional[str] = Query(None),
|
|
appearance_type: Optional[str] = Query(None),
|
|
celebrity_name: Optional[str] = Query(None),
|
|
credit_type: Optional[str] = Query(None, description="Filter by credit type: acting, directing, producing, writing, creator, guest"),
|
|
search: Optional[str] = Query(None),
|
|
date_filter: Optional[str] = Query(None),
|
|
start_date: Optional[str] = Query(None),
|
|
end_date: Optional[str] = Query(None),
|
|
plex_only: bool = Query(False, description="Only show items available in Plex library"),
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""
|
|
Get all appearances with optional server-side filtering
|
|
Groups podcast episodes by show (shows most recent episode, includes episode count)
|
|
Supports filtering by credit_type (acting, directing, producing, writing, creator, guest)
|
|
Supports plex_only filter to show only items matched to Plex library
|
|
"""
|
|
try:
|
|
# Build WHERE clauses
|
|
where_clauses = []
|
|
params = []
|
|
|
|
# Status filter
|
|
if status and status != 'all':
|
|
where_clauses.append("status = ?")
|
|
params.append(status)
|
|
|
|
# Appearance type filter
|
|
if appearance_type and appearance_type != 'all':
|
|
where_clauses.append("appearance_type = ?")
|
|
params.append(appearance_type)
|
|
|
|
# Celebrity filter
|
|
if celebrity_name and celebrity_name != 'all':
|
|
where_clauses.append("celebrity_name = ?")
|
|
params.append(celebrity_name)
|
|
|
|
# Credit type filter
|
|
if credit_type and credit_type != 'all':
|
|
where_clauses.append("credit_type = ?")
|
|
params.append(credit_type)
|
|
|
|
# Plex only filter - only show items with specific episode/movie key (not just show key)
|
|
if plex_only:
|
|
where_clauses.append("plex_rating_key IS NOT NULL")
|
|
|
|
# Search filter
|
|
if search:
|
|
where_clauses.append("(celebrity_name LIKE ? OR show_name LIKE ? OR network LIKE ? OR episode_title LIKE ?)")
|
|
search_param = f"%{search}%"
|
|
params.extend([search_param, search_param, search_param, search_param])
|
|
|
|
# Date filter
|
|
if date_filter == '7days':
|
|
where_clauses.append("appearance_date >= datetime('now', '-7 days') AND appearance_date <= datetime('now', '+7 days')")
|
|
elif date_filter == '30days':
|
|
where_clauses.append("appearance_date >= datetime('now', '-30 days') AND appearance_date <= datetime('now', '+30 days')")
|
|
elif date_filter == 'custom' and start_date and end_date:
|
|
where_clauses.append("appearance_date >= ? AND appearance_date <= ?")
|
|
params.extend([start_date, end_date])
|
|
|
|
# Build final query
|
|
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
|
|
|
|
# Sort ascending for upcoming (soonest first), descending for others (most recent first)
|
|
order_direction = "ASC" if status == "upcoming" else "DESC"
|
|
|
|
# Grouping strategy:
|
|
# - TV/Podcast: Group ALL episodes by show (including guests)
|
|
# - Movies: Group multiple roles on same movie together
|
|
query = f'''
|
|
WITH ShowStats AS (
|
|
SELECT
|
|
celebrity_id,
|
|
show_name,
|
|
appearance_type,
|
|
COUNT(DISTINCT COALESCE(season_number, 0) || '-' || COALESCE(episode_number, 0) || '-' || COALESCE(CAST(appearance_date AS TEXT), '')) as episode_count,
|
|
GROUP_CONCAT(DISTINCT credit_type) as all_credit_types,
|
|
MIN(appearance_date) as first_date,
|
|
MAX(appearance_date) as last_date
|
|
FROM celebrity_appearances
|
|
WHERE {where_sql}
|
|
GROUP BY celebrity_id, show_name, appearance_type
|
|
),
|
|
RankedAppearances AS (
|
|
SELECT
|
|
id, celebrity_id, celebrity_name, appearance_type, show_name,
|
|
episode_title, network, appearance_date, url, audio_url, watch_url,
|
|
description, tmdb_show_id, season_number, episode_number, status,
|
|
created_at, updated_at, poster_url, credit_type, character_name,
|
|
job_title, plex_rating_key, plex_show_rating_key,
|
|
ROW_NUMBER() OVER (
|
|
PARTITION BY celebrity_id, show_name, appearance_type
|
|
ORDER BY appearance_date DESC, season_number DESC, episode_number DESC
|
|
) as row_num
|
|
FROM celebrity_appearances
|
|
WHERE {where_sql}
|
|
)
|
|
SELECT
|
|
r.id, r.celebrity_id, r.celebrity_name, r.appearance_type, r.show_name,
|
|
r.episode_title, r.network,
|
|
CASE
|
|
WHEN r.appearance_type IN ('TV', 'Podcast') AND ss.episode_count > 1 THEN ss.last_date
|
|
ELSE r.appearance_date
|
|
END as appearance_date,
|
|
r.url, r.audio_url, r.watch_url, r.description, r.tmdb_show_id,
|
|
r.season_number, r.episode_number, r.status, r.created_at, r.updated_at,
|
|
r.poster_url, COALESCE(ss.episode_count, 1) as episode_count,
|
|
r.credit_type, r.character_name, r.job_title, r.plex_rating_key,
|
|
r.plex_show_rating_key, ss.all_credit_types
|
|
FROM RankedAppearances r
|
|
LEFT JOIN ShowStats ss ON r.celebrity_id = ss.celebrity_id
|
|
AND r.show_name = ss.show_name AND r.appearance_type = ss.appearance_type
|
|
WHERE r.row_num = 1
|
|
ORDER BY r.appearance_date {order_direction}
|
|
'''
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(query, params + params) # params twice for the 2 WHERE clauses
|
|
rows = cursor.fetchall()
|
|
|
|
# Get Plex config for generating watch URLs
|
|
plex_url = None
|
|
plex_machine_id = None
|
|
try:
|
|
cursor.execute("SELECT plex_url, plex_machine_id FROM appearance_config WHERE id = 1")
|
|
plex_row = cursor.fetchone()
|
|
if plex_row:
|
|
plex_url = plex_row[0]
|
|
plex_machine_id = plex_row[1]
|
|
except Exception as e:
|
|
logger.debug(f"Could not fetch Plex config: {e}")
|
|
|
|
appearances = []
|
|
for row in rows:
|
|
plex_watch_url = None
|
|
plex_app_url = None
|
|
episode_count = row[19]
|
|
# Use show key for multi-episode shows, episode key for single episodes
|
|
# Use episode rating_key if available, fall back to show rating_key
|
|
plex_key = row[23] if row[23] else row[24]
|
|
if plex_key and plex_url and plex_machine_id:
|
|
# Web URL for browser
|
|
plex_watch_url = f"{plex_url}/web/index.html#!/server/{plex_machine_id}/details?key=/library/metadata/{plex_key}"
|
|
# App URL for opening in Plex app directly
|
|
plex_app_url = f"plex://play?metadataKey=/library/metadata/{plex_key}&server={plex_machine_id}"
|
|
|
|
# Parse all_credit_types into a list
|
|
all_credit_types = row[25].split(',') if row[25] else [row[20]] if row[20] else []
|
|
|
|
appearances.append({
|
|
"id": row[0],
|
|
"celebrity_id": row[1],
|
|
"celebrity_name": row[2],
|
|
"appearance_type": row[3],
|
|
"show_name": row[4],
|
|
"episode_title": row[5],
|
|
"network": row[6],
|
|
"appearance_date": row[7],
|
|
"url": row[8],
|
|
"audio_url": row[9],
|
|
"watch_url": row[10],
|
|
"description": row[11],
|
|
"tmdb_show_id": row[12],
|
|
"season_number": row[13],
|
|
"episode_number": row[14],
|
|
"status": row[15],
|
|
"created_at": row[16],
|
|
"updated_at": row[17],
|
|
"poster_url": row[18],
|
|
"episode_count": row[19],
|
|
"credit_type": row[20],
|
|
"character_name": row[21],
|
|
"job_title": row[22],
|
|
"plex_rating_key": row[23],
|
|
"plex_watch_url": plex_watch_url,
|
|
"plex_app_url": plex_app_url,
|
|
"all_credit_types": all_credit_types,
|
|
})
|
|
|
|
return _enrich_with_roles(appearances, conn)
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching all appearances: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.get("/tv/grouped")
|
|
async def get_tv_grouped(
|
|
status: Optional[str] = Query(None),
|
|
role_filter: Optional[str] = Query(None),
|
|
credit_type: Optional[str] = Query(None, description="Filter by credit type: acting, directing, producing, writing, creator"),
|
|
days_back: int = Query(36500, ge=1, le=36500),
|
|
days_ahead: int = Query(90, ge=1, le=365),
|
|
limit: int = Query(100, ge=1, le=1000),
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""
|
|
Get TV appearances grouped by show name
|
|
Similar to podcast grouping - shows most recent episode and total count
|
|
Includes role information (actress, producer, director, etc.)
|
|
Supports filtering by credit_type (acting, directing, producing, writing, creator)
|
|
"""
|
|
try:
|
|
# Date range
|
|
start_date = (datetime.now() - timedelta(days=days_back)).isoformat()
|
|
end_date = (datetime.now() + timedelta(days=days_ahead)).isoformat()
|
|
|
|
# Build WHERE clauses
|
|
where_clauses = [
|
|
"appearance_type = 'TV'",
|
|
"appearance_date >= ?",
|
|
"appearance_date <= ?"
|
|
]
|
|
params = [start_date, end_date]
|
|
|
|
if status and status != 'all':
|
|
where_clauses.append("status = ?")
|
|
params.append(status)
|
|
|
|
if role_filter and role_filter != 'all':
|
|
where_clauses.append("role_type LIKE ?")
|
|
params.append(f"%{role_filter}%")
|
|
|
|
if credit_type and credit_type != 'all':
|
|
where_clauses.append("credit_type = ?")
|
|
params.append(credit_type)
|
|
|
|
where_sql = " AND ".join(where_clauses)
|
|
|
|
# Group by show, get most recent episode and count
|
|
query = f'''
|
|
WITH RankedShows AS (
|
|
SELECT
|
|
id,
|
|
celebrity_id,
|
|
celebrity_name,
|
|
show_name,
|
|
episode_title,
|
|
network,
|
|
appearance_date,
|
|
season_number,
|
|
episode_number,
|
|
status,
|
|
role_type,
|
|
tmdb_show_id,
|
|
description,
|
|
poster_url,
|
|
credit_type,
|
|
character_name,
|
|
job_title,
|
|
plex_rating_key,
|
|
ROW_NUMBER() OVER (
|
|
PARTITION BY show_name
|
|
ORDER BY appearance_date DESC
|
|
) as row_num
|
|
FROM celebrity_appearances
|
|
WHERE {where_sql}
|
|
),
|
|
ShowStats AS (
|
|
SELECT
|
|
show_name,
|
|
COUNT(DISTINCT COALESCE(season_number, 0) || '-' || COALESCE(episode_number, 0) || '-' || COALESCE(CAST(appearance_date AS TEXT), '')) as episode_count,
|
|
MIN(appearance_date) as first_appearance,
|
|
MAX(appearance_date) as last_appearance,
|
|
GROUP_CONCAT(DISTINCT role_type) as all_roles,
|
|
GROUP_CONCAT(DISTINCT credit_type) as all_credit_types
|
|
FROM celebrity_appearances
|
|
WHERE {where_sql}
|
|
GROUP BY show_name
|
|
)
|
|
SELECT
|
|
r.id,
|
|
r.celebrity_id,
|
|
r.celebrity_name,
|
|
r.show_name,
|
|
r.episode_title,
|
|
r.network,
|
|
r.appearance_date,
|
|
r.season_number,
|
|
r.episode_number,
|
|
r.status,
|
|
r.role_type,
|
|
r.tmdb_show_id,
|
|
r.description,
|
|
r.poster_url,
|
|
s.episode_count,
|
|
s.first_appearance,
|
|
s.last_appearance,
|
|
s.all_roles,
|
|
r.credit_type,
|
|
r.character_name,
|
|
r.job_title,
|
|
r.plex_rating_key,
|
|
s.all_credit_types
|
|
FROM RankedShows r
|
|
INNER JOIN ShowStats s ON r.show_name = s.show_name
|
|
WHERE r.row_num = 1
|
|
ORDER BY r.appearance_date DESC
|
|
LIMIT ?
|
|
'''
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(query, params + params + [limit])
|
|
rows = cursor.fetchall()
|
|
|
|
shows = []
|
|
for row in rows:
|
|
shows.append({
|
|
"id": row[0],
|
|
"celebrity_id": row[1],
|
|
"celebrity_name": row[2],
|
|
"show_name": row[3],
|
|
"latest_episode_title": row[4],
|
|
"network": row[5],
|
|
"latest_appearance_date": row[6],
|
|
"latest_season": row[7],
|
|
"latest_episode": row[8],
|
|
"status": row[9],
|
|
"latest_role": row[10],
|
|
"tmdb_show_id": row[11],
|
|
"description": row[12],
|
|
"poster_url": row[13],
|
|
"episode_count": row[14],
|
|
"first_appearance": row[15],
|
|
"last_appearance": row[16],
|
|
"all_roles": row[17], # Comma-separated list of all roles
|
|
"credit_type": row[18],
|
|
"character_name": row[19],
|
|
"job_title": row[20],
|
|
"plex_rating_key": row[21],
|
|
"all_credit_types": row[22].split(',') if row[22] else [row[18]] if row[18] else [],
|
|
})
|
|
|
|
return shows
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching grouped TV shows: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.get("/config")
|
|
async def get_appearance_config(
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get appearance tracking configuration"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
SELECT tmdb_enabled, tmdb_check_interval_hours,
|
|
notify_new_appearances, notify_days_before,
|
|
sync_past_movies, tmdb_api_key, tmdb_last_check,
|
|
podcast_enabled, radio_enabled,
|
|
podchaser_api_key, podchaser_enabled, podchaser_last_check,
|
|
plex_url, plex_token,
|
|
notify_include_poster, notify_plex_matches,
|
|
taddy_user_id, taddy_api_key, taddy_enabled, taddy_last_check,
|
|
taddy_lookback_days, taddy_max_results, plex_sync_enabled,
|
|
taddy_user_id_2, taddy_api_key_2
|
|
FROM appearance_config WHERE id = 1
|
|
''')
|
|
row = cursor.fetchone()
|
|
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Configuration not found")
|
|
|
|
return {
|
|
"tmdb_enabled": bool(row[0]),
|
|
"tmdb_check_interval_hours": row[1],
|
|
"notify_new_appearances": bool(row[2]),
|
|
"notify_days_before": row[3],
|
|
"sync_past_movies": bool(row[4]),
|
|
"tmdb_api_key": row[5],
|
|
"tmdb_last_check": row[6],
|
|
"podcast_enabled": bool(row[7]) if row[7] is not None else False,
|
|
"radio_enabled": bool(row[8]) if row[8] is not None else False,
|
|
"podchaser_api_key": row[9],
|
|
"podchaser_enabled": bool(row[10]) if row[10] is not None else False,
|
|
"podchaser_last_check": row[11],
|
|
"plex_url": row[12],
|
|
"plex_token": row[13],
|
|
"notify_include_poster": bool(row[14]) if row[14] is not None else True,
|
|
"notify_plex_matches": bool(row[15]) if row[15] is not None else True,
|
|
"taddy_user_id": row[16],
|
|
"taddy_api_key": row[17],
|
|
"taddy_enabled": bool(row[18]) if row[18] is not None else False,
|
|
"taddy_last_check": row[19],
|
|
"taddy_lookback_days": row[20] or 730,
|
|
"taddy_max_results": row[21] or 250,
|
|
"plex_sync_enabled": bool(row[22]) if row[22] is not None else False,
|
|
"taddy_user_id_2": row[23],
|
|
"taddy_api_key_2": row[24],
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error fetching appearance config: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.put("/config")
|
|
async def update_appearance_config(
|
|
config: Dict = Body(...),
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Update appearance tracking configuration"""
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Build update query based on provided fields
|
|
updates = []
|
|
params = []
|
|
|
|
if "tmdb_enabled" in config:
|
|
updates.append("tmdb_enabled = ?")
|
|
params.append(1 if config["tmdb_enabled"] else 0)
|
|
|
|
if "tmdb_check_interval_hours" in config:
|
|
updates.append("tmdb_check_interval_hours = ?")
|
|
params.append(config["tmdb_check_interval_hours"])
|
|
|
|
if "notify_new_appearances" in config:
|
|
updates.append("notify_new_appearances = ?")
|
|
params.append(1 if config["notify_new_appearances"] else 0)
|
|
|
|
if "notify_days_before" in config:
|
|
updates.append("notify_days_before = ?")
|
|
params.append(config["notify_days_before"])
|
|
|
|
if "sync_past_movies" in config:
|
|
updates.append("sync_past_movies = ?")
|
|
params.append(1 if config["sync_past_movies"] else 0)
|
|
|
|
if "notify_include_poster" in config:
|
|
updates.append("notify_include_poster = ?")
|
|
params.append(1 if config["notify_include_poster"] else 0)
|
|
|
|
if "notify_plex_matches" in config:
|
|
updates.append("notify_plex_matches = ?")
|
|
params.append(1 if config["notify_plex_matches"] else 0)
|
|
|
|
# Taddy settings
|
|
if "taddy_user_id" in config:
|
|
updates.append("taddy_user_id = ?")
|
|
params.append(config["taddy_user_id"])
|
|
|
|
if "taddy_api_key" in config:
|
|
updates.append("taddy_api_key = ?")
|
|
params.append(config["taddy_api_key"])
|
|
|
|
if "taddy_enabled" in config:
|
|
updates.append("taddy_enabled = ?")
|
|
params.append(1 if config["taddy_enabled"] else 0)
|
|
|
|
if "taddy_lookback_days" in config:
|
|
updates.append("taddy_lookback_days = ?")
|
|
params.append(config["taddy_lookback_days"])
|
|
|
|
if "taddy_max_results" in config:
|
|
updates.append("taddy_max_results = ?")
|
|
params.append(config["taddy_max_results"])
|
|
|
|
if "taddy_user_id_2" in config:
|
|
updates.append("taddy_user_id_2 = ?")
|
|
params.append(config["taddy_user_id_2"])
|
|
|
|
if "taddy_api_key_2" in config:
|
|
updates.append("taddy_api_key_2 = ?")
|
|
params.append(config["taddy_api_key_2"])
|
|
|
|
if "plex_sync_enabled" in config:
|
|
updates.append("plex_sync_enabled = ?")
|
|
params.append(1 if config["plex_sync_enabled"] else 0)
|
|
|
|
if updates:
|
|
query = f"UPDATE appearance_config SET {', '.join(updates)}, updated_at = CURRENT_TIMESTAMP WHERE id = 1"
|
|
cursor.execute(query, params)
|
|
conn.commit()
|
|
|
|
return {"success": True, "message": "Configuration updated"}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error updating appearance config: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
# ==================== APPEARANCE NOTIFICATIONS (must be before /{appearance_id}) ====================
|
|
|
|
@router.get("/notifications")
|
|
async def get_appearance_notifications(
|
|
limit: int = Query(50, ge=1, le=200),
|
|
offset: int = Query(0, ge=0),
|
|
notification_type: Optional[str] = Query(None, description="Filter by type: reminder, new_appearance, plex_match"),
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get appearance notification history"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
|
|
where_clauses = ["1=1"]
|
|
where_clauses_aliased = ["1=1"]
|
|
params = []
|
|
|
|
if notification_type:
|
|
where_clauses.append("notification_type = ?")
|
|
where_clauses_aliased.append("n.notification_type = ?")
|
|
params.append(notification_type)
|
|
|
|
where_sql = " AND ".join(where_clauses)
|
|
where_sql_aliased = " AND ".join(where_clauses_aliased)
|
|
|
|
# Get total count
|
|
cursor.execute(f'''
|
|
SELECT COUNT(*) FROM appearance_notifications
|
|
WHERE {where_sql}
|
|
''', params)
|
|
total = cursor.fetchone()[0]
|
|
|
|
# Get Plex config for building watch URLs
|
|
plex_url = None
|
|
plex_machine_id = None
|
|
try:
|
|
cursor.execute("SELECT plex_url, plex_machine_id FROM appearance_config WHERE id = 1")
|
|
plex_row = cursor.fetchone()
|
|
if plex_row:
|
|
plex_url = plex_row[0]
|
|
plex_machine_id = plex_row[1]
|
|
except Exception as e:
|
|
logger.debug(f"Could not fetch Plex config: {e}")
|
|
|
|
# Get notifications with Plex rating key from appearances
|
|
cursor.execute(f'''
|
|
SELECT n.id, n.appearance_id, n.celebrity_name, n.show_name, n.appearance_type,
|
|
n.appearance_date, n.notification_type, n.message, n.poster_url, n.sent_at,
|
|
a.plex_rating_key
|
|
FROM appearance_notifications n
|
|
LEFT JOIN celebrity_appearances a ON n.appearance_id = a.id
|
|
WHERE {where_sql_aliased}
|
|
ORDER BY COALESCE(n.sent_at, n.created_at) DESC
|
|
LIMIT ? OFFSET ?
|
|
''', params + [limit, offset])
|
|
|
|
notifications = []
|
|
for row in cursor.fetchall():
|
|
plex_watch_url = None
|
|
plex_app_url = None
|
|
plex_rating_key = row[10]
|
|
if plex_rating_key and plex_url and plex_machine_id:
|
|
plex_watch_url = f"{plex_url}/web/index.html#!/server/{plex_machine_id}/details?key=/library/metadata/{plex_rating_key}"
|
|
plex_app_url = f"plex://play?metadataKey=/library/metadata/{plex_rating_key}&server={plex_machine_id}"
|
|
|
|
notifications.append({
|
|
"id": row[0],
|
|
"appearance_id": row[1],
|
|
"celebrity_name": row[2],
|
|
"show_name": row[3],
|
|
"appearance_type": row[4],
|
|
"appearance_date": row[5],
|
|
"notification_type": row[6],
|
|
"message": row[7],
|
|
"poster_url": row[8],
|
|
"sent_at": row[9],
|
|
"plex_watch_url": plex_watch_url,
|
|
"plex_app_url": plex_app_url,
|
|
})
|
|
|
|
return {
|
|
"success": True,
|
|
"notifications": notifications,
|
|
"total": total,
|
|
"limit": limit,
|
|
"offset": offset
|
|
}
|
|
except Exception as e:
|
|
logger.error(f"Error getting appearance notifications: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.get("/notifications/stats")
|
|
async def get_appearance_notification_stats(
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get notification statistics"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Get counts by type
|
|
cursor.execute('''
|
|
SELECT notification_type, COUNT(*) as count
|
|
FROM appearance_notifications
|
|
GROUP BY notification_type
|
|
''')
|
|
by_type = {row[0]: row[1] for row in cursor.fetchall()}
|
|
|
|
# Get today's count
|
|
cursor.execute('''
|
|
SELECT COUNT(*) FROM appearance_notifications
|
|
WHERE date(sent_at) = date('now')
|
|
''')
|
|
today = cursor.fetchone()[0]
|
|
|
|
# Get total
|
|
cursor.execute('SELECT COUNT(*) FROM appearance_notifications')
|
|
total = cursor.fetchone()[0]
|
|
|
|
return {
|
|
"success": True,
|
|
"stats": {
|
|
"total": total,
|
|
"today": today,
|
|
"by_type": by_type
|
|
}
|
|
}
|
|
except Exception as e:
|
|
logger.error(f"Error getting notification stats: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.post("/notifications/send-reminders")
|
|
async def trigger_appearance_reminders(
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Manually trigger appearance reminder check"""
|
|
try:
|
|
result = await send_appearance_reminders(db)
|
|
return result
|
|
except Exception as e:
|
|
logger.error(f"Error triggering reminders: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.get("/{appearance_id}")
|
|
async def get_appearance_details(
|
|
appearance_id: int,
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get full details for a single appearance"""
|
|
try:
|
|
query = '''
|
|
SELECT
|
|
id, celebrity_id, celebrity_name, appearance_type, show_name,
|
|
episode_title, network, appearance_date, announcement_date,
|
|
url, watch_url, description, tmdb_show_id, tmdb_episode_id,
|
|
season_number, episode_number, status, notified,
|
|
created_at, updated_at
|
|
FROM celebrity_appearances
|
|
WHERE id = ?
|
|
'''
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(query, (appearance_id,))
|
|
row = cursor.fetchone()
|
|
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Appearance not found")
|
|
|
|
return {
|
|
"id": row[0],
|
|
"celebrity_id": row[1],
|
|
"celebrity_name": row[2],
|
|
"appearance_type": row[3],
|
|
"show_name": row[4],
|
|
"episode_title": row[5],
|
|
"network": row[6],
|
|
"appearance_date": row[7],
|
|
"announcement_date": row[8],
|
|
"url": row[9],
|
|
"watch_url": row[10],
|
|
"description": row[11],
|
|
"tmdb_show_id": row[12],
|
|
"tmdb_episode_id": row[13],
|
|
"season_number": row[14],
|
|
"episode_number": row[15],
|
|
"status": row[16],
|
|
"notified": row[17],
|
|
"created_at": row[18],
|
|
"updated_at": row[19],
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error fetching appearance {appearance_id}: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.get("/poster/{appearance_id}")
|
|
async def get_appearance_poster(
|
|
appearance_id: int,
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""
|
|
Get cached poster for an appearance.
|
|
Falls back to fetching and caching if not available.
|
|
"""
|
|
from fastapi.responses import Response
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(
|
|
'SELECT poster_data, poster_url FROM celebrity_appearances WHERE id = ?',
|
|
(appearance_id,)
|
|
)
|
|
row = cursor.fetchone()
|
|
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Appearance not found")
|
|
|
|
poster_data = row[0]
|
|
poster_url = row[1]
|
|
|
|
# Serve cached data if available
|
|
if poster_data:
|
|
return Response(
|
|
content=poster_data,
|
|
media_type='image/jpeg',
|
|
headers={'Cache-Control': 'public, max-age=86400, immutable'}
|
|
)
|
|
|
|
# Fetch and cache if not available
|
|
if poster_url:
|
|
try:
|
|
if poster_url.startswith('http'):
|
|
url = poster_url
|
|
else:
|
|
url = f"{TMDB_IMAGE_BASE}{poster_url}"
|
|
|
|
response = await http_client.get(url)
|
|
if response.status_code == 200 and response.content:
|
|
# Cache in database
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(
|
|
'UPDATE celebrity_appearances SET poster_data = ? WHERE id = ?',
|
|
(response.content, appearance_id)
|
|
)
|
|
conn.commit()
|
|
|
|
return Response(
|
|
content=response.content,
|
|
media_type='image/jpeg',
|
|
headers={'Cache-Control': 'public, max-age=86400, immutable'}
|
|
)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to fetch poster for appearance {appearance_id}: {e}")
|
|
|
|
raise HTTPException(status_code=404, detail="Poster not available")
|
|
|
|
|
|
@router.get("/show/episodes")
|
|
async def get_show_episodes(
|
|
celebrity_id: int = Query(...),
|
|
show_name: str = Query(...),
|
|
appearance_type: str = Query('Podcast'), # 'Podcast' or 'TV'
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get all episodes for a specific show (podcast or TV).
|
|
Groups multiple credit types on the same episode into one entry.
|
|
"""
|
|
try:
|
|
query = '''
|
|
SELECT
|
|
id,
|
|
episode_title,
|
|
appearance_date,
|
|
season_number,
|
|
episode_number,
|
|
url,
|
|
audio_url,
|
|
watch_url,
|
|
description,
|
|
status,
|
|
role_type,
|
|
credit_type,
|
|
character_name,
|
|
job_title,
|
|
plex_rating_key
|
|
FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND show_name = ?
|
|
AND appearance_type = ?
|
|
ORDER BY appearance_date DESC, season_number DESC, episode_number DESC
|
|
'''
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Get plex config for generating watch URLs
|
|
cursor.execute("SELECT plex_url, plex_machine_id FROM appearance_config WHERE id = 1")
|
|
config_row = cursor.fetchone()
|
|
plex_url = config_row[0] if config_row else None
|
|
plex_machine_id = config_row[1] if config_row else None
|
|
|
|
cursor.execute(query, (celebrity_id, show_name, appearance_type))
|
|
rows = cursor.fetchall()
|
|
|
|
# Group rows by episode (season_number + episode_number + appearance_date)
|
|
# to combine multiple credit types into one entry
|
|
episode_map = {} # key -> episode dict
|
|
for row in rows:
|
|
sn = row[3] or 0
|
|
en = row[4] or 0
|
|
ep_date = row[2] or ''
|
|
key = f"{sn}-{en}-{ep_date}"
|
|
|
|
if key not in episode_map:
|
|
plex_rating_key = row[14]
|
|
plex_watch_url = None
|
|
plex_app_url = None
|
|
if plex_rating_key and plex_url and plex_machine_id:
|
|
plex_watch_url = f"{plex_url}/web/index.html#!/server/{plex_machine_id}/details?key=/library/metadata/{plex_rating_key}"
|
|
plex_app_url = f"plex://play?metadataKey=/library/metadata/{plex_rating_key}&server={plex_machine_id}"
|
|
|
|
episode_map[key] = {
|
|
"id": row[0],
|
|
"episode_title": row[1],
|
|
"appearance_date": row[2],
|
|
"season_number": row[3],
|
|
"episode_number": row[4],
|
|
"url": row[5],
|
|
"audio_url": row[6],
|
|
"watch_url": row[7],
|
|
"description": row[8],
|
|
"status": row[9],
|
|
"role_type": row[10],
|
|
"credit_type": row[11],
|
|
"character_name": row[12],
|
|
"job_title": row[13],
|
|
"plex_rating_key": plex_rating_key,
|
|
"plex_watch_url": plex_watch_url,
|
|
"plex_app_url": plex_app_url,
|
|
"all_credit_types": [row[11]] if row[11] else [],
|
|
"all_roles": [{"credit_type": row[11], "character_name": row[12], "job_title": row[13]}] if row[11] else [],
|
|
}
|
|
else:
|
|
ep = episode_map[key]
|
|
if row[11] and row[11] not in ep["all_credit_types"]:
|
|
ep["all_credit_types"].append(row[11])
|
|
ep["all_roles"].append({"credit_type": row[11], "character_name": row[12], "job_title": row[13]})
|
|
|
|
return list(episode_map.values())
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error fetching episodes for show {show_name}: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.post("/sync")
|
|
async def sync_appearances(
|
|
background_tasks: BackgroundTasks,
|
|
request: SyncRequest = Body(default=SyncRequest()),
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""
|
|
Trigger TMDb sync for all or specific celebrities
|
|
Runs in background.
|
|
Manual syncs do NOT send push notifications (only scheduled syncs do).
|
|
"""
|
|
background_tasks.add_task(
|
|
sync_tmdb_appearances,
|
|
celebrity_ids=request.celebrity_ids,
|
|
from_scheduler=False # Manual syncs don't send notifications
|
|
)
|
|
|
|
celeb_count = len(request.celebrity_ids) if request.celebrity_ids else "all"
|
|
return {
|
|
"success": True,
|
|
"message": f"Syncing appearances for {celeb_count} celebrities"
|
|
}
|
|
|
|
|
|
@router.get("/sync/status")
|
|
async def get_sync_status(current_user: Dict = Depends(get_current_user)):
|
|
"""Get the current sync progress status from database (works across processes)"""
|
|
activity_manager = get_activity_manager()
|
|
task_status = activity_manager.get_background_task("tmdb_sync")
|
|
|
|
if task_status and task_status.get('active'):
|
|
extra = task_status.get('extra_data', {}) or {}
|
|
return {
|
|
"is_running": True,
|
|
"current_celebrity": extra.get('current_celebrity'),
|
|
"celebrities_processed": task_status.get('progress', {}).get('current', 0),
|
|
"total_celebrities": task_status.get('progress', {}).get('total', 0),
|
|
"new_appearances": extra.get('new_appearances', 0),
|
|
"started_at": task_status.get('start_time'),
|
|
"last_update": task_status.get('updated_at'),
|
|
"phase": extra.get('phase'),
|
|
"current_show": extra.get('current_show'),
|
|
"shows_processed": extra.get('shows_processed', 0),
|
|
"total_shows": extra.get('total_shows', 0),
|
|
"current_podcast": extra.get('current_podcast'),
|
|
"podcasts_processed": extra.get('podcasts_processed', 0),
|
|
"total_podcasts": extra.get('total_podcasts', 0),
|
|
}
|
|
|
|
# Not running - return default state
|
|
return {
|
|
"is_running": False,
|
|
"current_celebrity": None,
|
|
"celebrities_processed": 0,
|
|
"total_celebrities": 0,
|
|
"new_appearances": 0,
|
|
"started_at": None,
|
|
"last_update": None,
|
|
"phase": None,
|
|
"current_show": None,
|
|
"shows_processed": 0,
|
|
"total_shows": 0,
|
|
"current_podcast": None,
|
|
"podcasts_processed": 0,
|
|
"total_podcasts": 0,
|
|
}
|
|
|
|
|
|
@router.put("/{appearance_id}/status")
|
|
async def update_appearance_status(
|
|
appearance_id: int,
|
|
status: str,
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Update appearance status (upcoming/aired/watched/ignored)"""
|
|
valid_statuses = ["upcoming", "aired", "watched", "ignored"]
|
|
if status not in valid_statuses:
|
|
raise HTTPException(
|
|
status_code=400,
|
|
detail=f"Invalid status. Must be one of: {valid_statuses}"
|
|
)
|
|
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
UPDATE celebrity_appearances
|
|
SET status = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE id = ?
|
|
''', (status, appearance_id))
|
|
conn.commit()
|
|
|
|
if cursor.rowcount == 0:
|
|
raise HTTPException(status_code=404, detail="Appearance not found")
|
|
|
|
return {"success": True, "status": status}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error updating appearance status: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.post("/update-status")
|
|
async def update_appearance_statuses(
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Manually trigger status update for past appearances"""
|
|
try:
|
|
updated_count = update_past_appearances_status(db)
|
|
|
|
return {
|
|
"success": True,
|
|
"updated_count": updated_count,
|
|
"message": f"Updated {updated_count} appearance(s) to 'aired' status"
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in update status endpoint: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
# Helper function for notifications
|
|
def send_appearance_notification(notifier: PushoverNotifier, celebrity_name: str, appearance: Dict, db: UnifiedDatabase = None) -> bool:
|
|
"""
|
|
Send a Pushover notification for a new celebrity appearance
|
|
|
|
Args:
|
|
notifier: PushoverNotifier instance
|
|
celebrity_name: Name of the celebrity
|
|
appearance: Appearance data dict
|
|
db: UnifiedDatabase instance for recording notification (optional)
|
|
|
|
Returns:
|
|
True if notification sent successfully
|
|
"""
|
|
import tempfile
|
|
import os
|
|
from web.backend.core.http_client import http_client
|
|
|
|
poster_path = None
|
|
|
|
try:
|
|
# Format the air date
|
|
air_date_str = appearance.get("appearance_date", "Unknown date")
|
|
try:
|
|
air_date = datetime.fromisoformat(air_date_str)
|
|
formatted_date = air_date.strftime("%B %d, %Y") # "January 4, 2026"
|
|
day_of_week = air_date.strftime("%A") # "Monday"
|
|
time_until = air_date - datetime.now()
|
|
days_until = time_until.days
|
|
except (ValueError, TypeError):
|
|
formatted_date = air_date_str
|
|
day_of_week = ""
|
|
days_until = 0
|
|
|
|
# Build title based on appearance type
|
|
appearance_type = appearance.get("appearance_type", "TV")
|
|
show_name = appearance.get("show_name") or appearance.get("movie_name", "Unknown")
|
|
|
|
if appearance_type == "Movie":
|
|
title = f"🎬 New Movie: {celebrity_name}"
|
|
elif appearance_type == "Podcast":
|
|
title = f"🎙️ New Podcast: {celebrity_name}"
|
|
else:
|
|
title = f"📺 New TV Appearance: {celebrity_name}"
|
|
|
|
# Build rich HTML message with all details
|
|
message_parts = []
|
|
|
|
# Show/Movie/Podcast info
|
|
if appearance_type == "Movie":
|
|
message_parts.append(f"<b>🎬 Movie:</b> {show_name}")
|
|
elif appearance_type == "Podcast":
|
|
message_parts.append(f"<b>🎙️ Podcast:</b> {show_name}")
|
|
else:
|
|
message_parts.append(f"<b>📺 Show:</b> {show_name}")
|
|
|
|
if appearance.get("network"):
|
|
message_parts.append(f"<b>📡 Network:</b> {appearance['network']}")
|
|
|
|
# Episode details
|
|
if appearance.get("episode_title"):
|
|
episode_info = appearance["episode_title"]
|
|
if appearance.get("season_number") and appearance.get("episode_number"):
|
|
episode_info = f"S{appearance['season_number']}E{appearance['episode_number']}: {episode_info}"
|
|
message_parts.append(f"<b>🎞️ Episode:</b> {episode_info}")
|
|
elif appearance.get("season_number") and appearance.get("episode_number"):
|
|
message_parts.append(f"<b>🎞️ Episode:</b> S{appearance['season_number']}E{appearance['episode_number']}")
|
|
|
|
# Air/Release date with countdown
|
|
if appearance_type == "Movie":
|
|
date_label = "Releases"
|
|
elif appearance_type == "Podcast":
|
|
date_label = "Published"
|
|
else:
|
|
date_label = "Airs"
|
|
if days_until > 0:
|
|
if days_until == 1:
|
|
countdown = "Tomorrow!"
|
|
else:
|
|
countdown = f"In {days_until} days"
|
|
message_parts.append(f"<b>📅 {date_label}:</b> {day_of_week}, {formatted_date} ({countdown})")
|
|
else:
|
|
message_parts.append(f"<b>📅 {date_label}:</b> {day_of_week}, {formatted_date}")
|
|
|
|
# Description if available
|
|
if appearance.get("description"):
|
|
desc = appearance["description"]
|
|
# Truncate description if too long
|
|
if len(desc) > 200:
|
|
desc = desc[:197] + "..."
|
|
message_parts.append(f"\n<b>📝 Summary:</b> {desc}")
|
|
|
|
# Discovery timestamp
|
|
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
|
message_parts.append(f"\n<b>⏰ Discovered:</b> {now}")
|
|
|
|
message = "\n".join(message_parts)
|
|
|
|
# Download poster if available
|
|
poster_path = None
|
|
poster_url_path = appearance.get("poster_url")
|
|
if poster_url_path:
|
|
try:
|
|
# Check if it's already a full URL (podcasts) or a TMDb path
|
|
if poster_url_path.startswith("http"):
|
|
poster_url = poster_url_path
|
|
else:
|
|
# TMDb image URL format
|
|
poster_url = f"https://image.tmdb.org/t/p/w500{poster_url_path}"
|
|
|
|
# Download poster to temp file (sync HTTP call since this runs in background thread)
|
|
import httpx
|
|
response = httpx.get(poster_url, timeout=10.0)
|
|
|
|
if response.status_code == 200:
|
|
# Create temp file - strip query params before getting extension
|
|
url_path = poster_url.split('?')[0]
|
|
suffix = os.path.splitext(url_path)[1] or '.jpg'
|
|
fd, poster_path = tempfile.mkstemp(suffix=suffix, prefix='tmdb_poster_')
|
|
|
|
# Write image data
|
|
with os.fdopen(fd, 'wb') as f:
|
|
f.write(response.content)
|
|
|
|
logger.debug(f"Downloaded poster to {poster_path}")
|
|
else:
|
|
logger.warning(f"Failed to download poster: HTTP {response.status_code} for {poster_url}")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to download poster: {e}")
|
|
poster_path = None
|
|
|
|
# Send notification with poster image
|
|
result = notifier.send_notification(
|
|
title=title,
|
|
message=message,
|
|
html=True,
|
|
priority=0, # Normal priority
|
|
image_path=poster_path
|
|
)
|
|
|
|
# Cleanup temp file
|
|
if poster_path and os.path.exists(poster_path):
|
|
try:
|
|
os.remove(poster_path)
|
|
except Exception as e:
|
|
logger.warning(f"Failed to remove temp poster file: {e}")
|
|
|
|
# Record notification to database if successful and db provided
|
|
if result and db:
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
full_message = f"{title}\n{message}"
|
|
cursor.execute('''
|
|
INSERT INTO appearance_notifications
|
|
(appearance_id, celebrity_name, show_name, appearance_type,
|
|
appearance_date, notification_type, message, poster_url,
|
|
is_sent, sent_at)
|
|
VALUES (?, ?, ?, ?, ?, 'new_appearance', ?, ?, 1, CURRENT_TIMESTAMP)
|
|
''', (
|
|
appearance.get('id'),
|
|
celebrity_name,
|
|
show_name,
|
|
appearance_type,
|
|
air_date_str[:10] if air_date_str and len(air_date_str) >= 10 else air_date_str,
|
|
full_message,
|
|
appearance.get('poster_url')
|
|
))
|
|
conn.commit()
|
|
except Exception as e:
|
|
logger.warning(f"Failed to record appearance notification: {e}")
|
|
|
|
return result
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error sending appearance notification: {e}")
|
|
|
|
# Cleanup temp file on error
|
|
if poster_path and os.path.exists(poster_path):
|
|
try:
|
|
os.remove(poster_path)
|
|
except OSError:
|
|
pass # File cleanup failure is non-critical
|
|
|
|
return False
|
|
|
|
def update_past_appearances_status(db: UnifiedDatabase) -> int:
|
|
"""
|
|
Update status of appearances that have passed from 'upcoming' to 'aired'
|
|
|
|
Args:
|
|
db: UnifiedDatabase instance
|
|
|
|
Returns:
|
|
Number of appearances updated
|
|
"""
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Update appearances where air date has passed but status is still 'upcoming'
|
|
# Use date('now') to only mark as aired after the day has ended
|
|
cursor.execute('''
|
|
UPDATE celebrity_appearances
|
|
SET status = 'aired', updated_at = CURRENT_TIMESTAMP
|
|
WHERE appearance_date < date('now')
|
|
AND status = 'upcoming'
|
|
''')
|
|
|
|
updated_count = cursor.rowcount
|
|
conn.commit()
|
|
|
|
if updated_count > 0:
|
|
logger.info(f"Updated {updated_count} past appearance(s) to 'aired' status")
|
|
|
|
return updated_count
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error updating past appearances status: {e}")
|
|
return 0
|
|
|
|
# Background task
|
|
async def sync_tmdb_appearances(celebrity_ids: Optional[List[int]] = None, from_scheduler: bool = False, db=None):
|
|
"""
|
|
Sync TMDb appearances for celebrities
|
|
1. Get TMDb API key from config
|
|
2. For each celebrity, fetch upcoming TV appearances
|
|
3. Insert/update celebrity_appearances table
|
|
|
|
Args:
|
|
celebrity_ids: Optional list of celebrity IDs to sync (None = all)
|
|
from_scheduler: If True, send push notifications for new appearances.
|
|
Only the scheduler should set this to True.
|
|
db: Optional database instance (required when called from scheduler)
|
|
"""
|
|
if db is None:
|
|
db = get_app_state().db
|
|
activity_manager = get_activity_manager()
|
|
|
|
# Track extra data for the sync progress
|
|
sync_extra_data = {
|
|
"current_celebrity": None,
|
|
"new_appearances": 0,
|
|
"phase": None,
|
|
"current_show": None,
|
|
"shows_processed": 0,
|
|
"total_shows": 0,
|
|
"current_podcast": None,
|
|
"podcasts_processed": 0,
|
|
"total_podcasts": 0,
|
|
}
|
|
|
|
# Initialize progress tracking in database (works across processes)
|
|
activity_manager.start_background_task(
|
|
task_id="tmdb_sync",
|
|
task_type="tmdb_sync",
|
|
display_name="TMDB Sync",
|
|
status="Starting",
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
try:
|
|
# Get TMDb, Podchaser, and Taddy config and notification settings
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
SELECT tmdb_api_key, tmdb_enabled, notify_new_appearances,
|
|
podchaser_api_key, podchaser_enabled, sync_past_movies,
|
|
podchaser_client_id, podchaser_client_secret,
|
|
taddy_user_id, taddy_api_key, taddy_enabled,
|
|
taddy_lookback_days, taddy_max_results,
|
|
taddy_user_id_2, taddy_api_key_2,
|
|
plex_url, plex_token, plex_sync_enabled
|
|
FROM appearance_config WHERE id = 1
|
|
''')
|
|
row = cursor.fetchone()
|
|
|
|
if not row:
|
|
logger.error("Appearance config not found")
|
|
return
|
|
|
|
(tmdb_api_key, tmdb_enabled, notify_enabled, podchaser_api_key,
|
|
podchaser_enabled, sync_past_movies, podchaser_client_id, podchaser_client_secret,
|
|
taddy_user_id, taddy_api_key, taddy_enabled,
|
|
taddy_lookback_days, taddy_max_results,
|
|
taddy_user_id_2, taddy_api_key_2,
|
|
plex_url, plex_token, plex_sync_enabled) = row
|
|
|
|
# Use defaults if not set
|
|
taddy_lookback_days = taddy_lookback_days or 730
|
|
taddy_max_results = taddy_max_results or 250
|
|
|
|
# Check if at least one service is enabled
|
|
if not tmdb_enabled and not podchaser_enabled and not taddy_enabled and not plex_sync_enabled:
|
|
logger.info("All appearance tracking services disabled in config")
|
|
return
|
|
|
|
# Initialize Pushover notifier if notifications enabled AND this is a scheduled sync
|
|
# Manual syncs from the web UI should NOT send push notifications
|
|
notifier = None
|
|
if notify_enabled and from_scheduler:
|
|
try:
|
|
settings_manager = SettingsManager(str(db.db_path))
|
|
config = settings_manager.get_all()
|
|
notifier = create_notifier_from_config(config, unified_db=db)
|
|
if notifier:
|
|
logger.info("Pushover notifier initialized for appearance notifications (scheduled sync)")
|
|
else:
|
|
logger.info("Pushover not configured or disabled, skipping notifications")
|
|
except Exception as e:
|
|
logger.warning(f"Failed to initialize Pushover notifier: {e}")
|
|
notifier = None
|
|
elif notify_enabled and not from_scheduler:
|
|
logger.debug("Skipping notifications for manual sync")
|
|
|
|
# Initialize API clients
|
|
tmdb = TMDbClient(tmdb_api_key) if tmdb_enabled and tmdb_api_key else None
|
|
|
|
# Initialize Podchaser client - prefer client credentials (OAuth2) over api_key
|
|
podchaser = None
|
|
if podchaser_enabled:
|
|
if podchaser_client_id and podchaser_client_secret:
|
|
try:
|
|
podchaser = await PodchaserClient.from_client_credentials(
|
|
podchaser_client_id, podchaser_client_secret
|
|
)
|
|
logger.info("Podchaser client initialized with OAuth2 client credentials")
|
|
except Exception as e:
|
|
logger.error(f"Failed to initialize Podchaser client: {e}")
|
|
elif podchaser_api_key:
|
|
podchaser = PodchaserClient(podchaser_api_key)
|
|
logger.info("Podchaser client initialized with API key")
|
|
|
|
# Initialize Taddy client with optional fallback account
|
|
taddy = None
|
|
if taddy_enabled and taddy_user_id and taddy_api_key:
|
|
taddy = TaddyClient(
|
|
taddy_user_id, taddy_api_key,
|
|
user_id_2=taddy_user_id_2, api_key_2=taddy_api_key_2
|
|
)
|
|
if taddy.has_fallback:
|
|
logger.info("Taddy client initialized with fallback account")
|
|
else:
|
|
logger.info("Taddy client initialized")
|
|
|
|
# Initialize Plex client for library-based discovery
|
|
plex = None
|
|
if plex_sync_enabled and plex_url and plex_token:
|
|
try:
|
|
plex = PlexClient(plex_url, plex_token)
|
|
# Test connection
|
|
if await plex.test_connection():
|
|
logger.info("Plex client initialized for appearance discovery")
|
|
else:
|
|
logger.warning("Plex connection failed - disabling Plex sync for this run")
|
|
plex = None
|
|
except Exception as e:
|
|
logger.error(f"Failed to initialize Plex client: {e}")
|
|
plex = None
|
|
|
|
# Get celebrities to sync
|
|
query = '''
|
|
SELECT id, name, tmdb_person_id, podchaser_creator_id
|
|
FROM celebrity_profiles
|
|
WHERE enabled = 1
|
|
'''
|
|
|
|
if celebrity_ids:
|
|
placeholders = ','.join('?' * len(celebrity_ids))
|
|
query += f' AND id IN ({placeholders})'
|
|
params = celebrity_ids
|
|
else:
|
|
params = []
|
|
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute(query, params)
|
|
celebrities = cursor.fetchall()
|
|
|
|
logger.info(f"Syncing appearances for {len(celebrities)} celebrities (TMDb: {tmdb is not None}, Podchaser: {podchaser is not None}, Taddy: {taddy is not None}, Plex: {plex is not None})")
|
|
|
|
# Update total count for progress tracking
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", "Processing celebrities",
|
|
progress_current=0, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
# Track celebrities processed locally
|
|
celebrities_processed = 0
|
|
total_new_appearances = 0
|
|
|
|
# Crash recovery checkpoint
|
|
from modules.task_checkpoint import TaskCheckpoint
|
|
checkpoint = TaskCheckpoint('appearances:tmdb_sync', 'background')
|
|
checkpoint.start(total_items=len(celebrities))
|
|
if checkpoint.is_recovering():
|
|
logger.info("TMDb sync: recovering — skipping already-synced celebrities")
|
|
|
|
# Process each celebrity
|
|
for celeb_id, celeb_name, tmdb_person_id, podchaser_creator_id in celebrities:
|
|
try:
|
|
if checkpoint.is_completed(str(celeb_id)):
|
|
celebrities_processed += 1
|
|
continue
|
|
|
|
checkpoint.set_current(str(celeb_id))
|
|
|
|
# Update progress
|
|
sync_extra_data["current_celebrity"] = celeb_name
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Processing {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
new_appearances = [] # Track all new appearances from all sources
|
|
|
|
# ===== TMDb Sync =====
|
|
if tmdb:
|
|
# Get or search for TMDb person ID
|
|
if not tmdb_person_id:
|
|
person = await tmdb.search_person(celeb_name)
|
|
if person:
|
|
tmdb_person_id = person["id"]
|
|
|
|
# Save person ID
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
UPDATE celebrity_profiles
|
|
SET tmdb_person_id = ?, tmdb_last_sync = CURRENT_TIMESTAMP
|
|
WHERE id = ?
|
|
''', (tmdb_person_id, celeb_id))
|
|
conn.commit()
|
|
else:
|
|
logger.warning(f"Could not find TMDb person for: {celeb_name}")
|
|
# Skip this celebrity for TMDb only
|
|
tmdb_person_id = None
|
|
|
|
# Find upcoming TV and movie appearances if we have TMDb ID
|
|
if tmdb_person_id:
|
|
# Get movies based on sync_past_movies setting
|
|
if sync_past_movies:
|
|
# Get COMPLETE filmography (all past and future movies AND TV)
|
|
logger.info(f"Syncing complete filmography (past + future) for {celeb_name}")
|
|
|
|
# Get all movies with credit types
|
|
sync_extra_data["phase"] = "fetching_movies"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Fetching movies for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
all_movies = await tmdb.find_all_movie_appearances(tmdb_person_id)
|
|
movie_appearances = [m for m in all_movies if m.get("status") == "upcoming"]
|
|
aired_movies = [m for m in all_movies if m.get("status") == "aired"]
|
|
|
|
# Get upcoming TV (detects next episode to air on any credited show)
|
|
sync_extra_data["phase"] = "fetching_tv"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Fetching TV for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
tv_appearances = await tmdb.find_upcoming_tv_appearances(tmdb_person_id)
|
|
|
|
# Progress callback for TV shows
|
|
def tv_progress_callback(current_show, shows_done, total):
|
|
sync_extra_data["current_show"] = current_show
|
|
sync_extra_data["shows_processed"] = shows_done
|
|
sync_extra_data["total_shows"] = total
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Processing {current_show}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
# Get historical/aired TV (episode-level for guests and regular cast)
|
|
all_tv = await tmdb.find_all_tv_appearances_with_credits(tmdb_person_id, celeb_name, tv_progress_callback)
|
|
aired_tv_appearances = [t for t in all_tv if t.get("status") == "aired"]
|
|
sync_extra_data["current_show"] = None # Clear after TV done
|
|
|
|
# Scan episode titles for shows where credits aren't always populated
|
|
# (e.g., Celebrity IOU, Who Do You Think You Are, etc.)
|
|
sync_extra_data["phase"] = "scanning_episode_titles"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Scanning episode titles for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
title_appearances = await tmdb.find_appearances_by_episode_title(celeb_name)
|
|
if title_appearances:
|
|
logger.info(f"Found {len(title_appearances)} episode title matches for {celeb_name}")
|
|
# Add to TV appearances (will be deduplicated by unique index)
|
|
tv_appearances.extend([t for t in title_appearances if t.get("status") == "upcoming"])
|
|
aired_tv_appearances.extend([t for t in title_appearances if t.get("status") == "aired"])
|
|
|
|
sync_extra_data["phase"] = "saving"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Saving appearances for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
else:
|
|
# Only get upcoming movies and TV
|
|
logger.info(f"Syncing upcoming only for {celeb_name}")
|
|
sync_extra_data["phase"] = "fetching"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Fetching upcoming for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
tv_appearances = await tmdb.find_upcoming_tv_appearances(tmdb_person_id)
|
|
upcoming_only = await tmdb.find_upcoming_movie_appearances(tmdb_person_id)
|
|
movie_appearances = upcoming_only
|
|
aired_movies = []
|
|
aired_tv_appearances = []
|
|
|
|
# Scan episode titles for upcoming shows
|
|
sync_extra_data["phase"] = "scanning_episode_titles"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Scanning episode titles for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
title_appearances = await tmdb.find_appearances_by_episode_title(celeb_name, lookback_days=7, lookahead_days=90)
|
|
if title_appearances:
|
|
logger.info(f"Found {len(title_appearances)} episode title matches for {celeb_name}")
|
|
tv_appearances.extend([t for t in title_appearances if t.get("status") == "upcoming"])
|
|
|
|
sync_extra_data["phase"] = "saving"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Saving appearances for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
else:
|
|
tv_appearances = []
|
|
movie_appearances = []
|
|
aired_movies = []
|
|
aired_tv_appearances = []
|
|
|
|
# Insert/update TMDb appearances
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Process TV appearances
|
|
for app in tv_appearances:
|
|
# Get credit info (new fields, default to 'acting' for cast credits)
|
|
credit_type = app.get("credit_type", "acting")
|
|
character_name = app.get("character_name")
|
|
job_title = app.get("job_title")
|
|
|
|
# Check if this appearance already exists
|
|
cursor.execute('''
|
|
SELECT id FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'TV'
|
|
AND tmdb_show_id = ?
|
|
AND season_number = ?
|
|
AND episode_number = ?
|
|
AND credit_type = ?
|
|
''', (celeb_id, app["tmdb_show_id"], app["season_number"], app["episode_number"], credit_type))
|
|
|
|
existing = cursor.fetchone()
|
|
is_new = existing is None
|
|
|
|
# Insert or update the appearance
|
|
cursor.execute('''
|
|
INSERT INTO celebrity_appearances (
|
|
celebrity_id, celebrity_name, appearance_type,
|
|
show_name, episode_title, network,
|
|
appearance_date, tmdb_show_id, season_number,
|
|
episode_number, tmdb_episode_id, description,
|
|
poster_url, announcement_date, credit_type,
|
|
character_name, job_title
|
|
) VALUES (?, ?, 'TV', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, ?, ?, ?)
|
|
ON CONFLICT(celebrity_id, appearance_type, tmdb_show_id, season_number, episode_number, credit_type) WHERE appearance_type = 'TV'
|
|
DO UPDATE SET
|
|
episode_title = excluded.episode_title,
|
|
appearance_date = excluded.appearance_date,
|
|
description = excluded.description,
|
|
poster_url = excluded.poster_url,
|
|
character_name = excluded.character_name,
|
|
job_title = excluded.job_title,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
''', (
|
|
celeb_id, celeb_name, app["show_name"],
|
|
app["episode_title"], app["network"], app["appearance_date"],
|
|
app["tmdb_show_id"], app["season_number"], app["episode_number"],
|
|
app["tmdb_episode_id"], app["description"], app.get("poster_url"),
|
|
credit_type, character_name, job_title
|
|
))
|
|
|
|
# Track new appearances for notification
|
|
if is_new:
|
|
new_appearances.append({
|
|
**app,
|
|
"appearance_type": "TV"
|
|
})
|
|
|
|
# Process aired TV appearances (historical data)
|
|
for app in aired_tv_appearances:
|
|
# Get credit info (new fields, default to 'acting' for cast credits)
|
|
credit_type = app.get("credit_type", "acting")
|
|
character_name = app.get("character_name")
|
|
job_title = app.get("job_title")
|
|
|
|
# Check if this appearance already exists
|
|
cursor.execute('''
|
|
SELECT id FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'TV'
|
|
AND tmdb_show_id = ?
|
|
AND season_number = ?
|
|
AND episode_number = ?
|
|
AND credit_type = ?
|
|
''', (celeb_id, app["tmdb_show_id"], app["season_number"], app["episode_number"], credit_type))
|
|
|
|
existing = cursor.fetchone()
|
|
|
|
# Insert or update the appearance with status='aired'
|
|
cursor.execute('''
|
|
INSERT INTO celebrity_appearances (
|
|
celebrity_id, celebrity_name, appearance_type,
|
|
show_name, episode_title, network,
|
|
appearance_date, tmdb_show_id, season_number,
|
|
episode_number, tmdb_episode_id, description,
|
|
poster_url, status, announcement_date, credit_type,
|
|
character_name, job_title
|
|
) VALUES (?, ?, 'TV', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 'aired', CURRENT_TIMESTAMP, ?, ?, ?)
|
|
ON CONFLICT(celebrity_id, appearance_type, tmdb_show_id, season_number, episode_number, credit_type) WHERE appearance_type = 'TV'
|
|
DO UPDATE SET
|
|
episode_title = excluded.episode_title,
|
|
appearance_date = excluded.appearance_date,
|
|
description = excluded.description,
|
|
poster_url = excluded.poster_url,
|
|
character_name = excluded.character_name,
|
|
job_title = excluded.job_title,
|
|
status = 'aired',
|
|
updated_at = CURRENT_TIMESTAMP
|
|
''', (
|
|
celeb_id, celeb_name, app["show_name"],
|
|
app["episode_title"], app["network"], app["appearance_date"],
|
|
app["tmdb_show_id"], app["season_number"], app["episode_number"],
|
|
app["tmdb_episode_id"], app["description"], app.get("poster_url"),
|
|
credit_type, character_name, job_title
|
|
))
|
|
|
|
# Don't notify for historical appearances (only notify for new upcoming ones)
|
|
|
|
# Process movie appearances
|
|
for movie in movie_appearances:
|
|
# Get credit info (new fields, default to 'acting' for cast credits)
|
|
credit_type = movie.get("credit_type", "acting")
|
|
character_name = movie.get("character_name")
|
|
job_title = movie.get("job_title")
|
|
|
|
# Check if this movie already exists
|
|
cursor.execute('''
|
|
SELECT id FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'Movie'
|
|
AND tmdb_show_id = ?
|
|
AND credit_type = ?
|
|
''', (celeb_id, movie["tmdb_movie_id"], credit_type))
|
|
|
|
existing = cursor.fetchone()
|
|
is_new = existing is None
|
|
|
|
# Insert or update the movie appearance
|
|
# Use partial index for movies (celebrity_id, appearance_type, tmdb_show_id, credit_type)
|
|
cursor.execute('''
|
|
INSERT INTO celebrity_appearances (
|
|
celebrity_id, celebrity_name, appearance_type,
|
|
show_name, network,
|
|
appearance_date, tmdb_show_id, description,
|
|
poster_url, announcement_date, credit_type,
|
|
character_name, job_title
|
|
) VALUES (?, ?, 'Movie', ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP, ?, ?, ?)
|
|
ON CONFLICT(celebrity_id, appearance_type, tmdb_show_id, credit_type) WHERE appearance_type = 'Movie'
|
|
DO UPDATE SET
|
|
show_name = excluded.show_name,
|
|
appearance_date = excluded.appearance_date,
|
|
description = excluded.description,
|
|
poster_url = excluded.poster_url,
|
|
character_name = excluded.character_name,
|
|
job_title = excluded.job_title,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
''', (
|
|
celeb_id, celeb_name, movie["movie_name"],
|
|
movie["studio"], movie["release_date"],
|
|
movie["tmdb_movie_id"], movie["description"],
|
|
movie.get("poster_url"), credit_type, character_name, job_title
|
|
))
|
|
|
|
# Track new movie appearances for notification
|
|
if is_new:
|
|
new_appearances.append({
|
|
**movie,
|
|
"appearance_type": "Movie",
|
|
"show_name": movie["movie_name"] # Add for notification compatibility
|
|
})
|
|
|
|
# Process aired movie appearances (historical filmography)
|
|
for movie in aired_movies:
|
|
# Get credit info (new fields, default to 'acting' for cast credits)
|
|
credit_type = movie.get("credit_type", "acting")
|
|
character_name = movie.get("character_name")
|
|
job_title = movie.get("job_title")
|
|
|
|
# Insert or update the aired movie
|
|
cursor.execute('''
|
|
INSERT INTO celebrity_appearances (
|
|
celebrity_id, celebrity_name, appearance_type,
|
|
show_name, network,
|
|
appearance_date, tmdb_show_id, description,
|
|
poster_url, status, announcement_date, credit_type,
|
|
character_name, job_title
|
|
) VALUES (?, ?, 'Movie', ?, ?, ?, ?, ?, ?, 'aired', CURRENT_TIMESTAMP, ?, ?, ?)
|
|
ON CONFLICT(celebrity_id, appearance_type, tmdb_show_id, credit_type) WHERE appearance_type = 'Movie'
|
|
DO UPDATE SET
|
|
show_name = excluded.show_name,
|
|
appearance_date = excluded.appearance_date,
|
|
description = excluded.description,
|
|
poster_url = excluded.poster_url,
|
|
character_name = excluded.character_name,
|
|
job_title = excluded.job_title,
|
|
status = 'aired',
|
|
updated_at = CURRENT_TIMESTAMP
|
|
''', (
|
|
celeb_id, celeb_name, movie["movie_name"],
|
|
movie["studio"], movie["release_date"],
|
|
movie["tmdb_movie_id"], movie["description"],
|
|
movie.get("poster_url"), credit_type, character_name, job_title
|
|
))
|
|
# Don't notify for historical movies
|
|
|
|
conn.commit()
|
|
total_count = len(tv_appearances) + len(movie_appearances) + len(aired_movies)
|
|
logger.info(f"Synced {total_count} TMDb appearances for {celeb_name} ({len(tv_appearances)} upcoming TV, {len(movie_appearances)} upcoming movies, {len(aired_movies)} aired movies)")
|
|
|
|
# ===== Podchaser Sync =====
|
|
if podchaser:
|
|
sync_extra_data["phase"] = "fetching_podcasts"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Fetching podcasts for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
# Get or search for Podchaser creator ID
|
|
if not podchaser_creator_id:
|
|
creator = await podchaser.search_creator(celeb_name)
|
|
if creator:
|
|
podchaser_creator_id = creator["pcid"]
|
|
|
|
# Save creator ID
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
UPDATE celebrity_profiles
|
|
SET podchaser_creator_id = ?, podchaser_last_sync = CURRENT_TIMESTAMP
|
|
WHERE id = ?
|
|
''', (podchaser_creator_id, celeb_id))
|
|
conn.commit()
|
|
else:
|
|
logger.warning(f"Could not find Podchaser creator for: {celeb_name}")
|
|
podchaser_creator_id = None
|
|
|
|
# Find upcoming podcast guest appearances if we have creator ID
|
|
if podchaser_creator_id:
|
|
podcast_appearances = await podchaser.find_upcoming_podcast_appearances(
|
|
podchaser_creator_id,
|
|
creator_name=celeb_name
|
|
)
|
|
else:
|
|
podcast_appearances = []
|
|
|
|
# Insert/update podcast appearances
|
|
if podcast_appearances:
|
|
sync_extra_data["total_podcasts"] = len(podcast_appearances)
|
|
sync_extra_data["podcasts_processed"] = 0
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
for idx, podcast in enumerate(podcast_appearances):
|
|
sync_extra_data["current_podcast"] = podcast.get("podcast_name", "Unknown Podcast")
|
|
sync_extra_data["podcasts_processed"] = idx
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Processing podcast {podcast.get('podcast_name', 'Unknown')}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
# Check if this podcast episode already exists
|
|
cursor.execute('''
|
|
SELECT id FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'Podcast'
|
|
AND tmdb_show_id = ?
|
|
''', (celeb_id, podcast["podchaser_episode_id"]))
|
|
|
|
existing = cursor.fetchone()
|
|
is_new = existing is None
|
|
|
|
# Insert or update the podcast appearance
|
|
# Using tmdb_show_id column to store podchaser_episode_id for consistency
|
|
cursor.execute('''
|
|
INSERT INTO celebrity_appearances (
|
|
celebrity_id, celebrity_name, appearance_type,
|
|
show_name, episode_title, network,
|
|
appearance_date, tmdb_show_id, description,
|
|
poster_url, url, audio_url, announcement_date
|
|
) VALUES (?, ?, 'Podcast', ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
|
ON CONFLICT(celebrity_id, appearance_type, tmdb_show_id) WHERE appearance_type = 'Podcast'
|
|
DO UPDATE SET
|
|
episode_title = excluded.episode_title,
|
|
show_name = excluded.show_name,
|
|
appearance_date = excluded.appearance_date,
|
|
description = excluded.description,
|
|
poster_url = excluded.poster_url,
|
|
url = excluded.url,
|
|
audio_url = excluded.audio_url,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
''', (
|
|
celeb_id, celeb_name, podcast["podcast_name"],
|
|
podcast["episode_title"], podcast.get("role"), podcast["air_date"],
|
|
podcast["podchaser_episode_id"], podcast.get("description"),
|
|
podcast.get("poster_url"), podcast.get("episode_url"), podcast.get("audio_url")
|
|
))
|
|
|
|
# Track new podcast appearances for notification
|
|
if is_new:
|
|
new_appearances.append({
|
|
**podcast,
|
|
"appearance_type": "Podcast",
|
|
"show_name": podcast["podcast_name"],
|
|
"appearance_date": podcast["air_date"]
|
|
})
|
|
|
|
conn.commit()
|
|
sync_extra_data["podcasts_processed"] = len(podcast_appearances)
|
|
logger.info(f"Synced {len(podcast_appearances)} Podchaser podcast appearances for {celeb_name}")
|
|
|
|
# Backfill missing audio URLs from RSS feeds for Omny.fm podcasts
|
|
await backfill_audio_urls_from_rss(celeb_id, db)
|
|
|
|
# Clear podcast progress after Podchaser sync
|
|
sync_extra_data["current_podcast"] = None
|
|
|
|
# ===== Taddy Sync =====
|
|
if taddy:
|
|
sync_extra_data["phase"] = "fetching_podcasts_taddy"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Fetching Taddy podcasts for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
try:
|
|
# Search for podcast appearances by celebrity name
|
|
# Calculate max_pages from max_results (25 per page)
|
|
max_pages = (taddy_max_results + 24) // 25
|
|
taddy_appearances = await taddy.search_podcast_appearances(
|
|
celeb_name,
|
|
lookback_days=taddy_lookback_days,
|
|
lookahead_days=30,
|
|
max_pages=max_pages
|
|
)
|
|
|
|
if taddy_appearances:
|
|
sync_extra_data["total_podcasts"] = len(taddy_appearances)
|
|
sync_extra_data["podcasts_processed"] = 0
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
for idx, podcast in enumerate(taddy_appearances):
|
|
sync_extra_data["current_podcast"] = podcast.get("show_name", "Unknown Podcast")
|
|
sync_extra_data["podcasts_processed"] = idx
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Processing Taddy podcast {podcast.get('show_name', 'Unknown')}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
# Use taddy_episode_uuid as unique identifier
|
|
taddy_uuid = podcast.get("taddy_episode_uuid")
|
|
if not taddy_uuid:
|
|
continue
|
|
|
|
# Check if this podcast episode already exists (by episode title + show name as fallback)
|
|
cursor.execute('''
|
|
SELECT id FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'Podcast'
|
|
AND show_name = ?
|
|
AND episode_title = ?
|
|
''', (celeb_id, podcast["show_name"], podcast["episode_title"]))
|
|
|
|
existing = cursor.fetchone()
|
|
is_new = existing is None
|
|
|
|
if is_new:
|
|
# Insert new podcast appearance
|
|
cursor.execute('''
|
|
INSERT INTO celebrity_appearances (
|
|
celebrity_id, celebrity_name, appearance_type,
|
|
show_name, episode_title, appearance_date,
|
|
description, poster_url, audio_url, url, status,
|
|
credit_type, character_name, announcement_date
|
|
) VALUES (?, ?, 'Podcast', ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
|
|
''', (
|
|
celeb_id, celeb_name, podcast["show_name"],
|
|
podcast["episode_title"], podcast["appearance_date"],
|
|
podcast.get("description"), podcast.get("poster_url"),
|
|
podcast.get("audio_url"), podcast.get("url"),
|
|
podcast.get("status", "aired"),
|
|
podcast.get("credit_type", "guest"), podcast.get("character_name", "Self")
|
|
))
|
|
|
|
# Track new podcast appearances for notification
|
|
new_appearances.append({
|
|
**podcast,
|
|
"appearance_type": "Podcast"
|
|
})
|
|
|
|
conn.commit()
|
|
sync_extra_data["podcasts_processed"] = len(taddy_appearances)
|
|
logger.info(f"Synced {len(taddy_appearances)} Taddy podcast appearances for {celeb_name}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Taddy sync error for {celeb_name}: {e}")
|
|
|
|
# Clear podcast progress after Taddy sync
|
|
sync_extra_data["current_podcast"] = None
|
|
|
|
# ===== Plex Library Sync =====
|
|
# Discover movies/TV shows featuring this celebrity from Plex library
|
|
# For TV shows: matches at EPISODE level (only episodes in Plex get plex_rating_key)
|
|
# For Movies: matches the movie entry
|
|
if plex:
|
|
sync_extra_data["phase"] = "fetching_plex"
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Scanning Plex library for {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
# Helper to normalize show names for fuzzy matching
|
|
def normalize_show_name(name: str) -> str:
|
|
"""Strip country codes and years for matching, but keep subtitle after colon"""
|
|
if not name:
|
|
return ""
|
|
# Remove (US), (UK), (AU), etc.
|
|
normalized = re.sub(r'\s*\([A-Z]{2}\)\s*$', '', name)
|
|
# Remove year in parentheses at end
|
|
normalized = re.sub(r'\s*\(\d{4}\)\s*$', '', normalized)
|
|
# Don't strip colon parts - they're often meaningful (e.g., "Searching for Mexico")
|
|
return normalized.strip()
|
|
|
|
try:
|
|
plex_appearances = await plex.search_by_actor(celeb_name)
|
|
|
|
if plex_appearances:
|
|
plex_episodes_updated = 0
|
|
plex_movies_updated = 0
|
|
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
for app in plex_appearances:
|
|
plex_show_key = app.get("plex_rating_key")
|
|
if not plex_show_key:
|
|
continue
|
|
|
|
show_name = app.get("show_name")
|
|
appearance_type = app.get("appearance_type", "Movie")
|
|
plex_library_id = app.get("plex_library_id")
|
|
normalized_plex = normalize_show_name(show_name)
|
|
|
|
if appearance_type == "TV":
|
|
# ===== TV SHOWS: Episode-level matching =====
|
|
# Get all episodes from Plex for this show
|
|
plex_episodes = await plex.get_all_episodes(plex_show_key)
|
|
|
|
if not plex_episodes:
|
|
logger.debug(f"No episodes found in Plex for '{show_name}'")
|
|
continue
|
|
|
|
# Get all TMDb episodes for this show (exact or normalized match only)
|
|
cursor.execute('''
|
|
SELECT id, season_number, episode_number
|
|
FROM celebrity_appearances
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'TV'
|
|
AND (show_name = ? OR show_name = ?)
|
|
AND season_number IS NOT NULL
|
|
AND episode_number IS NOT NULL
|
|
AND plex_rating_key IS NULL
|
|
''', (celeb_id, show_name, normalized_plex))
|
|
|
|
tmdb_episodes = cursor.fetchall()
|
|
|
|
# Match each TMDb episode to Plex episode
|
|
for row in tmdb_episodes:
|
|
app_id, season, episode = row
|
|
plex_ep = plex_episodes.get((season, episode))
|
|
|
|
if plex_ep:
|
|
# Episode exists in Plex - update with episode-specific rating key
|
|
cursor.execute('''
|
|
UPDATE celebrity_appearances
|
|
SET plex_rating_key = ?,
|
|
plex_show_rating_key = ?,
|
|
plex_library_id = ?,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
WHERE id = ?
|
|
''', (plex_ep['ratingKey'], plex_show_key, plex_library_id, app_id))
|
|
plex_episodes_updated += 1
|
|
|
|
logger.debug(f"Matched {plex_episodes_updated} episodes for '{show_name}' "
|
|
f"(Plex has {len(plex_episodes)} eps, TMDb has {len(tmdb_episodes)} eps)")
|
|
|
|
else:
|
|
# ===== MOVIES: Direct matching =====
|
|
# Check if already linked
|
|
cursor.execute('''
|
|
SELECT id FROM celebrity_appearances
|
|
WHERE celebrity_id = ? AND plex_rating_key = ?
|
|
''', (celeb_id, plex_show_key))
|
|
|
|
if cursor.fetchone():
|
|
continue
|
|
|
|
# Try exact match
|
|
cursor.execute('''
|
|
UPDATE celebrity_appearances
|
|
SET plex_rating_key = ?,
|
|
plex_library_id = ?,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'Movie'
|
|
AND show_name = ?
|
|
AND plex_rating_key IS NULL
|
|
''', (plex_show_key, plex_library_id, celeb_id, show_name))
|
|
|
|
if cursor.rowcount > 0:
|
|
plex_movies_updated += cursor.rowcount
|
|
continue
|
|
|
|
# Try normalized match (exact normalized name only)
|
|
if normalized_plex and normalized_plex != show_name:
|
|
cursor.execute('''
|
|
UPDATE celebrity_appearances
|
|
SET plex_rating_key = ?,
|
|
plex_library_id = ?,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
WHERE celebrity_id = ?
|
|
AND appearance_type = 'Movie'
|
|
AND plex_rating_key IS NULL
|
|
AND show_name = ?
|
|
''', (plex_show_key, plex_library_id, celeb_id, normalized_plex))
|
|
|
|
if cursor.rowcount > 0:
|
|
plex_movies_updated += cursor.rowcount
|
|
continue
|
|
|
|
conn.commit()
|
|
logger.info(f"Plex sync for {celeb_name}: {plex_episodes_updated} episodes, "
|
|
f"{plex_movies_updated} movies updated (matching only, no new entries)")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Plex sync error for {celeb_name}: {e}")
|
|
|
|
# Send notifications for all newly discovered appearances
|
|
if notifier and new_appearances:
|
|
for app in new_appearances:
|
|
try:
|
|
send_appearance_notification(notifier, celeb_name, app, db)
|
|
logger.info(f"Sent notification for new appearance: {celeb_name} on {app.get('show_name')}")
|
|
except Exception as e:
|
|
logger.error(f"Failed to send notification for {celeb_name} appearance: {e}")
|
|
|
|
# Rate limiting
|
|
await asyncio.sleep(1)
|
|
|
|
# Update progress after successful sync
|
|
celebrities_processed += 1
|
|
total_new_appearances += len(new_appearances)
|
|
sync_extra_data["new_appearances"] = total_new_appearances
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Completed {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
|
|
checkpoint.mark_completed(str(celeb_id))
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error syncing {celeb_name}: {e}")
|
|
celebrities_processed += 1
|
|
activity_manager.update_background_task(
|
|
"tmdb_sync", f"Error syncing {celeb_name}",
|
|
progress_current=celebrities_processed, progress_total=len(celebrities),
|
|
extra_data=sync_extra_data
|
|
)
|
|
checkpoint.mark_completed(str(celeb_id))
|
|
continue
|
|
|
|
# Checkpoint complete
|
|
checkpoint.finish()
|
|
|
|
# Update status of past appearances to 'aired'
|
|
update_past_appearances_status(db)
|
|
|
|
# Pre-cache posters for appearances without cached data
|
|
await cache_uncached_posters(db)
|
|
|
|
# Update last check time for enabled services
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
updates = []
|
|
if tmdb:
|
|
updates.append("tmdb_last_check = CURRENT_TIMESTAMP")
|
|
if podchaser:
|
|
updates.append("podchaser_last_check = CURRENT_TIMESTAMP")
|
|
if taddy:
|
|
updates.append("taddy_last_check = CURRENT_TIMESTAMP")
|
|
if plex:
|
|
updates.append("plex_last_check = CURRENT_TIMESTAMP")
|
|
|
|
if updates:
|
|
query = f"UPDATE appearance_config SET {', '.join(updates)} WHERE id = 1"
|
|
cursor.execute(query)
|
|
conn.commit()
|
|
|
|
services = []
|
|
if tmdb:
|
|
services.append("TMDb")
|
|
if podchaser:
|
|
services.append("Podchaser")
|
|
if taddy:
|
|
services.append("Taddy")
|
|
if plex:
|
|
services.append("Plex")
|
|
|
|
logger.info(f"Appearance sync completed for: {', '.join(services)}")
|
|
|
|
# Mark sync as complete
|
|
activity_manager.stop_background_task("tmdb_sync")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Fatal error in sync_tmdb_appearances: {e}")
|
|
activity_manager.stop_background_task("tmdb_sync")
|
|
|
|
|
|
# ============================================================================
|
|
# PLEX INTEGRATION ENDPOINTS
|
|
# ============================================================================
|
|
|
|
# Plex OAuth state (in-memory for simplicity, could use Redis for production)
|
|
plex_oauth_state: Dict[int, Dict] = {}
|
|
|
|
class PlexConfigRequest(BaseModel):
|
|
plex_url: str
|
|
plex_token: str
|
|
|
|
class PlexServerSelectRequest(BaseModel):
|
|
server_url: str
|
|
server_token: str
|
|
server_name: str
|
|
machine_identifier: str
|
|
|
|
@router.post("/plex/auth/start")
|
|
async def start_plex_auth(
|
|
current_user: Dict = Depends(get_current_user)
|
|
):
|
|
"""
|
|
Start Plex OAuth flow. Returns a PIN and auth URL.
|
|
User should be redirected to the auth_url to authenticate.
|
|
"""
|
|
try:
|
|
oauth = PlexOAuth()
|
|
pin_data = await oauth.create_pin()
|
|
|
|
if not pin_data:
|
|
raise HTTPException(status_code=500, detail="Failed to create Plex authentication PIN")
|
|
|
|
# Store PIN for later verification
|
|
plex_oauth_state[pin_data['id']] = {
|
|
'code': pin_data['code'],
|
|
'created_at': datetime.now().isoformat(),
|
|
}
|
|
|
|
return {
|
|
"success": True,
|
|
"pin_id": pin_data['id'],
|
|
"auth_url": pin_data['auth_url'],
|
|
"expires_at": pin_data['expires_at'],
|
|
"message": "Open the auth_url in a new tab to sign in to Plex"
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error starting Plex auth: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.get("/plex/auth/check/{pin_id}")
|
|
async def check_plex_auth(
|
|
pin_id: int,
|
|
current_user: Dict = Depends(get_current_user)
|
|
):
|
|
"""
|
|
Check if Plex authentication is complete.
|
|
Returns the auth token and available servers if successful.
|
|
"""
|
|
try:
|
|
if pin_id not in plex_oauth_state:
|
|
raise HTTPException(status_code=404, detail="PIN not found or expired")
|
|
|
|
oauth = PlexOAuth()
|
|
token = await oauth.check_pin(pin_id)
|
|
|
|
if not token:
|
|
return {
|
|
"success": False,
|
|
"authenticated": False,
|
|
"message": "Waiting for authentication..."
|
|
}
|
|
|
|
# Authentication successful - get user info and servers
|
|
user_info = await oauth.get_user_info(token)
|
|
servers = await oauth.get_user_servers(token)
|
|
|
|
# Clean up the stored PIN
|
|
del plex_oauth_state[pin_id]
|
|
|
|
return {
|
|
"success": True,
|
|
"authenticated": True,
|
|
"token": token,
|
|
"user": user_info,
|
|
"servers": servers,
|
|
"message": f"Authenticated as {user_info.get('username', 'Unknown')}"
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error checking Plex auth: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.post("/plex/auth/save")
|
|
async def save_plex_auth(
|
|
config: PlexServerSelectRequest,
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""
|
|
Save the selected Plex server configuration after OAuth.
|
|
"""
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
UPDATE appearance_config
|
|
SET plex_url = ?, plex_token = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE id = 1
|
|
''', (config.server_url, config.server_token))
|
|
conn.commit()
|
|
|
|
# Test the connection (optional - OAuth already validated the token)
|
|
plex = PlexClient(config.server_url, config.server_token)
|
|
is_connected = await plex.test_connection()
|
|
|
|
if is_connected:
|
|
return {
|
|
"success": True,
|
|
"message": f"Connected to {config.server_name}",
|
|
"server_name": config.server_name,
|
|
"connection_verified": True
|
|
}
|
|
else:
|
|
# OAuth succeeded, so token is valid. Connection test may fail due to network issues
|
|
# (e.g., server running behind NAT, firewall, or using relay)
|
|
logger.warning(f"Plex connection test failed for {config.server_name}, but OAuth was successful. Config saved.")
|
|
return {
|
|
"success": True,
|
|
"message": f"Connected to {config.server_name} (direct connection unavailable, may use relay)",
|
|
"server_name": config.server_name,
|
|
"connection_verified": False
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error saving Plex auth config: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.post("/plex/configure")
|
|
async def configure_plex(
|
|
config: PlexConfigRequest,
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Save Plex connection settings"""
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
UPDATE appearance_config
|
|
SET plex_url = ?, plex_token = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE id = 1
|
|
''', (config.plex_url, config.plex_token))
|
|
conn.commit()
|
|
|
|
return {"success": True, "message": "Plex configuration saved"}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error saving Plex configuration: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.get("/plex/test")
|
|
async def test_plex_connection(
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Test Plex connection"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('SELECT plex_url, plex_token FROM appearance_config WHERE id = 1')
|
|
row = cursor.fetchone()
|
|
|
|
if not row or not row[0] or not row[1]:
|
|
return {"success": False, "message": "Plex not configured"}
|
|
|
|
plex_url, plex_token = row
|
|
|
|
plex = PlexClient(plex_url, plex_token)
|
|
is_connected = await plex.test_connection()
|
|
|
|
if is_connected:
|
|
server_info = await plex.get_server_identity()
|
|
return {
|
|
"success": True,
|
|
"message": f"Connected to {server_info.get('friendlyName', 'Plex Server')}",
|
|
"server_name": server_info.get('friendlyName'),
|
|
"version": server_info.get('version')
|
|
}
|
|
else:
|
|
return {"success": False, "message": "Failed to connect to Plex server"}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error testing Plex connection: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.get("/plex/libraries")
|
|
async def get_plex_libraries(
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get list of Plex libraries"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('SELECT plex_url, plex_token FROM appearance_config WHERE id = 1')
|
|
row = cursor.fetchone()
|
|
|
|
if not row or not row[0] or not row[1]:
|
|
raise HTTPException(status_code=400, detail="Plex not configured")
|
|
|
|
plex_url, plex_token = row
|
|
|
|
plex = PlexClient(plex_url, plex_token)
|
|
libraries = await plex.get_libraries()
|
|
|
|
return {
|
|
"success": True,
|
|
"libraries": libraries
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error getting Plex libraries: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
@router.post("/plex/match")
|
|
async def match_appearances_to_plex(
|
|
background_tasks: BackgroundTasks,
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Match appearances to Plex library items (runs in background)"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('SELECT plex_url, plex_token FROM appearance_config WHERE id = 1')
|
|
row = cursor.fetchone()
|
|
|
|
if not row or not row[0] or not row[1]:
|
|
raise HTTPException(status_code=400, detail="Plex not configured")
|
|
|
|
# Manual runs don't send notifications
|
|
background_tasks.add_task(match_plex_appearances_task, send_notifications=False)
|
|
|
|
return {
|
|
"success": True,
|
|
"message": "Plex matching started in background"
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error starting Plex matching: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
async def match_plex_appearances_task(send_notifications: bool = True):
|
|
"""Background task to match appearances to Plex library"""
|
|
db = get_app_state().db
|
|
|
|
try:
|
|
# Get Plex config
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('SELECT plex_url, plex_token FROM appearance_config WHERE id = 1')
|
|
row = cursor.fetchone()
|
|
|
|
if not row or not row[0] or not row[1]:
|
|
logger.error("Plex not configured for matching")
|
|
return
|
|
|
|
plex_url, plex_token = row
|
|
|
|
plex = PlexClient(plex_url, plex_token)
|
|
|
|
# Get unmatched appearances OR TV shows missing show key (movies and TV shows)
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('''
|
|
SELECT id, appearance_type, show_name, tmdb_show_id, appearance_date,
|
|
season_number, episode_number
|
|
FROM celebrity_appearances
|
|
WHERE (plex_rating_key IS NULL
|
|
OR (appearance_type = 'TV' AND plex_show_rating_key IS NULL))
|
|
AND appearance_type IN ('Movie', 'TV')
|
|
ORDER BY appearance_date DESC
|
|
LIMIT 500
|
|
''')
|
|
rows = cursor.fetchall()
|
|
|
|
if not rows:
|
|
logger.info("No unmatched appearances to process")
|
|
return
|
|
|
|
appearances = []
|
|
for row in rows:
|
|
app = {
|
|
'id': row[0],
|
|
'appearance_type': row[1],
|
|
'show_name': row[2] if row[1] == 'TV' else None,
|
|
'movie_name': row[2] if row[1] == 'Movie' else None,
|
|
'tmdb_show_id': row[3] if row[1] == 'TV' else None,
|
|
'tmdb_movie_id': row[3] if row[1] == 'Movie' else None,
|
|
'release_date': row[4],
|
|
'season_number': row[5],
|
|
'episode_number': row[6],
|
|
}
|
|
appearances.append(app)
|
|
|
|
logger.info(f"Matching {len(appearances)} appearances to Plex library")
|
|
|
|
# Match to Plex with incremental saves for real-time updates
|
|
total_matched = 0
|
|
new_matches = [] # Track new matches for notifications
|
|
batch_size = 10 # Save every 10 matches
|
|
|
|
async def save_match(appearance_id: int, match_info: dict):
|
|
nonlocal total_matched
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Check if this is a NEW match (not previously matched)
|
|
cursor.execute('SELECT plex_rating_key FROM celebrity_appearances WHERE id = ?', (appearance_id,))
|
|
existing = cursor.fetchone()
|
|
is_new_match = existing and existing[0] is None
|
|
|
|
cursor.execute('''
|
|
UPDATE celebrity_appearances
|
|
SET plex_rating_key = ?, plex_library_id = ?, plex_show_rating_key = ?, updated_at = CURRENT_TIMESTAMP
|
|
WHERE id = ?
|
|
''', (match_info['plex_rating_key'], match_info.get('plex_library_id'), match_info.get('plex_show_rating_key'), appearance_id))
|
|
conn.commit()
|
|
|
|
# If new match, get details for notification
|
|
# Only notify if we have an actual episode/movie key (not just show key)
|
|
# For TV shows, plex_rating_key is the episode key - if None, episode not in Plex
|
|
has_actual_match = match_info.get('plex_rating_key') is not None
|
|
if is_new_match and has_actual_match:
|
|
cursor.execute('''
|
|
SELECT cp.name, ca.show_name, ca.appearance_type, ca.appearance_date, ca.poster_url
|
|
FROM celebrity_appearances ca
|
|
JOIN celebrity_profiles cp ON ca.celebrity_id = cp.id
|
|
WHERE ca.id = ?
|
|
''', (appearance_id,))
|
|
match_details = cursor.fetchone()
|
|
if match_details:
|
|
new_matches.append({
|
|
'appearance_id': appearance_id,
|
|
'celebrity_name': match_details[0],
|
|
'show_name': match_details[1],
|
|
'appearance_type': match_details[2],
|
|
'appearance_date': match_details[3],
|
|
'poster_url': match_details[4]
|
|
})
|
|
|
|
total_matched += 1
|
|
if total_matched % batch_size == 0:
|
|
logger.info(f"Plex matching progress: {total_matched} matched so far")
|
|
|
|
matches = await plex.batch_match_appearances(appearances, on_match=save_match)
|
|
|
|
logger.info(f"Matched {total_matched} appearances to Plex library")
|
|
|
|
# Send notifications for new matches (limit to 5 to avoid spam)
|
|
# Only send notifications for scheduled runs, not manual runs
|
|
if new_matches and send_notifications:
|
|
# Check if Plex match notifications are enabled
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute('SELECT notify_plex_matches, notify_include_poster FROM appearance_config WHERE id = 1')
|
|
notify_config = cursor.fetchone()
|
|
plex_notify_enabled = notify_config[0] if notify_config and notify_config[0] is not None else True
|
|
include_poster = notify_config[1] if notify_config and notify_config[1] is not None else True
|
|
|
|
if plex_notify_enabled:
|
|
logger.info(f"Sending notifications for {len(new_matches)} new Plex matches")
|
|
notifications_sent = 0
|
|
for match in new_matches[:5]: # Limit to 5 notifications
|
|
await send_plex_match_notification(
|
|
celebrity_name=match['celebrity_name'],
|
|
show_name=match['show_name'],
|
|
appearance_type=match['appearance_type'],
|
|
appearance_date=match['appearance_date'],
|
|
appearance_id=match['appearance_id'],
|
|
poster_url=match.get('poster_url') if include_poster else None,
|
|
db=db
|
|
)
|
|
notifications_sent += 1
|
|
|
|
if len(new_matches) > 5:
|
|
logger.info(f"Skipped {len(new_matches) - 5} additional match notifications to avoid spam")
|
|
else:
|
|
logger.info(f"Plex match notifications disabled, skipping {len(new_matches)} matches")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error in Plex matching task: {e}")
|
|
|
|
|
|
@router.get("/plex/stats")
|
|
async def get_plex_match_stats(
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get Plex matching statistics"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Get total matchable appearances (Movies and TV only)
|
|
cursor.execute('''
|
|
SELECT COUNT(*) FROM celebrity_appearances
|
|
WHERE appearance_type IN ('Movie', 'TV')
|
|
''')
|
|
total = cursor.fetchone()[0]
|
|
|
|
# Get matched count
|
|
cursor.execute('''
|
|
SELECT COUNT(*) FROM celebrity_appearances
|
|
WHERE plex_rating_key IS NOT NULL
|
|
AND appearance_type IN ('Movie', 'TV')
|
|
''')
|
|
matched = cursor.fetchone()[0]
|
|
|
|
# Get breakdown by type
|
|
cursor.execute('''
|
|
SELECT appearance_type,
|
|
COUNT(*) as total,
|
|
SUM(CASE WHEN plex_rating_key IS NOT NULL THEN 1 ELSE 0 END) as matched
|
|
FROM celebrity_appearances
|
|
WHERE appearance_type IN ('Movie', 'TV')
|
|
GROUP BY appearance_type
|
|
''')
|
|
breakdown = {}
|
|
for row in cursor.fetchall():
|
|
breakdown[row[0]] = {'total': row[1], 'matched': row[2]}
|
|
|
|
return {
|
|
"total": total,
|
|
"matched": matched,
|
|
"unmatched": total - matched,
|
|
"percentage": round((matched / total * 100) if total > 0 else 0, 1),
|
|
"breakdown": breakdown
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error getting Plex stats: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
@router.get("/plex/watch-url/{appearance_id}")
|
|
async def get_plex_watch_url(
|
|
appearance_id: int,
|
|
current_user: Dict = Depends(get_current_user),
|
|
db: UnifiedDatabase = Depends(get_db)
|
|
):
|
|
"""Get Plex watch URL for an appearance"""
|
|
try:
|
|
with db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Get appearance and Plex config
|
|
cursor.execute('''
|
|
SELECT a.plex_rating_key, c.plex_url, c.plex_token
|
|
FROM celebrity_appearances a, appearance_config c
|
|
WHERE a.id = ? AND c.id = 1
|
|
''', (appearance_id,))
|
|
row = cursor.fetchone()
|
|
|
|
if not row:
|
|
raise HTTPException(status_code=404, detail="Appearance not found")
|
|
|
|
rating_key, plex_url, plex_token = row
|
|
|
|
if not rating_key:
|
|
return {"success": False, "message": "Appearance not matched to Plex"}
|
|
|
|
if not plex_url:
|
|
return {"success": False, "message": "Plex not configured"}
|
|
|
|
plex = PlexClient(plex_url, plex_token)
|
|
server_info = await plex.get_server_identity()
|
|
|
|
if server_info:
|
|
watch_url = plex.get_full_watch_url(rating_key, server_info.get('machineIdentifier'))
|
|
else:
|
|
watch_url = plex.get_watch_url(rating_key)
|
|
|
|
return {
|
|
"success": True,
|
|
"watch_url": watch_url,
|
|
"rating_key": rating_key
|
|
}
|
|
|
|
except HTTPException:
|
|
raise
|
|
except Exception as e:
|
|
logger.error(f"Error getting Plex watch URL: {e}")
|
|
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
# ==================== HELPER FUNCTIONS FOR NOTIFICATIONS ====================
|
|
|
|
async def send_appearance_reminders(db: UnifiedDatabase = None) -> Dict:
|
|
"""
|
|
Check for upcoming appearances and send push notification reminders.
|
|
This function is called at midnight daily by the scheduler.
|
|
"""
|
|
if db is None:
|
|
db = get_app_state().db
|
|
|
|
try:
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Get notification settings
|
|
cursor.execute('''
|
|
SELECT notify_new_appearances, notify_days_before, notify_include_poster
|
|
FROM appearance_config
|
|
WHERE id = 1
|
|
''')
|
|
config = cursor.fetchone()
|
|
|
|
if not config or not config[0]:
|
|
logger.info("Appearance notifications are disabled")
|
|
return {"success": True, "message": "Notifications disabled", "sent": 0}
|
|
|
|
notify_days_before = config[1] or 1
|
|
include_poster = config[2] if config[2] is not None else True
|
|
|
|
# Get Pushover config from SettingsManager
|
|
settings_manager = SettingsManager(str(db.db_path))
|
|
pushover_config = settings_manager.get('pushover', {})
|
|
|
|
if not pushover_config.get('enabled'):
|
|
logger.info("Pushover notifications disabled or not configured")
|
|
return {"success": True, "message": "Pushover disabled or not configured", "sent": 0}
|
|
|
|
if not pushover_config.get('user_key') or not pushover_config.get('api_token'):
|
|
logger.info("Pushover credentials not configured")
|
|
return {"success": True, "message": "Pushover credentials not configured", "sent": 0}
|
|
|
|
# Get appearances happening TODAY only (reminders sent at midnight)
|
|
# Only get appearances we haven't already sent reminders for
|
|
cursor.execute('''
|
|
SELECT DISTINCT
|
|
ca.id,
|
|
cp.name as celebrity_name,
|
|
ca.show_name,
|
|
ca.appearance_type,
|
|
ca.appearance_date,
|
|
ca.episode_title,
|
|
ca.network,
|
|
ca.poster_url
|
|
FROM celebrity_appearances ca
|
|
JOIN celebrity_profiles cp ON ca.celebrity_id = cp.id
|
|
LEFT JOIN appearance_notifications an ON (
|
|
an.appearance_id = ca.id
|
|
AND an.notification_type = 'reminder'
|
|
)
|
|
WHERE ca.status = 'upcoming'
|
|
AND ca.appearance_date = date('now')
|
|
AND an.id IS NULL
|
|
ORDER BY ca.appearance_date ASC
|
|
''')
|
|
|
|
upcoming = cursor.fetchall()
|
|
|
|
if not upcoming:
|
|
logger.info("No upcoming appearances to remind about")
|
|
return {"success": True, "message": "No upcoming appearances", "sent": 0}
|
|
|
|
# Create notifier
|
|
notifier = PushoverNotifier(
|
|
user_key=pushover_config.get('user_key'),
|
|
api_token=pushover_config.get('api_token'),
|
|
enabled=True,
|
|
default_priority=0,
|
|
include_image=pushover_config.get('include_image', True),
|
|
unified_db=db
|
|
)
|
|
|
|
sent_count = 0
|
|
|
|
for row in upcoming:
|
|
appearance_id, celebrity_name, show_name, appearance_type, appearance_date, episode_title, network, poster_url = row
|
|
|
|
# Format the message
|
|
date_obj = datetime.strptime(appearance_date, '%Y-%m-%d')
|
|
days_until = (date_obj.date() - datetime.now().date()).days
|
|
|
|
if days_until == 0:
|
|
time_str = "TODAY"
|
|
elif days_until == 1:
|
|
time_str = "TOMORROW"
|
|
else:
|
|
time_str = f"in {days_until} days"
|
|
|
|
title = f"📅 {celebrity_name} Appearance {time_str}"
|
|
|
|
if appearance_type == 'TV':
|
|
if episode_title:
|
|
message = f"{show_name}: {episode_title}"
|
|
else:
|
|
message = show_name
|
|
if network:
|
|
message += f" on {network}"
|
|
elif appearance_type == 'Movie':
|
|
message = f"{show_name} (Movie)"
|
|
elif appearance_type == 'Podcast':
|
|
message = f"🎙️ {show_name}"
|
|
else:
|
|
message = show_name
|
|
|
|
message += f"\n📆 {date_obj.strftime('%B %d, %Y')}"
|
|
|
|
# Download poster image if available and enabled
|
|
poster_temp_path = None
|
|
if poster_url and include_poster:
|
|
poster_temp_path = await download_poster_image(poster_url)
|
|
|
|
# Send notification with poster image
|
|
try:
|
|
success = notifier.send_notification(
|
|
title=title,
|
|
message=message,
|
|
priority=0,
|
|
image_path=poster_temp_path
|
|
)
|
|
finally:
|
|
# Clean up temp poster file
|
|
if poster_temp_path and os.path.exists(poster_temp_path):
|
|
try:
|
|
os.unlink(poster_temp_path)
|
|
except Exception:
|
|
pass
|
|
|
|
if success:
|
|
sent_count += 1
|
|
# Record in database with full formatted message
|
|
full_message = f"📅 {celebrity_name} Appearance {time_str}\n{message}"
|
|
cursor.execute('''
|
|
INSERT INTO appearance_notifications
|
|
(appearance_id, celebrity_name, show_name, appearance_type,
|
|
appearance_date, notification_type, message, poster_url,
|
|
is_sent, sent_at)
|
|
VALUES (?, ?, ?, ?, ?, 'reminder', ?, ?, 1, CURRENT_TIMESTAMP)
|
|
''', (appearance_id, celebrity_name, show_name, appearance_type,
|
|
appearance_date, full_message, poster_url))
|
|
conn.commit()
|
|
logger.info(f"Sent reminder for {celebrity_name} - {show_name} on {appearance_date}")
|
|
|
|
return {
|
|
"success": True,
|
|
"message": f"Sent {sent_count} reminders",
|
|
"sent": sent_count,
|
|
"total_upcoming": len(upcoming)
|
|
}
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error sending appearance reminders: {e}")
|
|
import traceback
|
|
logger.error(traceback.format_exc())
|
|
return {"success": False, "error": str(e), "sent": 0}
|
|
|
|
|
|
async def send_plex_match_notification(
|
|
celebrity_name: str,
|
|
show_name: str,
|
|
appearance_type: str,
|
|
appearance_date: str,
|
|
appearance_id: int,
|
|
poster_url: Optional[str],
|
|
db: UnifiedDatabase
|
|
) -> bool:
|
|
"""Send push notification when a new Plex match is found"""
|
|
try:
|
|
# Get Pushover config from SettingsManager
|
|
settings_manager = SettingsManager(str(db.db_path))
|
|
pushover_config = settings_manager.get('pushover', {})
|
|
|
|
if not pushover_config.get('enabled'):
|
|
return False
|
|
|
|
if not pushover_config.get('user_key') or not pushover_config.get('api_token'):
|
|
return False
|
|
|
|
# Create notifier
|
|
notifier = PushoverNotifier(
|
|
user_key=pushover_config.get('user_key'),
|
|
api_token=pushover_config.get('api_token'),
|
|
enabled=True,
|
|
default_priority=-1, # Low priority for plex matches
|
|
include_image=True,
|
|
unified_db=db
|
|
)
|
|
|
|
with db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
|
|
title = f"🎬 New Plex Match: {celebrity_name}"
|
|
message = f"{show_name} ({appearance_type})"
|
|
if appearance_date:
|
|
try:
|
|
date_obj = datetime.strptime(appearance_date[:10], '%Y-%m-%d')
|
|
message += f"\n📆 {date_obj.strftime('%B %d, %Y')}"
|
|
except ValueError:
|
|
logger.debug(f"Could not parse date: {appearance_date}")
|
|
message += "\n✅ Available in Plex"
|
|
|
|
# Download poster image if available
|
|
poster_temp_path = None
|
|
if poster_url:
|
|
poster_temp_path = await download_poster_image(poster_url)
|
|
|
|
# Send notification with poster image
|
|
try:
|
|
success = notifier.send_notification(
|
|
title=title,
|
|
message=message,
|
|
priority=-1,
|
|
image_path=poster_temp_path
|
|
)
|
|
finally:
|
|
# Clean up temp poster file
|
|
if poster_temp_path and os.path.exists(poster_temp_path):
|
|
try:
|
|
os.unlink(poster_temp_path)
|
|
except Exception:
|
|
pass
|
|
|
|
if success:
|
|
# Record notification with full formatted message
|
|
full_message = f"🎬 New Plex Match: {celebrity_name}\n{message}"
|
|
cursor.execute('''
|
|
INSERT INTO appearance_notifications
|
|
(appearance_id, celebrity_name, show_name, appearance_type,
|
|
appearance_date, notification_type, message, poster_url,
|
|
is_sent, sent_at)
|
|
VALUES (?, ?, ?, ?, ?, 'plex_match', ?, ?, 1, CURRENT_TIMESTAMP)
|
|
''', (appearance_id, celebrity_name, show_name, appearance_type,
|
|
appearance_date[:10] if appearance_date else None,
|
|
full_message, poster_url))
|
|
conn.commit()
|
|
logger.info(f"Sent Plex match notification for {celebrity_name} - {show_name}")
|
|
|
|
return success
|
|
|
|
except Exception as e:
|
|
logger.error(f"Error sending Plex match notification: {e}")
|
|
return False
|