305 lines
11 KiB
Python
305 lines
11 KiB
Python
"""
|
|
Dashboard API Router
|
|
|
|
Provides endpoints for dashboard-specific data like recent items across different locations.
|
|
"""
|
|
|
|
from fastapi import APIRouter, Depends, Request
|
|
from typing import Dict, Any, Optional
|
|
from slowapi import Limiter
|
|
from slowapi.util import get_remote_address
|
|
from ..core.dependencies import get_current_user, get_app_state
|
|
from ..core.exceptions import handle_exceptions
|
|
from modules.universal_logger import get_logger
|
|
|
|
router = APIRouter(prefix="/api/dashboard", tags=["dashboard"])
|
|
logger = get_logger('API')
|
|
limiter = Limiter(key_func=get_remote_address)
|
|
|
|
|
|
@router.get("/recent-items")
|
|
@limiter.limit("60/minute")
|
|
@handle_exceptions
|
|
async def get_recent_items(
|
|
request: Request,
|
|
limit: int = 20,
|
|
since_id: Optional[int] = None,
|
|
current_user=Depends(get_current_user)
|
|
) -> Dict[str, Any]:
|
|
"""
|
|
Get NEW items from Media, Review, and Internet Discovery for dashboard cards.
|
|
|
|
Uses file_inventory.id for ordering since it monotonically increases with
|
|
insertion order. download_date from the downloads table is included for
|
|
display but not used for ordering (batch downloads can interleave timestamps).
|
|
|
|
Args:
|
|
limit: Max items per category
|
|
since_id: Optional file_inventory ID - only return items with id > this value
|
|
|
|
Returns up to `limit` items from each location, sorted by most recently added first.
|
|
"""
|
|
app_state = get_app_state()
|
|
with app_state.db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
|
|
# Media items (location='final')
|
|
# ORDER BY fi.id DESC — id is monotonically increasing and reflects insertion order.
|
|
# download_date is included for display but NOT used for ordering.
|
|
if since_id:
|
|
cursor.execute("""
|
|
SELECT fi.id, fi.file_path, fi.filename, fi.source, fi.platform, fi.content_type,
|
|
fi.file_size, COALESCE(d.download_date, fi.created_date) as added_at,
|
|
fi.width, fi.height
|
|
FROM file_inventory fi
|
|
LEFT JOIN downloads d ON d.filename = fi.filename
|
|
WHERE fi.location = 'final'
|
|
AND fi.id > ?
|
|
AND (fi.moved_from_review IS NULL OR fi.moved_from_review = 0)
|
|
AND (fi.from_discovery IS NULL OR fi.from_discovery = 0)
|
|
ORDER BY fi.id DESC
|
|
LIMIT ?
|
|
""", (since_id, limit))
|
|
else:
|
|
cursor.execute("""
|
|
SELECT fi.id, fi.file_path, fi.filename, fi.source, fi.platform, fi.content_type,
|
|
fi.file_size, COALESCE(d.download_date, fi.created_date) as added_at,
|
|
fi.width, fi.height
|
|
FROM file_inventory fi
|
|
LEFT JOIN downloads d ON d.filename = fi.filename
|
|
WHERE fi.location = 'final'
|
|
AND (fi.moved_from_review IS NULL OR fi.moved_from_review = 0)
|
|
AND (fi.from_discovery IS NULL OR fi.from_discovery = 0)
|
|
ORDER BY fi.id DESC
|
|
LIMIT ?
|
|
""", (limit,))
|
|
|
|
media_items = []
|
|
for row in cursor.fetchall():
|
|
media_items.append({
|
|
'id': row[0],
|
|
'file_path': row[1],
|
|
'filename': row[2],
|
|
'source': row[3],
|
|
'platform': row[4],
|
|
'media_type': row[5],
|
|
'file_size': row[6],
|
|
'added_at': row[7],
|
|
'width': row[8],
|
|
'height': row[9]
|
|
})
|
|
|
|
# Get total count for new media items
|
|
if since_id:
|
|
cursor.execute("""
|
|
SELECT COUNT(*)
|
|
FROM file_inventory
|
|
WHERE location = 'final'
|
|
AND id > ?
|
|
AND (moved_from_review IS NULL OR moved_from_review = 0)
|
|
AND (from_discovery IS NULL OR from_discovery = 0)
|
|
""", (since_id,))
|
|
else:
|
|
cursor.execute("""
|
|
SELECT COUNT(*) FROM file_inventory
|
|
WHERE location = 'final'
|
|
AND (moved_from_review IS NULL OR moved_from_review = 0)
|
|
AND (from_discovery IS NULL OR from_discovery = 0)
|
|
""")
|
|
media_count = cursor.fetchone()[0]
|
|
|
|
# Review items (location='review')
|
|
if since_id:
|
|
cursor.execute("""
|
|
SELECT f.id, f.file_path, f.filename, f.source, f.platform, f.content_type,
|
|
f.file_size, COALESCE(d.download_date, f.created_date) as added_at,
|
|
f.width, f.height,
|
|
CASE WHEN fr.id IS NOT NULL THEN 1 ELSE 0 END as face_scanned,
|
|
fr.has_match as face_matched, fr.confidence as face_confidence, fr.matched_person
|
|
FROM file_inventory f
|
|
LEFT JOIN downloads d ON d.filename = f.filename
|
|
LEFT JOIN face_recognition_scans fr ON f.file_path = fr.file_path
|
|
WHERE f.location = 'review'
|
|
AND f.id > ?
|
|
AND (f.moved_from_media IS NULL OR f.moved_from_media = 0)
|
|
ORDER BY f.id DESC
|
|
LIMIT ?
|
|
""", (since_id, limit))
|
|
else:
|
|
cursor.execute("""
|
|
SELECT f.id, f.file_path, f.filename, f.source, f.platform, f.content_type,
|
|
f.file_size, COALESCE(d.download_date, f.created_date) as added_at,
|
|
f.width, f.height,
|
|
CASE WHEN fr.id IS NOT NULL THEN 1 ELSE 0 END as face_scanned,
|
|
fr.has_match as face_matched, fr.confidence as face_confidence, fr.matched_person
|
|
FROM file_inventory f
|
|
LEFT JOIN downloads d ON d.filename = f.filename
|
|
LEFT JOIN face_recognition_scans fr ON f.file_path = fr.file_path
|
|
WHERE f.location = 'review'
|
|
AND (f.moved_from_media IS NULL OR f.moved_from_media = 0)
|
|
ORDER BY f.id DESC
|
|
LIMIT ?
|
|
""", (limit,))
|
|
|
|
review_items = []
|
|
for row in cursor.fetchall():
|
|
face_recognition = None
|
|
if row[10]: # face_scanned
|
|
face_recognition = {
|
|
'scanned': True,
|
|
'matched': bool(row[11]) if row[11] is not None else False,
|
|
'confidence': row[12],
|
|
'matched_person': row[13]
|
|
}
|
|
|
|
review_items.append({
|
|
'id': row[0],
|
|
'file_path': row[1],
|
|
'filename': row[2],
|
|
'source': row[3],
|
|
'platform': row[4],
|
|
'media_type': row[5],
|
|
'file_size': row[6],
|
|
'added_at': row[7],
|
|
'width': row[8],
|
|
'height': row[9],
|
|
'face_recognition': face_recognition
|
|
})
|
|
|
|
# Get total count for new review items
|
|
if since_id:
|
|
cursor.execute("""
|
|
SELECT COUNT(*)
|
|
FROM file_inventory
|
|
WHERE location = 'review'
|
|
AND id > ?
|
|
AND (moved_from_media IS NULL OR moved_from_media = 0)
|
|
""", (since_id,))
|
|
else:
|
|
cursor.execute("""
|
|
SELECT COUNT(*) FROM file_inventory
|
|
WHERE location = 'review'
|
|
AND (moved_from_media IS NULL OR moved_from_media = 0)
|
|
""")
|
|
review_count = cursor.fetchone()[0]
|
|
|
|
# Internet Discovery items (celebrity_discovered_videos with status='new')
|
|
internet_discovery_items = []
|
|
internet_discovery_count = 0
|
|
|
|
try:
|
|
cursor.execute("""
|
|
SELECT
|
|
v.id,
|
|
v.video_id,
|
|
v.title,
|
|
v.thumbnail,
|
|
v.channel_name,
|
|
v.platform,
|
|
v.duration,
|
|
v.max_resolution,
|
|
v.status,
|
|
v.discovered_at,
|
|
v.url,
|
|
v.view_count,
|
|
v.upload_date,
|
|
c.name as celebrity_name
|
|
FROM celebrity_discovered_videos v
|
|
LEFT JOIN celebrity_profiles c ON v.celebrity_id = c.id
|
|
WHERE v.status = 'new'
|
|
ORDER BY v.id DESC
|
|
LIMIT ?
|
|
""", (limit,))
|
|
|
|
for row in cursor.fetchall():
|
|
internet_discovery_items.append({
|
|
'id': row[0],
|
|
'video_id': row[1],
|
|
'title': row[2],
|
|
'thumbnail': row[3],
|
|
'channel_name': row[4],
|
|
'platform': row[5],
|
|
'duration': row[6],
|
|
'max_resolution': row[7],
|
|
'status': row[8],
|
|
'discovered_at': row[9],
|
|
'url': row[10],
|
|
'view_count': row[11],
|
|
'upload_date': row[12],
|
|
'celebrity_name': row[13]
|
|
})
|
|
|
|
# Get total count for internet discovery
|
|
cursor.execute("SELECT COUNT(*) FROM celebrity_discovered_videos WHERE status = 'new'")
|
|
internet_discovery_count = cursor.fetchone()[0]
|
|
except Exception as e:
|
|
# Table might not exist if celebrity feature not used
|
|
logger.warning(f"Could not fetch internet discovery items: {e}", module="Dashboard")
|
|
|
|
return {
|
|
'media': {
|
|
'count': media_count,
|
|
'items': media_items
|
|
},
|
|
'review': {
|
|
'count': review_count,
|
|
'items': review_items
|
|
},
|
|
'internet_discovery': {
|
|
'count': internet_discovery_count,
|
|
'items': internet_discovery_items
|
|
}
|
|
}
|
|
|
|
|
|
@router.get("/dismissed-cards")
|
|
@limiter.limit("60/minute")
|
|
@handle_exceptions
|
|
async def get_dismissed_cards(
|
|
request: Request,
|
|
user=Depends(get_current_user)
|
|
) -> Dict[str, Any]:
|
|
"""Get the user's dismissed card IDs."""
|
|
app_state = get_app_state()
|
|
user_id = user.get('username', 'default')
|
|
|
|
with app_state.db.get_connection() as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute("""
|
|
SELECT preference_value FROM user_preferences
|
|
WHERE user_id = ? AND preference_key = 'dashboard_dismissed_cards'
|
|
""", (user_id,))
|
|
row = cursor.fetchone()
|
|
|
|
if row and row[0]:
|
|
import json
|
|
return json.loads(row[0])
|
|
|
|
return {'media': None, 'review': None, 'internet_discovery': None}
|
|
|
|
|
|
@router.post("/dismissed-cards")
|
|
@limiter.limit("30/minute")
|
|
@handle_exceptions
|
|
async def set_dismissed_cards(
|
|
request: Request,
|
|
data: Dict[str, Any],
|
|
user=Depends(get_current_user)
|
|
) -> Dict[str, str]:
|
|
"""Save the user's dismissed card IDs."""
|
|
import json
|
|
app_state = get_app_state()
|
|
user_id = user.get('username', 'default')
|
|
|
|
with app_state.db.get_connection(for_write=True) as conn:
|
|
cursor = conn.cursor()
|
|
cursor.execute("""
|
|
INSERT INTO user_preferences (user_id, preference_key, preference_value, updated_at)
|
|
VALUES (?, 'dashboard_dismissed_cards', ?, CURRENT_TIMESTAMP)
|
|
ON CONFLICT(user_id, preference_key) DO UPDATE SET
|
|
preference_value = excluded.preference_value,
|
|
updated_at = CURRENT_TIMESTAMP
|
|
""", (user_id, json.dumps(data)))
|
|
|
|
return {'status': 'ok'}
|