Files
media-downloader/web/backend/routers/private_gallery.py
Todd 0d7b2b1aab Initial commit
Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
2026-03-29 22:42:55 -04:00

7630 lines
276 KiB
Python

"""
Private Gallery Router
API endpoints for the Private Gallery feature:
- Authentication (setup, unlock, lock, change-password)
- Configuration management
- Relationship types CRUD
- Persons CRUD
- Media CRUD with encryption
- File upload/copy with encryption
- File serving with on-the-fly decryption
- Batch operations
- Export (decrypted downloads)
- Albums (auto-generated from persons)
- Statistics
"""
import asyncio
import hashlib
import json
import mimetypes
import os
import re
import shutil
import tempfile
import time
import uuid
from datetime import datetime, date, timedelta
from io import BytesIO
from pathlib import Path
import threading
from threading import Lock
from typing import Dict, List, Optional, Any
from zipfile import ZipFile
from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, Query, Request, Response, UploadFile, File, Form, Header
from fastapi.responses import StreamingResponse, FileResponse
from pydantic import BaseModel, Field
from slowapi import Limiter
from slowapi.util import get_remote_address
from ..core.dependencies import get_current_user, get_app_state
from ..core.exceptions import handle_exceptions, NotFoundError, ValidationError, AuthError
from ..core.responses import message_response, now_iso8601
from modules.universal_logger import get_logger
from modules.private_gallery_crypto import get_private_gallery_crypto
logger = get_logger('API')
router = APIRouter(prefix="/api/private-gallery", tags=["Private Gallery"])
limiter = Limiter(key_func=get_remote_address)
# Custom header for private gallery auth
PRIVATE_GALLERY_TOKEN_HEADER = "X-Private-Gallery-Token"
# In-memory cache for decrypted post lists (avoids re-decrypting all posts on every page)
_posts_cache: Dict[str, Any] = {}
_posts_cache_time: Dict[str, float] = {} # cache_key -> timestamp
_posts_cache_version: int = 0
_posts_cache_lock = Lock()
_POSTS_CACHE_TTL = 30 # seconds — ensures external DB changes (e.g. reddit monitor) are picked up
def _invalidate_posts_cache():
"""Bump version to invalidate cached decrypted post lists."""
global _posts_cache_version
with _posts_cache_lock:
_posts_cache_version += 1
_posts_cache.clear()
_posts_cache_time.clear()
# In-memory config cache (avoids DB query on every thumbnail/file request)
_config_cache: Dict[str, Any] = {}
_config_cache_time: float = 0
_config_cache_lock = Lock()
_CONFIG_CACHE_TTL = 30 # seconds
def _invalidate_config_cache():
"""Invalidate the config cache (call after config changes)."""
global _config_cache_time
with _config_cache_lock:
_config_cache_time = 0
_config_cache.clear()
# In-memory LRU cache for decrypted thumbnails
# Thumbnails are ~20KB each, 2000 items ≈ 40MB max memory
from collections import OrderedDict
_thumb_cache: OrderedDict = OrderedDict() # storage_id -> (etag, bytes)
_thumb_cache_lock = Lock()
_THUMB_CACHE_MAX = 2000
def _thumb_cache_get(storage_id: str):
"""Get a thumbnail from cache. Returns (etag, bytes) or None."""
with _thumb_cache_lock:
item = _thumb_cache.get(storage_id)
if item:
_thumb_cache.move_to_end(storage_id)
return item
def _thumb_cache_put(storage_id: str, etag: str, data: bytes):
"""Put a thumbnail into cache, evicting oldest if full."""
with _thumb_cache_lock:
if storage_id in _thumb_cache:
_thumb_cache.move_to_end(storage_id)
else:
if len(_thumb_cache) >= _THUMB_CACHE_MAX:
_thumb_cache.popitem(last=False)
_thumb_cache[storage_id] = (etag, data)
def _thumb_cache_invalidate(storage_id: str):
"""Remove a specific thumbnail from cache."""
with _thumb_cache_lock:
_thumb_cache.pop(storage_id, None)
# ============================================================================
# JOB TRACKING FOR BACKGROUND PROCESSING
# ============================================================================
_pg_jobs: Dict[str, Dict] = {}
_pg_jobs_lock = Lock()
def _create_pg_job(job_id: str, total_files: int, operation: str):
"""Create a new private gallery job."""
with _pg_jobs_lock:
_pg_jobs[job_id] = {
'id': job_id,
'status': 'processing',
'operation': operation,
'total_files': total_files,
'processed_files': 0,
'success_count': 0,
'failed_count': 0,
'duplicate_count': 0,
'results': [],
'current_file': None,
'current_phase': None,
'bytes_downloaded': 0,
'bytes_total': 0,
'started_at': datetime.now().isoformat(),
'completed_at': None
}
def _update_pg_job(job_id: str, updates: Dict):
"""Update a private gallery job's status."""
with _pg_jobs_lock:
if job_id in _pg_jobs:
_pg_jobs[job_id].update(updates)
def _get_pg_job(job_id: str) -> Optional[Dict]:
"""Get the current status of a private gallery job."""
with _pg_jobs_lock:
job = _pg_jobs.get(job_id)
return dict(job) if job else None
def _cleanup_old_pg_jobs():
"""Remove jobs older than 1 hour."""
with _pg_jobs_lock:
now = datetime.now()
to_remove = []
for job_id, job in _pg_jobs.items():
if job.get('completed_at'):
try:
completed = datetime.fromisoformat(job['completed_at'])
if (now - completed).total_seconds() > 3600:
to_remove.append(job_id)
except (ValueError, TypeError):
pass
for job_id in to_remove:
del _pg_jobs[job_id]
# ============================================================================
# PYDANTIC MODELS
# ============================================================================
class SetupRequest(BaseModel):
password: str = Field(..., min_length=8)
class UnlockRequest(BaseModel):
password: str
class ChangePasswordRequest(BaseModel):
current_password: str
new_password: str = Field(..., min_length=8)
class ConfigUpdateRequest(BaseModel):
storage_path: Optional[str] = None
thumbnail_path: Optional[str] = None
organize_by_person: Optional[bool] = None
organize_by_date: Optional[bool] = None
auto_lock_minutes: Optional[int] = None
duplicate_auto_select_distance: Optional[int] = Field(None, ge=0, le=12)
min_import_resolution: Optional[int] = Field(None, ge=0, le=10000)
class RelationshipRequest(BaseModel):
name: str = Field(..., min_length=1, max_length=50)
color: Optional[str] = "#6366f1"
class PersonRequest(BaseModel):
name: str = Field(..., min_length=1, max_length=100)
sort_name: Optional[str] = None
relationship_id: int
default_tag_ids: Optional[List[int]] = None
class PersonUpdateRequest(BaseModel):
name: Optional[str] = None
sort_name: Optional[str] = None
relationship_id: Optional[int] = None
default_tag_ids: Optional[List[int]] = None
class MediaUpdateRequest(BaseModel):
description: Optional[str] = None
person_id: Optional[int] = None
media_date: Optional[str] = None
tag_ids: Optional[List[int]] = None
class PostUpdateRequest(BaseModel):
description: Optional[str] = None
person_id: Optional[int] = None
media_date: Optional[str] = None
tag_ids: Optional[List[int]] = None
class AttachMediaRequest(BaseModel):
media_ids: List[int]
class CopyRequest(BaseModel):
source_paths: List[str]
person_id: int
tag_ids: List[int] = []
media_date: Optional[str] = None
description: Optional[str] = None
original_filenames: Optional[Dict[str, str]] = None # source_path -> original filename
class ImportUrlRequest(BaseModel):
urls: List[str]
person_id: int
tag_ids: List[int] = []
media_date: Optional[str] = None
description: Optional[str] = None
class ImportDirectoryRequest(BaseModel):
directory_path: str
person_id: int
tag_ids: List[int] = []
media_date: Optional[str] = None
description: Optional[str] = None
recursive: bool = False
class FeaturesUpdateRequest(BaseModel):
"""Request body for updating enabled features."""
enabled_features: List[str] = Field(..., description="List of enabled feature paths")
feature_order: Optional[Dict[str, List[str]]] = Field(None, description="Order of features by group")
feature_labels: Optional[Dict[str, str]] = Field(None, description="Custom labels for features: {'/path': 'Custom Label'}")
group_order: Optional[List[str]] = Field(None, description="Order of groups: ['media', 'video', ...]")
class BatchDeleteRequest(BaseModel):
media_ids: List[int]
class BatchTagsRequest(BaseModel):
media_ids: List[int]
add_tag_ids: Optional[List[int]] = None
remove_tag_ids: Optional[List[int]] = None
class BatchReadStatusRequest(BaseModel):
post_ids: List[int]
is_read: bool
class BatchDateRequest(BaseModel):
media_ids: List[int]
new_date: str
class BatchPersonRequest(BaseModel):
media_ids: List[int]
person_id: int
class ExportBatchRequest(BaseModel):
media_ids: List[int]
organize_by_date: Optional[bool] = False
organize_by_person: Optional[bool] = False
class ExportAllRequest(BaseModel):
organize_by_date: Optional[bool] = True
organize_by_person: Optional[bool] = True
include_metadata: Optional[bool] = True
class PostCreateRequest(BaseModel):
person_id: int
tag_ids: Optional[List[int]] = None
media_date: Optional[str] = None
description: Optional[str] = None
media_ids: Optional[List[int]] = None
class TagCreateRequest(BaseModel):
name: str = Field(..., min_length=1, max_length=100)
color: Optional[str] = '#6b7280'
description: Optional[str] = None
class TagUpdateRequest(BaseModel):
name: Optional[str] = None
color: Optional[str] = None
description: Optional[str] = None
class ImportAuthCreateRequest(BaseModel):
domain: str = Field(..., min_length=1, max_length=253)
auth_type: str = Field(default='basic', pattern='^(basic|cookies|both)$')
username: Optional[str] = None
password: Optional[str] = None
cookies: Optional[List[dict]] = None
user_agent: Optional[str] = None
notes: Optional[str] = None
class ImportAuthUpdateRequest(BaseModel):
domain: Optional[str] = None
auth_type: Optional[str] = Field(default=None, pattern='^(basic|cookies|both)$')
username: Optional[str] = None
password: Optional[str] = None
cookies: Optional[List[dict]] = None
user_agent: Optional[str] = None
notes: Optional[str] = None
class PersonGroupCreate(BaseModel):
name: str
description: Optional[str] = None
min_resolution: Optional[int] = Field(0, ge=0, le=10000)
class PersonGroupUpdate(BaseModel):
name: Optional[str] = None
description: Optional[str] = None
min_resolution: Optional[int] = Field(None, ge=0, le=10000)
class PersonGroupMemberAdd(BaseModel):
person_id: int
class PersonGroupTagMemberAdd(BaseModel):
tag_id: int
class PersonGroupRelationshipMemberAdd(BaseModel):
relationship_id: int
class PersonGroupExcludedTagAdd(BaseModel):
tag_id: int
class RedditCommunityCreate(BaseModel):
subreddit_name: str
person_id: int
class RedditCommunityUpdate(BaseModel):
subreddit_name: Optional[str] = None
person_id: Optional[int] = None
enabled: Optional[bool] = None
class RedditMonitorSettingsUpdate(BaseModel):
enabled: Optional[bool] = None
check_interval_hours: Optional[int] = None
lookback_days: Optional[int] = None
class RedditCookiesUpload(BaseModel):
cookies_json: str # JSON string of cookies array
class ScraperAccountCreate(BaseModel):
username: str
person_id: int
class ScraperAccountUpdate(BaseModel):
person_id: Optional[int] = None
enabled: Optional[bool] = None
# ============================================================================
# HELPER FUNCTIONS
# ============================================================================
def _get_crypto():
"""Get the crypto instance."""
return get_private_gallery_crypto()
def _get_db():
"""Get the unified database instance."""
app_state = get_app_state()
return app_state.db
def _get_config(db) -> Dict[str, Any]:
"""Get private gallery configuration (cached for 30s)."""
global _config_cache_time
now = time.monotonic()
with _config_cache_lock:
if _config_cache and (now - _config_cache_time) < _CONFIG_CACHE_TTL:
return dict(_config_cache)
config = {}
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT key, value FROM private_media_config')
for row in cursor.fetchall():
value = row['value']
# Convert string booleans and numbers
if value is None:
pass
elif value == 'true':
value = True
elif value == 'false':
value = False
elif value.isdigit():
value = int(value)
config[row['key']] = value
with _config_cache_lock:
_config_cache.clear()
_config_cache.update(config)
_config_cache_time = now
return config
def _get_import_auth_for_url(db, crypto, url: str) -> Optional[Dict]:
"""Find matching import auth for a URL. Suffix matching, most-specific first."""
from urllib.parse import urlparse
try:
parsed = urlparse(url)
hostname = (parsed.hostname or '').lower().strip('.')
except Exception:
return None
if not hostname:
return None
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT * FROM private_gallery_import_auth ORDER BY LENGTH(domain) DESC')
rows = cursor.fetchall()
for row in rows:
domain = row['domain']
if hostname == domain or hostname.endswith('.' + domain):
result = {
'auth_type': row['auth_type'],
}
if row['encrypted_username']:
result['username'] = crypto.decrypt_field(row['encrypted_username'])
if row['encrypted_password']:
result['password'] = crypto.decrypt_field(row['encrypted_password'])
if row['encrypted_cookies_json']:
try:
result['cookies'] = json.loads(crypto.decrypt_field(row['encrypted_cookies_json']))
except (json.JSONDecodeError, Exception) as e:
logger.debug(f"Error decrypting cookies: {e}", module="PrivateGallery")
if row['encrypted_user_agent']:
result['user_agent'] = crypto.decrypt_field(row['encrypted_user_agent'])
return result
return None
def _set_config(db, key: str, value: Any) -> None:
"""Set a configuration value."""
if isinstance(value, bool):
value = 'true' if value else 'false'
else:
value = str(value)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT OR REPLACE INTO private_media_config (key, value, updated_at)
VALUES (?, ?, CURRENT_TIMESTAMP)
''', (key, value))
conn.commit()
_invalidate_config_cache()
def _verify_gallery_token(
token: str = Header(None, alias=PRIVATE_GALLERY_TOKEN_HEADER),
_token: str = Query(None, description="Token via query param for img/video tags")
) -> Dict:
"""Verify the private gallery session token.
Accepts token from either header (for API calls) or query param (for img/video tags).
Refreshes session expiry on each valid request (activity-based timeout).
"""
# Use header token first, fall back to query param
actual_token = token or _token
if not actual_token:
raise AuthError("Private gallery authentication required")
crypto = _get_crypto()
session = crypto.verify_session(actual_token)
if not session:
raise AuthError("Invalid or expired session token")
if not crypto.is_initialized():
raise AuthError("Gallery is locked")
# Refresh session expiry on activity
db = _get_db()
config = _get_config(db)
auto_lock = config.get('auto_lock_minutes', 30)
crypto.refresh_session(actual_token, auto_lock)
return session
def _get_file_hash(file_path: Path) -> str:
"""Calculate SHA256 hash of a file."""
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(65536), b''):
sha256.update(chunk)
return sha256.hexdigest()
def _compute_perceptual_hash(file_path: Path) -> Optional[str]:
"""Calculate perceptual hash (dHash) for an image or video file."""
try:
import imagehash
from PIL import Image
except ImportError:
return None
ext = file_path.suffix.lower().lstrip('.')
image_exts = {'jpg', 'jpeg', 'png', 'gif', 'webp', 'bmp', 'tiff', 'heic', 'heif', 'avif'}
video_exts = {'mp4', 'mov', 'avi', 'mkv', 'webm', 'm4v', 'wmv', 'flv'}
pil_image = None
frame = None
frame_rgb = None
try:
if ext in video_exts:
try:
import cv2
except ImportError:
return None
cap = cv2.VideoCapture(str(file_path))
if not cap.isOpened():
return None
total_frames = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
cap.set(cv2.CAP_PROP_POS_FRAMES, int(total_frames * 0.5))
ret, frame = cap.read()
cap.release()
if not ret or frame is None:
return None
frame_rgb = cv2.cvtColor(frame, cv2.COLOR_BGR2RGB)
pil_image = Image.fromarray(frame_rgb)
elif ext in image_exts:
pil_image = Image.open(file_path)
else:
return None
phash = str(imagehash.dhash(pil_image, hash_size=16))
return phash
except Exception:
return None
finally:
if pil_image is not None:
pil_image.close()
del pil_image
if frame_rgb is not None:
del frame_rgb
if frame is not None:
del frame
def _get_file_info(file_path: Path) -> Dict[str, Any]:
"""Get file type, mime type, and dimensions."""
ext = file_path.suffix.lower().lstrip('.')
mime_type, _ = mimetypes.guess_type(str(file_path))
if not mime_type:
mime_type = 'application/octet-stream'
image_exts = {'jpg', 'jpeg', 'png', 'gif', 'webp', 'bmp', 'tiff', 'heic', 'heif', 'avif'}
video_exts = {'mp4', 'mov', 'avi', 'mkv', 'webm', 'm4v', 'wmv', 'flv'}
if ext in image_exts:
file_type = 'image'
elif ext in video_exts:
file_type = 'video'
else:
file_type = 'other'
info = {
'file_type': file_type,
'mime_type': mime_type,
'width': None,
'height': None,
'duration': None
}
# Get dimensions for images
if file_type == 'image':
try:
from PIL import Image
with Image.open(file_path) as img:
info['width'], info['height'] = img.size
except Exception:
pass
# Get duration/dimensions for videos
if file_type == 'video':
try:
import subprocess
result = subprocess.run([
'ffprobe', '-v', 'quiet', '-print_format', 'json',
'-show_streams', '-show_format', str(file_path)
], capture_output=True, text=True, timeout=30)
if result.returncode == 0:
import json
data = json.loads(result.stdout)
for stream in data.get('streams', []):
if stream.get('codec_type') == 'video':
info['width'] = stream.get('width')
info['height'] = stream.get('height')
break
if 'format' in data:
duration = data['format'].get('duration')
if duration:
info['duration'] = float(duration)
except Exception:
pass
return info
def _generate_thumbnail(file_path: Path, output_path: Path, file_type: str) -> bool:
"""Generate a thumbnail for an image or video."""
try:
output_path.parent.mkdir(parents=True, exist_ok=True)
if file_type == 'image':
from PIL import Image, ImageOps
with Image.open(file_path) as img:
# Apply EXIF orientation before resizing
img = ImageOps.exif_transpose(img)
img.thumbnail((400, 400))
# Convert to RGB if necessary (for JPEG)
if img.mode in ('RGBA', 'P'):
img = img.convert('RGB')
img.save(output_path, 'JPEG', quality=85)
return True
elif file_type == 'video':
import subprocess
result = subprocess.run([
'ffmpeg', '-y', '-i', str(file_path),
'-ss', '00:00:01', '-vframes', '1',
'-vf', 'scale=400:-1:force_original_aspect_ratio=decrease',
str(output_path)
], capture_output=True, timeout=30)
return result.returncode == 0 and output_path.exists()
except Exception as e:
logger.error(f"Thumbnail generation failed: {e}")
return False
def _extract_date_from_filename(filename: str) -> Optional[str]:
"""Extract date and optionally time from filename patterns."""
# Patterns with date and time (6 groups: year, month, day, hour, minute, second)
patterns_with_time = [
# IMG_20260115_143022.jpg or video_20260115_143022.mp4
r'(?:IMG|VID|video|photo)?[-_]?(\d{4})(\d{2})(\d{2})[-_](\d{2})(\d{2})(\d{2})',
# 2026-01-15_14-30-22.jpg or 2026_01_15_14_30_22.jpg
r'(\d{4})[-_](\d{2})[-_](\d{2})[-_](\d{2})[-_](\d{2})[-_](\d{2})',
# 20260115143022.jpg (all digits)
r'(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})',
]
# Patterns with date only (3 groups: year, month, day)
patterns_date_only = [
r'(\d{4})[-_]?(\d{2})[-_]?(\d{2})', # 2026-01-15 or 20260115
r'IMG[-_]?(\d{4})(\d{2})(\d{2})', # IMG_20260115
r'(\d{2})[-_](\d{2})[-_](\d{4})', # 15-01-2026
]
# First try patterns with time
for pattern in patterns_with_time:
match = re.search(pattern, filename)
if match:
groups = match.groups()
try:
year, month, day = int(groups[0]), int(groups[1]), int(groups[2])
hour, minute, second = int(groups[3]), int(groups[4]), int(groups[5])
if (1 <= month <= 12 and 1 <= day <= 31 and 2000 <= year <= 2100 and
0 <= hour <= 23 and 0 <= minute <= 59 and 0 <= second <= 59):
return f"{year:04d}-{month:02d}-{day:02d}T{hour:02d}:{minute:02d}:{second:02d}"
except (ValueError, IndexError):
continue
# Fall back to date-only patterns
for pattern in patterns_date_only:
match = re.search(pattern, filename)
if match:
groups = match.groups()
try:
if len(groups[0]) == 4:
year, month, day = int(groups[0]), int(groups[1]), int(groups[2])
else:
day, month, year = int(groups[0]), int(groups[1]), int(groups[2])
if 1 <= month <= 12 and 1 <= day <= 31 and 2000 <= year <= 2100:
return f"{year:04d}-{month:02d}-{day:02d}"
except (ValueError, IndexError):
continue
return None
def _extract_date_from_exif(file_path: Path) -> Optional[str]:
"""Extract date and time from EXIF metadata."""
try:
from PIL import Image
from PIL.ExifTags import TAGS
with Image.open(file_path) as img:
exif = img._getexif()
if exif:
for tag_id, value in exif.items():
tag = TAGS.get(tag_id, tag_id)
if tag in ('DateTimeOriginal', 'DateTime', 'DateTimeDigitized'):
if value:
# Format: "2026:01:15 14:30:22"
parts = value.split(' ')
date_part = parts[0].replace(':', '-')
if len(parts) > 1 and parts[1]:
return f"{date_part}T{parts[1]}"
return date_part
except Exception:
pass
return None
# ============================================================================
# JOB STATUS ENDPOINT
# ============================================================================
@router.get("/job-status/{job_id}")
@limiter.limit("120/minute")
@handle_exceptions
async def get_gallery_job_status(
request: Request,
job_id: str,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get the status of a private gallery background job."""
_cleanup_old_pg_jobs()
job = _get_pg_job(job_id)
if not job:
raise NotFoundError(f"Job '{job_id}' not found")
return job
# ============================================================================
# AUTHENTICATION ENDPOINTS (No gallery auth required)
# ============================================================================
@router.get("/status")
@limiter.limit("60/minute")
@handle_exceptions
async def get_status(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Check if setup is complete and current lock status."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
# Check if the user has a valid session token (not just if crypto is initialized)
is_unlocked = False
token = request.headers.get('X-Private-Gallery-Token')
if token and crypto.is_initialized():
session = crypto.verify_session(token)
is_unlocked = session is not None
# Refresh session expiry on status check (keeps session alive during active use)
if is_unlocked:
auto_lock = config.get('auto_lock_minutes', 30)
crypto.refresh_session(token, auto_lock)
return {
"is_setup_complete": config.get('is_setup_complete', False),
"is_unlocked": is_unlocked,
"active_sessions": crypto.get_active_session_count()
}
@router.post("/setup")
@limiter.limit("5/minute")
@handle_exceptions
async def setup_gallery(
request: Request,
body: SetupRequest,
current_user: Dict = Depends(get_current_user)
):
"""First-time setup - create password and encryption salt."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
if config.get('is_setup_complete', False):
raise ValidationError("Gallery is already set up")
# Hash password
password_hash = crypto.hash_password(body.password)
_set_config(db, 'password_hash', password_hash)
# Generate and store encryption salt
salt = crypto.generate_salt()
import base64
_set_config(db, 'encryption_salt', base64.b64encode(salt).decode('utf-8'))
# Create default relationship types
relationships = [
('Friend', '#10b981'),
('Family', '#3b82f6'),
('Coworker', '#f59e0b'),
('Acquaintance', '#8b5cf6'),
('Other', '#6b7280')
]
# Initialize encryption to encrypt relationship names
crypto.initialize_encryption(body.password, salt)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for name, color in relationships:
encrypted_name = crypto.encrypt_field(name)
cursor.execute('''
INSERT INTO private_media_relationships (encrypted_name, color)
VALUES (?, ?)
''', (encrypted_name, color))
conn.commit()
# Create storage directories
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
(storage_path / 'data').mkdir(parents=True, exist_ok=True)
(storage_path / 'thumbs').mkdir(parents=True, exist_ok=True)
# Mark setup complete
_set_config(db, 'is_setup_complete', True)
# Create session token
auto_lock = config.get('auto_lock_minutes', 30)
token = crypto.create_session(current_user.get('sub', 'user'), auto_lock)
logger.info("Private gallery setup completed")
return {
"message": "Gallery setup complete",
"token": token
}
@router.post("/unlock")
@limiter.limit("10/minute")
@handle_exceptions
async def unlock_gallery(
request: Request,
body: UnlockRequest,
current_user: Dict = Depends(get_current_user)
):
"""Unlock the gallery with password."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
if not config.get('is_setup_complete', False):
raise ValidationError("Gallery is not set up yet")
# Verify password (use 403 not 401 so frontend doesn't clear main auth session)
password_hash = config.get('password_hash', '')
if not crypto.verify_password(body.password, password_hash):
raise HTTPException(status_code=403, detail="Invalid password")
# Get salt and initialize encryption
import base64
salt = base64.b64decode(config.get('encryption_salt', ''))
crypto.initialize_encryption(body.password, salt)
# Create session token
auto_lock = config.get('auto_lock_minutes', 30)
token = crypto.create_session(current_user.get('sub', 'user'), auto_lock)
logger.info("Private gallery unlocked")
# Export key files for background services (Reddit monitor + scraper bridge)
try:
from modules.private_gallery_crypto import export_key_to_file
from modules.scraper_gallery_bridge import SCRAPER_BRIDGE_KEY_FILE
export_key_to_file(SCRAPER_BRIDGE_KEY_FILE)
except Exception:
pass
# One-time migration: convert single-shot encrypted files >50MB to chunked format
# for streaming playback. Runs once per API process on first unlock.
if getattr(unlock_gallery, '_migration_done', False):
return {"message": "Gallery unlocked", "token": token}
unlock_gallery._migration_done = True
def _auto_migrate_chunked():
try:
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
min_size = 50 * 1024 * 1024
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT id, storage_id, file_size FROM private_media WHERE file_size > ? ORDER BY file_size ASC', (min_size,))
rows = cursor.fetchall()
to_migrate = []
for row in rows:
enc_file = data_path / f"{row['storage_id']}.enc"
if enc_file.exists() and not crypto._is_chunked_format(enc_file):
to_migrate.append(row)
if not to_migrate:
return
logger.info(f"Auto-migrating {len(to_migrate)} files to chunked encryption format")
for row in to_migrate:
enc_file = data_path / f"{row['storage_id']}.enc"
try:
if crypto.re_encrypt_to_chunked(enc_file):
logger.info(f"Migrated ID {row['id']} ({row['file_size']/1e6:.0f}MB) to chunked format")
except Exception as e:
logger.error(f"Migration failed for ID {row['id']}: {e}")
logger.info("Chunked encryption migration complete")
except Exception as e:
logger.error(f"Auto-migration failed: {e}")
import threading
threading.Thread(target=_auto_migrate_chunked, daemon=True).start()
return {
"message": "Gallery unlocked",
"token": token
}
@router.post("/lock")
@limiter.limit("30/minute")
@handle_exceptions
async def lock_gallery(
request: Request,
token: str = Header(None, alias=PRIVATE_GALLERY_TOKEN_HEADER),
current_user: Dict = Depends(get_current_user)
):
"""Lock the gallery and invalidate session."""
crypto = _get_crypto()
if token:
crypto.invalidate_session(token)
crypto.invalidate_all_sessions()
crypto.clear_encryption()
logger.info("Private gallery locked")
return message_response("Gallery locked")
@router.post("/change-password")
@limiter.limit("5/minute")
@handle_exceptions
async def change_password(
request: Request,
body: ChangePasswordRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Change the gallery password."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
# Verify current password
password_hash = config.get('password_hash', '')
if not crypto.verify_password(body.current_password, password_hash):
raise AuthError("Current password is incorrect")
# Hash new password
new_hash = crypto.hash_password(body.new_password)
_set_config(db, 'password_hash', new_hash)
# Note: We keep the same salt to avoid re-encrypting all data
# The encryption key will be different when unlocked with new password
# This means existing encrypted data needs to be re-encrypted
# For simplicity in this implementation, we'll keep the salt the same
# In a production system, you might want to re-encrypt all data
logger.info("Private gallery password changed")
return message_response("Password changed successfully")
# ============================================================================
# CONFIGURATION ENDPOINTS
# ============================================================================
@router.get("/config")
@limiter.limit("60/minute")
@handle_exceptions
async def get_config(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get gallery configuration."""
db = _get_db()
config = _get_config(db)
# Remove sensitive fields
safe_config = {k: v for k, v in config.items()
if k not in ('password_hash', 'encryption_salt')}
return {"config": safe_config}
@router.put("/config")
@limiter.limit("30/minute")
@handle_exceptions
async def update_config(
request: Request,
body: ConfigUpdateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update gallery configuration."""
db = _get_db()
if body.storage_path is not None:
_set_config(db, 'storage_path', body.storage_path)
Path(body.storage_path).mkdir(parents=True, exist_ok=True)
if body.thumbnail_path is not None:
_set_config(db, 'thumbnail_path', body.thumbnail_path)
Path(body.thumbnail_path).mkdir(parents=True, exist_ok=True)
if body.organize_by_person is not None:
_set_config(db, 'organize_by_person', body.organize_by_person)
if body.organize_by_date is not None:
_set_config(db, 'organize_by_date', body.organize_by_date)
if body.auto_lock_minutes is not None:
_set_config(db, 'auto_lock_minutes', body.auto_lock_minutes)
if body.duplicate_auto_select_distance is not None:
_set_config(db, 'duplicate_auto_select_distance', body.duplicate_auto_select_distance)
if body.min_import_resolution is not None:
_set_config(db, 'min_import_resolution', body.min_import_resolution)
return message_response("Configuration updated")
# ============================================================================
# FEATURES ENDPOINTS
# ============================================================================
# Default list of all available features (all enabled by default)
# These paths must match App.tsx menu items
ALL_FEATURES = [
# Primary nav (Media section)
'/downloads',
'/gallery',
'/review',
# Video dropdown
'/videos',
'/celebrities',
'/queue',
'/video/channel-monitors',
# Tools dropdown
'/import',
'/faces',
'/scheduler',
'/appearances',
'/press',
'/discovery',
'/recycle-bin',
# Analytics dropdown
'/analytics',
'/health',
'/monitoring',
'/notifications',
# Paid Content dropdown
'/paid-content',
'/paid-content/feed',
'/paid-content/gallery',
'/paid-content/messages',
'/paid-content/creators',
'/paid-content/add',
'/paid-content/queue',
'/paid-content/notifications',
'/paid-content/settings',
'/paid-content/watch-later',
'/paid-content/analytics',
'/paid-content/recycle',
# Private Gallery
'/private-gallery',
'/private-gallery/config',
# System dropdown (always enabled - can't disable config)
'/config',
'/platforms',
'/scrapers',
'/logs',
'/changelog',
]
@router.get("/features")
@limiter.limit("60/minute")
@handle_exceptions
async def get_features(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get list of enabled features.
Returns both the list of all available features and which ones are enabled.
"""
import json
db = _get_db()
config = _get_config(db)
# Get enabled features from config, default to all features enabled
enabled_features_json = config.get('enabled_features')
if enabled_features_json and isinstance(enabled_features_json, str):
try:
enabled_features = json.loads(enabled_features_json)
# Auto-enable only truly new features (added to code after last save)
known_features_json = config.get('known_features')
known_features = []
if known_features_json and isinstance(known_features_json, str):
try:
known_features = json.loads(known_features_json)
except json.JSONDecodeError:
pass
for feature in ALL_FEATURES:
if feature not in enabled_features and feature not in known_features:
enabled_features.append(feature)
# Filter out stale/removed features
enabled_features = [f for f in enabled_features if f in ALL_FEATURES]
except json.JSONDecodeError:
enabled_features = ALL_FEATURES.copy()
else:
enabled_features = ALL_FEATURES.copy()
# Get feature order from config
feature_order_json = config.get('feature_order')
if feature_order_json and isinstance(feature_order_json, str):
try:
feature_order = json.loads(feature_order_json)
except json.JSONDecodeError:
feature_order = {}
else:
feature_order = {}
# Get custom labels from config
feature_labels_json = config.get('feature_labels')
if feature_labels_json and isinstance(feature_labels_json, str):
try:
feature_labels = json.loads(feature_labels_json)
except json.JSONDecodeError:
feature_labels = {}
else:
feature_labels = {}
# Get group order from config
group_order_json = config.get('group_order')
if group_order_json and isinstance(group_order_json, str):
try:
group_order = json.loads(group_order_json)
except json.JSONDecodeError:
group_order = []
else:
group_order = []
return {
"all_features": ALL_FEATURES,
"enabled_features": enabled_features,
"feature_order": feature_order,
"feature_labels": feature_labels,
"group_order": group_order
}
@router.put("/features")
@limiter.limit("30/minute")
@handle_exceptions
async def update_features(
request: Request,
body: FeaturesUpdateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update the list of enabled features.
Only features in the ALL_FEATURES list can be enabled/disabled.
System features like /config cannot be disabled.
"""
import json
db = _get_db()
# Filter out any stale/removed features silently
valid_features = [f for f in body.enabled_features if f in ALL_FEATURES]
# Always ensure /config is enabled (can't lock yourself out)
# Use dict.fromkeys() to deduplicate while preserving order
enabled = list(dict.fromkeys(valid_features))
if '/config' not in enabled:
enabled.append('/config')
# Store enabled features as JSON string
_set_config(db, 'enabled_features', json.dumps(enabled))
# Store the set of known features at save time so we can detect truly new features later
_set_config(db, 'known_features', json.dumps(ALL_FEATURES.copy()))
# Store feature order if provided
feature_order = {}
if body.feature_order:
feature_order = body.feature_order
_set_config(db, 'feature_order', json.dumps(feature_order))
# Store custom labels if provided
feature_labels = {}
if body.feature_labels:
feature_labels = body.feature_labels
_set_config(db, 'feature_labels', json.dumps(feature_labels))
# Store group order if provided
group_order = []
if body.group_order:
group_order = body.group_order
_set_config(db, 'group_order', json.dumps(group_order))
return {
"message": "Features updated",
"enabled_features": enabled,
"feature_order": feature_order,
"feature_labels": feature_labels,
"group_order": group_order
}
@router.get("/features/public")
@limiter.limit("120/minute")
@handle_exceptions
async def get_features_public(
request: Request,
current_user: Dict = Depends(get_current_user)
):
"""Get enabled features without requiring private gallery auth.
This endpoint is used by the frontend to determine which menu items to show.
It doesn't expose the full feature list, just which ones are enabled.
"""
import json
db = _get_db()
config = _get_config(db)
# Get enabled features from config, default to all features enabled
enabled_features_json = config.get('enabled_features')
if enabled_features_json and isinstance(enabled_features_json, str):
try:
enabled_features = json.loads(enabled_features_json)
# Auto-enable only truly new features (added to code after last save)
known_features_json = config.get('known_features')
known_features = []
if known_features_json and isinstance(known_features_json, str):
try:
known_features = json.loads(known_features_json)
except json.JSONDecodeError:
pass
for feature in ALL_FEATURES:
if feature not in enabled_features and feature not in known_features:
enabled_features.append(feature)
# Filter out stale/removed features
enabled_features = [f for f in enabled_features if f in ALL_FEATURES]
except json.JSONDecodeError:
enabled_features = ALL_FEATURES.copy()
else:
enabled_features = ALL_FEATURES.copy()
# Get feature order from config
feature_order_json = config.get('feature_order')
if feature_order_json and isinstance(feature_order_json, str):
try:
feature_order = json.loads(feature_order_json)
except json.JSONDecodeError:
feature_order = {}
else:
feature_order = {}
# Get custom labels from config
feature_labels_json = config.get('feature_labels')
if feature_labels_json and isinstance(feature_labels_json, str):
try:
feature_labels = json.loads(feature_labels_json)
except json.JSONDecodeError:
feature_labels = {}
else:
feature_labels = {}
# Get group order from config
group_order_json = config.get('group_order')
if group_order_json and isinstance(group_order_json, str):
try:
group_order = json.loads(group_order_json)
except json.JSONDecodeError:
group_order = []
else:
group_order = []
return {
"enabled_features": enabled_features,
"feature_order": feature_order,
"feature_labels": feature_labels,
"group_order": group_order
}
@router.post("/features/reset")
@limiter.limit("10/minute")
@handle_exceptions
async def reset_features(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Reset all feature settings to defaults.
This enables all features, clears custom labels, and resets ordering.
"""
import json
db = _get_db()
# Reset to all features enabled
_set_config(db, 'enabled_features', json.dumps(ALL_FEATURES))
# Clear custom order
_set_config(db, 'feature_order', json.dumps({}))
# Clear custom labels
_set_config(db, 'feature_labels', json.dumps({}))
# Clear group order
_set_config(db, 'group_order', json.dumps([]))
return {
"message": "Features reset to defaults",
"enabled_features": ALL_FEATURES,
"feature_order": {},
"feature_labels": {},
"group_order": []
}
# ============================================================================
# RELATIONSHIP ENDPOINTS
# ============================================================================
@router.get("/relationships")
@limiter.limit("60/minute")
@handle_exceptions
async def get_relationships(
request: Request,
assigned_only: Optional[bool] = None,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get all relationship types. If assigned_only=true, only return relationships that have persons assigned."""
db = _get_db()
crypto = _get_crypto()
with db.get_connection() as conn:
cursor = conn.cursor()
if assigned_only:
cursor.execute('''
SELECT DISTINCT r.id, r.encrypted_name, r.color, r.created_at
FROM private_media_relationships r
WHERE r.id IN (
SELECT relationship_id FROM private_media_persons
)
ORDER BY r.id
''')
else:
cursor.execute('''
SELECT id, encrypted_name, color, created_at
FROM private_media_relationships
ORDER BY id
''')
rows = cursor.fetchall()
relationships = []
for row in rows:
relationships.append({
'id': row['id'],
'name': crypto.decrypt_field(row['encrypted_name']),
'color': row['color'],
'created_at': row['created_at']
})
relationships.sort(key=lambda r: r['name'].lower())
return {"relationships": relationships}
@router.post("/relationships")
@limiter.limit("30/minute")
@handle_exceptions
async def create_relationship(
request: Request,
body: RelationshipRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Create a new relationship type."""
db = _get_db()
crypto = _get_crypto()
encrypted_name = crypto.encrypt_field(body.name)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media_relationships (encrypted_name, color)
VALUES (?, ?)
''', (encrypted_name, body.color))
rel_id = cursor.lastrowid
conn.commit()
return {
"id": rel_id,
"name": body.name,
"color": body.color
}
@router.put("/relationships/{relationship_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_relationship(
request: Request,
relationship_id: int,
body: RelationshipRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update a relationship type."""
db = _get_db()
crypto = _get_crypto()
encrypted_name = crypto.encrypt_field(body.name)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
UPDATE private_media_relationships
SET encrypted_name = ?, color = ?
WHERE id = ?
''', (encrypted_name, body.color, relationship_id))
if cursor.rowcount == 0:
raise NotFoundError(f"Relationship {relationship_id} not found")
conn.commit()
return message_response("Relationship updated")
@router.delete("/relationships/{relationship_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def delete_relationship(
request: Request,
relationship_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete a relationship type (fails if persons exist)."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Check if any persons use this relationship
cursor.execute('''
SELECT COUNT(*) as count FROM private_media_persons
WHERE relationship_id = ?
''', (relationship_id,))
if cursor.fetchone()['count'] > 0:
raise ValidationError("Cannot delete relationship type that has persons assigned")
cursor.execute('DELETE FROM private_media_relationships WHERE id = ?', (relationship_id,))
if cursor.rowcount == 0:
raise NotFoundError(f"Relationship {relationship_id} not found")
conn.commit()
return message_response("Relationship deleted")
# ============================================================================
# PERSON ENDPOINTS
# ============================================================================
@router.get("/persons")
@limiter.limit("60/minute")
@handle_exceptions
async def get_persons(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get all persons with relationship info."""
db = _get_db()
crypto = _get_crypto()
# Ensure default tags table exists (migration safety)
with db.get_connection(for_write=True) as conn:
conn.cursor().execute('''
CREATE TABLE IF NOT EXISTS private_media_person_default_tags (
person_id INTEGER NOT NULL, tag_id INTEGER NOT NULL,
PRIMARY KEY (person_id, tag_id),
FOREIGN KEY (person_id) REFERENCES private_media_persons(id) ON DELETE CASCADE,
FOREIGN KEY (tag_id) REFERENCES private_gallery_tags(id) ON DELETE CASCADE
)
''')
conn.commit()
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT p.id, p.encrypted_name, p.encrypted_sort_name,
p.relationship_id, p.created_at, p.updated_at,
r.encrypted_name as rel_encrypted_name, r.color as rel_color
FROM private_media_persons p
JOIN private_media_relationships r ON p.relationship_id = r.id
ORDER BY p.encrypted_sort_name, p.encrypted_name
''')
rows = cursor.fetchall()
# Batch-query default tags for all persons
cursor.execute('SELECT person_id, tag_id FROM private_media_person_default_tags')
default_tags_rows = cursor.fetchall()
# Build person_id -> [tag_ids] map
default_tags_map: Dict[int, List[int]] = {}
for dt_row in default_tags_rows:
pid = dt_row['person_id']
if pid not in default_tags_map:
default_tags_map[pid] = []
default_tags_map[pid].append(dt_row['tag_id'])
persons = []
for row in rows:
persons.append({
'id': row['id'],
'name': crypto.decrypt_field(row['encrypted_name']),
'sort_name': crypto.decrypt_field(row['encrypted_sort_name']) if row['encrypted_sort_name'] else None,
'relationship_id': row['relationship_id'],
'relationship': {
'id': row['relationship_id'],
'name': crypto.decrypt_field(row['rel_encrypted_name']),
'color': row['rel_color']
},
'default_tag_ids': default_tags_map.get(row['id'], []),
'created_at': row['created_at'],
'updated_at': row['updated_at']
})
# Sort by decrypted name
persons.sort(key=lambda p: (p.get('sort_name') or p['name']).lower())
return {"persons": persons}
@router.post("/persons")
@limiter.limit("30/minute")
@handle_exceptions
async def create_person(
request: Request,
body: PersonRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Create a new person."""
db = _get_db()
crypto = _get_crypto()
encrypted_name = crypto.encrypt_field(body.name)
encrypted_sort_name = crypto.encrypt_field(body.sort_name) if body.sort_name else None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Verify relationship exists
cursor.execute('SELECT id FROM private_media_relationships WHERE id = ?', (body.relationship_id,))
if not cursor.fetchone():
raise ValidationError(f"Relationship {body.relationship_id} not found")
cursor.execute('''
INSERT INTO private_media_persons (encrypted_name, encrypted_sort_name, relationship_id)
VALUES (?, ?, ?)
''', (encrypted_name, encrypted_sort_name, body.relationship_id))
person_id = cursor.lastrowid
# Insert default tags
if body.default_tag_ids:
cursor.execute('''
CREATE TABLE IF NOT EXISTS private_media_person_default_tags (
person_id INTEGER NOT NULL, tag_id INTEGER NOT NULL,
PRIMARY KEY (person_id, tag_id),
FOREIGN KEY (person_id) REFERENCES private_media_persons(id) ON DELETE CASCADE,
FOREIGN KEY (tag_id) REFERENCES private_gallery_tags(id) ON DELETE CASCADE
)
''')
for tag_id in body.default_tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_person_default_tags (person_id, tag_id)
VALUES (?, ?)
''', (person_id, tag_id))
conn.commit()
return {
"id": person_id,
"name": body.name,
"sort_name": body.sort_name,
"relationship_id": body.relationship_id,
"default_tag_ids": body.default_tag_ids or []
}
@router.put("/persons/{person_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_person(
request: Request,
person_id: int,
body: PersonUpdateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update a person."""
db = _get_db()
crypto = _get_crypto()
updates = []
params = []
if body.name is not None:
updates.append("encrypted_name = ?")
params.append(crypto.encrypt_field(body.name))
if body.sort_name is not None:
updates.append("encrypted_sort_name = ?")
params.append(crypto.encrypt_field(body.sort_name) if body.sort_name else None)
if body.relationship_id is not None:
updates.append("relationship_id = ?")
params.append(body.relationship_id)
if not updates and body.default_tag_ids is None:
raise ValidationError("No fields to update")
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
if updates:
updates.append("updated_at = CURRENT_TIMESTAMP")
params.append(person_id)
cursor.execute(f'''
UPDATE private_media_persons
SET {", ".join(updates)}
WHERE id = ?
''', params)
if cursor.rowcount == 0:
raise NotFoundError(f"Person {person_id} not found")
# Sync default tags (delete-and-reinsert)
if body.default_tag_ids is not None:
# Ensure table exists (migration safety)
cursor.execute('''
CREATE TABLE IF NOT EXISTS private_media_person_default_tags (
person_id INTEGER NOT NULL, tag_id INTEGER NOT NULL,
PRIMARY KEY (person_id, tag_id),
FOREIGN KEY (person_id) REFERENCES private_media_persons(id) ON DELETE CASCADE,
FOREIGN KEY (tag_id) REFERENCES private_gallery_tags(id) ON DELETE CASCADE
)
''')
cursor.execute('DELETE FROM private_media_person_default_tags WHERE person_id = ?', (person_id,))
for tag_id in body.default_tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_person_default_tags (person_id, tag_id)
VALUES (?, ?)
''', (person_id, tag_id))
# Apply default tags to all existing posts for this person
cursor.execute('SELECT id FROM private_media_posts WHERE person_id = ?', (person_id,))
post_ids = [row['id'] for row in cursor.fetchall()]
for post_id in post_ids:
for tag_id in body.default_tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (post_id, tag_id))
conn.commit()
return message_response("Person updated")
@router.delete("/persons/{person_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def delete_person(
request: Request,
person_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete a person (media will have person_id set to NULL)."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_persons WHERE id = ?', (person_id,))
if cursor.rowcount == 0:
raise NotFoundError(f"Person {person_id} not found")
conn.commit()
return message_response("Person deleted")
# ============================================================================
# PERSON GROUP ENDPOINTS (Encrypted)
# ============================================================================
@router.get("/person-groups")
@limiter.limit("60/minute")
@handle_exceptions
async def list_person_groups(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""List all person groups with member counts."""
db = _get_db()
crypto = _get_crypto()
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT g.id, g.encrypted_name, g.encrypted_description, g.created_at, g.updated_at,
g.min_resolution,
(SELECT COUNT(*) FROM private_media_person_group_members WHERE group_id = g.id) as member_count,
(SELECT COUNT(*) FROM private_media_person_group_tag_members WHERE group_id = g.id) as tag_member_count,
(SELECT COUNT(*) FROM private_media_person_group_relationship_members WHERE group_id = g.id) as relationship_member_count,
(SELECT COUNT(*) FROM private_media_person_group_excluded_tags WHERE group_id = g.id) as excluded_tag_count
FROM private_media_person_groups g
ORDER BY g.created_at DESC
''')
rows = cursor.fetchall()
groups = []
for row in rows:
groups.append({
'id': row['id'],
'name': crypto.decrypt_field(row['encrypted_name']),
'description': crypto.decrypt_field(row['encrypted_description']) if row['encrypted_description'] else None,
'member_count': row['member_count'],
'tag_member_count': row['tag_member_count'],
'relationship_member_count': row['relationship_member_count'],
'excluded_tag_count': row['excluded_tag_count'],
'min_resolution': row['min_resolution'] or 0,
'created_at': row['created_at'],
'updated_at': row['updated_at'],
})
groups.sort(key=lambda g: g['name'].lower())
return {"groups": groups}
@router.get("/person-groups/{group_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def get_person_group(
request: Request,
group_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get a single person group with its members."""
db = _get_db()
crypto = _get_crypto()
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT id, encrypted_name, encrypted_description, min_resolution, created_at, updated_at
FROM private_media_person_groups WHERE id = ?
''', (group_id,))
row = cursor.fetchone()
if not row:
raise NotFoundError(f"Person group {group_id} not found")
group = {
'id': row['id'],
'name': crypto.decrypt_field(row['encrypted_name']),
'description': crypto.decrypt_field(row['encrypted_description']) if row['encrypted_description'] else None,
'min_resolution': row['min_resolution'] or 0,
'created_at': row['created_at'],
'updated_at': row['updated_at'],
}
# Fetch person members
cursor.execute('''
SELECT p.id, p.encrypted_name, p.encrypted_sort_name, p.relationship_id,
r.encrypted_name as rel_encrypted_name, r.color as rel_color,
m.added_at
FROM private_media_person_group_members m
JOIN private_media_persons p ON m.person_id = p.id
JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE m.group_id = ?
ORDER BY p.encrypted_name
''', (group_id,))
member_rows = cursor.fetchall()
# Fetch tag members
cursor.execute('''
SELECT t.id, t.encrypted_name, t.color, tm.added_at
FROM private_media_person_group_tag_members tm
JOIN private_gallery_tags t ON tm.tag_id = t.id
WHERE tm.group_id = ?
''', (group_id,))
tag_member_rows = cursor.fetchall()
# Fetch relationship members
cursor.execute('''
SELECT r.id, r.encrypted_name, r.color, rm.added_at
FROM private_media_person_group_relationship_members rm
JOIN private_media_relationships r ON rm.relationship_id = r.id
WHERE rm.group_id = ?
''', (group_id,))
rel_member_rows = cursor.fetchall()
# Fetch excluded tags
cursor.execute('''
SELECT t.id, t.encrypted_name, t.color, et.added_at
FROM private_media_person_group_excluded_tags et
JOIN private_gallery_tags t ON et.tag_id = t.id
WHERE et.group_id = ?
''', (group_id,))
excluded_tag_rows = cursor.fetchall()
members = []
for mr in member_rows:
members.append({
'id': mr['id'],
'name': crypto.decrypt_field(mr['encrypted_name']),
'sort_name': crypto.decrypt_field(mr['encrypted_sort_name']) if mr['encrypted_sort_name'] else None,
'relationship_id': mr['relationship_id'],
'relationship': {
'id': mr['relationship_id'],
'name': crypto.decrypt_field(mr['rel_encrypted_name']),
'color': mr['rel_color'],
},
'added_at': mr['added_at'],
})
members.sort(key=lambda m: (m.get('sort_name') or m['name']).lower())
tag_members = []
for tr in tag_member_rows:
tag_members.append({
'id': tr['id'],
'name': crypto.decrypt_field(tr['encrypted_name']),
'color': tr['color'],
'added_at': tr['added_at'],
})
tag_members.sort(key=lambda t: t['name'].lower())
relationship_members = []
for rr in rel_member_rows:
relationship_members.append({
'id': rr['id'],
'name': crypto.decrypt_field(rr['encrypted_name']),
'color': rr['color'],
'added_at': rr['added_at'],
})
relationship_members.sort(key=lambda r: r['name'].lower())
excluded_tags = []
for etr in excluded_tag_rows:
excluded_tags.append({
'id': etr['id'],
'name': crypto.decrypt_field(etr['encrypted_name']),
'color': etr['color'],
'added_at': etr['added_at'],
})
excluded_tags.sort(key=lambda t: t['name'].lower())
group['members'] = members
group['member_count'] = len(members)
group['tag_members'] = tag_members
group['tag_member_count'] = len(tag_members)
group['relationship_members'] = relationship_members
group['relationship_member_count'] = len(relationship_members)
group['excluded_tags'] = excluded_tags
group['excluded_tag_count'] = len(excluded_tags)
return group
@router.post("/person-groups")
@limiter.limit("30/minute")
@handle_exceptions
async def create_person_group(
request: Request,
body: PersonGroupCreate,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Create a new person group."""
db = _get_db()
crypto = _get_crypto()
encrypted_name = crypto.encrypt_field(body.name)
encrypted_description = crypto.encrypt_field(body.description) if body.description else None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media_person_groups (encrypted_name, encrypted_description, min_resolution)
VALUES (?, ?, ?)
''', (encrypted_name, encrypted_description, body.min_resolution or 0))
group_id = cursor.lastrowid
conn.commit()
return {
"group": {
"id": group_id,
"name": body.name,
"description": body.description,
"min_resolution": body.min_resolution or 0,
"member_count": 0,
},
"message": "Group created"
}
@router.put("/person-groups/{group_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_person_group(
request: Request,
group_id: int,
body: PersonGroupUpdate,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update a person group."""
db = _get_db()
crypto = _get_crypto()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Check existence
cursor.execute('SELECT id FROM private_media_person_groups WHERE id = ?', (group_id,))
if not cursor.fetchone():
raise NotFoundError(f"Person group {group_id} not found")
updates = []
params = []
if body.name is not None:
updates.append("encrypted_name = ?")
params.append(crypto.encrypt_field(body.name))
if body.description is not None:
updates.append("encrypted_description = ?")
params.append(crypto.encrypt_field(body.description))
if body.min_resolution is not None:
updates.append("min_resolution = ?")
params.append(body.min_resolution)
if updates:
updates.append("updated_at = CURRENT_TIMESTAMP")
params.append(group_id)
cursor.execute(f'UPDATE private_media_person_groups SET {", ".join(updates)} WHERE id = ?', params)
conn.commit()
return {"success": True, "message": "Group updated"}
@router.delete("/person-groups/{group_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def delete_person_group(
request: Request,
group_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete a person group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_person_groups WHERE id = ?', (group_id,))
if cursor.rowcount == 0:
raise NotFoundError(f"Person group {group_id} not found")
conn.commit()
return {"success": True, "message": "Group deleted"}
@router.post("/person-groups/{group_id}/members")
@limiter.limit("60/minute")
@handle_exceptions
async def add_person_to_group(
request: Request,
group_id: int,
body: PersonGroupMemberAdd,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Add a person to a group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Verify group exists
cursor.execute('SELECT id FROM private_media_person_groups WHERE id = ?', (group_id,))
if not cursor.fetchone():
raise NotFoundError(f"Person group {group_id} not found")
cursor.execute('''
INSERT OR IGNORE INTO private_media_person_group_members (group_id, person_id)
VALUES (?, ?)
''', (group_id, body.person_id))
conn.commit()
return {"success": True, "message": "Person added to group"}
@router.delete("/person-groups/{group_id}/members/{person_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def remove_person_from_group(
request: Request,
group_id: int,
person_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Remove a person from a group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_person_group_members WHERE group_id = ? AND person_id = ?', (group_id, person_id))
conn.commit()
return {"success": True, "message": "Person removed from group"}
@router.post("/person-groups/{group_id}/tags")
@limiter.limit("60/minute")
@handle_exceptions
async def add_tag_to_person_group(
request: Request,
group_id: int,
body: PersonGroupTagMemberAdd,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Add a tag to a person group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('SELECT id FROM private_media_person_groups WHERE id = ?', (group_id,))
if not cursor.fetchone():
raise NotFoundError(f"Person group {group_id} not found")
cursor.execute('''
INSERT OR IGNORE INTO private_media_person_group_tag_members (group_id, tag_id)
VALUES (?, ?)
''', (group_id, body.tag_id))
conn.commit()
return {"success": True, "message": "Tag added to group"}
@router.delete("/person-groups/{group_id}/tags/{tag_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def remove_tag_from_person_group(
request: Request,
group_id: int,
tag_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Remove a tag from a person group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_person_group_tag_members WHERE group_id = ? AND tag_id = ?', (group_id, tag_id))
conn.commit()
return {"success": True, "message": "Tag removed from group"}
@router.post("/person-groups/{group_id}/relationships")
@limiter.limit("60/minute")
@handle_exceptions
async def add_relationship_to_person_group(
request: Request,
group_id: int,
body: PersonGroupRelationshipMemberAdd,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Add a relationship to a person group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('SELECT id FROM private_media_person_groups WHERE id = ?', (group_id,))
if not cursor.fetchone():
raise NotFoundError(f"Person group {group_id} not found")
cursor.execute('''
INSERT OR IGNORE INTO private_media_person_group_relationship_members (group_id, relationship_id)
VALUES (?, ?)
''', (group_id, body.relationship_id))
conn.commit()
return {"success": True, "message": "Relationship added to group"}
@router.delete("/person-groups/{group_id}/relationships/{relationship_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def remove_relationship_from_person_group(
request: Request,
group_id: int,
relationship_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Remove a relationship from a person group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_person_group_relationship_members WHERE group_id = ? AND relationship_id = ?', (group_id, relationship_id))
conn.commit()
return {"success": True, "message": "Relationship removed from group"}
@router.post("/person-groups/{group_id}/excluded-tags")
@limiter.limit("60/minute")
@handle_exceptions
async def add_excluded_tag_to_person_group(
request: Request,
group_id: int,
body: PersonGroupExcludedTagAdd,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Add an excluded tag to a person group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('SELECT id FROM private_media_person_groups WHERE id = ?', (group_id,))
if not cursor.fetchone():
raise NotFoundError(f"Person group {group_id} not found")
cursor.execute('''
INSERT OR IGNORE INTO private_media_person_group_excluded_tags (group_id, tag_id)
VALUES (?, ?)
''', (group_id, body.tag_id))
conn.commit()
return {"success": True, "message": "Excluded tag added to group"}
@router.delete("/person-groups/{group_id}/excluded-tags/{tag_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def remove_excluded_tag_from_person_group(
request: Request,
group_id: int,
tag_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Remove an excluded tag from a person group."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_person_group_excluded_tags WHERE group_id = ? AND tag_id = ?', (group_id, tag_id))
conn.commit()
return {"success": True, "message": "Excluded tag removed from group"}
# ============================================================================
# TAG ENDPOINTS (Encrypted private_gallery_tags)
# ============================================================================
_tags_migrated = False
def _ensure_private_gallery_tags_migrated(db, crypto):
"""Sync all tags from paid_content_tags to private_gallery_tags, preserving IDs."""
global _tags_migrated
if _tags_migrated:
return
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Create the table if it doesn't exist
cursor.execute('''
CREATE TABLE IF NOT EXISTS private_gallery_tags (
id INTEGER PRIMARY KEY AUTOINCREMENT,
encrypted_name TEXT NOT NULL,
color TEXT DEFAULT '#6b7280',
encrypted_description TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
''')
# Get existing private gallery tag IDs
cursor.execute('SELECT id FROM private_gallery_tags')
existing_ids = {row['id'] for row in cursor.fetchall()}
# Copy ALL paid content tags that don't already exist in private gallery
cursor.execute('SELECT id, name, color, description FROM paid_content_tags')
new_count = 0
for row in cursor.fetchall():
if row['id'] not in existing_ids:
encrypted_name = crypto.encrypt_field(row['name'])
encrypted_description = crypto.encrypt_field(row['description']) if row['description'] else None
cursor.execute('''
INSERT INTO private_gallery_tags (id, encrypted_name, color, encrypted_description)
VALUES (?, ?, ?, ?)
''', (row['id'], encrypted_name, row['color'], encrypted_description))
new_count += 1
if new_count > 0:
conn.commit()
logger.info(f"Synced {new_count} new tags from paid_content_tags to private_gallery_tags")
_tags_migrated = True
@router.get("/tags")
@limiter.limit("60/minute")
@handle_exceptions
async def get_tags(
request: Request,
assigned_only: Optional[bool] = None,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get all encrypted tags. If assigned_only=true, only return tags used in private gallery."""
db = _get_db()
crypto = _get_crypto()
# Ensure migration has run
_ensure_private_gallery_tags_migrated(db, crypto)
with db.get_connection() as conn:
cursor = conn.cursor()
if assigned_only:
cursor.execute('''
SELECT t.id, t.encrypted_name, t.color, t.encrypted_description, t.created_at,
(SELECT COUNT(*) FROM private_media_tags mt WHERE mt.tag_id = t.id) +
(SELECT COUNT(*) FROM private_media_post_tags pt WHERE pt.tag_id = t.id) as usage_count
FROM private_gallery_tags t
WHERE t.id IN (
SELECT tag_id FROM private_media_tags
UNION
SELECT tag_id FROM private_media_post_tags
)
''')
else:
cursor.execute('''
SELECT id, encrypted_name, color, encrypted_description, created_at,
(SELECT COUNT(*) FROM private_media_tags mt WHERE mt.tag_id = private_gallery_tags.id) +
(SELECT COUNT(*) FROM private_media_post_tags pt WHERE pt.tag_id = private_gallery_tags.id) as usage_count
FROM private_gallery_tags
''')
rows = cursor.fetchall()
tags = []
for row in rows:
tags.append({
'id': row['id'],
'name': crypto.decrypt_field(row['encrypted_name']),
'color': row['color'],
'description': crypto.decrypt_field(row['encrypted_description']) if row['encrypted_description'] else None,
'created_at': row['created_at'],
'usage_count': row['usage_count'],
})
# Sort alphabetically by name
tags.sort(key=lambda t: t['name'].lower())
return {"tags": tags}
@router.post("/tags")
@limiter.limit("30/minute")
@handle_exceptions
async def create_tag(
request: Request,
body: TagCreateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Create a new encrypted tag."""
db = _get_db()
crypto = _get_crypto()
encrypted_name = crypto.encrypt_field(body.name)
encrypted_description = crypto.encrypt_field(body.description) if body.description else None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_gallery_tags (encrypted_name, color, encrypted_description)
VALUES (?, ?, ?)
''', (encrypted_name, body.color, encrypted_description))
tag_id = cursor.lastrowid
conn.commit()
return {
"id": tag_id,
"name": body.name,
"color": body.color,
"description": body.description,
}
@router.put("/tags/{tag_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_tag(
request: Request,
tag_id: int,
body: TagUpdateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update an encrypted tag."""
db = _get_db()
crypto = _get_crypto()
updates = []
params = []
if body.name is not None:
updates.append("encrypted_name = ?")
params.append(crypto.encrypt_field(body.name))
if body.color is not None:
updates.append("color = ?")
params.append(body.color)
if body.description is not None:
updates.append("encrypted_description = ?")
params.append(crypto.encrypt_field(body.description) if body.description else None)
if not updates:
raise ValidationError("No fields to update")
params.append(tag_id)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute(f'''
UPDATE private_gallery_tags
SET {", ".join(updates)}
WHERE id = ?
''', params)
if cursor.rowcount == 0:
raise NotFoundError(f"Tag {tag_id} not found")
conn.commit()
return message_response("Tag updated")
@router.delete("/tags/{tag_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def delete_tag(
request: Request,
tag_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete an encrypted tag. Junction table CASCADE handles cleanup."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_gallery_tags WHERE id = ?', (tag_id,))
if cursor.rowcount == 0:
raise NotFoundError(f"Tag {tag_id} not found")
conn.commit()
return message_response("Tag deleted")
# ============================================================================
# IMPORT AUTH ENDPOINTS
# ============================================================================
def _normalize_domain(domain: str) -> str:
"""Normalize domain: strip protocol/path, lowercase, strip leading dots."""
domain = domain.strip().lower()
# Strip protocol if present
if '://' in domain:
domain = domain.split('://', 1)[1]
# Strip path/query
domain = domain.split('/')[0].split('?')[0].split('#')[0]
# Strip port
domain = domain.split(':')[0]
# Strip leading/trailing dots
domain = domain.strip('.')
return domain
@router.get("/import-auth")
@limiter.limit("60/minute")
@handle_exceptions
async def get_import_auth(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""List all import auth entries. Never exposes decrypted credentials."""
db = _get_db()
crypto = _get_crypto()
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT * FROM private_gallery_import_auth ORDER BY domain')
rows = cursor.fetchall()
entries = []
for row in rows:
cookies_count = 0
if row['encrypted_cookies_json']:
try:
cookies = json.loads(crypto.decrypt_field(row['encrypted_cookies_json']))
cookies_count = len(cookies) if isinstance(cookies, list) else 0
except Exception:
pass
has_user_agent = bool(row['encrypted_user_agent'])
user_agent_display = None
if has_user_agent:
try:
ua = crypto.decrypt_field(row['encrypted_user_agent'])
user_agent_display = ua[:50] + '...' if len(ua) > 50 else ua
except Exception:
pass
entries.append({
'id': row['id'],
'domain': row['domain'],
'auth_type': row['auth_type'],
'has_username': bool(row['encrypted_username']),
'has_password': bool(row['encrypted_password']),
'cookies_count': cookies_count,
'user_agent': user_agent_display,
'notes': row['notes'],
'created_at': row['created_at'],
'updated_at': row['updated_at'],
})
return {"entries": entries}
@router.post("/import-auth")
@limiter.limit("30/minute")
@handle_exceptions
async def create_import_auth(
request: Request,
body: ImportAuthCreateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Create a new import auth entry."""
db = _get_db()
crypto = _get_crypto()
domain = _normalize_domain(body.domain)
if not domain:
raise ValidationError("Invalid domain")
# Validate auth_type requirements
if body.auth_type in ('basic', 'both') and (not body.username or not body.password):
raise ValidationError("Username and password are required for basic/both auth type")
if body.auth_type in ('cookies', 'both') and not body.cookies:
raise ValidationError("Cookies are required for cookies/both auth type")
encrypted_username = crypto.encrypt_field(body.username) if body.username else None
encrypted_password = crypto.encrypt_field(body.password) if body.password else None
encrypted_cookies_json = crypto.encrypt_field(json.dumps(body.cookies)) if body.cookies else None
encrypted_user_agent = crypto.encrypt_field(body.user_agent) if body.user_agent else None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
try:
cursor.execute('''
INSERT INTO private_gallery_import_auth
(domain, auth_type, encrypted_username, encrypted_password, encrypted_cookies_json, encrypted_user_agent, notes)
VALUES (?, ?, ?, ?, ?, ?, ?)
''', (domain, body.auth_type, encrypted_username, encrypted_password, encrypted_cookies_json, encrypted_user_agent, body.notes))
entry_id = cursor.lastrowid
conn.commit()
except Exception as e:
if 'UNIQUE' in str(e).upper():
raise ValidationError(f"Domain '{domain}' already has an auth entry")
raise
return {"id": entry_id, "domain": domain, "auth_type": body.auth_type}
@router.put("/import-auth/{entry_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def update_import_auth(
request: Request,
entry_id: int,
body: ImportAuthUpdateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update an import auth entry."""
db = _get_db()
crypto = _get_crypto()
updates = []
params = []
if body.domain is not None:
domain = _normalize_domain(body.domain)
if not domain:
raise ValidationError("Invalid domain")
updates.append("domain = ?")
params.append(domain)
if body.auth_type is not None:
updates.append("auth_type = ?")
params.append(body.auth_type)
if body.username is not None:
updates.append("encrypted_username = ?")
params.append(crypto.encrypt_field(body.username) if body.username else None)
if body.password is not None:
updates.append("encrypted_password = ?")
params.append(crypto.encrypt_field(body.password) if body.password else None)
if body.cookies is not None:
updates.append("encrypted_cookies_json = ?")
params.append(crypto.encrypt_field(json.dumps(body.cookies)) if body.cookies else None)
if body.user_agent is not None:
updates.append("encrypted_user_agent = ?")
params.append(crypto.encrypt_field(body.user_agent) if body.user_agent else None)
if body.notes is not None:
updates.append("notes = ?")
params.append(body.notes)
if not updates:
raise ValidationError("No fields to update")
updates.append("updated_at = CURRENT_TIMESTAMP")
params.append(entry_id)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute(f'''
UPDATE private_gallery_import_auth
SET {", ".join(updates)}
WHERE id = ?
''', params)
if cursor.rowcount == 0:
raise NotFoundError(f"Import auth entry {entry_id} not found")
conn.commit()
return message_response("Import auth updated")
@router.delete("/import-auth/{entry_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def delete_import_auth(
request: Request,
entry_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete an import auth entry."""
db = _get_db()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_gallery_import_auth WHERE id = ?', (entry_id,))
if cursor.rowcount == 0:
raise NotFoundError(f"Import auth entry {entry_id} not found")
conn.commit()
return message_response("Import auth deleted")
# ============================================================================
# MEDIA ENDPOINTS
# ============================================================================
@router.get("/media")
@limiter.limit("120/minute")
@handle_exceptions
async def get_media(
request: Request,
offset: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=500),
person_id: Optional[int] = None,
person_group_id: Optional[int] = None,
relationship_id: Optional[int] = None,
tag_ids: Optional[str] = None,
file_type: Optional[str] = None,
search: Optional[str] = None,
sort_by: str = Query("media_date", regex="^(media_date|created_at|filename)$"),
sort_order: str = Query("desc", regex="^(asc|desc)$"),
date_from: Optional[str] = None,
date_to: Optional[str] = None,
has_tags: Optional[bool] = None,
has_description: Optional[bool] = None,
include_attached: Optional[bool] = None,
shuffle: Optional[bool] = None,
shuffle_seed: Optional[int] = None,
unread_only: Optional[bool] = None,
min_resolution: Optional[int] = Query(None, ge=0, le=10000),
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get media items with filtering and pagination."""
db = _get_db()
crypto = _get_crypto()
# Ensure private_gallery_tags table exists and is migrated
_ensure_private_gallery_tags_migrated(db, crypto)
# Build query - we fetch all then filter in Python since many fields are encrypted
with db.get_connection() as conn:
cursor = conn.cursor()
# Build WHERE clauses for non-encrypted fields
where_clauses = []
params = []
# include_attached parameter is no longer used — media always appears
# in the gallery regardless of being attached to another post
if person_id is not None:
where_clauses.append("m.person_id = ?")
params.append(person_id)
if person_group_id is not None:
# WHO filter: persons + relationships (pass through if none defined)
where_clauses.append("""(
(
NOT EXISTS (SELECT 1 FROM private_media_person_group_members WHERE group_id = ?)
AND NOT EXISTS (SELECT 1 FROM private_media_person_group_relationship_members WHERE group_id = ?)
)
OR m.person_id IN (SELECT person_id FROM private_media_person_group_members WHERE group_id = ?)
OR m.person_id IN (SELECT p2.id FROM private_media_persons p2 WHERE p2.relationship_id IN (SELECT relationship_id FROM private_media_person_group_relationship_members WHERE group_id = ?))
)""")
params.extend([person_group_id, person_group_id, person_group_id, person_group_id])
# TAG filter: included tags (pass through if none defined)
where_clauses.append("""(
NOT EXISTS (SELECT 1 FROM private_media_person_group_tag_members WHERE group_id = ?)
OR m.id IN (SELECT mt.media_id FROM private_media_tags mt WHERE mt.tag_id IN (SELECT tag_id FROM private_media_person_group_tag_members WHERE group_id = ?))
)""")
params.extend([person_group_id, person_group_id])
# EXCLUDE filter: excluded tags
where_clauses.append("""
m.id NOT IN (SELECT mt2.media_id FROM private_media_tags mt2 WHERE mt2.tag_id IN (SELECT tag_id FROM private_media_person_group_excluded_tags WHERE group_id = ?))
""")
params.append(person_group_id)
if file_type and file_type != 'all':
where_clauses.append("m.file_type = ?")
params.append(file_type)
if tag_ids:
tag_id_list = [int(t) for t in tag_ids.split(',') if t.strip().isdigit()]
if tag_id_list:
placeholders = ','.join('?' * len(tag_id_list))
where_clauses.append(f'''
(m.id IN (SELECT media_id FROM private_media_tags WHERE tag_id IN ({placeholders}))
OR m.post_id IN (SELECT post_id FROM private_media_post_tags WHERE tag_id IN ({placeholders})))
''')
params.extend(tag_id_list)
params.extend(tag_id_list)
if relationship_id is not None:
where_clauses.append("p.relationship_id = ?")
params.append(relationship_id)
# Filter by has_tags (SQL-based for efficiency)
if has_tags is True:
where_clauses.append("EXISTS (SELECT 1 FROM private_media_tags WHERE media_id = m.id)")
elif has_tags is False:
where_clauses.append("NOT EXISTS (SELECT 1 FROM private_media_tags WHERE media_id = m.id)")
if unread_only:
where_clauses.append("post.is_read = 0")
# Merge explicit min_resolution with person group's min_resolution (use higher)
effective_min_res = min_resolution or 0
if person_group_id is not None:
cursor.execute('SELECT min_resolution FROM private_media_person_groups WHERE id = ?', (person_group_id,))
group_row = cursor.fetchone()
if group_row and group_row['min_resolution']:
effective_min_res = max(effective_min_res, group_row['min_resolution'])
if effective_min_res > 0:
where_clauses.append("(m.file_type != 'image' OR (m.width >= ? AND m.height >= ?))")
params.extend([effective_min_res, effective_min_res])
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
# Get total count (before pagination)
count_sql = f'''
SELECT COUNT(*) as total
FROM private_media m
LEFT JOIN private_media_posts post ON m.post_id = post.id
LEFT JOIN private_media_persons p ON m.person_id = p.id
WHERE {where_sql}
'''
cursor.execute(count_sql, params)
total = cursor.fetchone()['total']
# Shuffle mode: deterministic shuffle using PostgreSQL md5 hash
if shuffle:
seed = str(shuffle_seed if shuffle_seed is not None else 42)
total_filtered = total
query = f'''
SELECT m.*, p.encrypted_name as person_encrypted_name,
p.relationship_id, r.encrypted_name as rel_encrypted_name, r.color as rel_color,
post.created_at as post_created_at, post.encrypted_media_date as post_encrypted_media_date
FROM private_media m
LEFT JOIN private_media_posts post ON m.post_id = post.id
LEFT JOIN private_media_persons p ON m.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE {where_sql}
ORDER BY md5(m.id::text || ?::text), m.id
LIMIT ? OFFSET ?
'''
cursor.execute(query, params + [seed, limit, offset])
rows = cursor.fetchall()
# Skip re-sorting below — shuffle order is authoritative
can_sql_paginate = True
# Optimization: when no encrypted-field filters are active (no search, date_from,
# date_to, has_description) and sort is by created_at, paginate in SQL directly
# to avoid decrypting every row in the database.
elif not shuffle:
needs_decrypt_filter = bool(search or date_from or date_to or has_description is not None)
needs_decrypt_sort = sort_by in ('media_date', 'filename')
can_sql_paginate = not needs_decrypt_filter and not needs_decrypt_sort
if not shuffle and can_sql_paginate:
# Fast path: SQL-level pagination (sort_by=created_at only)
count_sql = f'''
SELECT COUNT(*) as total
FROM private_media m
LEFT JOIN private_media_posts post ON m.post_id = post.id
LEFT JOIN private_media_persons p ON m.person_id = p.id
WHERE {where_sql}
'''
cursor.execute(count_sql, params)
total_filtered = cursor.fetchone()[0]
order_dir = sort_order.upper() if sort_order.upper() in ('ASC', 'DESC') else 'DESC'
# Sort by post.created_at to exactly match the posts endpoint sort order,
# then m.id ASC within each post to match attachment display order
order_clause = f'post.created_at {order_dir}, m.id ASC'
query = f'''
SELECT m.*, p.encrypted_name as person_encrypted_name,
p.relationship_id, r.encrypted_name as rel_encrypted_name, r.color as rel_color,
post.created_at as post_created_at, post.encrypted_media_date as post_encrypted_media_date
FROM private_media m
LEFT JOIN private_media_posts post ON m.post_id = post.id
LEFT JOIN private_media_persons p ON m.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE {where_sql}
ORDER BY {order_clause}
LIMIT ? OFFSET ?
'''
cursor.execute(query, params + [limit, offset])
rows = cursor.fetchall()
elif not shuffle:
# Slow path: fetch all, decrypt, filter, sort, then paginate in Python
# Also fetch post-level fields for sorting by post.media_date (matching posts endpoint)
query = f'''
SELECT m.*, p.encrypted_name as person_encrypted_name,
p.relationship_id, r.encrypted_name as rel_encrypted_name, r.color as rel_color,
post.created_at as post_created_at, post.encrypted_media_date as post_encrypted_media_date
FROM private_media m
LEFT JOIN private_media_posts post ON m.post_id = post.id
LEFT JOIN private_media_persons p ON m.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE {where_sql}
ORDER BY post.created_at DESC, m.id ASC
'''
cursor.execute(query, params)
rows = cursor.fetchall()
# Decrypt and process items
items = []
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
for row in rows:
item = dict(row)
# Decrypt fields
item['filename'] = crypto.decrypt_field(item['encrypted_filename'])
item['description'] = crypto.decrypt_field(item['encrypted_description']) if item['encrypted_description'] else None
item['media_date'] = crypto.decrypt_field(item['encrypted_media_date'])
item['source_path'] = crypto.decrypt_field(item['encrypted_source_path']) if item['encrypted_source_path'] else None
# Decrypt post-level media_date for sorting (to match posts endpoint sort order)
item['post_media_date'] = crypto.decrypt_field(item['post_encrypted_media_date']) if item.get('post_encrypted_media_date') else None
# Remove encrypted fields
del item['encrypted_filename']
del item['encrypted_description']
del item['encrypted_media_date']
del item['encrypted_source_path']
if 'post_encrypted_media_date' in item:
del item['post_encrypted_media_date']
# Add person info
if item['person_encrypted_name']:
item['person'] = {
'id': item['person_id'],
'name': crypto.decrypt_field(item['person_encrypted_name']),
'relationship': {
'id': item['relationship_id'],
'name': crypto.decrypt_field(item['rel_encrypted_name']) if item['rel_encrypted_name'] else None,
'color': item['rel_color']
}
}
else:
item['person'] = None
del item['person_encrypted_name']
del item['rel_encrypted_name']
del item['rel_color']
# Add URLs
item['thumbnail_url'] = f"/api/private-gallery/thumbnail/{item['id']}"
item['stream_url'] = f"/api/private-gallery/stream/{item['id']}"
item['file_url'] = f"/api/private-gallery/file/{item['id']}"
items.append(item)
# Re-sort items within each post by id ASC to match the posts endpoint and paginated attachments endpoint
# Skip re-sorting in shuffle mode — shuffle order is authoritative
if items and not shuffle:
post_order = []
seen = set()
by_post = {}
for item in items:
pid = item.get('post_id')
if pid not in seen:
post_order.append(pid)
seen.add(pid)
by_post[pid] = []
by_post[pid].append(item)
for pid in by_post:
by_post[pid].sort(key=lambda x: x.get('id', 0))
items = []
for pid in post_order:
items.extend(by_post[pid])
if not can_sql_paginate:
# Apply date filtering (on decrypted dates)
if date_from:
items = [i for i in items if i['media_date'] >= date_from]
if date_to:
items = [i for i in items if i['media_date'] <= date_to]
# Apply search (on decrypted fields) - each word must match in filename or description
if search:
words = search.lower().split()
if words:
items = [i for i in items if all(
w in (i.get('filename') or '').lower() or w in (i.get('description') or '').lower()
for w in words
)]
# Filter by has_description (after decryption since description is encrypted)
if has_description is True:
items = [i for i in items if i.get('description') and i['description'].strip()]
elif has_description is False:
items = [i for i in items if not i.get('description') or not i['description'].strip()]
# Sort by decrypted field — use post-level media_date to match posts endpoint sort
if sort_by == 'media_date':
items.sort(key=lambda x: (x.get('post_media_date') or x.get('media_date') or '', x.get('post_created_at') or ''), reverse=(sort_order == 'desc'))
elif sort_by == 'filename':
items.sort(key=lambda x: (x.get('filename') or '').lower(), reverse=(sort_order == 'desc'))
elif sort_by == 'created_at' and sort_order == 'asc':
items.reverse() # SQL default is DESC, reverse for ASC
# Apply pagination
total_filtered = len(items)
items = items[offset:offset + limit]
# Get tags for each item
media_ids = [i['id'] for i in items]
if media_ids:
with db.get_connection() as conn:
cursor = conn.cursor()
placeholders = ','.join('?' * len(media_ids))
cursor.execute(f'''
SELECT mt.media_id, t.id, t.encrypted_name, t.color
FROM private_media_tags mt
JOIN private_gallery_tags t ON mt.tag_id = t.id
WHERE mt.media_id IN ({placeholders})
''', media_ids)
tags_by_media = {}
for row in cursor.fetchall():
media_id = row['media_id']
if media_id not in tags_by_media:
tags_by_media[media_id] = []
tags_by_media[media_id].append({
'id': row['id'],
'name': crypto.decrypt_field(row['encrypted_name']),
'color': row['color']
})
for item in items:
item['tags'] = tags_by_media.get(item['id'], [])
# Clean up internal sort fields before response
for item in items:
item.pop('post_media_date', None)
item.pop('post_created_at', None)
item.pop('post_encrypted_media_date', None)
return {
"items": items,
"total": total_filtered,
"offset": offset,
"limit": limit,
"has_more": offset + limit < total_filtered
}
@router.get("/posts")
@limiter.limit("120/minute")
@handle_exceptions
async def get_posts(
request: Request,
offset: int = Query(0, ge=0),
limit: int = Query(50, ge=1, le=200),
person_id: Optional[int] = None,
person_group_id: Optional[int] = None,
relationship_id: Optional[int] = None,
tag_ids: Optional[str] = None,
file_type: Optional[str] = None,
search: Optional[str] = None,
sort_by: str = Query("media_date", regex="^(media_date|created_at)$"),
sort_order: str = Query("desc", regex="^(asc|desc)$"),
date_from: Optional[str] = None,
date_to: Optional[str] = None,
has_tags: Optional[bool] = None,
has_description: Optional[bool] = None,
unread_only: Optional[bool] = None,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get posts with their grouped media items."""
db = _get_db()
crypto = _get_crypto()
# Ensure private_gallery_tags table exists and is migrated
_ensure_private_gallery_tags_migrated(db, crypto)
# Build cache key from SQL-level filter params
cache_key = f"{person_id}|{person_group_id}|{relationship_id}|{tag_ids}|{has_tags}|{file_type}|{unread_only}|{_posts_cache_version}"
# Check cache for decrypted base post list (with TTL)
cached = _posts_cache.get(cache_key)
if cached is not None:
cache_age = time.time() - _posts_cache_time.get(cache_key, 0)
if cache_age > _POSTS_CACHE_TTL:
cached = None # expired
if cached is not None:
all_posts = [dict(p) for p in cached] # shallow copy each dict
else:
with db.get_connection() as conn:
cursor = conn.cursor()
# Build WHERE clauses for posts
where_clauses = []
params = []
if person_id is not None:
where_clauses.append("post.person_id = ?")
params.append(person_id)
if person_group_id is not None:
# WHO filter: persons + relationships (pass through if none defined)
where_clauses.append("""(
(
NOT EXISTS (SELECT 1 FROM private_media_person_group_members WHERE group_id = ?)
AND NOT EXISTS (SELECT 1 FROM private_media_person_group_relationship_members WHERE group_id = ?)
)
OR post.person_id IN (SELECT person_id FROM private_media_person_group_members WHERE group_id = ?)
OR post.person_id IN (SELECT p2.id FROM private_media_persons p2 WHERE p2.relationship_id IN (SELECT relationship_id FROM private_media_person_group_relationship_members WHERE group_id = ?))
)""")
params.extend([person_group_id, person_group_id, person_group_id, person_group_id])
# TAG filter: included tags (pass through if none defined)
where_clauses.append("""(
NOT EXISTS (SELECT 1 FROM private_media_person_group_tag_members WHERE group_id = ?)
OR post.id IN (SELECT DISTINCT m_tg.post_id FROM private_media m_tg JOIN private_media_tags mt_tg ON mt_tg.media_id = m_tg.id WHERE mt_tg.tag_id IN (SELECT tag_id FROM private_media_person_group_tag_members WHERE group_id = ?) AND m_tg.post_id IS NOT NULL)
)""")
params.extend([person_group_id, person_group_id])
# EXCLUDE filter: excluded tags
where_clauses.append("""
post.id NOT IN (SELECT DISTINCT m_ex.post_id FROM private_media m_ex JOIN private_media_tags mt_ex ON mt_ex.media_id = m_ex.id WHERE mt_ex.tag_id IN (SELECT tag_id FROM private_media_person_group_excluded_tags WHERE group_id = ?) AND m_ex.post_id IS NOT NULL)
""")
params.append(person_group_id)
if relationship_id is not None:
where_clauses.append("p.relationship_id = ?")
params.append(relationship_id)
if tag_ids:
tag_id_list = [int(t) for t in tag_ids.split(',') if t.strip().isdigit()]
if tag_id_list:
placeholders = ','.join('?' * len(tag_id_list))
where_clauses.append(f'''
post.id IN (
SELECT post_id FROM private_media_post_tags WHERE tag_id IN ({placeholders})
UNION
SELECT DISTINCT m.post_id FROM private_media m
JOIN private_media_tags mt ON mt.media_id = m.id
WHERE mt.tag_id IN ({placeholders}) AND m.post_id IS NOT NULL
)
''')
params.extend(tag_id_list)
params.extend(tag_id_list)
if has_tags is True:
where_clauses.append("EXISTS (SELECT 1 FROM private_media_post_tags WHERE post_id = post.id)")
elif has_tags is False:
where_clauses.append("NOT EXISTS (SELECT 1 FROM private_media_post_tags WHERE post_id = post.id)")
# Filter by file_type (posts that have at least one media item of that type)
if file_type and file_type != 'all':
where_clauses.append(f'''
EXISTS (SELECT 1 FROM private_media m WHERE m.post_id = post.id AND m.file_type = ?)
''')
params.append(file_type)
if unread_only:
where_clauses.append("post.is_read = 0")
where_sql = " AND ".join(where_clauses) if where_clauses else "1=1"
# Get posts with person info
query = f'''
SELECT post.*, p.encrypted_name as person_encrypted_name,
p.relationship_id, r.encrypted_name as rel_encrypted_name, r.color as rel_color
FROM private_media_posts post
LEFT JOIN private_media_persons p ON post.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE {where_sql}
ORDER BY post.created_at DESC
'''
cursor.execute(query, params)
rows = cursor.fetchall()
# Decrypt and process posts
all_posts = []
for row in rows:
post = dict(row)
# Decrypt post fields
post['description'] = crypto.decrypt_field(post['encrypted_description']) if post['encrypted_description'] else None
post['media_date'] = crypto.decrypt_field(post['encrypted_media_date'])
# Remove encrypted fields
del post['encrypted_description']
del post['encrypted_media_date']
# Add person info
if post.get('person_encrypted_name'):
post['person'] = {
'id': post['person_id'],
'name': crypto.decrypt_field(post['person_encrypted_name']),
'relationship': {
'id': post['relationship_id'],
'name': crypto.decrypt_field(post['rel_encrypted_name']) if post.get('rel_encrypted_name') else None,
'color': post.get('rel_color')
}
}
else:
post['person'] = None
# Clean up temp fields
for key in ['person_encrypted_name', 'rel_encrypted_name', 'rel_color']:
if key in post:
del post[key]
all_posts.append(post)
# Store in cache with timestamp
with _posts_cache_lock:
_posts_cache[cache_key] = all_posts
_posts_cache_time[cache_key] = time.time()
all_posts = [dict(p) for p in all_posts] # work on copies
posts = all_posts
# Apply date filtering (on decrypted dates)
if date_from:
posts = [p for p in posts if p['media_date'] >= date_from]
if date_to:
posts = [p for p in posts if p['media_date'] <= date_to]
# Apply search (on decrypted fields) - each word must match in description
if search:
words = search.lower().split()
if words:
posts = [p for p in posts if all(
w in (p.get('description') or '').lower()
for w in words
)]
# Filter by has_description (after decryption)
if has_description is True:
posts = [p for p in posts if p.get('description') and p['description'].strip()]
elif has_description is False:
posts = [p for p in posts if not p.get('description') or not p['description'].strip()]
# Sort by decrypted field, with created_at as tiebreaker
if sort_by == 'media_date':
posts.sort(key=lambda x: (x.get('media_date') or '', x.get('created_at') or ''), reverse=(sort_order == 'desc'))
elif sort_order == 'asc':
posts.reverse() # Default order is DESC from SQL, reverse for ASC
# Apply pagination
total_filtered = len(posts)
# Count total media items across ALL filtered posts (before pagination)
all_filtered_post_ids = [p['id'] for p in posts]
total_media = 0
if all_filtered_post_ids:
with db.get_connection() as conn:
cursor = conn.cursor()
placeholders = ','.join('?' * len(all_filtered_post_ids))
if file_type and file_type != 'all':
cursor.execute(f'''
SELECT COUNT(*) as cnt FROM private_media
WHERE post_id IN ({placeholders}) AND file_type = ?
''', all_filtered_post_ids + [file_type])
else:
cursor.execute(f'''
SELECT COUNT(*) as cnt FROM private_media
WHERE post_id IN ({placeholders})
''', all_filtered_post_ids)
total_media = cursor.fetchone()['cnt']
posts = posts[offset:offset + limit]
# Get media items for each post (capped at 100 per post for performance)
ATTACHMENT_CAP = 100
post_ids = [p['id'] for p in posts]
media_by_post = {}
media_count_by_post = {}
media_type_counts_by_post = {}
if post_ids:
with db.get_connection() as conn:
cursor = conn.cursor()
placeholders = ','.join('?' * len(post_ids))
file_type_clause = ''
file_type_params = []
if file_type and file_type != 'all':
file_type_clause = ' AND m.file_type = ?'
file_type_params = [file_type]
# Get total counts per post with image/video breakdown (cheap — no decryption)
cursor.execute(f'''
SELECT m.post_id, COUNT(*) as cnt,
SUM(CASE WHEN m.file_type = 'image' THEN 1 ELSE 0 END) as image_count,
SUM(CASE WHEN m.file_type = 'video' THEN 1 ELSE 0 END) as video_count
FROM private_media m
WHERE m.post_id IN ({placeholders}){file_type_clause}
GROUP BY m.post_id
''', list(post_ids) + file_type_params)
for row in cursor.fetchall():
media_count_by_post[row['post_id']] = row['cnt']
media_type_counts_by_post[row['post_id']] = {
'image_count': row['image_count'],
'video_count': row['video_count']
}
# Fetch capped media per post using window function
media_query_params = list(post_ids) + file_type_params
cursor.execute(f'''
SELECT * FROM (
SELECT m.*, p.encrypted_name as person_encrypted_name,
p.relationship_id, r.encrypted_name as rel_encrypted_name, r.color as rel_color,
ROW_NUMBER() OVER (PARTITION BY m.post_id ORDER BY m.id ASC) as rn
FROM private_media m
LEFT JOIN private_media_persons p ON m.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE m.post_id IN ({placeholders}){file_type_clause}
) WHERE rn <= {ATTACHMENT_CAP}
''', media_query_params)
for row in cursor.fetchall():
media = dict(row)
post_id = media['post_id']
del media['rn']
# Decrypt media fields
media['filename'] = crypto.decrypt_field(media['encrypted_filename'])
media['description'] = crypto.decrypt_field(media['encrypted_description']) if media['encrypted_description'] else None
media['media_date'] = crypto.decrypt_field(media['encrypted_media_date'])
media['source_path'] = crypto.decrypt_field(media['encrypted_source_path']) if media['encrypted_source_path'] else None
# Remove encrypted fields
for key in ['encrypted_filename', 'encrypted_description', 'encrypted_media_date', 'encrypted_source_path']:
if key in media:
del media[key]
# Add URLs
media['thumbnail_url'] = f"/api/private-gallery/thumbnail/{media['id']}"
media['stream_url'] = f"/api/private-gallery/stream/{media['id']}"
media['file_url'] = f"/api/private-gallery/file/{media['id']}"
# Clean up temp fields
for key in ['person_encrypted_name', 'rel_encrypted_name', 'rel_color']:
if key in media:
del media[key]
if post_id not in media_by_post:
media_by_post[post_id] = []
media_by_post[post_id].append(media)
# Sort attachments within each post by id ascending (matches ROW_NUMBER window and paginated endpoint)
for pid in media_by_post:
media_by_post[pid].sort(key=lambda x: x.get('id', 0))
# Get tags for each post
tags_by_post = {}
if post_ids:
with db.get_connection() as conn:
cursor = conn.cursor()
placeholders = ','.join('?' * len(post_ids))
cursor.execute(f'''
SELECT pt.post_id, t.id, t.encrypted_name, t.color
FROM private_media_post_tags pt
JOIN private_gallery_tags t ON pt.tag_id = t.id
WHERE pt.post_id IN ({placeholders})
''', post_ids)
for row in cursor.fetchall():
post_id = row['post_id']
if post_id not in tags_by_post:
tags_by_post[post_id] = []
tags_by_post[post_id].append({
'id': row['id'],
'name': crypto.decrypt_field(row['encrypted_name']),
'color': row['color']
})
# Attach media and tags to posts
for post in posts:
post['attachments'] = media_by_post.get(post['id'], [])
post['tags'] = tags_by_post.get(post['id'], [])
post['attachment_count'] = media_count_by_post.get(post['id'], 0)
type_counts = media_type_counts_by_post.get(post['id'], {})
post['image_count'] = type_counts.get('image_count', 0)
post['video_count'] = type_counts.get('video_count', 0)
return {
"posts": posts,
"total": total_filtered,
"total_media": total_media,
"offset": offset,
"limit": limit,
"has_more": offset + limit < total_filtered
}
@router.get("/new-posts-count")
@limiter.limit("120/minute")
@handle_exceptions
async def get_new_posts_count(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get count of unread posts."""
db = _get_db()
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT COUNT(*) as cnt FROM private_media_posts WHERE is_read = 0')
count = cursor.fetchone()['cnt']
return {"count": count}
@router.post("/mark-posts-seen")
@limiter.limit("30/minute")
@handle_exceptions
async def mark_posts_seen(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Mark all unread posts as read."""
db = _get_db()
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('UPDATE private_media_posts SET is_read = 1 WHERE is_read = 0')
updated = cursor.rowcount
conn.commit()
_invalidate_posts_cache()
return {"updated": updated}
@router.put("/posts/batch/read-status")
@limiter.limit("60/minute")
@handle_exceptions
async def batch_update_read_status(
request: Request,
body: BatchReadStatusRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update read status for multiple posts."""
if not body.post_ids:
return {"updated": 0}
db = _get_db()
is_read_val = 1 if body.is_read else 0
with db.get_connection() as conn:
cursor = conn.cursor()
placeholders = ','.join('?' * len(body.post_ids))
cursor.execute(
f'UPDATE private_media_posts SET is_read = ? WHERE id IN ({placeholders})',
[is_read_val] + body.post_ids
)
updated = cursor.rowcount
conn.commit()
_invalidate_posts_cache()
return {"updated": updated}
@router.put("/posts/{post_id}/read-status")
@limiter.limit("120/minute")
@handle_exceptions
async def update_post_read_status(
request: Request,
post_id: int,
body: dict,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update read status for a single post."""
db = _get_db()
is_read_val = 1 if body.get('is_read') else 0
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('UPDATE private_media_posts SET is_read = ? WHERE id = ?', (is_read_val, post_id))
if cursor.rowcount == 0:
raise NotFoundError(f"Post {post_id} not found")
conn.commit()
_invalidate_posts_cache()
return {"post_id": post_id, "is_read": is_read_val}
@router.get("/posts/{post_id}/attachments")
@limiter.limit("120/minute")
@handle_exceptions
async def get_post_attachments(
request: Request,
post_id: int,
offset: int = Query(0, ge=0),
limit: int = Query(100, ge=1, le=500),
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get paginated attachments for a post."""
db = _get_db()
crypto = _get_crypto()
with db.get_connection() as conn:
cursor = conn.cursor()
# Verify post exists
cursor.execute('SELECT id FROM private_media_posts WHERE id = ?', (post_id,))
if not cursor.fetchone():
raise NotFoundError(f"Post {post_id} not found")
# Fetch all media for this post, decrypt, sort by media_date DESC (matches post view)
cursor.execute('''
SELECT m.*, p.encrypted_name as person_encrypted_name,
p.relationship_id, r.encrypted_name as rel_encrypted_name, r.color as rel_color
FROM private_media m
LEFT JOIN private_media_persons p ON m.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE m.post_id = ?
''', (post_id,))
all_items = []
for row in cursor.fetchall():
media = dict(row)
media['filename'] = crypto.decrypt_field(media['encrypted_filename'])
media['description'] = crypto.decrypt_field(media['encrypted_description']) if media['encrypted_description'] else None
media['media_date'] = crypto.decrypt_field(media['encrypted_media_date'])
media['source_path'] = crypto.decrypt_field(media['encrypted_source_path']) if media['encrypted_source_path'] else None
for key in ['encrypted_filename', 'encrypted_description', 'encrypted_media_date', 'encrypted_source_path']:
if key in media:
del media[key]
media['thumbnail_url'] = f"/api/private-gallery/thumbnail/{media['id']}"
media['stream_url'] = f"/api/private-gallery/stream/{media['id']}"
media['file_url'] = f"/api/private-gallery/file/{media['id']}"
for key in ['person_encrypted_name', 'rel_encrypted_name', 'rel_color']:
if key in media:
del media[key]
all_items.append(media)
# Sort by id ascending (matches posts endpoint and ROW_NUMBER window function)
all_items.sort(key=lambda x: x.get('id', 0))
total = len(all_items)
items = all_items[offset:offset + limit]
return {
"items": items,
"total": total,
"offset": offset,
"limit": limit,
"has_more": offset + limit < total
}
@router.put("/posts/{post_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def update_post(
request: Request,
post_id: int,
body: PostUpdateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update a post's metadata."""
db = _get_db()
crypto = _get_crypto()
# Verify post exists
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT id FROM private_media_posts WHERE id = ?', (post_id,))
if not cursor.fetchone():
raise NotFoundError(f"Post {post_id} not found")
updates = []
params = []
if body.description is not None:
updates.append("encrypted_description = ?")
params.append(crypto.encrypt_field(body.description) if body.description else None)
if body.person_id is not None:
updates.append("person_id = ?")
params.append(body.person_id if body.person_id > 0 else None)
if body.media_date is not None:
updates.append("encrypted_media_date = ?")
params.append(crypto.encrypt_field(body.media_date))
if updates:
updates.append("updated_at = CURRENT_TIMESTAMP")
params.append(post_id)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute(f'''
UPDATE private_media_posts
SET {", ".join(updates)}
WHERE id = ?
''', params)
conn.commit()
# Update tags if provided
if body.tag_ids is not None:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Remove existing tags
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (post_id,))
# Add new tags
for tag_id in body.tag_ids:
cursor.execute('''
INSERT INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (post_id, tag_id))
conn.commit()
_invalidate_posts_cache()
return message_response("Post updated")
@router.delete("/posts/{post_id}")
@limiter.limit("30/minute")
@handle_exceptions
async def delete_post(
request: Request,
post_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete a post and all its media items."""
db = _get_db()
crypto = _get_crypto()
# Get all media items for this post
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT storage_id FROM private_media WHERE post_id = ?', (post_id,))
media_rows = cursor.fetchall()
if not media_rows:
# Check if post exists with no media
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT id FROM private_media_posts WHERE id = ?', (post_id,))
if not cursor.fetchone():
raise NotFoundError(f"Post {post_id} not found")
# Get storage path
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
thumbs_path = storage_path / 'thumbs'
# Delete files (ignore errors — files may be missing or on a failed disk)
# Skip exists() checks — they hang for ~6s on I/O error disks, causing request timeouts
for row in media_rows:
storage_id = row['storage_id']
for path in [
storage_path / 'data' / f"{storage_id}.enc",
thumbs_path / f"{storage_id}.enc",
]:
try:
path.unlink()
except FileNotFoundError:
pass
except OSError as e:
logger.warning(f"Could not delete {path}: {e}")
# Delete from database (cascades to media items and tags)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Delete media items (cascade from posts should handle this, but be explicit)
cursor.execute('DELETE FROM private_media WHERE post_id = ?', (post_id,))
# Delete post tags
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (post_id,))
# Delete post
cursor.execute('DELETE FROM private_media_posts WHERE id = ?', (post_id,))
conn.commit()
_invalidate_posts_cache()
return message_response("Post deleted")
@router.post("/posts")
@limiter.limit("30/minute")
@handle_exceptions
async def create_post(
request: Request,
body: PostCreateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Create a new post, optionally attaching existing media."""
db = _get_db()
crypto = _get_crypto()
final_date = body.media_date or date.today().isoformat()
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Verify person exists
cursor.execute('SELECT id FROM private_media_persons WHERE id = ?', (body.person_id,))
if not cursor.fetchone():
raise ValidationError(f"Person {body.person_id} not found")
# Create the post
cursor.execute('''
INSERT INTO private_media_posts (
person_id, encrypted_description, encrypted_media_date
) VALUES (?, ?, ?)
''', (
body.person_id,
crypto.encrypt_field(body.description) if body.description else None,
crypto.encrypt_field(final_date)
))
post_id = cursor.lastrowid
# Add tags to the post
if body.tag_ids:
for tag_id in body.tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (post_id, tag_id))
# Attach existing media if provided
attached = 0
if body.media_ids:
for media_id in body.media_ids:
cursor.execute('SELECT id, post_id FROM private_media WHERE id = ?', (media_id,))
row = cursor.fetchone()
if not row:
continue
source_post_id = row['post_id']
# Record original_post_id if not already set
if source_post_id is not None:
cursor.execute('SELECT original_post_id FROM private_media WHERE id = ?', (media_id,))
orig = cursor.fetchone()
if orig and orig['original_post_id'] is None:
cursor.execute('UPDATE private_media SET original_post_id = ? WHERE id = ?', (source_post_id, media_id))
# Move to new post
cursor.execute('UPDATE private_media SET post_id = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?', (post_id, media_id))
attached += 1
conn.commit()
_invalidate_posts_cache()
return {"post_id": post_id, "attached": attached}
def _scan_duplicates_background(job_id: str, post_id: int, person_id: int):
"""Background task to scan for perceptual duplicates across a person's media."""
import threading
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
try:
# Get all media for this person
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT id, post_id, storage_id, encrypted_filename, file_type, perceptual_hash, width, height
FROM private_media
WHERE post_id IN (SELECT id FROM private_media_posts WHERE person_id = ?)
AND file_type IN ('image', 'video')
''', (person_id,))
all_media = cursor.fetchall()
total = len(all_media)
_update_pg_job(job_id, {'total_files': total, 'current_phase': 'hashing'})
if total < 2:
_update_pg_job(job_id, {
'status': 'completed',
'scan_results': [],
'total_scanned': total,
'total_groups': 0,
'completed_at': datetime.now().isoformat(),
'message': f'Not enough media to compare ({total} found)'
})
return
# Backfill perceptual hashes
media_list = []
for idx, row in enumerate(all_media):
phash = row['perceptual_hash']
media_id = row['id']
if not phash:
encrypted_file = data_path / f"{row['storage_id']}.enc"
if encrypted_file.exists():
try:
temp_dir = Path(tempfile.gettempdir())
orig_name = crypto.decrypt_field(row['encrypted_filename']) or ''
ext = Path(orig_name).suffix or ('.jpg' if row['file_type'] == 'image' else '.mp4' if row['file_type'] == 'video' else '')
temp_file = temp_dir / f"pg_phash_{row['storage_id']}{ext}"
crypto.decrypt_file(encrypted_file, temp_file)
phash = _compute_perceptual_hash(temp_file)
temp_file.unlink(missing_ok=True)
if phash:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('UPDATE private_media SET perceptual_hash = ? WHERE id = ?', (phash, media_id))
conn.commit()
except Exception:
pass
if phash:
media_list.append({
'id': media_id,
'post_id': row['post_id'],
'storage_id': row['storage_id'],
'filename': crypto.decrypt_field(row['encrypted_filename']),
'file_type': row['file_type'],
'perceptual_hash': phash,
'width': row['width'],
'height': row['height'],
})
_update_pg_job(job_id, {
'processed_files': idx + 1,
'success_count': len(media_list),
})
# Compare all pairs using hamming distance (optimized with pre-computed integers)
total_pairs = len(media_list) * (len(media_list) - 1) // 2
_update_pg_job(job_id, {'current_phase': 'comparing', 'total_pairs': total_pairs, 'compared_pairs': 0})
# Pre-compute integer values for fast XOR-based hamming distance
hash_ints = {}
for m in media_list:
try:
hash_ints[m['id']] = int(m['perceptual_hash'], 16)
except (ValueError, TypeError):
pass
threshold = 12
parent = {m['id']: m['id'] for m in media_list}
def find(x):
while parent[x] != x:
parent[x] = parent[parent[x]]
x = parent[x]
return x
def union(x, y):
px, py = find(x), find(y)
if px != py:
parent[px] = py
pair_distances = {}
compared = 0
for i in range(len(media_list)):
id_i = media_list[i]['id']
if id_i not in hash_ints:
compared += len(media_list) - i - 1
continue
hi = hash_ints[id_i]
for j in range(i + 1, len(media_list)):
id_j = media_list[j]['id']
compared += 1
if id_j not in hash_ints:
continue
dist = bin(hi ^ hash_ints[id_j]).count('1')
if dist <= threshold:
union(id_i, id_j)
pair_distances[(id_i, id_j)] = dist
pair_distances[(id_j, id_i)] = dist
# Update progress periodically
if i % 50 == 0:
_update_pg_job(job_id, {'compared_pairs': compared})
# Group by root
groups: Dict[int, list] = {}
for m in media_list:
root = find(m['id'])
if root not in groups:
groups[root] = []
groups[root].append(m)
duplicate_groups = []
for group in groups.values():
if len(group) > 1:
items = []
for m in group:
distances_to_others = {}
for other in group:
if other['id'] != m['id']:
key = (m['id'], other['id'])
if key in pair_distances:
distances_to_others[str(other['id'])] = pair_distances[key]
items.append({
'media_id': m['id'],
'post_id': m['post_id'],
'storage_id': m['storage_id'],
'filename': m['filename'],
'file_type': m['file_type'],
'width': m['width'],
'height': m['height'],
'distances': distances_to_others,
})
duplicate_groups.append({'items': items})
results_data = {
'scan_results': duplicate_groups,
'total_scanned': len(media_list),
'total_groups': len(duplicate_groups),
}
_update_pg_job(job_id, {
'status': 'completed',
'completed_at': datetime.now().isoformat(),
**results_data,
})
except Exception as e:
logger.error(f"[ScanDuplicates] Background scan failed: {e}", module="PrivateGallery")
_update_pg_job(job_id, {
'status': 'completed',
'scan_results': [],
'total_scanned': 0,
'total_groups': 0,
'completed_at': datetime.now().isoformat(),
'message': f'Scan failed: {str(e)}'
})
def _scan_all_duplicates_background(job_id: str):
"""Background task to scan for perceptual duplicates across ALL persons' media."""
import threading
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
try:
# Get all persons and build name lookup
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT id, encrypted_name FROM private_media_persons')
person_rows = cursor.fetchall()
person_names = {}
for p in person_rows:
try:
person_names[p['id']] = crypto.decrypt_field(p['encrypted_name']) or f"Person {p['id']}"
except Exception:
person_names[p['id']] = f"Person {p['id']}"
# Fetch ALL media with a person assigned
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT m.id, m.post_id, m.storage_id, m.encrypted_filename, m.file_type,
m.perceptual_hash, m.width, m.height, post.person_id
FROM private_media m
JOIN private_media_posts post ON m.post_id = post.id
WHERE post.person_id IS NOT NULL
AND m.file_type IN ('image', 'video')
''')
all_media = cursor.fetchall()
total = len(all_media)
_update_pg_job(job_id, {'total_files': total, 'current_phase': 'hashing'})
if total < 2:
_update_pg_job(job_id, {
'status': 'completed',
'scan_results': [],
'total_scanned': total,
'total_groups': 0,
'completed_at': datetime.now().isoformat(),
'message': f'Not enough media to compare ({total} found)'
})
return
# Phase 1: Backfill perceptual hashes
media_list = []
for idx, row in enumerate(all_media):
phash = row['perceptual_hash']
media_id = row['id']
if not phash:
encrypted_file = data_path / f"{row['storage_id']}.enc"
if encrypted_file.exists():
try:
temp_dir = Path(tempfile.gettempdir())
orig_name = crypto.decrypt_field(row['encrypted_filename']) or ''
ext = Path(orig_name).suffix or ('.jpg' if row['file_type'] == 'image' else '.mp4' if row['file_type'] == 'video' else '')
temp_file = temp_dir / f"pg_phash_{row['storage_id']}{ext}"
crypto.decrypt_file(encrypted_file, temp_file)
phash = _compute_perceptual_hash(temp_file)
temp_file.unlink(missing_ok=True)
if phash:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('UPDATE private_media SET perceptual_hash = ? WHERE id = ?', (phash, media_id))
conn.commit()
except Exception:
pass
if phash:
media_list.append({
'id': media_id,
'post_id': row['post_id'],
'person_id': row['person_id'],
'storage_id': row['storage_id'],
'filename': crypto.decrypt_field(row['encrypted_filename']),
'file_type': row['file_type'],
'perceptual_hash': phash,
'width': row['width'],
'height': row['height'],
})
_update_pg_job(job_id, {
'processed_files': idx + 1,
'success_count': len(media_list),
})
# Phase 2: Compare within each person group
# Group media by person_id
person_groups: Dict[int, list] = {}
for m in media_list:
pid = m['person_id']
if pid not in person_groups:
person_groups[pid] = []
person_groups[pid].append(m)
# Count total pairs across all person groups
total_pairs = 0
for group in person_groups.values():
n = len(group)
total_pairs += n * (n - 1) // 2
_update_pg_job(job_id, {'current_phase': 'comparing', 'total_pairs': total_pairs, 'compared_pairs': 0})
threshold = 12
parent = {m['id']: m['id'] for m in media_list}
def find(x):
while parent[x] != x:
parent[x] = parent[parent[x]]
x = parent[x]
return x
def union(x, y):
px, py = find(x), find(y)
if px != py:
parent[px] = py
pair_distances = {}
compared = 0
# Pre-compute integer values for fast XOR-based hamming distance
hash_ints = {}
for m in media_list:
try:
hash_ints[m['id']] = int(m['perceptual_hash'], 16)
except (ValueError, TypeError):
pass
for pid, group in person_groups.items():
pname = person_names.get(pid, f"Person {pid}")
_update_pg_job(job_id, {'current_person': pname})
for i in range(len(group)):
id_i = group[i]['id']
if id_i not in hash_ints:
compared += len(group) - i - 1
continue
hi = hash_ints[id_i]
for j in range(i + 1, len(group)):
id_j = group[j]['id']
compared += 1
if id_j not in hash_ints:
continue
dist = bin(hi ^ hash_ints[id_j]).count('1')
if dist <= threshold:
union(id_i, id_j)
pair_distances[(id_i, id_j)] = dist
pair_distances[(id_j, id_i)] = dist
if i % 50 == 0:
_update_pg_job(job_id, {'compared_pairs': compared})
# Group by root
groups: Dict[int, list] = {}
for m in media_list:
root = find(m['id'])
if root not in groups:
groups[root] = []
groups[root].append(m)
duplicate_groups = []
for group in groups.values():
if len(group) > 1:
# All items in a group share the same person_id (we only compare within person)
pid = group[0]['person_id']
pname = person_names.get(pid, f"Person {pid}")
items = []
for m in group:
distances_to_others = {}
for other in group:
if other['id'] != m['id']:
key = (m['id'], other['id'])
if key in pair_distances:
distances_to_others[str(other['id'])] = pair_distances[key]
items.append({
'media_id': m['id'],
'post_id': m['post_id'],
'person_id': m['person_id'],
'person_name': pname,
'storage_id': m['storage_id'],
'filename': m['filename'],
'file_type': m['file_type'],
'width': m['width'],
'height': m['height'],
'distances': distances_to_others,
})
duplicate_groups.append({'items': items, 'person_name': pname})
results_data = {
'scan_results': duplicate_groups,
'total_scanned': len(media_list),
'total_groups': len(duplicate_groups),
}
_update_pg_job(job_id, {
'status': 'completed',
'completed_at': datetime.now().isoformat(),
**results_data,
})
except Exception as e:
logger.error(f"[ScanAllDuplicates] Background scan failed: {e}", module="PrivateGallery")
_update_pg_job(job_id, {
'status': 'completed',
'scan_results': [],
'total_scanned': 0,
'total_groups': 0,
'completed_at': datetime.now().isoformat(),
'message': f'Scan failed: {str(e)}'
})
@router.post("/posts/{post_id}/scan-duplicates")
@limiter.limit("5/minute")
@handle_exceptions
async def scan_duplicates_for_person(
request: Request,
post_id: int,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Start a background scan for perceptual duplicates across this post's person's media."""
db = _get_db()
# Get person_id from the post
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT person_id FROM private_media_posts WHERE id = ?', (post_id,))
post_row = cursor.fetchone()
if not post_row:
raise NotFoundError(f"Post {post_id} not found")
person_id = post_row['person_id']
if not person_id:
return {"job_id": None, "message": "Post has no person assigned"}
job_id = f"scan-{post_id}-{uuid.uuid4().hex[:8]}"
_create_pg_job(job_id, 0, 'scan-duplicates')
_update_pg_job(job_id, {'person_id': person_id, 'current_phase': 'starting'})
import threading
thread = threading.Thread(target=_scan_duplicates_background, args=(job_id, post_id, person_id), daemon=True)
thread.start()
return {"job_id": job_id}
@router.post("/scan-all-duplicates")
@limiter.limit("2/minute")
@handle_exceptions
async def scan_all_duplicates(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Start a background scan for perceptual duplicates across ALL persons' media."""
db = _get_db()
job_id = f"scan-all-{uuid.uuid4().hex[:8]}"
_create_pg_job(job_id, 0, 'scan-all-duplicates')
_update_pg_job(job_id, {'current_phase': 'starting'})
import threading
thread = threading.Thread(target=_scan_all_duplicates_background, args=(job_id,), daemon=True)
thread.start()
return {"job_id": job_id}
@router.post("/posts/cleanup-empty")
@limiter.limit("30/minute")
@handle_exceptions
async def cleanup_empty_posts(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete posts that have no media and no description."""
body = await request.json()
post_ids = body.get('post_ids', [])
if not post_ids or not isinstance(post_ids, list):
return {"deleted_post_ids": []}
db = _get_db()
deleted = []
for pid in post_ids:
try:
with db.get_connection() as conn:
cursor = conn.cursor()
# Check media count
cursor.execute('SELECT COUNT(*) as cnt FROM private_media WHERE post_id = ?', (pid,))
media_count = cursor.fetchone()['cnt']
if media_count > 0:
continue
# Check if post is tagged 'reddit' (skip description check for reddit posts)
cursor.execute('SELECT id FROM private_media_posts WHERE id = ?', (pid,))
post_row = cursor.fetchone()
if not post_row:
continue
is_reddit = False
cursor.execute('''
SELECT t.encrypted_name FROM private_media_post_tags pt
JOIN private_gallery_tags t ON t.id = pt.tag_id
WHERE pt.post_id = ?
''', (pid,))
crypto = _get_crypto()
for tag_row in cursor.fetchall():
try:
tag_name = crypto.decrypt_field(tag_row['encrypted_name'])
if tag_name and tag_name.lower() == 'reddit':
is_reddit = True
break
except Exception:
continue
if not is_reddit:
# For non-reddit posts, only delete if no description
cursor.execute('SELECT encrypted_description FROM private_media_posts WHERE id = ?', (pid,))
row = cursor.fetchone()
if row and row['encrypted_description']:
desc = crypto.decrypt_field(row['encrypted_description'])
if desc and desc.strip():
continue
# Post has no media — delete it
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (pid,))
cursor.execute('DELETE FROM private_media_posts WHERE id = ?', (pid,))
conn.commit()
deleted.append(pid)
except Exception as e:
logger.error(f"[CleanupEmpty] Failed to check/delete post {pid}: {e}", module="PrivateGallery")
if deleted:
_invalidate_posts_cache()
return {"deleted_post_ids": deleted}
@router.post("/posts/{post_id}/attach-media")
@limiter.limit("30/minute")
@handle_exceptions
async def attach_media_to_post(
request: Request,
post_id: int,
body: AttachMediaRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Attach existing media items to a post by copying them (originals remain on their source post)."""
db = _get_db()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
thumbs_path = storage_path / 'thumbs'
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Verify target post exists
cursor.execute('SELECT id FROM private_media_posts WHERE id = ?', (post_id,))
if not cursor.fetchone():
raise NotFoundError(f"Post {post_id} not found")
attached = 0
for media_id in body.media_ids:
# Get full media record to copy
cursor.execute('''
SELECT storage_id, encrypted_filename, encrypted_description,
file_hash, file_size, file_type, mime_type,
width, height, duration, person_id,
encrypted_media_date, source_type, post_id
FROM private_media WHERE id = ?
''', (media_id,))
row = cursor.fetchone()
if not row:
continue
# Skip if already on target post
if row['post_id'] == post_id:
continue
# Create a copy with a new storage_id
new_storage_id = str(uuid.uuid4())
# Hard-link the encrypted data file (shares disk space, independent deletion)
src_data = data_path / f"{row['storage_id']}.enc"
dst_data = data_path / f"{new_storage_id}.enc"
if src_data.exists():
try:
os.link(str(src_data), str(dst_data))
except OSError:
# Fallback to copy if hard link fails (e.g. cross-device)
import shutil
shutil.copy2(str(src_data), str(dst_data))
# Hard-link the encrypted thumbnail
src_thumb = thumbs_path / f"{row['storage_id']}.enc"
dst_thumb = thumbs_path / f"{new_storage_id}.enc"
if src_thumb.exists():
try:
os.link(str(src_thumb), str(dst_thumb))
except OSError:
import shutil
shutil.copy2(str(src_thumb), str(dst_thumb))
# Insert new media record pointing to the target post
cursor.execute('''
INSERT INTO private_media (
post_id, storage_id, encrypted_filename, encrypted_description,
file_hash, file_size, file_type, mime_type,
width, height, duration, person_id,
encrypted_media_date, source_type, original_post_id
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
post_id, new_storage_id, row['encrypted_filename'], row['encrypted_description'],
row['file_hash'], row['file_size'], row['file_type'], row['mime_type'],
row['width'], row['height'], row['duration'], row['person_id'],
row['encrypted_media_date'], row['source_type'], row['post_id']
))
attached += 1
conn.commit()
return {"attached": attached}
@router.get("/media/{media_id}")
@limiter.limit("120/minute")
@handle_exceptions
async def get_media_item(
request: Request,
media_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get a single media item."""
db = _get_db()
crypto = _get_crypto()
# Ensure private_gallery_tags table exists and is migrated
_ensure_private_gallery_tags_migrated(db, crypto)
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT m.*, p.encrypted_name as person_encrypted_name,
p.relationship_id, r.encrypted_name as rel_encrypted_name, r.color as rel_color
FROM private_media m
LEFT JOIN private_media_persons p ON m.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE m.id = ?
''', (media_id,))
row = cursor.fetchone()
if not row:
raise NotFoundError(f"Media {media_id} not found")
item = dict(row)
# Decrypt fields
item['filename'] = crypto.decrypt_field(item['encrypted_filename'])
item['description'] = crypto.decrypt_field(item['encrypted_description']) if item['encrypted_description'] else None
item['media_date'] = crypto.decrypt_field(item['encrypted_media_date'])
item['source_path'] = crypto.decrypt_field(item['encrypted_source_path']) if item['encrypted_source_path'] else None
del item['encrypted_filename']
del item['encrypted_description']
del item['encrypted_media_date']
del item['encrypted_source_path']
# Add person info
if item['person_encrypted_name']:
item['person'] = {
'id': item['person_id'],
'name': crypto.decrypt_field(item['person_encrypted_name']),
'relationship': {
'id': item['relationship_id'],
'name': crypto.decrypt_field(item['rel_encrypted_name']) if item['rel_encrypted_name'] else None,
'color': item['rel_color']
}
}
else:
item['person'] = None
del item['person_encrypted_name']
del item['rel_encrypted_name']
del item['rel_color']
# Get tags
cursor.execute('''
SELECT t.id, t.encrypted_name, t.color
FROM private_media_tags mt
JOIN private_gallery_tags t ON mt.tag_id = t.id
WHERE mt.media_id = ?
''', (media_id,))
item['tags'] = [{
'id': r['id'],
'name': crypto.decrypt_field(r['encrypted_name']),
'color': r['color']
} for r in cursor.fetchall()]
# Add URLs
item['thumbnail_url'] = f"/api/private-gallery/thumbnail/{item['id']}"
item['stream_url'] = f"/api/private-gallery/stream/{item['id']}"
item['file_url'] = f"/api/private-gallery/file/{item['id']}"
return item
@router.put("/media/{media_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def update_media(
request: Request,
media_id: int,
body: MediaUpdateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update media metadata."""
db = _get_db()
crypto = _get_crypto()
updates = []
params = []
if body.description is not None:
updates.append("encrypted_description = ?")
params.append(crypto.encrypt_field(body.description) if body.description else None)
if body.person_id is not None:
updates.append("person_id = ?")
params.append(body.person_id if body.person_id > 0 else None)
if body.media_date is not None:
updates.append("encrypted_media_date = ?")
params.append(crypto.encrypt_field(body.media_date))
if updates:
updates.append("updated_at = CURRENT_TIMESTAMP")
params.append(media_id)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute(f'''
UPDATE private_media
SET {", ".join(updates)}
WHERE id = ?
''', params)
if cursor.rowcount == 0:
raise NotFoundError(f"Media {media_id} not found")
conn.commit()
# Update tags if provided
if body.tag_ids is not None:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Remove existing tags
cursor.execute('DELETE FROM private_media_tags WHERE media_id = ?', (media_id,))
# Add new tags
for tag_id in body.tag_ids:
cursor.execute('''
INSERT INTO private_media_tags (media_id, tag_id)
VALUES (?, ?)
''', (media_id, tag_id))
conn.commit()
return message_response("Media updated")
def _cleanup_empty_reddit_posts(db, crypto, storage_path: Path):
"""Delete posts that have no media attachments and are tagged 'reddit'."""
try:
# Find the reddit tag ID by decrypting tag names
reddit_tag_id = None
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute("SELECT id, encrypted_name FROM private_gallery_tags")
for row in cursor.fetchall():
try:
name = crypto.decrypt_field(row['encrypted_name'])
if name and name.lower() == 'reddit':
reddit_tag_id = row['id']
break
except Exception:
continue
if reddit_tag_id is None:
return 0
# Find posts tagged 'reddit' that have zero media
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT p.id FROM private_media_posts p
JOIN private_media_post_tags pt ON pt.post_id = p.id
WHERE pt.tag_id = ?
AND NOT EXISTS (SELECT 1 FROM private_media m WHERE m.post_id = p.id)
''', (reddit_tag_id,))
empty_posts = [row['id'] for row in cursor.fetchall()]
if not empty_posts:
return 0
# Delete each empty post
thumbs_path = storage_path / 'thumbs'
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for post_id in empty_posts:
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (post_id,))
cursor.execute('DELETE FROM private_media_posts WHERE id = ?', (post_id,))
conn.commit()
if empty_posts:
_invalidate_posts_cache()
logger.info(f"Cleaned up {len(empty_posts)} empty reddit-tagged posts", module="PrivateGallery")
return len(empty_posts)
except Exception as e:
logger.error(f"Failed to cleanup empty reddit posts: {e}", module="PrivateGallery")
return 0
@router.delete("/media/{media_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def delete_media(
request: Request,
media_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete a media item and its encrypted files."""
db = _get_db()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
# Get storage_id
cursor.execute('SELECT storage_id FROM private_media WHERE id = ?', (media_id,))
row = cursor.fetchone()
if not row:
raise NotFoundError(f"Media {media_id} not found")
storage_id = row['storage_id']
# Delete files
data_file = storage_path / 'data' / f"{storage_id}.enc"
thumb_file = storage_path / 'thumbs' / f"{storage_id}.enc"
if data_file.exists():
data_file.unlink()
if thumb_file.exists():
thumb_file.unlink()
# Delete database record (tags cascade)
cursor.execute('DELETE FROM private_media WHERE id = ?', (media_id,))
conn.commit()
_thumb_cache_invalidate(storage_id)
_invalidate_posts_cache()
# Clean up empty reddit-tagged posts
crypto = _get_crypto()
_cleanup_empty_reddit_posts(db, crypto, storage_path)
return message_response("Media deleted")
@router.post("/media/batch-delete")
@limiter.limit("120/minute")
@handle_exceptions
async def batch_delete_media(
request: Request,
body: BatchDeleteRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete multiple media items."""
db = _get_db()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
deleted = 0
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for media_id in body.media_ids:
cursor.execute('SELECT storage_id FROM private_media WHERE id = ?', (media_id,))
row = cursor.fetchone()
if row:
storage_id = row['storage_id']
# Delete files (skip exists() — hangs on I/O error disks)
for path in [
storage_path / 'data' / f"{storage_id}.enc",
storage_path / 'thumbs' / f"{storage_id}.enc",
]:
try:
path.unlink()
except FileNotFoundError:
pass
except OSError as e:
logger.warning(f"Could not delete {path}: {e}")
cursor.execute('DELETE FROM private_media WHERE id = ?', (media_id,))
_thumb_cache_invalidate(storage_id)
deleted += 1
conn.commit()
_invalidate_posts_cache()
# Clean up empty reddit-tagged posts
crypto = _get_crypto()
cleaned = _cleanup_empty_reddit_posts(db, crypto, storage_path)
return {"deleted": deleted, "empty_reddit_posts_cleaned": cleaned}
# ============================================================================
# UPLOAD/COPY ENDPOINTS
# ============================================================================
def _upload_to_gallery_background(job_id, file_infos, post_id, person_id, tag_id_list, final_date):
"""Background task to process uploaded files into the private gallery."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
thumbs_path = storage_path / 'thumbs'
data_path.mkdir(parents=True, exist_ok=True)
thumbs_path.mkdir(parents=True, exist_ok=True)
results = []
uploaded_media_ids = []
success_count = 0
failed_count = 0
duplicate_count = 0
skipped_count = 0
for idx, fi in enumerate(file_infos):
temp_file = Path(fi['temp_path'])
original_filename = fi['original_filename']
_update_pg_job(job_id, {
'current_file': original_filename,
'processed_files': idx
})
try:
# Calculate hash for duplicate detection
file_hash = _get_file_hash(temp_file)
# Check for duplicates (scoped by person)
with db.get_connection() as conn:
cursor = conn.cursor()
if person_id:
cursor.execute('SELECT id, person_id FROM private_media WHERE file_hash = ? AND person_id = ?', (file_hash, person_id))
else:
cursor.execute('SELECT id, person_id FROM private_media WHERE file_hash = ?', (file_hash,))
existing = cursor.fetchone()
if existing:
temp_file.unlink(missing_ok=True)
duplicate_count += 1
results.append({
'filename': original_filename,
'status': 'duplicate',
'existing_id': existing['id']
})
_update_pg_job(job_id, {
'results': list(results),
'duplicate_count': duplicate_count,
'processed_files': idx + 1
})
continue
# Get file info
file_info = _get_file_info(temp_file)
file_size = temp_file.stat().st_size
# Skip low-resolution images
min_res = int(config.get('min_import_resolution', 0) or 0)
if min_res > 0 and file_info['file_type'] == 'image':
w = file_info.get('width') or 0
h = file_info.get('height') or 0
if w < min_res or h < min_res:
temp_file.unlink(missing_ok=True)
skipped_count += 1
results.append({
'filename': original_filename,
'status': 'skipped',
'reason': f'Low resolution ({w}x{h}, min {min_res}px)'
})
_update_pg_job(job_id, {
'results': list(results),
'skipped_count': skipped_count,
'processed_files': idx + 1
})
continue
# Compute perceptual hash before encryption
perceptual_hash = _compute_perceptual_hash(temp_file)
# Try to extract date from EXIF or filename
item_date = _extract_date_from_exif(temp_file)
if not item_date:
item_date = _extract_date_from_filename(original_filename)
if not item_date:
try:
from datetime import datetime as dt
mtime = temp_file.stat().st_mtime
item_date = dt.fromtimestamp(mtime).strftime('%Y-%m-%dT%H:%M:%S')
except Exception:
pass
if not item_date:
item_date = final_date
# Generate storage ID
storage_id = str(uuid.uuid4())
# Generate thumbnail first (before encrypting original)
temp_dir = Path(tempfile.gettempdir())
temp_thumb = temp_dir / f"pg_thumb_{storage_id}.jpg"
_generate_thumbnail(temp_file, temp_thumb, file_info['file_type'])
# Encrypt the original file
encrypted_file = data_path / f"{storage_id}.enc"
if not crypto.encrypt_file(temp_file, encrypted_file):
raise Exception("Encryption failed")
# Encrypt thumbnail if it exists
if temp_thumb.exists():
encrypted_thumb = thumbs_path / f"{storage_id}.enc"
crypto.encrypt_file(temp_thumb, encrypted_thumb)
temp_thumb.unlink()
# Clean up temp file
temp_file.unlink(missing_ok=True)
# Insert into database with post_id reference
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media (
post_id, storage_id, encrypted_filename, encrypted_description,
file_hash, file_size, file_type, mime_type,
width, height, duration, person_id,
encrypted_media_date, source_type, perceptual_hash
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
post_id,
storage_id,
crypto.encrypt_field(original_filename),
None,
file_hash,
file_size,
file_info['file_type'],
file_info['mime_type'],
file_info['width'],
file_info['height'],
file_info['duration'],
person_id,
crypto.encrypt_field(item_date),
'upload',
perceptual_hash
))
media_id = cursor.lastrowid
uploaded_media_ids.append(media_id)
for tag_id in tag_id_list:
cursor.execute('''
INSERT OR IGNORE INTO private_media_tags (media_id, tag_id)
VALUES (?, ?)
''', (media_id, tag_id))
conn.commit()
success_count += 1
results.append({
'id': media_id,
'filename': original_filename,
'status': 'created',
'media_date': item_date
})
except Exception as e:
logger.error(f"Upload failed for {original_filename}: {e}")
failed_count += 1
temp_file.unlink(missing_ok=True)
results.append({
'filename': original_filename,
'status': 'failed',
'error': str(e)
})
_update_pg_job(job_id, {
'results': list(results),
'success_count': success_count,
'failed_count': failed_count,
'duplicate_count': duplicate_count,
'skipped_count': skipped_count,
'processed_files': idx + 1
})
# If no files were successfully uploaded, delete the empty post
if not uploaded_media_ids and post_id:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (post_id,))
cursor.execute('DELETE FROM private_media_posts WHERE id = ?', (post_id,))
conn.commit()
_invalidate_posts_cache()
_update_pg_job(job_id, {
'status': 'completed',
'completed_at': datetime.now().isoformat(),
'current_file': None,
'post_id': post_id if uploaded_media_ids else None
})
@router.post("/upload")
@limiter.limit("30/minute")
@handle_exceptions
async def upload_media(
request: Request,
background_tasks: BackgroundTasks,
files: List[UploadFile] = File(default=[]),
person_id: int = Form(...),
tag_ids: Optional[str] = Form(None),
media_date: Optional[str] = Form(None),
description: Optional[str] = Form(None),
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Upload media files with encryption. All files are grouped into a single post."""
db = _get_db()
crypto = _get_crypto()
# Parse tag IDs (optional)
tag_id_list = []
if tag_ids:
tag_id_list = [int(t) for t in tag_ids.split(',') if t.strip().isdigit()]
# Determine the media date for the post
final_date = media_date
if not final_date:
final_date = date.today().isoformat()
# Save all uploaded files to temp before responding (UploadFile objects become invalid after response)
file_infos = []
temp_dir = Path(tempfile.gettempdir())
for file in files:
temp_path = temp_dir / f"pg_upload_{uuid.uuid4()}{Path(file.filename).suffix}"
with open(temp_path, 'wb') as f:
shutil.copyfileobj(file.file, f)
file_infos.append({
'temp_path': str(temp_path),
'original_filename': file.filename,
'file_size': temp_path.stat().st_size
})
# Create the post to group all media items
post_id = None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media_posts (
person_id, encrypted_description, encrypted_media_date
) VALUES (?, ?, ?)
''', (
person_id,
crypto.encrypt_field(description) if description else None,
crypto.encrypt_field(final_date)
))
post_id = cursor.lastrowid
for tag_id in tag_id_list:
cursor.execute('''
INSERT OR IGNORE INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (post_id, tag_id))
conn.commit()
# Create job and launch background task
job_id = f"pg_upload_{uuid.uuid4().hex[:12]}"
_create_pg_job(job_id, len(file_infos), 'upload')
background_tasks.add_task(
_upload_to_gallery_background,
job_id,
file_infos,
post_id,
person_id,
tag_id_list,
final_date
)
return {
"job_id": job_id,
"post_id": post_id,
"status": "processing",
"total_files": len(file_infos)
}
def _is_forum_thread_url(url):
"""Quick check if URL matches a Discourse thread pattern (no network request)."""
from urllib.parse import urlparse
path = urlparse(url).path
return bool(re.match(r'/t/(?:[^/]+/)?(\d+)(?:/\d+)?$', path))
def _resolve_forum_thread_urls(url):
"""If URL is a Discourse forum thread, scrape it and return list of image URLs. Otherwise return None."""
from urllib.parse import urlparse
parsed = urlparse(url)
path = parsed.path
# Detect Discourse thread URL pattern: /t/slug/topic_id or /t/topic_id
discourse_match = re.match(r'/t/(?:[^/]+/)?(\d+)(?:/\d+)?$', path)
if not discourse_match:
return None
topic_id = discourse_match.group(1)
base_url = f"{parsed.scheme}://{parsed.netloc}"
try:
from curl_cffi import requests as cf_requests
from bs4 import BeautifulSoup
session = cf_requests.Session(impersonate='chrome')
# Fetch topic JSON
r = session.get(f"{base_url}/t/{topic_id}.json", timeout=30)
if r.status_code != 200:
logger.warning(f"Forum thread fetch failed: {r.status_code}")
return None
data = r.json()
stream = data.get('post_stream', {})
posts = stream.get('posts', [])
all_ids = stream.get('stream', [])
loaded_ids = {p['id'] for p in posts}
# Fetch remaining posts if needed (Discourse returns ~20 per page)
remaining = [pid for pid in all_ids if pid not in loaded_ids]
if remaining:
for i in range(0, len(remaining), 20):
chunk = remaining[i:i+20]
params = '&'.join(f'post_ids[]={pid}' for pid in chunk)
r2 = session.get(f"{base_url}/t/{topic_id}/posts.json?{params}", timeout=30)
if r2.status_code == 200:
extra = r2.json().get('post_stream', {}).get('posts', [])
posts.extend(extra)
# Extract full-size image URLs from lightbox links
image_urls = []
seen = set()
for post in posts:
cooked = post.get('cooked', '')
soup = BeautifulSoup(cooked, 'html.parser')
for a in soup.find_all('a', class_='lightbox'):
href = a.get('href', '')
if href and href not in seen:
seen.add(href)
image_urls.append(href)
title = data.get('title', '')
logger.info(f"Forum thread {topic_id}: found {len(image_urls)} media URLs from {len(posts)} posts")
return {'urls': image_urls, 'title': title} if image_urls else None
except Exception as e:
logger.error(f"Forum thread scrape failed: {e}")
return None
def _is_erome_album_url(url):
"""Quick check if URL matches an erome.com album pattern."""
from urllib.parse import urlparse
parsed = urlparse(url)
return parsed.netloc in ('www.erome.com', 'erome.com') and bool(re.match(r'/a/[A-Za-z0-9]+$', parsed.path))
def _resolve_erome_album_urls(url):
"""If URL is an erome album, scrape it and return dict with media URLs and title. Otherwise return None."""
from urllib.parse import urlparse
parsed = urlparse(url)
if parsed.netloc not in ('www.erome.com', 'erome.com'):
return None
if not re.match(r'/a/[A-Za-z0-9]+$', parsed.path):
return None
try:
import requests as req_lib
from bs4 import BeautifulSoup
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36',
'Referer': 'https://www.erome.com/'
}
r = req_lib.get(url, headers=headers, timeout=30)
if r.status_code != 200:
logger.warning(f"Erome album fetch failed: {r.status_code}")
return None
soup = BeautifulSoup(r.text, 'html.parser')
title_el = soup.find('h1', class_='album-title-page')
title = title_el.text.strip() if title_el else ''
media_urls = []
seen = set()
for mg in soup.find_all('div', class_='media-group'):
img_div = mg.find('div', class_='img')
if not img_div:
continue
data_src = img_div.get('data-src')
data_html = img_div.get('data-html')
if data_src and not data_html:
# Image
if data_src not in seen:
seen.add(data_src)
media_urls.append(data_src)
elif data_html:
# Video - get first <source> src
source = mg.find('source')
if source and source.get('src') and source['src'] not in seen:
seen.add(source['src'])
media_urls.append(source['src'])
logger.info(f"Erome album {parsed.path}: found {len(media_urls)} media URLs")
return {'urls': media_urls, 'title': title} if media_urls else None
except Exception as e:
logger.error(f"Erome album scrape failed: {e}")
return None
_VENV_BIN = '/opt/media-downloader/venv/bin'
_TUBE_SITE_DOMAINS = {
'xhamster.com', 'xhamster.one', 'xhamster.desi', 'xhamster2.com', 'xhamster3.com',
'xvideos.com', 'xvideos2.com', 'xvideos.es',
'motherless.com',
}
def _is_tube_site_url(url):
"""Quick check if URL is from a supported tube/media site."""
from urllib.parse import urlparse
domain = urlparse(url).netloc.lower().replace('www.', '')
return domain in _TUBE_SITE_DOMAINS
def _resolve_tube_site_urls(url):
"""Resolve tube site URL into media URLs using gallery-dl/yt-dlp."""
import subprocess, json
try:
# gallery-dl -j returns JSON with [type, url_or_info, metadata]
result = subprocess.run(
[f'{_VENV_BIN}/gallery-dl', '-j', url],
capture_output=True, text=True, timeout=120
)
if result.returncode == 0 and result.stdout.strip():
data = json.loads(result.stdout)
media_urls = []
title = ''
for entry in data:
if isinstance(entry, list) and len(entry) >= 3 and entry[0] == 3:
media_urls.append(entry[1]) # direct URL
if not title:
title = entry[2].get('title', '') or entry[2].get('album', '')
elif isinstance(entry, list) and len(entry) >= 2 and entry[0] == 2:
if not title and isinstance(entry[1], dict):
title = entry[1].get('title', '') or entry[1].get('album', '')
if media_urls:
is_video = any(
entry[2].get('extension', '') in ('mp4', 'webm', 'mkv')
or entry[2].get('type', '') == 'video'
for entry in data
if isinstance(entry, list) and len(entry) >= 3 and entry[0] == 3
)
return {'urls': media_urls, 'title': title, 'use_ytdlp': is_video, 'original_url': url}
except Exception as e:
logger.debug(f"gallery-dl resolve failed for {url}: {e}")
# Fallback: yt-dlp for video metadata
try:
result = subprocess.run(
[f'{_VENV_BIN}/yt-dlp', '--dump-json', '--no-download', '--no-warnings', url],
capture_output=True, text=True, timeout=60
)
if result.returncode == 0 and result.stdout.strip():
info = json.loads(result.stdout.strip().split('\n')[0])
title = info.get('title', '')
return {'urls': [url], 'title': title, 'use_ytdlp': True, 'original_url': url}
except Exception as e:
logger.debug(f"yt-dlp resolve failed for {url}: {e}")
return None
def _import_urls_to_gallery_background(job_id, urls, post_id, person_id, tag_id_list, final_date, description):
"""Background task to download URLs and import them into the private gallery."""
import requests as req_lib
from modules.paid_content.file_host_downloader import FileHostDownloader
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
thumbs_path = storage_path / 'thumbs'
data_path.mkdir(parents=True, exist_ok=True)
thumbs_path.mkdir(parents=True, exist_ok=True)
downloader = FileHostDownloader()
results = []
imported_media_ids = []
success_count = 0
failed_count = 0
duplicate_count = 0
skipped_count = 0
# Deduplicate URLs in the input list
seen_urls = set()
unique_urls = []
for u in urls:
if u not in seen_urls:
seen_urls.add(u)
unique_urls.append(u)
urls = unique_urls
# Resolve page URLs (forum threads, erome albums) into individual media URLs
resolved_urls = []
url_post_ids = {} # maps resolved URL -> post_id for album/thread-specific posts
url_referers = {} # maps resolved URL -> Referer header needed for download
has_resolvable = any(_is_forum_thread_url(u) or _is_erome_album_url(u) or _is_tube_site_url(u) for u in urls)
if has_resolvable:
_update_pg_job(job_id, {'current_phase': 'resolving'})
for u in urls:
# Try each resolver in order
page_result = _resolve_forum_thread_urls(u)
referer = None
if not page_result:
page_result = _resolve_erome_album_urls(u)
if page_result:
referer = 'https://www.erome.com/'
if not page_result and _is_tube_site_url(u):
page_result = _resolve_tube_site_urls(u)
if page_result and page_result.get('use_ytdlp'):
# Mark ytdlp URLs for special download handling
for media_url in page_result['urls']:
url_referers[media_url] = '__ytdlp__'
if page_result:
# Create a separate post for this album/thread
album_title = page_result['title']
album_post_id = None
try:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media_posts (
person_id, encrypted_description, encrypted_media_date
) VALUES (?, ?, ?)
''', (
person_id,
crypto.encrypt_field(album_title) if album_title else None,
crypto.encrypt_field(final_date)
))
album_post_id = cursor.lastrowid
for tag_id in tag_id_list:
cursor.execute('''
INSERT OR IGNORE INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (album_post_id, tag_id))
conn.commit()
except Exception as e:
logger.error(f"Failed to create post for '{album_title}': {e}")
album_post_id = post_id # fall back to original post
for media_url in page_result['urls']:
url_post_ids[media_url] = album_post_id
if referer:
url_referers[media_url] = referer
resolved_urls.append(media_url)
else:
resolved_urls.append(u)
urls = resolved_urls
# If ALL urls came from threads, delete the original (now-unused) post
non_thread_urls = [u for u in urls if u not in url_post_ids]
if url_post_ids and not non_thread_urls and post_id:
try:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (post_id,))
cursor.execute('DELETE FROM private_media_posts WHERE id = ?', (post_id,))
conn.commit()
post_id = None
except Exception:
pass
# Update job total and resolved filenames so frontend can rebuild items
resolved_filenames = [u.rstrip('/').split('/')[-1].split('?')[0] or f'url-{i+1}' for i, u in enumerate(urls)]
_update_pg_job(job_id, {
'total_files': len(urls),
'resolved_filenames': resolved_filenames,
'current_phase': 'downloading'
})
# Build set of already-imported filenames for duplicate detection
# Only count as duplicate if the encrypted data file actually exists on disk
existing_filenames = set()
try:
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT storage_id, encrypted_filename FROM private_media WHERE encrypted_filename IS NOT NULL')
for row in cursor.fetchall():
try:
decrypted_name = crypto.decrypt_field(row['encrypted_filename'])
if decrypted_name:
enc_file = data_path / f"{row['storage_id']}.enc"
if enc_file.exists():
existing_filenames.add(decrypted_name)
except Exception:
pass
except Exception:
pass
for idx, url in enumerate(urls):
url_basename = url.rstrip('/').split('/')[-1].split('?')[0] or f'url-{idx + 1}'
_update_pg_job(job_id, {
'current_file': url_basename,
'current_phase': 'downloading',
'bytes_downloaded': 0,
'bytes_total': 0,
'processed_files': idx
})
# Check for duplicate filename
if url_basename in existing_filenames:
duplicate_count += 1
results.append({
'filename': url_basename,
'status': 'duplicate',
'reason': 'File already exists'
})
logger.info(f"Skipping duplicate file: {url_basename}")
continue
temp_dir = Path(tempfile.mkdtemp(prefix='pg_import_'))
downloaded_files = []
try:
if url_referers.get(url) == '__ytdlp__':
# Download via yt-dlp (handles HLS/DASH streams, format merging)
import subprocess
_update_pg_job(job_id, {'current_phase': 'downloading', 'current_file': url_basename})
yt_result = subprocess.run(
[f'{_VENV_BIN}/yt-dlp', '-o', str(temp_dir / '%(title).100B.%(ext)s'),
'--no-playlist', '--no-warnings', '--no-progress', url],
capture_output=True, text=True, timeout=600
)
if yt_result.returncode != 0:
raise Exception(f'yt-dlp failed: {yt_result.stderr[:200]}')
downloaded_files = [f for f in temp_dir.iterdir() if f.is_file() and not f.name.startswith('.')]
if not downloaded_files:
raise Exception('yt-dlp produced no output files')
# Try file host downloader
elif (host := downloader.detect_host(url)):
loop = asyncio.new_event_loop()
try:
result = loop.run_until_complete(downloader.download_url(url, temp_dir))
finally:
loop.close()
if result.get('success') and result.get('files'):
downloaded_files = [Path(f) for f in result['files']]
else:
raise Exception(result.get('error', 'Download failed'))
else:
# Direct download - check for domain-specific auth
import_auth = _get_import_auth_for_url(db, crypto, url)
base_headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36'
}
if url in url_referers:
base_headers['Referer'] = url_referers[url]
base_kwargs = {}
if import_auth:
if import_auth.get('user_agent'):
base_headers['User-Agent'] = import_auth['user_agent']
if import_auth.get('username') and import_auth.get('password'):
base_kwargs['auth'] = (import_auth['username'], import_auth['password'])
if import_auth.get('cookies'):
base_kwargs['cookies'] = {c['name']: c['value'] for c in import_auth['cookies']}
# HEAD request to get file size and check range support
cf_fallback = False
try:
head_resp = req_lib.head(url, headers=base_headers, timeout=30,
allow_redirects=True, **base_kwargs)
head_resp.raise_for_status()
except Exception as head_err:
status_code = getattr(getattr(head_err, 'response', None), 'status_code', 0)
if status_code == 403 or '403' in str(head_err):
# Cloudflare-protected - fall back to curl_cffi
logger.info(f"HEAD request got 403, falling back to curl_cffi for {url}")
cf_fallback = True
else:
raise
if cf_fallback:
from curl_cffi import requests as cf_requests
cf_resp = cf_requests.get(url, impersonate='chrome', timeout=120)
cf_resp.raise_for_status()
ct = cf_resp.headers.get('Content-Type', '').lower()
if not (ct.startswith('image/') or ct.startswith('video/') or ct == 'application/octet-stream'):
raise Exception(f'Unsupported content type: {ct}')
filename = url_basename
save_path = temp_dir / filename
save_path.write_bytes(cf_resp.content)
downloaded_files = [save_path]
_update_pg_job(job_id, {
'current_phase': 'processing',
'bytes_downloaded': len(cf_resp.content),
'bytes_total': len(cf_resp.content)
})
else:
content_type = head_resp.headers.get('Content-Type', '').lower()
if not (content_type.startswith('image/') or content_type.startswith('video/') or content_type == 'application/octet-stream'):
raise Exception(f'Unsupported content type: {content_type}')
# Determine filename from URL or content-disposition
filename = url_basename
cd = head_resp.headers.get('Content-Disposition', '')
if 'filename=' in cd:
fn_match = re.search(r'filename="?([^";\n]+)"?', cd)
if fn_match:
filename = fn_match.group(1).strip()
bytes_total = int(head_resp.headers.get('Content-Length', 0) or 0)
accepts_ranges = head_resp.headers.get('Accept-Ranges', '').lower() == 'bytes'
save_path = temp_dir / filename
if bytes_total:
_update_pg_job(job_id, {'bytes_total': bytes_total})
NUM_THREADS = 5
MAX_RETRIES = 3
STALL_TIMEOUT = 30 # seconds with no data = stalled
if accepts_ranges and bytes_total > 0:
# Multi-threaded segmented download with stall detection and retry
import threading
segment_size = bytes_total // NUM_THREADS
segments = []
for i in range(NUM_THREADS):
start = i * segment_size
end = bytes_total - 1 if i == NUM_THREADS - 1 else (i + 1) * segment_size - 1
segments.append((start, end))
segment_files = [temp_dir / f".part{i}" for i in range(NUM_THREADS)]
segment_progress = [0] * NUM_THREADS
segment_errors = [None] * NUM_THREADS
progress_lock = threading.Lock()
def download_segment(seg_idx, byte_start, byte_end, out_path):
expected_size = byte_end - byte_start + 1
bytes_written = 0
for attempt in range(MAX_RETRIES):
try:
resume_start = byte_start + bytes_written
if resume_start > byte_end:
break
seg_headers = {**base_headers, 'Range': f'bytes={resume_start}-{byte_end}'}
r = req_lib.get(url, headers=seg_headers, stream=True,
timeout=(30, STALL_TIMEOUT), **base_kwargs)
r.raise_for_status()
# Verify server honored the Range request
if r.status_code != 206:
raise Exception(f"Server returned {r.status_code} instead of 206 for range request")
mode = 'ab' if bytes_written > 0 else 'wb'
with open(out_path, mode) as f:
for chunk in r.iter_content(chunk_size=65536):
f.write(chunk)
bytes_written += len(chunk)
with progress_lock:
segment_progress[seg_idx] = bytes_written
# Verify segment received the expected number of bytes
if bytes_written != expected_size:
raise Exception(f"Segment size mismatch: expected {expected_size} bytes, got {bytes_written}")
break # Success
except Exception as e:
if attempt < MAX_RETRIES - 1:
logger.warning(f"Segment {seg_idx} stalled/failed (attempt {attempt+1}), retrying from byte {byte_start + bytes_written}: {e}")
time.sleep(2 ** attempt)
else:
segment_errors[seg_idx] = e
threads = []
for i, (start, end) in enumerate(segments):
t = threading.Thread(target=download_segment,
args=(i, start, end, segment_files[i]))
t.start()
threads.append(t)
# Monitor progress while threads run
while any(t.is_alive() for t in threads):
time.sleep(0.3)
with progress_lock:
total_downloaded = sum(segment_progress)
_update_pg_job(job_id, {'bytes_downloaded': total_downloaded})
for t in threads:
t.join()
# Check for errors
for i, err in enumerate(segment_errors):
if err:
raise Exception(f'Segment {i} failed after {MAX_RETRIES} attempts: {err}')
# Combine segments into final file
with open(save_path, 'wb') as fout:
for seg_file in segment_files:
with open(seg_file, 'rb') as fin:
while True:
chunk = fin.read(1024 * 1024)
if not chunk:
break
fout.write(chunk)
seg_file.unlink()
# Verify combined file size matches expected total
actual_size = save_path.stat().st_size
if actual_size != bytes_total:
save_path.unlink(missing_ok=True)
raise Exception(f'Downloaded file size mismatch: expected {bytes_total} bytes, got {actual_size}')
_update_pg_job(job_id, {'bytes_downloaded': bytes_total})
else:
# Single-threaded download with stall detection and retry
bytes_downloaded = 0
last_progress_update = 0
for attempt in range(MAX_RETRIES):
try:
dl_headers = {**base_headers}
if bytes_downloaded > 0 and accepts_ranges:
dl_headers['Range'] = f'bytes={bytes_downloaded}-'
resp = req_lib.get(url, headers=dl_headers, stream=True,
timeout=(30, STALL_TIMEOUT), **base_kwargs)
resp.raise_for_status()
# On resume, verify server honored the Range request
if bytes_downloaded > 0 and accepts_ranges and resp.status_code != 206:
raise Exception(f"Server returned {resp.status_code} instead of 206 for range resume")
mode = 'ab' if bytes_downloaded > 0 else 'wb'
with open(save_path, mode) as f:
for chunk in resp.iter_content(chunk_size=65536):
f.write(chunk)
bytes_downloaded += len(chunk)
if bytes_downloaded - last_progress_update >= 262144:
_update_pg_job(job_id, {'bytes_downloaded': bytes_downloaded})
last_progress_update = bytes_downloaded
break # Success
except Exception as e:
if attempt < MAX_RETRIES - 1:
logger.warning(f"Download stalled/failed (attempt {attempt+1}), retrying from byte {bytes_downloaded}: {e}")
time.sleep(2 ** attempt)
else:
raise
# Verify final file size if Content-Length was known
if bytes_total > 0:
actual_size = save_path.stat().st_size
if actual_size != bytes_total:
save_path.unlink(missing_ok=True)
raise Exception(f'Downloaded file size mismatch: expected {bytes_total} bytes, got {actual_size}')
_update_pg_job(job_id, {'bytes_downloaded': bytes_downloaded})
downloaded_files = [save_path]
# Update phase to processing
_update_pg_job(job_id, {'current_phase': 'processing'})
# Process each downloaded file
for dl_file in downloaded_files:
dl_filename = dl_file.name
try:
file_hash = _get_file_hash(dl_file)
# Duplicate check (scoped by person)
with db.get_connection() as conn:
cursor = conn.cursor()
if person_id:
cursor.execute('SELECT id, person_id FROM private_media WHERE file_hash = ? AND person_id = ?', (file_hash, person_id))
else:
cursor.execute('SELECT id, person_id FROM private_media WHERE file_hash = ?', (file_hash,))
existing = cursor.fetchone()
if existing:
duplicate_count += 1
results.append({
'filename': dl_filename,
'status': 'duplicate',
'existing_id': existing['id']
})
continue
file_info = _get_file_info(dl_file)
file_size = dl_file.stat().st_size
# Skip low-resolution images
min_res = int(config.get('min_import_resolution', 0) or 0)
if min_res > 0 and file_info['file_type'] == 'image':
w = file_info.get('width') or 0
h = file_info.get('height') or 0
if w < min_res or h < min_res:
skipped_count += 1
results.append({
'filename': dl_filename,
'status': 'skipped',
'reason': f'Low resolution ({w}x{h}, min {min_res}px)'
})
continue
# Compute perceptual hash before encryption
perceptual_hash = _compute_perceptual_hash(dl_file)
# Extract date (EXIF first, then filename, then fallback)
item_date = _extract_date_from_exif(dl_file)
if not item_date:
item_date = _extract_date_from_filename(dl_filename)
if not item_date:
item_date = final_date
storage_id = str(uuid.uuid4())
# Generate thumbnail
_update_pg_job(job_id, {'current_phase': 'thumbnail'})
temp_thumb = temp_dir / f"pg_thumb_{storage_id}.jpg"
_generate_thumbnail(dl_file, temp_thumb, file_info['file_type'])
# Encrypt original
_update_pg_job(job_id, {'current_phase': 'encrypting'})
encrypted_file = data_path / f"{storage_id}.enc"
if not crypto.encrypt_file(dl_file, encrypted_file):
raise Exception(f"Encryption failed for {dl_filename}")
# Encrypt thumbnail
if temp_thumb.exists():
encrypted_thumb = thumbs_path / f"{storage_id}.enc"
crypto.encrypt_file(temp_thumb, encrypted_thumb)
# Insert into database
item_post_id = url_post_ids.get(url, post_id)
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media (
post_id, storage_id, encrypted_filename, encrypted_description,
file_hash, file_size, file_type, mime_type,
width, height, duration, person_id,
encrypted_media_date, source_type, encrypted_source_path,
perceptual_hash
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
item_post_id,
storage_id,
crypto.encrypt_field(dl_filename),
None,
file_hash,
file_size,
file_info['file_type'],
file_info['mime_type'],
file_info['width'],
file_info['height'],
file_info['duration'],
person_id,
crypto.encrypt_field(item_date),
'import',
crypto.encrypt_field(url),
perceptual_hash
))
media_id = cursor.lastrowid
imported_media_ids.append(media_id)
for tag_id in tag_id_list:
cursor.execute('''
INSERT OR IGNORE INTO private_media_tags (media_id, tag_id)
VALUES (?, ?)
''', (media_id, tag_id))
conn.commit()
success_count += 1
existing_filenames.add(dl_filename)
results.append({
'id': media_id,
'filename': dl_filename,
'status': 'created',
'media_date': item_date
})
except Exception as e:
logger.error(f"Import processing failed for {dl_filename}: {e}")
failed_count += 1
results.append({
'filename': dl_filename,
'status': 'failed',
'error': str(e)
})
except Exception as e:
logger.error(f"Import download failed for {url}: {e}")
failed_count += 1
results.append({
'filename': url_basename,
'status': 'failed',
'error': str(e)
})
finally:
# Clean up temp dir
shutil.rmtree(temp_dir, ignore_errors=True)
_update_pg_job(job_id, {
'results': list(results),
'success_count': success_count,
'failed_count': failed_count,
'duplicate_count': duplicate_count,
'skipped_count': skipped_count,
'processed_files': idx + 1
})
# Delete empty posts (original + any thread posts with no successful imports)
all_post_ids = set()
if post_id:
all_post_ids.add(post_id)
all_post_ids.update(url_post_ids.values())
used_post_ids = set()
if imported_media_ids:
try:
with db.get_connection() as conn:
cursor = conn.cursor()
placeholders = ','.join('?' for _ in imported_media_ids)
cursor.execute(f'SELECT DISTINCT post_id FROM private_media WHERE id IN ({placeholders})', imported_media_ids)
used_post_ids = {row['post_id'] for row in cursor.fetchall() if row['post_id']}
except Exception:
pass
empty_post_ids = all_post_ids - used_post_ids
if empty_post_ids:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for pid in empty_post_ids:
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (pid,))
cursor.execute('DELETE FROM private_media_posts WHERE id = ?', (pid,))
conn.commit()
_invalidate_posts_cache()
_update_pg_job(job_id, {
'status': 'completed',
'completed_at': datetime.now().isoformat(),
'current_file': None,
'post_id': post_id if imported_media_ids else None
})
@router.post("/import-urls")
@limiter.limit("30/minute")
@handle_exceptions
async def import_urls(
request: Request,
body: ImportUrlRequest,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Import media from URLs (file hosts or direct links)."""
if not body.urls:
raise ValidationError("At least one URL is required")
db = _get_db()
crypto = _get_crypto()
# Determine the post-level date
post_date = body.media_date or date.today().isoformat()
# Create a post to group imported media
post_id = None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media_posts (
person_id, encrypted_description, encrypted_media_date
) VALUES (?, ?, ?)
''', (
body.person_id,
crypto.encrypt_field(body.description) if body.description else None,
crypto.encrypt_field(post_date)
))
post_id = cursor.lastrowid
for tag_id in body.tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (post_id, tag_id))
conn.commit()
# Create job and launch background task
job_id = f"pg_import_{uuid.uuid4().hex[:12]}"
_create_pg_job(job_id, len(body.urls), 'import')
background_tasks.add_task(
_import_urls_to_gallery_background,
job_id,
body.urls,
post_id,
body.person_id,
body.tag_ids,
post_date,
body.description
)
return {
"job_id": job_id,
"post_id": post_id,
"status": "processing",
"total_files": len(body.urls)
}
def _copy_to_gallery_background(job_id, source_paths, post_id, person_id, tag_ids, media_date, original_filenames, post_date):
"""Background task to copy files to the private gallery."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
thumbs_path = storage_path / 'thumbs'
data_path.mkdir(parents=True, exist_ok=True)
thumbs_path.mkdir(parents=True, exist_ok=True)
results = []
copied_media_ids = []
success_count = 0
failed_count = 0
duplicate_count = 0
skipped_count = 0
for idx, source_path_str in enumerate(source_paths):
source_path = Path(source_path_str)
filename = original_filenames.get(source_path_str, source_path.name) if original_filenames else source_path.name
_update_pg_job(job_id, {
'current_file': filename,
'processed_files': idx
})
if not source_path.exists():
failed_count += 1
results.append({
'path': source_path_str,
'filename': filename,
'status': 'failed',
'error': 'File not found'
})
_update_pg_job(job_id, {
'results': list(results),
'failed_count': failed_count,
'processed_files': idx + 1
})
continue
try:
# Calculate hash for duplicate detection
file_hash = _get_file_hash(source_path)
# Check for duplicates (scoped by person)
with db.get_connection() as conn:
cursor = conn.cursor()
if person_id:
cursor.execute('SELECT id, person_id FROM private_media WHERE file_hash = ? AND person_id = ?', (file_hash, person_id))
else:
cursor.execute('SELECT id, person_id FROM private_media WHERE file_hash = ?', (file_hash,))
existing = cursor.fetchone()
if existing:
duplicate_count += 1
results.append({
'path': source_path_str,
'filename': filename,
'status': 'duplicate',
'existing_id': existing['id']
})
_update_pg_job(job_id, {
'results': list(results),
'duplicate_count': duplicate_count,
'processed_files': idx + 1
})
continue
# Get file info
file_info = _get_file_info(source_path)
file_size = source_path.stat().st_size
# Skip low-resolution images
min_res = int(config.get('min_import_resolution', 0) or 0)
if min_res > 0 and file_info['file_type'] == 'image':
w = file_info.get('width') or 0
h = file_info.get('height') or 0
if w < min_res or h < min_res:
skipped_count += 1
results.append({
'path': source_path_str,
'filename': filename,
'status': 'skipped',
'reason': f'Low resolution ({w}x{h}, min {min_res}px)'
})
_update_pg_job(job_id, {
'results': list(results),
'skipped_count': skipped_count,
'processed_files': idx + 1
})
continue
# Compute perceptual hash before encryption
perceptual_hash = _compute_perceptual_hash(source_path)
# Determine media date per file
final_date = media_date
if not final_date:
final_date = _extract_date_from_exif(source_path)
if not final_date:
final_date = _extract_date_from_filename(source_path.name)
if not final_date:
try:
from datetime import datetime as dt
mtime = source_path.stat().st_mtime
final_date = dt.fromtimestamp(mtime).strftime('%Y-%m-%dT%H:%M:%S')
except Exception:
pass
if not final_date:
final_date = post_date
# Generate storage ID
storage_id = str(uuid.uuid4())
# Generate thumbnail first
temp_dir = Path(tempfile.gettempdir())
temp_thumb = temp_dir / f"pg_thumb_{storage_id}.jpg"
_generate_thumbnail(source_path, temp_thumb, file_info['file_type'])
# Encrypt the original file
encrypted_file = data_path / f"{storage_id}.enc"
if not crypto.encrypt_file(source_path, encrypted_file):
raise Exception("Encryption failed")
# Encrypt thumbnail if it exists
if temp_thumb.exists():
encrypted_thumb = thumbs_path / f"{storage_id}.enc"
crypto.encrypt_file(temp_thumb, encrypted_thumb)
temp_thumb.unlink()
# Insert into database with post_id reference
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media (
post_id, storage_id, encrypted_filename, encrypted_description,
file_hash, file_size, file_type, mime_type,
width, height, duration, person_id,
encrypted_media_date, source_type, encrypted_source_path,
perceptual_hash
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
post_id,
storage_id,
crypto.encrypt_field(filename),
None,
file_hash,
file_size,
file_info['file_type'],
file_info['mime_type'],
file_info['width'],
file_info['height'],
file_info['duration'],
person_id,
crypto.encrypt_field(final_date),
'copy',
crypto.encrypt_field(source_path_str),
perceptual_hash
))
media_id = cursor.lastrowid
copied_media_ids.append(media_id)
for tag_id in tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_tags (media_id, tag_id)
VALUES (?, ?)
''', (media_id, tag_id))
conn.commit()
success_count += 1
results.append({
'id': media_id,
'path': source_path_str,
'filename': filename,
'status': 'created',
'media_date': final_date
})
except Exception as e:
logger.error(f"Copy failed for {source_path_str}: {e}")
failed_count += 1
results.append({
'path': source_path_str,
'filename': filename,
'status': 'failed',
'error': str(e)
})
_update_pg_job(job_id, {
'results': list(results),
'success_count': success_count,
'failed_count': failed_count,
'duplicate_count': duplicate_count,
'skipped_count': skipped_count,
'processed_files': idx + 1
})
# If no files were successfully copied, delete the empty post
if not copied_media_ids and post_id:
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('DELETE FROM private_media_post_tags WHERE post_id = ?', (post_id,))
cursor.execute('DELETE FROM private_media_posts WHERE id = ?', (post_id,))
conn.commit()
_invalidate_posts_cache()
_update_pg_job(job_id, {
'status': 'completed',
'completed_at': datetime.now().isoformat(),
'current_file': None,
'post_id': post_id if copied_media_ids else None
})
@router.post("/copy")
@limiter.limit("30/minute")
@handle_exceptions
async def copy_to_gallery(
request: Request,
body: CopyRequest,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Copy files from other locations to the gallery."""
db = _get_db()
crypto = _get_crypto()
# Determine the post-level date
post_date = body.media_date or date.today().isoformat()
# Create a post to group copied media
post_id = None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media_posts (
person_id, encrypted_description, encrypted_media_date
) VALUES (?, ?, ?)
''', (
body.person_id,
crypto.encrypt_field(body.description) if body.description else None,
crypto.encrypt_field(post_date)
))
post_id = cursor.lastrowid
for tag_id in body.tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (post_id, tag_id))
conn.commit()
# Create job and launch background task
job_id = f"pg_copy_{uuid.uuid4().hex[:12]}"
_create_pg_job(job_id, len(body.source_paths), 'copy')
background_tasks.add_task(
_copy_to_gallery_background,
job_id,
body.source_paths,
post_id,
body.person_id,
body.tag_ids,
body.media_date,
body.original_filenames or {},
post_date
)
return {
"job_id": job_id,
"post_id": post_id,
"status": "processing",
"total_files": len(body.source_paths)
}
@router.post("/import-directory")
@limiter.limit("30/minute")
@handle_exceptions
async def import_directory(
request: Request,
body: ImportDirectoryRequest,
background_tasks: BackgroundTasks,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Import all media files from a server directory into the gallery."""
dir_path = Path(body.directory_path)
if not dir_path.exists():
raise HTTPException(status_code=400, detail=f"Directory not found: {body.directory_path}")
if not dir_path.is_dir():
raise HTTPException(status_code=400, detail=f"Path is not a directory: {body.directory_path}")
# Collect media files
media_extensions = {
'.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.tiff', '.avif',
'.mkv', '.avi', '.wmv', '.flv', '.webm', '.mov', '.mp4', '.m4v', '.ts'
}
source_paths = []
if body.recursive:
for root, _dirs, filenames in os.walk(dir_path):
for fname in sorted(filenames):
if Path(fname).suffix.lower() in media_extensions:
source_paths.append(str(Path(root) / fname))
else:
for entry in sorted(dir_path.iterdir()):
if entry.is_file() and entry.suffix.lower() in media_extensions:
source_paths.append(str(entry))
if not source_paths:
raise HTTPException(status_code=400, detail="No media files found in the specified directory")
db = _get_db()
crypto = _get_crypto()
# Determine the post-level date
post_date = body.media_date or date.today().isoformat()
# Create a post to group imported media
post_id = None
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('''
INSERT INTO private_media_posts (
person_id, encrypted_description, encrypted_media_date
) VALUES (?, ?, ?)
''', (
body.person_id,
crypto.encrypt_field(body.description) if body.description else None,
crypto.encrypt_field(post_date)
))
post_id = cursor.lastrowid
for tag_id in body.tag_ids:
cursor.execute('''
INSERT OR IGNORE INTO private_media_post_tags (post_id, tag_id)
VALUES (?, ?)
''', (post_id, tag_id))
conn.commit()
# Create job and launch background task
job_id = f"pg_import_{uuid.uuid4().hex[:12]}"
_create_pg_job(job_id, len(source_paths), 'directory')
background_tasks.add_task(
_copy_to_gallery_background,
job_id,
source_paths,
post_id,
body.person_id,
body.tag_ids,
body.media_date,
{},
post_date
)
return {
"job_id": job_id,
"post_id": post_id,
"status": "processing",
"total_files": len(source_paths)
}
@router.get("/list-directory")
@limiter.limit("60/minute")
@handle_exceptions
async def list_directory(
request: Request,
path: str = Query(..., description="Server directory path to list"),
recursive: bool = Query(False, description="Recursively scan subdirectories"),
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""List media files in a server directory for preview before import."""
dir_path = Path(path)
if not dir_path.exists():
raise HTTPException(status_code=400, detail=f"Path not found: {path}")
if not dir_path.is_dir():
raise HTTPException(status_code=400, detail=f"Path is not a directory: {path}")
media_extensions = {
'.jpg', '.jpeg', '.png', '.gif', '.webp', '.bmp', '.tiff', '.avif',
'.mkv', '.avi', '.wmv', '.flv', '.webm', '.mov', '.mp4', '.m4v', '.ts'
}
video_extensions = {'.mkv', '.avi', '.wmv', '.flv', '.webm', '.mov', '.mp4', '.m4v', '.ts'}
files = []
subdirectories = []
total_size = 0
try:
if recursive:
for root, dirs, filenames in os.walk(dir_path):
rel_root = Path(root).relative_to(dir_path)
if root == str(dir_path):
subdirectories.extend(sorted(dirs))
for fname in sorted(filenames):
fpath = Path(root) / fname
if fpath.suffix.lower() in media_extensions:
size = fpath.stat().st_size
total_size += size
display_name = str(rel_root / fname) if str(rel_root) != '.' else fname
files.append({
'name': display_name,
'size': size,
'type': 'video' if fpath.suffix.lower() in video_extensions else 'image'
})
else:
for entry in sorted(dir_path.iterdir()):
if entry.is_dir():
subdirectories.append(entry.name)
elif entry.is_file() and entry.suffix.lower() in media_extensions:
size = entry.stat().st_size
total_size += size
files.append({
'name': entry.name,
'size': size,
'type': 'video' if entry.suffix.lower() in video_extensions else 'image'
})
except PermissionError:
raise HTTPException(status_code=403, detail=f"Permission denied: {path}")
return {
"path": path,
"files": files,
"subdirectories": subdirectories,
"file_count": len(files),
"total_size": total_size
}
@router.post("/regenerate-thumbnails")
@limiter.limit("5/minute")
@handle_exceptions
async def regenerate_thumbnails(
request: Request,
missing_only: bool = Query(False, description="Only regenerate missing thumbnails"),
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Regenerate thumbnails. Use missing_only=true to only fix missing ones."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
thumbs_path = storage_path / 'thumbs'
with db.get_connection() as conn:
cursor = conn.cursor()
if missing_only:
cursor.execute("SELECT id, storage_id, file_type, mime_type FROM private_media")
else:
cursor.execute("SELECT id, storage_id, file_type, mime_type FROM private_media WHERE file_type = 'image'")
media_rows = cursor.fetchall()
fixed = 0
skipped = 0
errors = []
temp_dir = Path(tempfile.gettempdir())
for row in media_rows:
storage_id = row['storage_id']
file_type = (row['file_type'] or 'image') if missing_only else 'image'
encrypted_file = data_path / f"{storage_id}.enc"
encrypted_thumb = thumbs_path / f"{storage_id}.enc"
if not encrypted_file.exists():
skipped += 1
continue
if missing_only and encrypted_thumb.exists():
skipped += 1
continue
try:
# Determine file extension from mime_type
mime = row['mime_type'] or ''
ext = mimetypes.guess_extension(mime) or ('.mp4' if file_type == 'video' else '.jpg')
if ext == '.jpe':
ext = '.jpg'
# Decrypt original to temp file (with extension so PIL/ffmpeg can identify it)
temp_original = temp_dir / f"pg_regen_{storage_id}{ext}"
crypto.decrypt_file(encrypted_file, temp_original)
# Generate thumbnail
temp_thumb = temp_dir / f"pg_thumb_{storage_id}.jpg"
if _generate_thumbnail(temp_original, temp_thumb, file_type):
crypto.encrypt_file(temp_thumb, encrypted_thumb)
fixed += 1
else:
errors.append({'id': row['id'], 'error': f'Thumbnail generation returned false ({file_type})'})
# Clean up temp files
if temp_original.exists():
temp_original.unlink()
if temp_thumb.exists():
temp_thumb.unlink()
except Exception as e:
errors.append({'id': row['id'], 'error': str(e)})
for f in [temp_dir / f"pg_regen_{storage_id}{ext}", temp_dir / f"pg_thumb_{storage_id}.jpg"]:
if f.exists():
f.unlink()
# Clear thumbnail cache
with _thumb_cache_lock:
_thumb_cache.clear()
logger.info(f"Thumbnail regeneration complete: {fixed} regenerated, {skipped} skipped, {len(errors)} errors")
return {
'total': len(media_rows),
'regenerated': fixed,
'skipped': skipped,
'errors': errors
}
# ============================================================================
# FILE SERVING ENDPOINTS
# ============================================================================
@router.get("/thumbnail/{media_id}")
@limiter.limit("300/minute")
@handle_exceptions
async def get_thumbnail(
request: Request,
media_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get decrypted thumbnail with in-memory LRU cache and ETag support."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT storage_id FROM private_media WHERE id = ?', (media_id,))
row = cursor.fetchone()
if not row:
raise NotFoundError(f"Media {media_id} not found")
storage_id = row['storage_id']
# Check in-memory cache first (avoids disk I/O + AES decrypt)
cached = _thumb_cache_get(storage_id)
if cached:
etag, decrypted = cached
# Check If-None-Match for 304
if_none_match = request.headers.get('if-none-match')
if if_none_match and if_none_match.strip('" ') == etag:
return Response(status_code=304, headers={
"ETag": f'"{etag}"',
"Cache-Control": "private, max-age=86400, stale-while-revalidate=604800",
})
return Response(
content=decrypted,
media_type="image/jpeg",
headers={
"Cache-Control": "private, max-age=86400, stale-while-revalidate=604800",
"ETag": f'"{etag}"',
}
)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
thumb_file = storage_path / 'thumbs' / f"{storage_id}.enc"
if not thumb_file.exists():
raise NotFoundError("Thumbnail not available")
# Decrypt thumbnail
decrypted = crypto.decrypt_file_streaming(thumb_file)
if not decrypted:
raise NotFoundError("Failed to decrypt thumbnail")
# Generate ETag from content hash (fast — md5 of ~20KB)
etag = hashlib.md5(decrypted).hexdigest()
# Store in LRU cache
_thumb_cache_put(storage_id, etag, decrypted)
# Check If-None-Match
if_none_match = request.headers.get('if-none-match')
if if_none_match and if_none_match.strip('" ') == etag:
return Response(status_code=304, headers={
"ETag": f'"{etag}"',
"Cache-Control": "private, max-age=86400, stale-while-revalidate=604800",
})
return Response(
content=decrypted,
media_type="image/jpeg",
headers={
"Cache-Control": "private, max-age=86400, stale-while-revalidate=604800",
"ETag": f'"{etag}"',
}
)
@router.get("/file/{media_id}")
@limiter.limit("120/minute")
@handle_exceptions
async def get_file(
request: Request,
media_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get decrypted full file."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT storage_id, encrypted_filename, mime_type, file_size
FROM private_media WHERE id = ?
''', (media_id,))
row = cursor.fetchone()
if not row:
raise NotFoundError(f"Media {media_id} not found")
storage_id = row['storage_id']
filename = crypto.decrypt_field(row['encrypted_filename'])
mime_type = row['mime_type']
file_size = row['file_size']
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_file = storage_path / 'data' / f"{storage_id}.enc"
if not data_file.exists():
raise NotFoundError("File not available")
# Use storage_id as ETag — file content never changes for a given storage_id
etag = storage_id
if_none_match = request.headers.get('if-none-match')
if if_none_match and if_none_match.strip('" ') == etag:
return Response(status_code=304, headers={
"ETag": f'"{etag}"',
"Cache-Control": "private, max-age=86400, stale-while-revalidate=604800",
})
# Sanitize filename for Content-Disposition header (remove quotes, non-ASCII)
from urllib.parse import quote
safe_filename = filename.replace('"', "'").encode('ascii', 'replace').decode('ascii')
encoded_filename = quote(filename, safe='')
headers = {
"Cache-Control": "private, max-age=86400, stale-while-revalidate=604800",
"Content-Disposition": f"inline; filename=\"{safe_filename}\"; filename*=UTF-8''{encoded_filename}",
"ETag": f'"{etag}"',
}
if file_size:
headers["Content-Length"] = str(file_size)
# Stream file chunks without loading all into memory
return StreamingResponse(
crypto.decrypt_file_generator(data_file),
media_type=mime_type,
headers=headers
)
@router.get("/stream/{media_id}")
@limiter.limit("120/minute")
@handle_exceptions
async def stream_video(
request: Request,
media_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Stream decrypted video file."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT storage_id, encrypted_filename, mime_type, file_size
FROM private_media WHERE id = ?
''', (media_id,))
row = cursor.fetchone()
if not row:
raise NotFoundError(f"Media {media_id} not found")
storage_id = row['storage_id']
mime_type = row['mime_type']
file_size = row['file_size']
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_file = storage_path / 'data' / f"{storage_id}.enc"
if not data_file.exists():
raise NotFoundError("File not available")
total_size = file_size # Original plaintext size from DB
# Handle Range requests for video seeking
range_header = request.headers.get('range')
if range_header:
match = re.match(r'bytes=(\d+)-(\d*)', range_header)
if match:
start = int(match.group(1))
end = int(match.group(2)) if match.group(2) else total_size - 1
# Clamp values
start = min(start, total_size - 1)
end = min(end, total_size - 1)
content_length = end - start + 1
return StreamingResponse(
crypto.decrypt_file_range_generator(data_file, start, end),
status_code=206,
media_type=mime_type,
headers={
"Content-Range": f"bytes {start}-{end}/{total_size}",
"Accept-Ranges": "bytes",
"Content-Length": str(content_length),
"Cache-Control": "private, max-age=3600"
}
)
# Full file request - stream chunks without loading all into memory
return StreamingResponse(
crypto.decrypt_file_generator(data_file),
media_type=mime_type,
headers={
"Accept-Ranges": "bytes",
"Content-Length": str(total_size),
"Cache-Control": "private, max-age=3600"
}
)
# ============================================================================
# BATCH OPERATIONS
# ============================================================================
@router.put("/media/batch/tags")
@limiter.limit("30/minute")
@handle_exceptions
async def batch_update_tags(
request: Request,
body: BatchTagsRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Add or remove tags from multiple items."""
db = _get_db()
added = 0
removed = 0
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for media_id in body.media_ids:
# Add tags
if body.add_tag_ids:
for tag_id in body.add_tag_ids:
try:
cursor.execute('''
INSERT OR IGNORE INTO private_media_tags (media_id, tag_id)
VALUES (?, ?)
''', (media_id, tag_id))
added += cursor.rowcount
except Exception:
pass
# Remove tags
if body.remove_tag_ids:
for tag_id in body.remove_tag_ids:
cursor.execute('''
DELETE FROM private_media_tags
WHERE media_id = ? AND tag_id = ?
''', (media_id, tag_id))
removed += cursor.rowcount
conn.commit()
return {"added": added, "removed": removed}
@router.put("/media/batch/date")
@limiter.limit("30/minute")
@handle_exceptions
async def batch_update_date(
request: Request,
body: BatchDateRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update date for multiple items."""
db = _get_db()
crypto = _get_crypto()
encrypted_date = crypto.encrypt_field(body.new_date)
updated = 0
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for media_id in body.media_ids:
cursor.execute('''
UPDATE private_media
SET encrypted_media_date = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
''', (encrypted_date, media_id))
updated += cursor.rowcount
conn.commit()
return {"updated": updated}
@router.put("/media/batch/person")
@limiter.limit("30/minute")
@handle_exceptions
async def batch_update_person(
request: Request,
body: BatchPersonRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update person for multiple items."""
db = _get_db()
updated = 0
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
for media_id in body.media_ids:
cursor.execute('''
UPDATE private_media
SET person_id = ?, updated_at = CURRENT_TIMESTAMP
WHERE id = ?
''', (body.person_id, media_id))
updated += cursor.rowcount
conn.commit()
return {"updated": updated}
# ============================================================================
# EXPORT ENDPOINTS
# ============================================================================
@router.get("/export/{media_id}")
@limiter.limit("60/minute")
@handle_exceptions
async def export_single(
request: Request,
media_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Export a single file (decrypted with original filename)."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('''
SELECT storage_id, encrypted_filename, mime_type
FROM private_media WHERE id = ?
''', (media_id,))
row = cursor.fetchone()
if not row:
raise NotFoundError(f"Media {media_id} not found")
storage_id = row['storage_id']
filename = crypto.decrypt_field(row['encrypted_filename'])
mime_type = row['mime_type']
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_file = storage_path / 'data' / f"{storage_id}.enc"
if not data_file.exists():
raise NotFoundError("File not available")
# Sanitize filename for Content-Disposition header
from urllib.parse import quote
safe_filename = filename.replace('"', "'").encode('ascii', 'replace').decode('ascii')
encoded_filename = quote(filename, safe='')
# Stream file chunks without loading all into memory
return StreamingResponse(
crypto.decrypt_file_generator(data_file),
media_type=mime_type,
headers={
"Content-Disposition": f"attachment; filename=\"{safe_filename}\"; filename*=UTF-8''{encoded_filename}"
}
)
@router.post("/export/batch")
@limiter.limit("10/minute")
@handle_exceptions
async def export_batch(
request: Request,
body: ExportBatchRequest,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Export multiple files as a ZIP."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
# Create ZIP in memory
zip_buffer = BytesIO()
with ZipFile(zip_buffer, 'w') as zf:
with db.get_connection() as conn:
cursor = conn.cursor()
for media_id in body.media_ids:
cursor.execute('''
SELECT m.storage_id, m.encrypted_filename, m.encrypted_media_date,
p.encrypted_name as person_name
FROM private_media m
LEFT JOIN private_media_persons p ON m.person_id = p.id
WHERE m.id = ?
''', (media_id,))
row = cursor.fetchone()
if not row:
continue
storage_id = row['storage_id']
filename = crypto.decrypt_field(row['encrypted_filename'])
media_date = crypto.decrypt_field(row['encrypted_media_date'])
person_name = crypto.decrypt_field(row['person_name']) if row['person_name'] else 'Unknown'
data_file = storage_path / 'data' / f"{storage_id}.enc"
if not data_file.exists():
continue
decrypted = crypto.decrypt_file_streaming(data_file)
if not decrypted:
continue
# Build path in ZIP
path_parts = []
if body.organize_by_person:
path_parts.append(person_name.replace('/', '_'))
if body.organize_by_date:
path_parts.append(media_date)
path_parts.append(filename)
zip_path = '/'.join(path_parts)
zf.writestr(zip_path, decrypted)
zip_buffer.seek(0)
return Response(
content=zip_buffer.getvalue(),
media_type="application/zip",
headers={
"Content-Disposition": f'attachment; filename="private_gallery_export_{datetime.now().strftime("%Y%m%d_%H%M%S")}.zip"'
}
)
# ============================================================================
# ALBUMS (Auto-generated from persons)
# ============================================================================
@router.get("/albums")
@limiter.limit("60/minute")
@handle_exceptions
async def get_albums(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get all person albums with cover image and count."""
db = _get_db()
crypto = _get_crypto()
with db.get_connection() as conn:
cursor = conn.cursor()
# Get persons with media counts
cursor.execute('''
SELECT p.id, p.encrypted_name, p.relationship_id,
r.encrypted_name as rel_encrypted_name, r.color as rel_color,
COUNT(m.id) as item_count,
(SELECT id FROM private_media WHERE person_id = p.id ORDER BY created_at DESC LIMIT 1) as latest_media_id
FROM private_media_persons p
JOIN private_media_relationships r ON p.relationship_id = r.id
LEFT JOIN private_media m ON m.person_id = p.id
GROUP BY p.id
ORDER BY p.encrypted_name
''')
rows = cursor.fetchall()
albums = []
for row in rows:
album = {
'person_id': row['id'],
'person_name': crypto.decrypt_field(row['encrypted_name']),
'relationship': {
'id': row['relationship_id'],
'name': crypto.decrypt_field(row['rel_encrypted_name']),
'color': row['rel_color']
},
'item_count': row['item_count'],
'cover_thumbnail_url': f"/api/private-gallery/thumbnail/{row['latest_media_id']}" if row['latest_media_id'] else None
}
albums.append(album)
# Sort by decrypted name
albums.sort(key=lambda a: a['person_name'].lower())
return {"albums": albums}
# ============================================================================
# STATS ENDPOINT
# ============================================================================
@router.post("/reparse-dates")
@limiter.limit("5/minute")
@handle_exceptions
async def reparse_dates(
request: Request,
current_user: Dict = Depends(get_current_user),
):
"""Re-extract dates from original filenames for all media items.
Requires gallery to be unlocked (encryption key in memory) but does not require gallery token."""
db = _get_db()
crypto = _get_crypto()
if not crypto.is_initialized():
raise AuthError("Gallery must be unlocked to reparse dates")
updated = 0
debug_info = []
with db.get_connection(for_write=True) as conn:
cursor = conn.cursor()
cursor.execute('SELECT id, encrypted_filename, encrypted_media_date FROM private_media')
rows = cursor.fetchall()
for row in rows:
filename = crypto.decrypt_field(row['encrypted_filename'])
old_date = crypto.decrypt_field(row['encrypted_media_date'])
# Try to extract a better date from filename
new_date = _extract_date_from_filename(filename)
# Log first 5 for debugging
if len(debug_info) < 5:
debug_info.append({
"id": row['id'],
"filename": filename,
"old_date": old_date,
"extracted_date": new_date,
"will_update": bool(new_date and new_date != old_date)
})
if new_date and new_date != old_date:
cursor.execute(
'UPDATE private_media SET encrypted_media_date = ?, updated_at = CURRENT_TIMESTAMP WHERE id = ?',
(crypto.encrypt_field(new_date), row['id'])
)
updated += 1
conn.commit()
return {
"message": f"Updated {updated} media items with re-parsed dates",
"updated": updated,
"total": len(rows),
"debug_samples": debug_info
}
@router.get("/stats")
@limiter.limit("60/minute")
@handle_exceptions
async def get_stats(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get gallery statistics."""
db = _get_db()
config = _get_config(db)
with db.get_connection() as conn:
cursor = conn.cursor()
# Total items
cursor.execute('SELECT COUNT(*) as total FROM private_media')
total_items = cursor.fetchone()['total']
# Total size
cursor.execute('SELECT COALESCE(SUM(file_size), 0) as total FROM private_media')
total_size = cursor.fetchone()['total']
# By type
cursor.execute('''
SELECT file_type, COUNT(*) as count
FROM private_media
GROUP BY file_type
''')
by_type = {row['file_type']: row['count'] for row in cursor.fetchall()}
# By person
cursor.execute('''
SELECT p.id, COUNT(m.id) as count
FROM private_media_persons p
LEFT JOIN private_media m ON m.person_id = p.id
GROUP BY p.id
''')
by_person = {row['id']: row['count'] for row in cursor.fetchall()}
# Person count
cursor.execute('SELECT COUNT(*) as total FROM private_media_persons')
total_persons = cursor.fetchone()['total']
# Recent items (last 7 days)
cursor.execute('''
SELECT COUNT(*) as count FROM private_media
WHERE created_at >= datetime('now', '-7 days')
''')
recent_7d = cursor.fetchone()['count']
# Calculate storage usage
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
try:
data_size = sum(f.stat().st_size for f in (storage_path / 'data').glob('*.enc'))
thumb_size = sum(f.stat().st_size for f in (storage_path / 'thumbs').glob('*.enc'))
storage_used = data_size + thumb_size
except Exception:
storage_used = 0
return {
"total_items": total_items,
"total_size": total_size,
"total_persons": total_persons,
"by_type": by_type,
"by_person_count": by_person,
"recent_7d": recent_7d,
"storage_used": storage_used
}
@router.post("/migrate-chunked")
@limiter.limit("2/minute")
@handle_exceptions
async def migrate_to_chunked(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Re-encrypt single-shot files >50MB to chunked format for streaming."""
db = _get_db()
crypto = _get_crypto()
config = _get_config(db)
storage_path = Path(config.get('storage_path', '/opt/immich/private'))
data_path = storage_path / 'data'
min_size = 50 * 1024 * 1024 # 50MB threshold
# Find files that need migration
with db.get_connection() as conn:
cursor = conn.cursor()
cursor.execute('SELECT id, storage_id, file_size FROM private_media WHERE file_size > ? ORDER BY file_size ASC', (min_size,))
rows = cursor.fetchall()
to_migrate = []
for row in rows:
enc_file = data_path / f"{row['storage_id']}.enc"
if enc_file.exists() and not crypto._is_chunked_format(enc_file):
to_migrate.append(row)
if not to_migrate:
return {"status": "done", "message": "No files need migration", "migrated": 0}
# Run migration in background thread
job_id = f"pg_migrate_{secrets.token_hex(6)}"
_update_pg_job(job_id, {
'status': 'running',
'total_files': len(to_migrate),
'processed_files': 0,
'migrated': 0,
'failed': 0,
'current_file': ''
})
def _run_migration():
migrated = 0
failed = 0
for idx, row in enumerate(to_migrate):
enc_file = data_path / f"{row['storage_id']}.enc"
_update_pg_job(job_id, {
'current_file': f"ID {row['id']} ({row['file_size'] / 1e6:.0f}MB)",
'processed_files': idx
})
try:
if crypto.re_encrypt_to_chunked(enc_file):
migrated += 1
logger.info(f"Migrated ID {row['id']} ({row['file_size']/1e6:.0f}MB) to chunked format")
else:
failed += 1
except Exception as e:
logger.error(f"Migration failed for ID {row['id']}: {e}")
failed += 1
_update_pg_job(job_id, {
'processed_files': idx + 1,
'migrated': migrated,
'failed': failed
})
_invalidate_posts_cache()
_update_pg_job(job_id, {
'status': 'completed',
'processed_files': len(to_migrate),
'migrated': migrated,
'failed': failed
})
import threading
t = threading.Thread(target=_run_migration, daemon=True)
t.start()
return {
"status": "started",
"job_id": job_id,
"total_files": len(to_migrate),
"total_size_mb": sum(r['file_size'] for r in to_migrate) / 1e6
}
# ============================================================================
# REDDIT COMMUNITY MONITOR ENDPOINTS
# ============================================================================
def _get_reddit_monitor(with_activity_manager: bool = False):
"""Get or create the Reddit community monitor instance."""
db = _get_db()
db_path = str(Path(db.db_path) if hasattr(db, 'db_path') else Path(__file__).parent.parent.parent / 'database' / 'media_downloader.db')
from modules.reddit_community_monitor import RedditCommunityMonitor
activity_manager = None
if with_activity_manager:
from modules.activity_status import get_activity_manager
app_state = get_app_state()
activity_manager = get_activity_manager(app_state.db if app_state else None)
return RedditCommunityMonitor(db_path, activity_manager)
@router.get("/reddit/settings")
@handle_exceptions
async def get_reddit_settings(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get Reddit monitor settings."""
monitor = _get_reddit_monitor()
settings = monitor.get_settings()
# Check if encrypted cookies exist (don't expose them)
crypto = _get_crypto()
settings['has_cookies'] = monitor.has_cookies(crypto)
return {"settings": settings}
@router.put("/reddit/settings")
@handle_exceptions
async def update_reddit_settings(
request: Request,
data: RedditMonitorSettingsUpdate,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update Reddit monitor settings."""
monitor = _get_reddit_monitor()
crypto = _get_crypto()
updates = {k: v for k, v in data.model_dump().items() if v is not None}
# Handle key file export/deletion when enabling/disabling
if 'enabled' in updates:
from modules.private_gallery_crypto import export_key_to_file, delete_key_file
from modules.reddit_community_monitor import REDDIT_MONITOR_KEY_FILE
if updates['enabled']:
if not crypto.is_initialized():
raise AuthError("Gallery must be unlocked to enable Reddit monitor")
if not export_key_to_file(REDDIT_MONITOR_KEY_FILE):
raise ValidationError("Failed to export encryption key for background monitoring")
else:
delete_key_file(REDDIT_MONITOR_KEY_FILE)
if monitor.update_settings(**updates):
return {"message": "Reddit monitor settings updated"}
raise ValidationError("Failed to update settings")
@router.put("/reddit/cookies")
@handle_exceptions
async def upload_reddit_cookies(
request: Request,
data: RedditCookiesUpload,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Upload JSON cookies for Reddit authentication. Stored encrypted."""
crypto = _get_crypto()
if not crypto.is_initialized():
raise AuthError("Gallery must be unlocked to upload cookies")
# Validate JSON
try:
parsed = json.loads(data.cookies_json)
if not isinstance(parsed, list):
raise ValidationError("Cookies must be a JSON array")
except json.JSONDecodeError:
raise ValidationError("Invalid JSON")
monitor = _get_reddit_monitor()
if monitor.save_cookies(crypto, data.cookies_json):
return {"message": "Cookies saved"}
raise ValidationError("Failed to save cookies")
@router.delete("/reddit/cookies")
@handle_exceptions
async def delete_reddit_cookies(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete stored Reddit cookies."""
monitor = _get_reddit_monitor()
monitor.delete_cookies()
return {"message": "Cookies deleted"}
@router.get("/reddit/communities")
@handle_exceptions
async def get_reddit_communities(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get all Reddit community mappings with decrypted person names."""
monitor = _get_reddit_monitor()
crypto = _get_crypto()
communities = monitor.get_all_communities()
# Decrypt person names
result = []
for c in communities:
item = {
'id': c['id'],
'subreddit_name': c['subreddit_name'],
'person_id': c['person_id'],
'enabled': bool(c['enabled']),
'last_checked': c['last_checked'],
'total_media_found': c['total_media_found'],
'created_at': c['created_at'],
'updated_at': c['updated_at'],
}
if c.get('person_encrypted_name'):
try:
item['person_name'] = crypto.decrypt_field(c['person_encrypted_name'])
except Exception:
item['person_name'] = '[Decryption Error]'
else:
item['person_name'] = 'Unknown'
if c.get('relationship_encrypted_name'):
try:
item['relationship_name'] = crypto.decrypt_field(c['relationship_encrypted_name'])
except Exception:
item['relationship_name'] = ''
else:
item['relationship_name'] = ''
item['relationship_color'] = c.get('relationship_color', '#6b7280')
result.append(item)
return {"communities": result}
@router.post("/reddit/communities")
@handle_exceptions
async def add_reddit_community(
request: Request,
data: RedditCommunityCreate,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Add a new Reddit community mapping."""
monitor = _get_reddit_monitor()
try:
community_id = monitor.add_community(data.subreddit_name, data.person_id)
return {"id": community_id, "message": "Community added"}
except Exception as e:
if 'UNIQUE constraint' in str(e):
raise ValidationError("This subreddit is already mapped to this person")
raise
@router.put("/reddit/communities/{community_id}")
@handle_exceptions
async def update_reddit_community(
request: Request,
community_id: int,
data: RedditCommunityUpdate,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update a Reddit community mapping."""
monitor = _get_reddit_monitor()
updates = {k: v for k, v in data.model_dump().items() if v is not None}
if monitor.update_community(community_id, **updates):
return {"message": "Community updated"}
raise NotFoundError("Community not found")
@router.delete("/reddit/communities/{community_id}")
@handle_exceptions
async def delete_reddit_community(
request: Request,
community_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete a Reddit community mapping."""
monitor = _get_reddit_monitor()
if monitor.delete_community(community_id):
return {"message": "Community deleted"}
raise NotFoundError("Community not found")
@router.post("/reddit/check-now")
@handle_exceptions
async def trigger_reddit_check(
request: Request,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Trigger an immediate Reddit community check in the background."""
monitor = _get_reddit_monitor(with_activity_manager=True)
# Re-export key file to ensure it's current
crypto = _get_crypto()
if crypto.is_initialized():
from modules.private_gallery_crypto import export_key_to_file
from modules.reddit_community_monitor import REDDIT_MONITOR_KEY_FILE
export_key_to_file(REDDIT_MONITOR_KEY_FILE)
def run_check():
try:
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
count = loop.run_until_complete(monitor.check_all_now())
logger.info(f"Reddit manual check complete: {count} new media", module="PrivateGallery")
if count > 0:
_invalidate_posts_cache()
finally:
loop.close()
except Exception as e:
logger.error(f"Reddit manual check failed: {e}", module="PrivateGallery")
import threading
t = threading.Thread(target=run_check, daemon=True)
t.start()
return {"message": "Reddit check started in background"}
@router.post("/reddit/communities/{community_id}/download-all")
@handle_exceptions
async def download_full_reddit_community(
request: Request,
community_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Download all available media from a Reddit community."""
monitor = _get_reddit_monitor(with_activity_manager=True)
community = monitor.get_community(community_id)
if not community:
raise NotFoundError("Community not found")
# Re-export key file
crypto = _get_crypto()
if crypto.is_initialized():
from modules.private_gallery_crypto import export_key_to_file
from modules.reddit_community_monitor import REDDIT_MONITOR_KEY_FILE
export_key_to_file(REDDIT_MONITOR_KEY_FILE)
def run_download():
try:
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
count = loop.run_until_complete(monitor.download_full_community(community_id))
logger.info(f"Reddit full download complete: {count} media from r/{community['subreddit_name']}", module="PrivateGallery")
if count > 0:
_invalidate_posts_cache()
finally:
loop.close()
except Exception as e:
logger.error(f"Reddit full download failed: {e}", module="PrivateGallery")
import threading
t = threading.Thread(target=run_download, daemon=True)
t.start()
return {"message": f"Full download started for r/{community['subreddit_name']}"}
@router.post("/reddit/communities/{community_id}/check")
@handle_exceptions
async def check_single_reddit_community(
request: Request,
community_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Check a single Reddit community for new posts."""
monitor = _get_reddit_monitor(with_activity_manager=True)
community = monitor.get_community(community_id)
if not community:
raise NotFoundError("Community not found")
# Re-export key file
crypto = _get_crypto()
if crypto.is_initialized():
from modules.private_gallery_crypto import export_key_to_file
from modules.reddit_community_monitor import REDDIT_MONITOR_KEY_FILE
export_key_to_file(REDDIT_MONITOR_KEY_FILE)
def run_check():
try:
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
count = loop.run_until_complete(monitor.check_single_community(community_id))
logger.info(f"Reddit single community check complete: {count} new media from r/{community['subreddit_name']}", module="PrivateGallery")
if count > 0:
_invalidate_posts_cache()
finally:
loop.close()
except Exception as e:
logger.error(f"Reddit single community check failed: {e}", module="PrivateGallery")
import threading
t = threading.Thread(target=run_check, daemon=True)
t.start()
return {"message": f"Check started for r/{community['subreddit_name']}"}
@router.post("/reddit/communities/check-by-person/{person_id}")
@handle_exceptions
async def check_reddit_communities_by_person(
request: Request,
person_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Check all Reddit communities for a specific person."""
monitor = _get_reddit_monitor(with_activity_manager=True)
# Re-export key file
crypto = _get_crypto()
if crypto.is_initialized():
from modules.private_gallery_crypto import export_key_to_file
from modules.reddit_community_monitor import REDDIT_MONITOR_KEY_FILE
export_key_to_file(REDDIT_MONITOR_KEY_FILE)
def run_check():
try:
import asyncio
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
count = loop.run_until_complete(monitor.check_communities_by_person(person_id))
logger.info(f"Reddit person check complete: {count} new media for person {person_id}", module="PrivateGallery")
if count > 0:
_invalidate_posts_cache()
finally:
loop.close()
except Exception as e:
logger.error(f"Reddit person check failed: {e}", module="PrivateGallery")
import threading
t = threading.Thread(target=run_check, daemon=True)
t.start()
return {"message": f"Check started for all communities of person {person_id}"}
@router.get("/reddit/history/{community_id}")
@handle_exceptions
async def get_reddit_history(
request: Request,
community_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get download history for a Reddit community."""
monitor = _get_reddit_monitor()
history = monitor.get_history(community_id)
return {"history": history}
# ============================================================================
# SCRAPER ACCOUNT MAPPINGS (Instagram, TikTok, Snapchat)
# ============================================================================
VALID_SCRAPER_PLATFORMS = ('instagram', 'tiktok', 'snapchat')
def _validate_scraper_platform(platform: str):
if platform not in VALID_SCRAPER_PLATFORMS:
raise ValidationError(f"Invalid platform: {platform}. Must be one of: {', '.join(VALID_SCRAPER_PLATFORMS)}")
@router.get("/scraper-accounts/{platform}/available")
@handle_exceptions
async def get_available_scraper_accounts(
request: Request,
platform: str,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Get de-duplicated list of accounts from all scrapers for a platform."""
_validate_scraper_platform(platform)
db = _get_db()
# Load config
app_state = get_app_state()
config = {}
if hasattr(app_state, 'settings_manager') and app_state.settings_manager:
config = app_state.settings_manager.get_all()
elif hasattr(app_state, 'config'):
config = app_state.config or {}
from modules.scraper_gallery_bridge import get_available_accounts
accounts = get_available_accounts(platform, config, db)
return {"accounts": accounts}
@router.get("/scraper-accounts/{platform}")
@handle_exceptions
async def get_scraper_accounts(
request: Request,
platform: str,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""List all scraper account mappings for a platform."""
_validate_scraper_platform(platform)
db = _get_db()
crypto = _get_crypto()
import sqlite3
conn = sqlite3.connect(db.db_path, timeout=10)
conn.row_factory = sqlite3.Row
try:
cursor = conn.cursor()
cursor.execute('''
SELECT sa.*, p.encrypted_name as person_encrypted_name,
r.encrypted_name as relationship_encrypted_name,
r.color as relationship_color
FROM private_media_scraper_accounts sa
JOIN private_media_persons p ON sa.person_id = p.id
LEFT JOIN private_media_relationships r ON p.relationship_id = r.id
WHERE sa.platform = ?
ORDER BY sa.created_at DESC
''', (platform,))
rows = cursor.fetchall()
finally:
conn.close()
accounts = []
for row in rows:
item = {
'id': row['id'],
'platform': row['platform'],
'username': row['username'],
'person_id': row['person_id'],
'enabled': bool(row['enabled']),
'last_imported_at': row['last_imported_at'],
'total_media_imported': row['total_media_imported'],
'created_at': row['created_at'],
'updated_at': row['updated_at'],
}
try:
item['person_name'] = crypto.decrypt_field(row['person_encrypted_name'])
except Exception:
item['person_name'] = '[Decryption Error]'
try:
item['relationship_name'] = crypto.decrypt_field(row['relationship_encrypted_name']) if row['relationship_encrypted_name'] else None
except Exception:
item['relationship_name'] = None
item['relationship_color'] = row['relationship_color']
accounts.append(item)
return {"accounts": accounts}
@router.post("/scraper-accounts/{platform}")
@handle_exceptions
async def add_scraper_account(
request: Request,
platform: str,
data: ScraperAccountCreate,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Add a new scraper account → person mapping."""
_validate_scraper_platform(platform)
db = _get_db()
username = data.username.strip().lower().lstrip('@')
if not username:
raise ValidationError("Username is required")
import sqlite3
conn = sqlite3.connect(db.db_path, timeout=10)
try:
cursor = conn.cursor()
now = datetime.now().isoformat()
try:
cursor.execute('''
INSERT INTO private_media_scraper_accounts (platform, username, person_id, created_at, updated_at)
VALUES (?, ?, ?, ?, ?)
''', (platform, username, data.person_id, now, now))
conn.commit()
# Export key file so scheduler can import even when gallery is locked
crypto = _get_crypto()
if crypto.is_initialized():
from modules.private_gallery_crypto import export_key_to_file
from modules.scraper_gallery_bridge import SCRAPER_BRIDGE_KEY_FILE
export_key_to_file(SCRAPER_BRIDGE_KEY_FILE)
return {"id": cursor.lastrowid, "message": "Account mapping added"}
except sqlite3.IntegrityError as e:
if 'UNIQUE constraint' in str(e):
raise ValidationError("This account is already mapped to this person")
if 'FOREIGN KEY constraint' in str(e):
raise ValidationError("Person not found")
raise
finally:
conn.close()
@router.put("/scraper-accounts/{platform}/{account_id}")
@handle_exceptions
async def update_scraper_account(
request: Request,
platform: str,
account_id: int,
data: ScraperAccountUpdate,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Update a scraper account mapping."""
_validate_scraper_platform(platform)
db = _get_db()
import sqlite3
conn = sqlite3.connect(db.db_path, timeout=10)
try:
cursor = conn.cursor()
updates = []
params = []
if data.person_id is not None:
updates.append("person_id = ?")
params.append(data.person_id)
if data.enabled is not None:
updates.append("enabled = ?")
params.append(1 if data.enabled else 0)
if not updates:
raise ValidationError("No fields to update")
updates.append("updated_at = ?")
params.append(datetime.now().isoformat())
params.append(account_id)
params.append(platform)
cursor.execute(
f'UPDATE private_media_scraper_accounts SET {", ".join(updates)} WHERE id = ? AND platform = ?',
params
)
conn.commit()
if cursor.rowcount == 0:
raise NotFoundError("Account mapping not found")
return {"message": "Account mapping updated"}
finally:
conn.close()
@router.delete("/scraper-accounts/{platform}/{account_id}")
@handle_exceptions
async def delete_scraper_account(
request: Request,
platform: str,
account_id: int,
current_user: Dict = Depends(get_current_user),
session: Dict = Depends(_verify_gallery_token)
):
"""Delete a scraper account mapping."""
_validate_scraper_platform(platform)
db = _get_db()
import sqlite3
conn = sqlite3.connect(db.db_path, timeout=10)
try:
cursor = conn.cursor()
cursor.execute(
'DELETE FROM private_media_scraper_accounts WHERE id = ? AND platform = ?',
(account_id, platform)
)
conn.commit()
if cursor.rowcount == 0:
raise NotFoundError("Account mapping not found")
return {"message": "Account mapping deleted"}
finally:
conn.close()