873
modules/private_gallery_crypto.py
Normal file
873
modules/private_gallery_crypto.py
Normal file
@@ -0,0 +1,873 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Private Gallery Encryption Module
|
||||
|
||||
Provides security features for the Private Gallery:
|
||||
- Password hashing with bcrypt
|
||||
- Key derivation with Argon2id
|
||||
- File encryption/decryption with AES-256-GCM
|
||||
- Field encryption with Fernet
|
||||
- Session token management
|
||||
"""
|
||||
|
||||
import os
|
||||
import secrets
|
||||
import hashlib
|
||||
import base64
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Dict, Tuple
|
||||
from pathlib import Path
|
||||
from threading import Lock
|
||||
|
||||
try:
|
||||
import bcrypt
|
||||
except ImportError:
|
||||
bcrypt = None
|
||||
|
||||
try:
|
||||
from argon2 import PasswordHasher
|
||||
from argon2.low_level import hash_secret_raw, Type
|
||||
ARGON2_AVAILABLE = True
|
||||
except ImportError:
|
||||
ARGON2_AVAILABLE = False
|
||||
|
||||
try:
|
||||
from cryptography.fernet import Fernet
|
||||
from cryptography.hazmat.primitives.ciphers.aead import AESGCM
|
||||
from cryptography.hazmat.primitives import hashes
|
||||
from cryptography.hazmat.primitives.kdf.pbkdf2 import PBKDF2HMAC
|
||||
CRYPTO_AVAILABLE = True
|
||||
except ImportError:
|
||||
CRYPTO_AVAILABLE = False
|
||||
|
||||
from modules.universal_logger import get_logger
|
||||
|
||||
logger = get_logger('PrivateGalleryCrypto')
|
||||
|
||||
|
||||
class PrivateGalleryCrypto:
|
||||
"""
|
||||
Handles all encryption operations for the Private Gallery.
|
||||
|
||||
Security features:
|
||||
- Passwords hashed with bcrypt (cost factor 12)
|
||||
- Encryption key derived from password using Argon2id
|
||||
- Files encrypted with AES-256-GCM
|
||||
- Database fields encrypted with Fernet (AES-128-CBC + HMAC)
|
||||
- Session tokens with configurable timeout
|
||||
"""
|
||||
|
||||
# Argon2id parameters (OWASP recommended)
|
||||
ARGON2_TIME_COST = 3
|
||||
ARGON2_MEMORY_COST = 65536 # 64 MiB
|
||||
ARGON2_PARALLELISM = 4
|
||||
ARGON2_HASH_LENGTH = 32 # 256 bits for AES-256
|
||||
|
||||
# AES-GCM parameters
|
||||
AES_KEY_SIZE = 32 # 256 bits
|
||||
AES_NONCE_SIZE = 12 # 96 bits (GCM recommended)
|
||||
AES_TAG_SIZE = 16 # 128 bits
|
||||
|
||||
# Encryption chunk size for streaming large files
|
||||
CHUNK_SIZE = 8 * 1024 * 1024 # 8 MB chunks
|
||||
CHUNKED_THRESHOLD = 50 * 1024 * 1024 # Use chunked encryption for files > 50 MB
|
||||
CHUNKED_MAGIC = b'\x01PGCE' # Magic bytes: version 1, Private Gallery Chunked Encryption
|
||||
|
||||
def __init__(self):
|
||||
self._sessions: Dict[str, Dict] = {} # token -> {expiry, username}
|
||||
self._session_lock = Lock()
|
||||
self._derived_key: Optional[bytes] = None
|
||||
self._fernet: Optional[Fernet] = None
|
||||
self._aesgcm: Optional[AESGCM] = None
|
||||
|
||||
# Check dependencies
|
||||
if not bcrypt:
|
||||
logger.warning("bcrypt not available - password hashing will use fallback")
|
||||
if not ARGON2_AVAILABLE:
|
||||
logger.warning("argon2-cffi not available - key derivation will use PBKDF2")
|
||||
if not CRYPTO_AVAILABLE:
|
||||
raise ImportError("cryptography library required for Private Gallery")
|
||||
|
||||
# =========================================================================
|
||||
# PASSWORD HASHING (bcrypt)
|
||||
# =========================================================================
|
||||
|
||||
def hash_password(self, password: str) -> str:
|
||||
"""
|
||||
Hash a password using bcrypt with cost factor 12.
|
||||
|
||||
Args:
|
||||
password: Plain text password
|
||||
|
||||
Returns:
|
||||
bcrypt hash string (includes salt)
|
||||
"""
|
||||
if bcrypt:
|
||||
salt = bcrypt.gensalt(rounds=12)
|
||||
hashed = bcrypt.hashpw(password.encode('utf-8'), salt)
|
||||
return hashed.decode('utf-8')
|
||||
else:
|
||||
# Fallback to PBKDF2 if bcrypt not available
|
||||
salt = secrets.token_bytes(16)
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=salt,
|
||||
iterations=600000,
|
||||
)
|
||||
key = kdf.derive(password.encode('utf-8'))
|
||||
return f"pbkdf2${base64.b64encode(salt).decode()}${base64.b64encode(key).decode()}"
|
||||
|
||||
def verify_password(self, password: str, password_hash: str) -> bool:
|
||||
"""
|
||||
Verify a password against its hash.
|
||||
|
||||
Args:
|
||||
password: Plain text password to check
|
||||
password_hash: Stored hash to verify against
|
||||
|
||||
Returns:
|
||||
True if password matches
|
||||
"""
|
||||
try:
|
||||
if password_hash.startswith('pbkdf2$'):
|
||||
# PBKDF2 fallback hash
|
||||
parts = password_hash.split('$')
|
||||
if len(parts) != 3:
|
||||
return False
|
||||
salt = base64.b64decode(parts[1])
|
||||
stored_key = base64.b64decode(parts[2])
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=32,
|
||||
salt=salt,
|
||||
iterations=600000,
|
||||
)
|
||||
try:
|
||||
kdf.verify(password.encode('utf-8'), stored_key)
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
elif bcrypt:
|
||||
return bcrypt.checkpw(
|
||||
password.encode('utf-8'),
|
||||
password_hash.encode('utf-8')
|
||||
)
|
||||
else:
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Password verification failed: {e}")
|
||||
return False
|
||||
|
||||
# =========================================================================
|
||||
# KEY DERIVATION (Argon2id or PBKDF2)
|
||||
# =========================================================================
|
||||
|
||||
def derive_key(self, password: str, salt: bytes) -> bytes:
|
||||
"""
|
||||
Derive an encryption key from password using Argon2id.
|
||||
|
||||
Args:
|
||||
password: User's password
|
||||
salt: Random salt (should be stored)
|
||||
|
||||
Returns:
|
||||
32-byte derived key for AES-256
|
||||
"""
|
||||
if ARGON2_AVAILABLE:
|
||||
key = hash_secret_raw(
|
||||
secret=password.encode('utf-8'),
|
||||
salt=salt,
|
||||
time_cost=self.ARGON2_TIME_COST,
|
||||
memory_cost=self.ARGON2_MEMORY_COST,
|
||||
parallelism=self.ARGON2_PARALLELISM,
|
||||
hash_len=self.ARGON2_HASH_LENGTH,
|
||||
type=Type.ID # Argon2id
|
||||
)
|
||||
return key
|
||||
else:
|
||||
# Fallback to PBKDF2 with high iterations
|
||||
kdf = PBKDF2HMAC(
|
||||
algorithm=hashes.SHA256(),
|
||||
length=self.AES_KEY_SIZE,
|
||||
salt=salt,
|
||||
iterations=600000, # OWASP recommended minimum
|
||||
)
|
||||
return kdf.derive(password.encode('utf-8'))
|
||||
|
||||
def generate_salt(self) -> bytes:
|
||||
"""Generate a cryptographically secure random salt."""
|
||||
return secrets.token_bytes(16)
|
||||
|
||||
def initialize_encryption(self, password: str, salt: bytes) -> None:
|
||||
"""
|
||||
Initialize encryption with derived key.
|
||||
Must be called after successful unlock.
|
||||
|
||||
Args:
|
||||
password: User's password
|
||||
salt: Stored salt for key derivation
|
||||
"""
|
||||
self._derived_key = self.derive_key(password, salt)
|
||||
|
||||
# Initialize Fernet for field encryption
|
||||
# Fernet requires a 32-byte key, base64-encoded
|
||||
fernet_key = base64.urlsafe_b64encode(self._derived_key)
|
||||
self._fernet = Fernet(fernet_key)
|
||||
|
||||
# Initialize AES-GCM for file encryption
|
||||
self._aesgcm = AESGCM(self._derived_key)
|
||||
|
||||
logger.info("Encryption initialized successfully")
|
||||
|
||||
def clear_encryption(self) -> None:
|
||||
"""Clear encryption keys from memory (on lock)."""
|
||||
self._derived_key = None
|
||||
self._fernet = None
|
||||
self._aesgcm = None
|
||||
logger.info("Encryption keys cleared")
|
||||
|
||||
def is_initialized(self) -> bool:
|
||||
"""Check if encryption is initialized (unlocked)."""
|
||||
return self._derived_key is not None
|
||||
|
||||
# =========================================================================
|
||||
# FIELD ENCRYPTION (Fernet - for database fields)
|
||||
# =========================================================================
|
||||
|
||||
def encrypt_field(self, plaintext: str) -> str:
|
||||
"""
|
||||
Encrypt a database field value.
|
||||
|
||||
Args:
|
||||
plaintext: Plain text to encrypt
|
||||
|
||||
Returns:
|
||||
Base64-encoded encrypted string
|
||||
"""
|
||||
if not self._fernet:
|
||||
raise RuntimeError("Encryption not initialized - call initialize_encryption first")
|
||||
|
||||
if not plaintext:
|
||||
return ""
|
||||
|
||||
encrypted = self._fernet.encrypt(plaintext.encode('utf-8'))
|
||||
return base64.urlsafe_b64encode(encrypted).decode('utf-8')
|
||||
|
||||
def decrypt_field(self, ciphertext: str) -> str:
|
||||
"""
|
||||
Decrypt a database field value.
|
||||
|
||||
Args:
|
||||
ciphertext: Base64-encoded encrypted string
|
||||
|
||||
Returns:
|
||||
Decrypted plain text
|
||||
"""
|
||||
if not self._fernet:
|
||||
raise RuntimeError("Encryption not initialized - call initialize_encryption first")
|
||||
|
||||
if not ciphertext:
|
||||
return ""
|
||||
|
||||
try:
|
||||
encrypted = base64.urlsafe_b64decode(ciphertext.encode('utf-8'))
|
||||
decrypted = self._fernet.decrypt(encrypted)
|
||||
return decrypted.decode('utf-8')
|
||||
except Exception as e:
|
||||
logger.error(f"Field decryption failed: {e}")
|
||||
return "[Decryption Error]"
|
||||
|
||||
# =========================================================================
|
||||
# FILE ENCRYPTION (AES-256-GCM)
|
||||
# =========================================================================
|
||||
|
||||
def encrypt_file(self, input_path: Path, output_path: Path) -> bool:
|
||||
"""
|
||||
Encrypt a file using AES-256-GCM.
|
||||
|
||||
Small files (<=50MB): single-shot format
|
||||
[12-byte nonce][encrypted data + 16-byte tag]
|
||||
|
||||
Large files (>50MB): chunked format for memory efficiency
|
||||
[5-byte magic 0x01PGCE][4-byte chunk_size BE]
|
||||
[12-byte nonce][encrypted chunk + 16-byte tag] (repeated)
|
||||
|
||||
Args:
|
||||
input_path: Path to plaintext file
|
||||
output_path: Path for encrypted output
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
if not self._aesgcm:
|
||||
raise RuntimeError("Encryption not initialized")
|
||||
|
||||
try:
|
||||
file_size = input_path.stat().st_size
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if file_size <= self.CHUNKED_THRESHOLD:
|
||||
# Small file: single-shot encryption (backward compatible)
|
||||
nonce = secrets.token_bytes(self.AES_NONCE_SIZE)
|
||||
with open(input_path, 'rb') as f:
|
||||
plaintext = f.read()
|
||||
ciphertext = self._aesgcm.encrypt(nonce, plaintext, None)
|
||||
with open(output_path, 'wb') as f:
|
||||
f.write(nonce)
|
||||
f.write(ciphertext)
|
||||
else:
|
||||
# Large file: chunked encryption
|
||||
import struct
|
||||
with open(input_path, 'rb') as fin, open(output_path, 'wb') as fout:
|
||||
# Write header
|
||||
fout.write(self.CHUNKED_MAGIC)
|
||||
fout.write(struct.pack('>I', self.CHUNK_SIZE))
|
||||
|
||||
# Encrypt in chunks
|
||||
while True:
|
||||
chunk = fin.read(self.CHUNK_SIZE)
|
||||
if not chunk:
|
||||
break
|
||||
nonce = secrets.token_bytes(self.AES_NONCE_SIZE)
|
||||
encrypted_chunk = self._aesgcm.encrypt(nonce, chunk, None)
|
||||
# Write chunk: nonce + encrypted data (includes GCM tag)
|
||||
fout.write(nonce)
|
||||
fout.write(struct.pack('>I', len(encrypted_chunk)))
|
||||
fout.write(encrypted_chunk)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"File encryption failed: {e}")
|
||||
# Clean up partial output
|
||||
if output_path.exists():
|
||||
try:
|
||||
output_path.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
def _is_chunked_format(self, input_path: Path) -> bool:
|
||||
"""Check if an encrypted file uses the chunked format."""
|
||||
try:
|
||||
with open(input_path, 'rb') as f:
|
||||
magic = f.read(len(self.CHUNKED_MAGIC))
|
||||
return magic == self.CHUNKED_MAGIC
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def decrypt_file(self, input_path: Path, output_path: Optional[Path] = None) -> Optional[bytes]:
|
||||
"""
|
||||
Decrypt a file encrypted with AES-256-GCM.
|
||||
Handles both single-shot and chunked formats.
|
||||
|
||||
Args:
|
||||
input_path: Path to encrypted file
|
||||
output_path: Optional path to write decrypted file
|
||||
|
||||
Returns:
|
||||
Decrypted bytes if output_path is None, else None on success
|
||||
"""
|
||||
if not self._aesgcm:
|
||||
raise RuntimeError("Encryption not initialized")
|
||||
|
||||
try:
|
||||
if self._is_chunked_format(input_path):
|
||||
return self._decrypt_file_chunked(input_path, output_path)
|
||||
|
||||
# Single-shot format: [nonce][ciphertext+tag]
|
||||
with open(input_path, 'rb') as f:
|
||||
nonce = f.read(self.AES_NONCE_SIZE)
|
||||
if len(nonce) != self.AES_NONCE_SIZE:
|
||||
raise ValueError("Invalid encrypted file: missing nonce")
|
||||
ciphertext = f.read()
|
||||
|
||||
plaintext = self._aesgcm.decrypt(nonce, ciphertext, None)
|
||||
|
||||
if output_path:
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(output_path, 'wb') as f:
|
||||
f.write(plaintext)
|
||||
return None
|
||||
|
||||
return plaintext
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"File decryption failed: {e}")
|
||||
return None
|
||||
|
||||
def _decrypt_file_chunked(self, input_path: Path, output_path: Optional[Path] = None) -> Optional[bytes]:
|
||||
"""Decrypt a chunked-format encrypted file."""
|
||||
import struct
|
||||
|
||||
try:
|
||||
parts = [] if output_path is None else None
|
||||
|
||||
with open(input_path, 'rb') as fin:
|
||||
# Read header
|
||||
magic = fin.read(len(self.CHUNKED_MAGIC))
|
||||
if magic != self.CHUNKED_MAGIC:
|
||||
raise ValueError("Invalid chunked file header")
|
||||
chunk_size_bytes = fin.read(4)
|
||||
# chunk_size from header (informational, actual sizes are per-chunk)
|
||||
struct.unpack('>I', chunk_size_bytes)
|
||||
|
||||
fout = None
|
||||
if output_path:
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
fout = open(output_path, 'wb')
|
||||
|
||||
try:
|
||||
while True:
|
||||
# Read chunk: [12-byte nonce][4-byte encrypted_len][encrypted data]
|
||||
nonce = fin.read(self.AES_NONCE_SIZE)
|
||||
if len(nonce) == 0:
|
||||
break # EOF
|
||||
if len(nonce) != self.AES_NONCE_SIZE:
|
||||
raise ValueError("Truncated chunk nonce")
|
||||
|
||||
enc_len_bytes = fin.read(4)
|
||||
if len(enc_len_bytes) != 4:
|
||||
raise ValueError("Truncated chunk length")
|
||||
enc_len = struct.unpack('>I', enc_len_bytes)[0]
|
||||
|
||||
encrypted_chunk = fin.read(enc_len)
|
||||
if len(encrypted_chunk) != enc_len:
|
||||
raise ValueError("Truncated chunk data")
|
||||
|
||||
decrypted_chunk = self._aesgcm.decrypt(nonce, encrypted_chunk, None)
|
||||
|
||||
if fout:
|
||||
fout.write(decrypted_chunk)
|
||||
else:
|
||||
parts.append(decrypted_chunk)
|
||||
finally:
|
||||
if fout:
|
||||
fout.close()
|
||||
|
||||
if output_path:
|
||||
return None
|
||||
return b''.join(parts)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Chunked file decryption failed for {input_path}: {type(e).__name__}: {e}")
|
||||
return None
|
||||
|
||||
def re_encrypt_to_chunked(self, file_path: Path) -> bool:
|
||||
"""
|
||||
Re-encrypt a single-shot encrypted file to chunked format in-place.
|
||||
Decrypts and re-encrypts in chunks to avoid loading the entire file into memory.
|
||||
|
||||
Args:
|
||||
file_path: Path to the single-shot encrypted file
|
||||
|
||||
Returns:
|
||||
True if successful, False if already chunked or on error
|
||||
"""
|
||||
if not self._aesgcm:
|
||||
raise RuntimeError("Encryption not initialized")
|
||||
|
||||
if self._is_chunked_format(file_path):
|
||||
return False # Already chunked
|
||||
|
||||
import struct
|
||||
temp_path = file_path.with_suffix(f'.enc.{secrets.token_hex(4)}.tmp')
|
||||
|
||||
try:
|
||||
# Decrypt the single-shot file fully (required by AES-GCM)
|
||||
with open(file_path, 'rb') as f:
|
||||
nonce = f.read(self.AES_NONCE_SIZE)
|
||||
if len(nonce) != self.AES_NONCE_SIZE:
|
||||
raise ValueError("Invalid encrypted file")
|
||||
ciphertext = f.read()
|
||||
|
||||
plaintext = self._aesgcm.decrypt(nonce, ciphertext, None)
|
||||
del ciphertext # Free memory
|
||||
|
||||
# Write chunked format to temp file
|
||||
with open(temp_path, 'wb') as fout:
|
||||
fout.write(self.CHUNKED_MAGIC)
|
||||
fout.write(struct.pack('>I', self.CHUNK_SIZE))
|
||||
|
||||
offset = 0
|
||||
while offset < len(plaintext):
|
||||
chunk = plaintext[offset:offset + self.CHUNK_SIZE]
|
||||
offset += len(chunk)
|
||||
chunk_nonce = secrets.token_bytes(self.AES_NONCE_SIZE)
|
||||
encrypted_chunk = self._aesgcm.encrypt(chunk_nonce, chunk, None)
|
||||
fout.write(chunk_nonce)
|
||||
fout.write(struct.pack('>I', len(encrypted_chunk)))
|
||||
fout.write(encrypted_chunk)
|
||||
|
||||
del plaintext # Free memory
|
||||
|
||||
# Atomic replace
|
||||
temp_path.replace(file_path)
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Re-encryption to chunked failed for {file_path}: {e}")
|
||||
if temp_path.exists():
|
||||
try:
|
||||
temp_path.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
def decrypt_file_streaming(self, input_path: Path) -> Optional[bytes]:
|
||||
"""
|
||||
Decrypt a file and return bytes for streaming.
|
||||
Only suitable for small files (single-shot format, ≤50MB).
|
||||
For large chunked files, use decrypt_file_generator() instead.
|
||||
|
||||
Args:
|
||||
input_path: Path to encrypted file
|
||||
|
||||
Returns:
|
||||
Decrypted bytes or None on error
|
||||
"""
|
||||
return self.decrypt_file(input_path, output_path=None)
|
||||
|
||||
def decrypt_file_generator(self, input_path: Path):
|
||||
"""
|
||||
Generator that yields decrypted chunks for streaming large files.
|
||||
For chunked files, yields one decrypted chunk at a time (~8MB each).
|
||||
For single-shot files, yields the entire content at once.
|
||||
|
||||
Args:
|
||||
input_path: Path to encrypted file
|
||||
|
||||
Yields:
|
||||
bytes: Decrypted data chunks
|
||||
"""
|
||||
import struct
|
||||
|
||||
if not self._aesgcm:
|
||||
raise RuntimeError("Encryption not initialized")
|
||||
|
||||
if self._is_chunked_format(input_path):
|
||||
with open(input_path, 'rb') as fin:
|
||||
# Skip header
|
||||
fin.read(len(self.CHUNKED_MAGIC))
|
||||
fin.read(4)
|
||||
|
||||
while True:
|
||||
nonce = fin.read(self.AES_NONCE_SIZE)
|
||||
if len(nonce) == 0:
|
||||
break
|
||||
if len(nonce) != self.AES_NONCE_SIZE:
|
||||
raise ValueError("Truncated chunk nonce")
|
||||
|
||||
enc_len_bytes = fin.read(4)
|
||||
if len(enc_len_bytes) != 4:
|
||||
raise ValueError("Truncated chunk length")
|
||||
enc_len = struct.unpack('>I', enc_len_bytes)[0]
|
||||
|
||||
encrypted_chunk = fin.read(enc_len)
|
||||
if len(encrypted_chunk) != enc_len:
|
||||
raise ValueError("Truncated chunk data")
|
||||
|
||||
yield self._aesgcm.decrypt(nonce, encrypted_chunk, None)
|
||||
else:
|
||||
# Single-shot: yield everything at once (≤50MB)
|
||||
with open(input_path, 'rb') as f:
|
||||
nonce = f.read(self.AES_NONCE_SIZE)
|
||||
if len(nonce) != self.AES_NONCE_SIZE:
|
||||
raise ValueError("Invalid encrypted file: missing nonce")
|
||||
ciphertext = f.read()
|
||||
yield self._aesgcm.decrypt(nonce, ciphertext, None)
|
||||
|
||||
def decrypt_file_range_generator(self, input_path: Path, start: int, end: int):
|
||||
"""
|
||||
Generator that yields only the decrypted bytes for a specific byte range.
|
||||
For chunked files, only decrypts the necessary chunks and slices them.
|
||||
For single-shot files, decrypts all and slices.
|
||||
|
||||
Args:
|
||||
input_path: Path to encrypted file
|
||||
start: Start byte offset (inclusive)
|
||||
end: End byte offset (inclusive)
|
||||
|
||||
Yields:
|
||||
bytes: Decrypted data for the requested range
|
||||
"""
|
||||
import struct
|
||||
|
||||
if not self._aesgcm:
|
||||
raise RuntimeError("Encryption not initialized")
|
||||
|
||||
if not self._is_chunked_format(input_path):
|
||||
# Single-shot: decrypt all and slice (file is ≤50MB)
|
||||
with open(input_path, 'rb') as f:
|
||||
nonce = f.read(self.AES_NONCE_SIZE)
|
||||
ciphertext = f.read()
|
||||
plaintext = self._aesgcm.decrypt(nonce, ciphertext, None)
|
||||
yield plaintext[start:end + 1]
|
||||
return
|
||||
|
||||
chunk_size = self.CHUNK_SIZE
|
||||
first_chunk = start // chunk_size
|
||||
last_chunk = end // chunk_size
|
||||
|
||||
# Header: 5 magic + 4 chunk_size = 9 bytes
|
||||
header_size = len(self.CHUNKED_MAGIC) + 4
|
||||
# Each full encrypted chunk: 12 nonce + 4 length + (chunk_size + 16 tag)
|
||||
enc_chunk_stride = self.AES_NONCE_SIZE + 4 + chunk_size + self.AES_TAG_SIZE
|
||||
|
||||
with open(input_path, 'rb') as fin:
|
||||
for chunk_idx in range(first_chunk, last_chunk + 1):
|
||||
# Seek to this chunk's position in the encrypted file
|
||||
fin.seek(header_size + chunk_idx * enc_chunk_stride)
|
||||
|
||||
nonce = fin.read(self.AES_NONCE_SIZE)
|
||||
if len(nonce) == 0:
|
||||
break
|
||||
if len(nonce) != self.AES_NONCE_SIZE:
|
||||
raise ValueError("Truncated chunk nonce")
|
||||
|
||||
enc_len_bytes = fin.read(4)
|
||||
if len(enc_len_bytes) != 4:
|
||||
raise ValueError("Truncated chunk length")
|
||||
enc_len = struct.unpack('>I', enc_len_bytes)[0]
|
||||
|
||||
encrypted_chunk = fin.read(enc_len)
|
||||
if len(encrypted_chunk) != enc_len:
|
||||
raise ValueError("Truncated chunk data")
|
||||
|
||||
decrypted_chunk = self._aesgcm.decrypt(nonce, encrypted_chunk, None)
|
||||
|
||||
# Calculate which part of this chunk we need
|
||||
chunk_start_byte = chunk_idx * chunk_size
|
||||
slice_start = max(start - chunk_start_byte, 0)
|
||||
slice_end = min(end - chunk_start_byte + 1, len(decrypted_chunk))
|
||||
|
||||
yield decrypted_chunk[slice_start:slice_end]
|
||||
|
||||
# =========================================================================
|
||||
# SESSION MANAGEMENT
|
||||
# =========================================================================
|
||||
|
||||
def create_session(self, username: str = "user", timeout_minutes: int = 30) -> str:
|
||||
"""
|
||||
Create a new session token.
|
||||
|
||||
Args:
|
||||
username: Username for the session
|
||||
timeout_minutes: Session timeout in minutes
|
||||
|
||||
Returns:
|
||||
Session token string
|
||||
"""
|
||||
token = secrets.token_urlsafe(32)
|
||||
expiry = datetime.now() + timedelta(minutes=timeout_minutes)
|
||||
|
||||
with self._session_lock:
|
||||
self._sessions[token] = {
|
||||
'expiry': expiry,
|
||||
'username': username,
|
||||
'created_at': datetime.now()
|
||||
}
|
||||
|
||||
logger.info(f"Created session for {username}, expires in {timeout_minutes} minutes")
|
||||
return token
|
||||
|
||||
def verify_session(self, token: str) -> Optional[Dict]:
|
||||
"""
|
||||
Verify a session token is valid and not expired.
|
||||
|
||||
Args:
|
||||
token: Session token to verify
|
||||
|
||||
Returns:
|
||||
Session info dict if valid, None otherwise
|
||||
"""
|
||||
with self._session_lock:
|
||||
session = self._sessions.get(token)
|
||||
|
||||
if not session:
|
||||
return None
|
||||
|
||||
if datetime.now() > session['expiry']:
|
||||
# Expired - remove it
|
||||
del self._sessions[token]
|
||||
return None
|
||||
|
||||
return session
|
||||
|
||||
def refresh_session(self, token: str, timeout_minutes: int = 30) -> bool:
|
||||
"""
|
||||
Refresh a session's expiry time.
|
||||
|
||||
Args:
|
||||
token: Session token to refresh
|
||||
timeout_minutes: New timeout in minutes
|
||||
|
||||
Returns:
|
||||
True if refreshed, False if token invalid
|
||||
"""
|
||||
with self._session_lock:
|
||||
session = self._sessions.get(token)
|
||||
|
||||
if not session:
|
||||
return False
|
||||
|
||||
if datetime.now() > session['expiry']:
|
||||
del self._sessions[token]
|
||||
return False
|
||||
|
||||
session['expiry'] = datetime.now() + timedelta(minutes=timeout_minutes)
|
||||
return True
|
||||
|
||||
def invalidate_session(self, token: str) -> bool:
|
||||
"""
|
||||
Invalidate a session token (logout/lock).
|
||||
|
||||
Args:
|
||||
token: Session token to invalidate
|
||||
|
||||
Returns:
|
||||
True if invalidated, False if not found
|
||||
"""
|
||||
with self._session_lock:
|
||||
if token in self._sessions:
|
||||
del self._sessions[token]
|
||||
return True
|
||||
return False
|
||||
|
||||
def invalidate_all_sessions(self) -> int:
|
||||
"""
|
||||
Invalidate all sessions (master lock).
|
||||
|
||||
Returns:
|
||||
Number of sessions invalidated
|
||||
"""
|
||||
with self._session_lock:
|
||||
count = len(self._sessions)
|
||||
self._sessions.clear()
|
||||
return count
|
||||
|
||||
def cleanup_expired_sessions(self) -> int:
|
||||
"""
|
||||
Remove all expired sessions.
|
||||
|
||||
Returns:
|
||||
Number of sessions removed
|
||||
"""
|
||||
with self._session_lock:
|
||||
now = datetime.now()
|
||||
expired = [t for t, s in self._sessions.items() if now > s['expiry']]
|
||||
for token in expired:
|
||||
del self._sessions[token]
|
||||
return len(expired)
|
||||
|
||||
def get_active_session_count(self) -> int:
|
||||
"""Get count of active (non-expired) sessions."""
|
||||
self.cleanup_expired_sessions()
|
||||
return len(self._sessions)
|
||||
|
||||
|
||||
# Global instance
|
||||
_crypto_instance: Optional[PrivateGalleryCrypto] = None
|
||||
_crypto_lock = Lock()
|
||||
|
||||
|
||||
def get_private_gallery_crypto() -> PrivateGalleryCrypto:
|
||||
"""Get or create the global crypto instance."""
|
||||
global _crypto_instance
|
||||
|
||||
with _crypto_lock:
|
||||
if _crypto_instance is None:
|
||||
_crypto_instance = PrivateGalleryCrypto()
|
||||
return _crypto_instance
|
||||
|
||||
|
||||
def export_key_to_file(path: str) -> bool:
|
||||
"""
|
||||
Save the current derived key from the global crypto instance to a file.
|
||||
The file is written with mode 0600 for security.
|
||||
|
||||
Args:
|
||||
path: File path to write the key material to
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
import json as _json
|
||||
|
||||
crypto = get_private_gallery_crypto()
|
||||
if not crypto.is_initialized() or crypto._derived_key is None:
|
||||
logger.warning("Cannot export key: encryption not initialized")
|
||||
return False
|
||||
|
||||
try:
|
||||
key_data = {
|
||||
'derived_key': base64.b64encode(crypto._derived_key).decode('utf-8')
|
||||
}
|
||||
key_path = Path(path)
|
||||
key_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Write atomically via temp file
|
||||
tmp_path = key_path.with_suffix('.tmp')
|
||||
with open(tmp_path, 'w') as f:
|
||||
_json.dump(key_data, f)
|
||||
os.chmod(str(tmp_path), 0o600)
|
||||
tmp_path.replace(key_path)
|
||||
|
||||
logger.info(f"Exported encryption key to {path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to export key to {path}: {e}")
|
||||
return False
|
||||
|
||||
|
||||
def load_key_from_file(path: str) -> Optional[PrivateGalleryCrypto]:
|
||||
"""
|
||||
Load a derived key from a file and return an initialized crypto instance.
|
||||
|
||||
Args:
|
||||
path: File path containing the key material
|
||||
|
||||
Returns:
|
||||
Initialized PrivateGalleryCrypto instance, or None if unavailable
|
||||
"""
|
||||
import json as _json
|
||||
|
||||
key_path = Path(path)
|
||||
if not key_path.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(key_path, 'r') as f:
|
||||
key_data = _json.load(f)
|
||||
|
||||
derived_key = base64.b64decode(key_data['derived_key'])
|
||||
|
||||
crypto = PrivateGalleryCrypto()
|
||||
crypto._derived_key = derived_key
|
||||
|
||||
# Initialize Fernet for field encryption
|
||||
fernet_key = base64.urlsafe_b64encode(derived_key)
|
||||
crypto._fernet = Fernet(fernet_key)
|
||||
|
||||
# Initialize AES-GCM for file encryption
|
||||
crypto._aesgcm = AESGCM(derived_key)
|
||||
|
||||
return crypto
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load key from {path}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def delete_key_file(path: str) -> bool:
|
||||
"""Delete the key file if it exists."""
|
||||
try:
|
||||
key_path = Path(path)
|
||||
if key_path.exists():
|
||||
key_path.unlink()
|
||||
logger.info(f"Deleted key file {path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete key file {path}: {e}")
|
||||
return False
|
||||
Reference in New Issue
Block a user