Updates galore. Improved folder structure, componentized, and notifications upon completion.

This commit is contained in:
Brian McGonagill 2026-03-17 14:01:35 -05:00
parent b48784e2ad
commit 7e0502ca40
33 changed files with 3565 additions and 728 deletions

37
app/__init__.py Normal file
View file

@ -0,0 +1,37 @@
"""
app/__init__.py
===============
Flask application factory.
Usage
-----
from app import create_app
flask_app = create_app()
Gunicorn (wsgi.py) calls create_app() once at startup.
The dev-server entry point (run.py) does the same.
"""
from flask import Flask
from .config import BASE_DIR, MEDIA_ROOT
from .db import init_db
from .routes import register_routes
def create_app() -> Flask:
"""
Create and return a configured Flask application instance.
"""
flask_app = Flask(
__name__,
template_folder=str(BASE_DIR / 'templates'),
static_folder=str(BASE_DIR / 'static'),
)
# Initialise the SQLite settings database
init_db()
register_routes(flask_app)
return flask_app

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

51
app/config.py Normal file
View file

@ -0,0 +1,51 @@
"""
app/config.py
=============
Central configuration and the path-jail helper used by every other module.
All tuneable values can be overridden via environment variables:
MEDIA_ROOT Root directory the application may read/write (default: /media)
DB_PATH Path to the SQLite database file (default: <project>/videopress.db)
PORT TCP port Gunicorn listens on (default: 8080)
LOG_LEVEL Gunicorn log verbosity (default: info)
"""
import os
from pathlib import Path
# ---------------------------------------------------------------------------
# Paths
# ---------------------------------------------------------------------------
PACKAGE_DIR = Path(__file__).resolve().parent # …/app/
BASE_DIR = PACKAGE_DIR.parent # …/videocompressor/
# Every file-system operation in the application is restricted to MEDIA_ROOT.
MEDIA_ROOT = Path(os.environ.get('MEDIA_ROOT', '/media')).resolve()
# ---------------------------------------------------------------------------
# Path-jail helper
# ---------------------------------------------------------------------------
def safe_path(raw: str) -> Path:
"""
Resolve *raw* to an absolute path and assert it is inside MEDIA_ROOT.
Returns the resolved Path on success.
Raises PermissionError if the path would escape MEDIA_ROOT (including
symlink traversal and ../../ attacks).
"""
try:
resolved = Path(raw).resolve()
except Exception:
raise PermissionError(f"Invalid path: {raw!r}")
root_str = str(MEDIA_ROOT)
path_str = str(resolved)
if path_str != root_str and not path_str.startswith(root_str + os.sep):
raise PermissionError(
f"Access denied: '{resolved}' is outside the allowed "
f"media root ({MEDIA_ROOT})."
)
return resolved

142
app/db.py Normal file
View file

@ -0,0 +1,142 @@
"""
app/db.py
=========
Lightweight SQLite-backed key/value settings store.
The database file is created automatically on first use beside the
application package, or at the path set by the DB_PATH environment
variable (useful for Docker volume persistence).
Public API
----------
init_db() create the table if it doesn't exist (call at startup)
get_setting(key) return the stored string value, or None
save_setting(key, val) upsert a key/value pair
get_all_settings() return all rows as {key: value}
delete_setting(key) remove a key (used to clear optional fields)
"""
import os
import sqlite3
import threading
from pathlib import Path
from .config import BASE_DIR
# ---------------------------------------------------------------------------
# Database location
# ---------------------------------------------------------------------------
# Default: videocompressor/videopress.db — sits beside the app/ package.
# Override with the DB_PATH env var (e.g. to a Docker-mounted volume path).
DB_PATH = Path(os.environ.get('DB_PATH', str(BASE_DIR / 'videopress.db')))
# SQLite connections are not thread-safe across threads; use a per-thread
# connection via threading.local() so each worker greenlet/thread gets its own.
_local = threading.local()
_INIT_LOCK = threading.Lock()
_initialised = False
def _connect() -> sqlite3.Connection:
"""Return (and cache) a per-thread SQLite connection."""
if not hasattr(_local, 'conn') or _local.conn is None:
_local.conn = sqlite3.connect(str(DB_PATH), check_same_thread=False)
_local.conn.row_factory = sqlite3.Row
# WAL mode allows concurrent reads alongside a single writer
_local.conn.execute('PRAGMA journal_mode=WAL')
_local.conn.execute('PRAGMA foreign_keys=ON')
return _local.conn
# ---------------------------------------------------------------------------
# Schema
# ---------------------------------------------------------------------------
def init_db() -> None:
"""
Create the settings table if it does not already exist.
Also creates the parent directory of DB_PATH if needed.
Safe to call multiple times idempotent.
"""
global _initialised
with _INIT_LOCK:
if _initialised:
return
# Ensure the directory exists before SQLite tries to create the file.
# This handles the case where the Docker volume mount creates ./data
# as root before the container user can write to it.
db_dir = DB_PATH.parent
try:
db_dir.mkdir(parents=True, exist_ok=True)
except PermissionError:
raise PermissionError(
f"Cannot create database directory '{db_dir}'. "
f"If running in Docker, create the directory on the host first "
f"and ensure it is writable by UID 1000:\n"
f" mkdir -p {db_dir} && chown 1000:1000 {db_dir}"
)
# Test that we can actually write to the directory before SQLite tries
test_file = db_dir / '.write_test'
try:
test_file.touch()
test_file.unlink()
except PermissionError:
raise PermissionError(
f"Database directory '{db_dir}' is not writable by the current user. "
f"If running in Docker, fix permissions on the host:\n"
f" chown 1000:1000 {db_dir}"
)
conn = _connect()
conn.execute("""
CREATE TABLE IF NOT EXISTS settings (
key TEXT PRIMARY KEY,
value TEXT NOT NULL
)
""")
conn.commit()
_initialised = True
# ---------------------------------------------------------------------------
# CRUD helpers
# ---------------------------------------------------------------------------
def get_setting(key: str) -> str | None:
"""Return the stored value for *key*, or None if not set."""
init_db()
row = _connect().execute(
'SELECT value FROM settings WHERE key = ?', (key,)
).fetchone()
return row['value'] if row else None
def save_setting(key: str, value: str) -> None:
"""Insert or update *key* with *value*."""
init_db()
conn = _connect()
conn.execute(
'INSERT INTO settings (key, value) VALUES (?, ?)'
' ON CONFLICT(key) DO UPDATE SET value = excluded.value',
(key, value),
)
conn.commit()
def delete_setting(key: str) -> None:
"""Remove *key* from the store (silently succeeds if absent)."""
init_db()
conn = _connect()
conn.execute('DELETE FROM settings WHERE key = ?', (key,))
conn.commit()
def get_all_settings() -> dict[str, str]:
"""Return all stored settings as a plain dict."""
init_db()
rows = _connect().execute('SELECT key, value FROM settings').fetchall()
return {row['key']: row['value'] for row in rows}

349
app/jobs.py Normal file
View file

@ -0,0 +1,349 @@
"""
app/jobs.py
===========
In-process job store and the ffmpeg compression worker thread.
Design note: job state is kept in a plain dict protected by a threading.Lock.
This is intentional VideoPress uses a single Gunicorn worker process
(required for SSE streaming with gevent), so cross-process state sharing is
not needed. If you ever move to multiple workers, replace `active_jobs` with
a Redis-backed store and remove the threading.Lock.
Public API
----------
active_jobs : dict {job_id -> job_dict}
job_lock : Lock protects mutations to active_jobs
push_event() : append an SSE event to a job's event queue
run_compression_job(): worker called in a daemon thread
"""
import os
import subprocess
import threading
import time
from pathlib import Path
from .notify import send_completion_email
# ---------------------------------------------------------------------------
# Job store
# ---------------------------------------------------------------------------
active_jobs: dict = {}
job_lock = threading.Lock()
# ---------------------------------------------------------------------------
# Helpers
# ---------------------------------------------------------------------------
def push_event(job: dict, event: dict) -> None:
"""Append *event* to job['events'] under the job's own lock."""
with job['lock']:
job['events'].append(event)
def _choose_encoder(codec: str) -> tuple[str, bool]:
"""
Return (ffmpeg_encoder_name, is_hevc) for the given source codec string.
HEVC / H.265 sources are re-encoded with libx265 to preserve efficiency.
Everything else uses libx264 (universally supported, always available).
"""
normalised = codec.lower()
is_hevc = normalised in ('hevc', 'h265', 'x265')
encoder = 'libx265' if is_hevc else 'libx264'
return encoder, is_hevc
def _build_ffmpeg_cmd(
src: str,
out: str,
video_k: int,
is_hevc: bool,
encoder: str,
) -> list[str]:
"""
Build the ffmpeg command list for one file.
libx264 accepts -maxrate / -bufsize directly.
libx265 requires those same constraints via -x265-params because its
CLI option names differ from the generic ffmpeg flags.
Both use AAC audio at 128 kbps.
-movflags +faststart is only meaningful for MP4 containers but is
silently ignored for MKV / MOV / etc., so it is always included.
"""
if is_hevc:
vbv_maxrate = int(video_k * 1.5)
vbv_bufsize = video_k * 2
encoder_opts = [
'-c:v', encoder,
'-b:v', f'{video_k}k',
'-x265-params', f'vbv-maxrate={vbv_maxrate}:vbv-bufsize={vbv_bufsize}',
]
else:
encoder_opts = [
'-c:v', encoder,
'-b:v', f'{video_k}k',
'-maxrate', f'{int(video_k * 1.5)}k',
'-bufsize', f'{video_k * 2}k',
]
return [
'ffmpeg', '-y', '-i', src,
*encoder_opts,
'-c:a', 'aac', '-b:a', '128k',
'-movflags', '+faststart',
'-progress', 'pipe:1', '-nostats',
out,
]
def _get_duration(filepath: str) -> float:
"""Return the duration of *filepath* in seconds, or 0.0 on failure."""
try:
probe = subprocess.run(
['ffprobe', '-v', 'error',
'-show_entries', 'format=duration',
'-of', 'default=noprint_wrappers=1:nokey=1',
filepath],
capture_output=True, text=True, timeout=30,
)
return float(probe.stdout.strip()) if probe.stdout.strip() else 0.0
except Exception:
return 0.0
def _send_notification(job: dict, email_results: list[dict], cancelled: bool) -> None:
"""Send email and push a 'notify' event regardless of outcome."""
notify_email = job.get('notify_email', '')
if not notify_email:
return
ok, err = send_completion_email(notify_email, email_results, cancelled)
push_event(job, {
'type': 'notify',
'success': ok,
'message': (f'Notification sent to {notify_email}.' if ok
else f'Could not send notification: {err}'),
})
# ---------------------------------------------------------------------------
# Compression worker
# ---------------------------------------------------------------------------
def run_compression_job(job_id: str) -> None:
"""
Worker function executed in a daemon thread for each compression job.
Iterates over the file list, runs ffmpeg for each file, streams progress
events, and sends an email notification when finished (if requested).
"""
with job_lock:
job = active_jobs.get(job_id)
if not job:
return
files = job['files']
suffix = job['suffix']
total = job['total']
push_event(job, {
'type': 'start',
'total': total,
'message': f'Starting compression of {total} file(s)',
})
for idx, file_info in enumerate(files):
# ── Cancellation check ────────────────────────────────────────────
with job['lock']:
cancelled = job['cancelled']
if cancelled:
_handle_cancel(job, idx)
return
# ── Per-file setup ────────────────────────────────────────────────
src_path = file_info['path']
target_bitrate = file_info.get('target_bit_rate_bps', 1_000_000)
src_codec = file_info.get('codec', 'unknown')
p = Path(src_path)
out_path = str(p.parent / (p.stem + suffix + p.suffix))
encoder, is_hevc = _choose_encoder(src_codec)
video_k = max(int(target_bitrate / 1000), 200)
push_event(job, {
'type': 'file_start',
'index': idx,
'total': total,
'filename': p.name,
'output': out_path,
'encoder': encoder,
'message': f'Compressing ({idx + 1}/{total}): {p.name} [{encoder}]',
})
duration_secs = _get_duration(src_path)
cmd = _build_ffmpeg_cmd(src_path, out_path, video_k, is_hevc, encoder)
# ── Run ffmpeg ────────────────────────────────────────────────────
try:
proc = subprocess.Popen(
cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
text=True,
bufsize=1,
)
with job['lock']:
job['process'] = proc
_stream_progress(job, proc, idx, duration_secs)
proc.wait()
with job['lock']:
cancelled = job['cancelled']
if cancelled:
_remove_partial(out_path)
_handle_cancel(job, idx)
return
if proc.returncode != 0:
_push_file_error(job, idx, p.name, proc)
else:
_push_file_done(job, idx, p.name, out_path, file_info)
with job['lock']:
job['current_index'] = idx + 1
except Exception as exc:
push_event(job, {
'type': 'file_error',
'index': idx,
'filename': p.name,
'message': f'Exception: {exc}',
})
# ── All files processed ───────────────────────────────────────────────
push_event(job, {
'type': 'done',
'message': f'All {total} file(s) processed.',
})
with job['lock']:
job['status'] = 'done'
all_events = list(job['events'])
completed = [{'status': 'done', **e} for e in all_events if e.get('type') == 'file_done']
errored = [{'status': 'error', **e} for e in all_events if e.get('type') == 'file_error']
_send_notification(job, completed + errored, cancelled=False)
# ---------------------------------------------------------------------------
# Private sub-helpers
# ---------------------------------------------------------------------------
def _stream_progress(
job: dict,
proc: subprocess.Popen,
idx: int,
duration_secs: float,
) -> None:
"""Read ffmpeg's -progress output and push progress events."""
for line in proc.stdout:
with job['lock']:
if job['cancelled']:
proc.terminate()
return
line = line.strip()
if '=' not in line:
continue
key, _, value = line.partition('=')
key, value = key.strip(), value.strip()
if key == 'out_time_ms' and duration_secs > 0:
try:
elapsed = int(value) / 1_000_000
pct = min(100.0, (elapsed / duration_secs) * 100)
push_event(job, {
'type': 'progress',
'index': idx,
'percent': round(pct, 1),
'elapsed_secs': round(elapsed, 1),
'duration_secs': round(duration_secs, 1),
})
except (ValueError, ZeroDivisionError):
pass
elif key == 'progress' and value == 'end':
push_event(job, {
'type': 'progress',
'index': idx,
'percent': 100.0,
'elapsed_secs': duration_secs,
'duration_secs': duration_secs,
})
def _remove_partial(path: str) -> None:
try:
if os.path.exists(path):
os.remove(path)
except OSError:
pass
def _handle_cancel(job: dict, idx: int) -> None:
"""Push cancel event, set status, send notification for cancelled run."""
push_event(job, {'type': 'cancelled', 'message': 'Compression cancelled by user'})
with job['lock']:
job['status'] = 'cancelled'
all_events = list(job['events'])
completed = [{'status': 'done', **e} for e in all_events if e.get('type') == 'file_done']
errored = [{'status': 'error', **e} for e in all_events if e.get('type') == 'file_error']
_send_notification(job, completed + errored, cancelled=True)
def _push_file_error(
job: dict,
idx: int,
filename: str,
proc: subprocess.Popen,
) -> None:
try:
tail = proc.stderr.read()[-500:]
except Exception:
tail = ''
push_event(job, {
'type': 'file_error',
'index': idx,
'filename': filename,
'message': f'ffmpeg exited with code {proc.returncode}',
'detail': tail,
})
def _push_file_done(
job: dict,
idx: int,
filename: str,
out_path: str,
file_info: dict,
) -> None:
try:
out_sz = os.path.getsize(out_path)
out_gb = round(out_sz / (1024 ** 3), 3)
orig_sz = file_info.get('size_bytes', 0)
reduction = round((1 - out_sz / orig_sz) * 100, 1) if orig_sz else 0
except OSError:
out_gb = 0
reduction = 0
push_event(job, {
'type': 'file_done',
'index': idx,
'filename': filename,
'output': out_path,
'output_size_gb': out_gb,
'reduction_pct': reduction,
'message': f'Completed: {filename} → saved {reduction}%',
})

140
app/media.py Normal file
View file

@ -0,0 +1,140 @@
"""
app/media.py
============
File-system scanning and FFprobe metadata helpers.
Public API
----------
VIDEO_EXTENSIONS : frozenset of lowercase video file suffixes
get_video_info() : run ffprobe on a single file, return a metadata dict
list_video_files(): walk a directory tree and return files above a size floor
"""
import json
import os
import subprocess
from pathlib import Path
# ---------------------------------------------------------------------------
# Constants
# ---------------------------------------------------------------------------
VIDEO_EXTENSIONS: frozenset[str] = frozenset({
'.mp4', '.mkv', '.mov', '.avi', '.wmv', '.flv',
'.webm', '.m4v', '.mpg', '.mpeg', '.ts', '.mts',
'.m2ts', '.vob', '.ogv', '.3gp', '.3g2',
})
# ---------------------------------------------------------------------------
# FFprobe helper
# ---------------------------------------------------------------------------
def get_video_info(filepath: str) -> dict | None:
"""
Use ffprobe to get duration, total bitrate, codec, and dimensions.
Returns a dict with the keys below, or None if ffprobe fails.
Bitrate resolution order (handles HEVC/MKV where the stream-level
bit_rate field is absent):
1. Stream-level bit_rate present for H.264/MP4, often missing for HEVC
2. Format-level bit_rate reliable for all containers
3. Derived from size / duration final fallback
Returned keys
-------------
duration, bit_rate_bps, bit_rate_mbps,
target_bit_rate_bps, target_bit_rate_mbps,
size_bytes, size_gb, codec, width, height
"""
cmd = [
'ffprobe', '-v', 'error',
'-select_streams', 'v:0',
'-show_entries',
'format=duration,bit_rate,size:stream=codec_name,width,height,bit_rate',
'-of', 'json',
filepath,
]
try:
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
if result.returncode != 0:
return None
data = json.loads(result.stdout)
fmt = data.get('format', {})
stream = (data.get('streams') or [{}])[0]
duration = float(fmt.get('duration', 0))
size_bytes = int(fmt.get('size', 0))
codec = stream.get('codec_name', 'unknown')
width = stream.get('width', 0)
height = stream.get('height', 0)
stream_br = int(stream.get('bit_rate') or 0)
format_br = int(fmt.get('bit_rate') or 0)
if stream_br > 0:
bit_rate = stream_br
elif format_br > 0:
bit_rate = format_br
elif duration > 0:
bit_rate = int((size_bytes * 8) / duration)
else:
bit_rate = 0
# Target ≈ 1/3 of the total bitrate; reserve 128 kbps for audio.
audio_bps = 128_000
video_bps = bit_rate - audio_bps if bit_rate > audio_bps else bit_rate
target_video_bps = max(int(video_bps / 3), 200_000)
return {
'duration': duration,
'bit_rate_bps': bit_rate,
'bit_rate_mbps': round(bit_rate / 1_000_000, 2),
'target_bit_rate_bps': target_video_bps,
'target_bit_rate_mbps': round(target_video_bps / 1_000_000, 2),
'size_bytes': size_bytes,
'size_gb': round(size_bytes / (1024 ** 3), 3),
'codec': codec,
'width': width,
'height': height,
}
except Exception:
return None
# ---------------------------------------------------------------------------
# Directory scanner
# ---------------------------------------------------------------------------
def list_video_files(directory: Path, min_size_gb: float) -> list[dict]:
"""
Recursively walk *directory* and return video files larger than
*min_size_gb* gigabytes.
Each entry is a dict with: path, name, size_bytes, size_gb.
Raises PermissionError if the root directory is inaccessible.
"""
min_bytes = min_size_gb * (1024 ** 3)
results: list[dict] = []
try:
for root, dirs, files in os.walk(directory):
dirs[:] = [d for d in dirs if not d.startswith('.')]
for fname in files:
if Path(fname).suffix.lower() in VIDEO_EXTENSIONS:
fpath = os.path.join(root, fname)
try:
fsize = os.path.getsize(fpath)
if fsize >= min_bytes:
results.append({
'path': fpath,
'name': fname,
'size_bytes': fsize,
'size_gb': round(fsize / (1024 ** 3), 3),
})
except OSError:
continue
except PermissionError as exc:
raise PermissionError(f"Cannot access directory: {exc}") from exc
return results

329
app/notify.py Normal file
View file

@ -0,0 +1,329 @@
"""
app/notify.py
=============
Email notification helper for compression job completion.
Delivery uses SMTP settings stored in SQLite (via app.db).
If no SMTP settings have been configured, the send call returns an
informative error rather than silently failing.
Public API
----------
get_smtp_config() -> dict with all SMTP fields (safe for the UI)
send_completion_email(to, results, cancelled) -> (ok: bool, error: str)
SMTP settings keys (stored in the 'settings' table)
----------------------------------------------------
smtp_host hostname or IP of the SMTP server
smtp_port port number (str)
smtp_security 'tls' (STARTTLS) | 'ssl' (SMTPS) | 'none'
smtp_user login username (optional)
smtp_password login password (optional, stored as-is)
smtp_from From: address used in sent mail
"""
import smtplib
import socket
import ssl
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate, make_msgid
from .db import get_setting
# ---------------------------------------------------------------------------
# SMTP config helper
# ---------------------------------------------------------------------------
def get_smtp_config() -> dict:
"""
Read SMTP settings from the database and return them as a dict.
The password field is replaced with a placeholder so this dict is
safe to serialise and send to the browser.
Returns
-------
{
host, port, security, user, from_addr,
password_set: bool (True if a password is stored)
}
"""
return {
'host': get_setting('smtp_host') or '',
'port': get_setting('smtp_port') or '587',
'security': get_setting('smtp_security') or 'tls',
'user': get_setting('smtp_user') or '',
'from_addr': get_setting('smtp_from') or '',
'password_set': bool(get_setting('smtp_password')),
}
def _load_smtp_config() -> dict:
"""Load full config including the raw password (server-side only)."""
return {
'host': get_setting('smtp_host') or '',
'port': int(get_setting('smtp_port') or 587),
'security': get_setting('smtp_security') or 'tls',
'user': get_setting('smtp_user') or '',
'password': get_setting('smtp_password') or '',
'from_addr': get_setting('smtp_from') or '',
}
# ---------------------------------------------------------------------------
# Send helper
# ---------------------------------------------------------------------------
def send_completion_email(
to_address: str,
results: list[dict],
cancelled: bool,
) -> tuple[bool, str]:
"""
Send a job-completion notification to *to_address* using the SMTP
settings stored in SQLite.
Returns (success, error_message).
"""
if not to_address or '@' not in to_address:
return False, 'Invalid recipient email address'
cfg = _load_smtp_config()
if not cfg['host']:
return False, (
'No SMTP server configured. '
'Please add your SMTP settings in the ⚙ Settings panel.'
)
if not cfg['from_addr']:
return False, (
'No From address configured. '
'Please add your SMTP settings in the ⚙ Settings panel.'
)
# ── Build message ─────────────────────────────────────────────────────
done_files = [r for r in results if r.get('status') == 'done']
error_files = [r for r in results if r.get('status') == 'error']
total = len(results)
hostname = socket.getfqdn()
if cancelled:
subject = (f'VideoPress: compression cancelled '
f'({len(done_files)}/{total} completed) on {hostname}')
elif error_files:
subject = (f'VideoPress: compression complete with '
f'{len(error_files)} error(s) on {hostname}')
else:
subject = (f'VideoPress: compression complete — '
f'{total} file(s) processed on {hostname}')
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = cfg['from_addr']
msg['To'] = to_address
msg['Date'] = formatdate(localtime=True)
msg['Message-ID'] = make_msgid(domain=hostname)
msg.attach(MIMEText(
_build_plain(hostname, cancelled, done_files, error_files, total),
'plain', 'utf-8',
))
msg.attach(MIMEText(
_build_html(hostname, subject, cancelled, done_files, error_files, total),
'html', 'utf-8',
))
# ── Connect and send ──────────────────────────────────────────────────
try:
security = cfg['security'].lower()
host = cfg['host']
port = cfg['port']
if security == 'ssl':
# SMTPS — wrap in SSL from the start (port 465 typically)
context = ssl.create_default_context()
server = smtplib.SMTP_SSL(host, port, context=context, timeout=15)
else:
# Plain or STARTTLS (port 587 typically)
server = smtplib.SMTP(host, port, timeout=15)
server.ehlo()
if security == 'tls':
context = ssl.create_default_context()
server.starttls(context=context)
server.ehlo()
with server:
if cfg['user'] and cfg['password']:
server.login(cfg['user'], cfg['password'])
server.sendmail(cfg['from_addr'], [to_address], msg.as_bytes())
return True, ''
except smtplib.SMTPAuthenticationError:
return False, (
'Authentication failed — check your username and password. '
'For Gmail/Google Workspace, use an App Password rather than '
'your account password.'
)
except smtplib.SMTPConnectError as exc:
return False, (
f'Could not connect to {host}:{port}. '
f'Check the host, port, and security setting. ({exc})'
)
except smtplib.SMTPRecipientsRefused as exc:
refused = ', '.join(exc.recipients.keys())
return False, f'Recipient address rejected by server: {refused}'
except smtplib.SMTPSenderRefused as exc:
return False, (
f'From address "{cfg["from_addr"]}" was rejected by the server. '
f'Ensure it matches your authenticated account. ({exc.smtp_error.decode(errors="replace")})'
)
except smtplib.SMTPException as exc:
return False, f'SMTP error: {exc}'
except ssl.SSLError as exc:
return False, (
f'SSL/TLS error connecting to {host}:{port}'
f'try changing the Security setting. ({exc})'
)
except TimeoutError:
return False, (
f'Connection to {host}:{port} timed out. '
f'Check the host and port, and that the server is reachable.'
)
except OSError as exc:
return False, (
f'Network error connecting to {host}:{port}{exc}. '
f'Check the hostname and that the server is reachable.'
)
except Exception as exc:
return False, f'Unexpected error: {exc}'
# ---------------------------------------------------------------------------
# Email body builders
# ---------------------------------------------------------------------------
def _build_plain(hostname, cancelled, done_files, error_files, total) -> str:
lines = [
'VideoPress Compression Report',
f'Host : {hostname}',
f'Status : {"Cancelled" if cancelled else "Complete"}',
f'Files : {len(done_files)} succeeded, {len(error_files)} failed, {total} total',
'',
]
if done_files:
lines.append('Completed files:')
for r in done_files:
lines.append(
f"{r.get('filename','?')} "
f"({r.get('output_size_gb','?')} GB, "
f"-{r.get('reduction_pct','?')}%)"
)
lines.append('')
if error_files:
lines.append('Failed files:')
for r in error_files:
lines.append(
f"{r.get('filename','?')} "
f"{r.get('message','unknown error')}"
)
lines.append('')
lines += ['', 'Sent by VideoPress FFmpeg Compressor']
return '\n'.join(lines)
def _build_html(hostname, subject, cancelled, done_files, error_files, total) -> str:
status_colour = (
'#166534' if not cancelled and not error_files
else '#92400e' if cancelled
else '#991b1b'
)
status_label = (
'Cancelled' if cancelled
else 'Complete ✓' if not error_files
else 'Complete with errors'
)
def file_rows(files, icon, bg):
rows = ''
for r in files:
detail = (
f"{r.get('output_size_gb','?')} GB &nbsp;·&nbsp; "
f"-{r.get('reduction_pct','?')}%"
if r.get('status') == 'done'
else r.get('message', 'unknown error')
)
rows += (
f'<tr style="background:{bg}">'
f'<td style="padding:6px 10px;font-size:1.1em">{icon}</td>'
f'<td style="padding:6px 10px;font-family:monospace;font-size:.9em">'
f'{r.get("filename","?")}</td>'
f'<td style="padding:6px 10px;color:#555;font-size:.85em">{detail}</td>'
f'</tr>'
)
return rows
done_rows = file_rows(done_files, '', '#f0fdf4')
error_rows = file_rows(error_files, '', '#fef2f2')
error_cell = (
f'<div><div style="font-size:.7em;text-transform:uppercase;'
f'letter-spacing:.06em;color:#6b7280;font-weight:700">Failed</div>'
f'<div style="font-size:1.3em;font-weight:700;color:#991b1b">'
f'{len(error_files)}</div></div>'
) if error_files else ''
done_section = (
f'<h2 style="font-size:1em;color:#166534;margin:0 0 8px">Completed</h2>'
f'<table style="width:100%;border-collapse:collapse;margin-bottom:20px">'
f'{done_rows}</table>'
) if done_files else ''
error_section = (
f'<h2 style="font-size:1em;color:#991b1b;margin:0 0 8px">Errors</h2>'
f'<table style="width:100%;border-collapse:collapse;margin-bottom:20px">'
f'{error_rows}</table>'
) if error_files else ''
return f"""<!DOCTYPE html>
<html lang="en">
<head><meta charset="UTF-8"><title>{subject}</title></head>
<body style="font-family:system-ui,sans-serif;background:#f9fafb;margin:0;padding:24px">
<div style="max-width:640px;margin:0 auto;background:#fff;border-radius:10px;
box-shadow:0 2px 8px rgba(0,0,0,.08);overflow:hidden">
<div style="background:#1a1a18;padding:20px 28px">
<span style="color:#f97316;font-size:1.4em"></span>
<span style="color:#f5f5f2;font-size:1.15em;font-weight:700;
letter-spacing:.03em;margin-left:10px">
Video<strong style="color:#f97316">Press</strong>
</span>
</div>
<div style="padding:28px">
<h1 style="margin:0 0 4px;font-size:1.2em;color:#111">Compression Run Report</h1>
<p style="margin:0 0 20px;color:#6b7280;font-size:.9em">Host: <code>{hostname}</code></p>
<div style="background:#f3f4f6;border-radius:8px;padding:16px 20px;
margin-bottom:24px;display:flex;gap:32px;flex-wrap:wrap">
<div>
<div style="font-size:.7em;text-transform:uppercase;letter-spacing:.06em;
color:#6b7280;font-weight:700">Status</div>
<div style="font-size:1.3em;font-weight:700;color:{status_colour}">{status_label}</div>
</div>
<div>
<div style="font-size:.7em;text-transform:uppercase;letter-spacing:.06em;
color:#6b7280;font-weight:700">Total</div>
<div style="font-size:1.3em;font-weight:700;color:#111">{total}</div>
</div>
<div>
<div style="font-size:.7em;text-transform:uppercase;letter-spacing:.06em;
color:#6b7280;font-weight:700">Succeeded</div>
<div style="font-size:1.3em;font-weight:700;color:#166534">{len(done_files)}</div>
</div>
{error_cell}
</div>
{done_section}
{error_section}
<hr style="border:none;border-top:1px solid #e5e7eb;margin:24px 0 16px">
<p style="color:#9ca3af;font-size:.78em;margin:0">Sent by VideoPress FFmpeg Compressor</p>
</div>
</div>
</body>
</html>"""

470
app/routes.py Normal file
View file

@ -0,0 +1,470 @@
"""
app/routes.py
=============
All Flask route handlers. Registered on the app object via register_routes()
which is called by the application factory in app/__init__.py.
Routes
------
GET / index page
GET /api/config server configuration (media_root)
GET /api/browse?path= directory listing
POST /api/scan scan for video files
POST /api/compress/start start a compression job
GET /api/compress/progress/<id> SSE progress stream
POST /api/compress/cancel/<id> cancel a running job
"""
import json
import time
import threading
from pathlib import Path
from flask import Flask, Response, jsonify, render_template, request, stream_with_context
from .config import MEDIA_ROOT, safe_path
from .db import get_all_settings, save_setting, delete_setting
from .media import get_video_info, list_video_files
from .jobs import active_jobs, job_lock, run_compression_job
from .notify import get_smtp_config, send_completion_email
def fmttime(seconds: float) -> str:
"""Format *seconds* as M:SS or H:MM:SS."""
s = int(seconds)
h = s // 3600
m = (s % 3600) // 60
sec = s % 60
if h:
return f"{h}:{m:02d}:{sec:02d}"
return f"{m}:{sec:02d}"
def register_routes(app: Flask) -> None:
"""Attach all routes to *app*."""
# ── UI ────────────────────────────────────────────────────────────────
@app.route('/')
def index():
return render_template('index.html', media_root=str(MEDIA_ROOT))
# ── Config ────────────────────────────────────────────────────────────
@app.route('/api/config')
def api_config():
"""Return server-side settings the frontend needs at startup."""
return jsonify({'media_root': str(MEDIA_ROOT)})
# ── SMTP settings ─────────────────────────────────────────────────────
@app.route('/api/settings/smtp', methods=['GET'])
def smtp_settings_get():
"""
Return current SMTP settings (password is never sent, only a flag
indicating whether one is stored).
"""
return jsonify(get_smtp_config())
@app.route('/api/settings/smtp', methods=['POST'])
def smtp_settings_save():
"""
Save SMTP settings to SQLite. Only fields present in the request
body are updated; omitting 'password' leaves the stored password
unchanged (useful when the user edits other fields but doesn't want
to re-enter the password).
"""
data = request.get_json(silent=True) or {}
# Fields whose DB key matches smtp_{field} exactly
for field in ('host', 'port', 'security'):
if field in data:
value = str(data[field]).strip()
if not value:
return jsonify({'error': f"'{field}' cannot be empty"}), 400
save_setting(f'smtp_{field}', value)
# from_addr is stored as 'smtp_from' (not 'smtp_from_addr')
if 'from_addr' in data:
value = str(data['from_addr']).strip()
if not value:
return jsonify({'error': "'from_addr' cannot be empty"}), 400
save_setting('smtp_from', value)
# Optional fields
if 'user' in data:
val = str(data['user']).strip()
if val:
save_setting('smtp_user', val)
else:
delete_setting('smtp_user')
# Password: only update if a non-empty value is explicitly sent
if 'password' in data and str(data['password']).strip():
save_setting('smtp_password', str(data['password']).strip())
return jsonify({'ok': True, 'config': get_smtp_config()})
@app.route('/api/settings/smtp/test', methods=['POST'])
def smtp_settings_test():
"""
Send a test email using the currently saved SMTP settings.
Always returns HTTP 200 SMTP failures are reported in the
JSON body as {ok: false, message: "..."} so the browser can
display the exact error without interference from proxies or
the browser's own error handling for 5xx responses.
"""
data = request.get_json(silent=True) or {}
test_to = data.get('to', '').strip()
if not test_to or '@' not in test_to:
return jsonify({'ok': False, 'message': 'Please enter a valid recipient address.'}), 400
ok, err = send_completion_email(
to_address = test_to,
results = [{
'status': 'done',
'filename': 'test_video.mp4',
'output_size_gb': 1.2,
'reduction_pct': 33,
}],
cancelled = False,
)
if ok:
return jsonify({'ok': True, 'message': f'Test email sent to {test_to}.'})
# Always 200 — the caller checks data.ok, not the HTTP status
return jsonify({'ok': False, 'message': err})
# ── Directory browser ─────────────────────────────────────────────────
@app.route('/api/browse')
def browse_directory():
raw = request.args.get('path', str(MEDIA_ROOT))
try:
path = safe_path(raw)
except PermissionError as exc:
return jsonify({'error': str(exc)}), 403
if not path.exists():
return jsonify({'error': 'Path does not exist'}), 404
if not path.is_dir():
return jsonify({'error': 'Not a directory'}), 400
try:
entries = [
{'name': e.name, 'path': str(e), 'is_dir': e.is_dir()}
for e in sorted(
path.iterdir(),
key=lambda e: (not e.is_dir(), e.name.lower()),
)
if not e.name.startswith('.')
]
parent = str(path.parent) if path != MEDIA_ROOT else None
return jsonify({
'current': str(path),
'parent': parent,
'entries': entries,
'media_root': str(MEDIA_ROOT),
})
except PermissionError:
return jsonify({'error': 'Permission denied'}), 403
# ── File scanner ──────────────────────────────────────────────────────
@app.route('/api/scan', methods=['POST'])
def scan_directory():
data = request.get_json(silent=True) or {}
raw_dir = data.get('directory', '')
min_size_gb = float(data.get('min_size_gb', 1.0))
if not raw_dir:
return jsonify({'error': 'No directory provided'}), 400
try:
directory = safe_path(raw_dir)
except PermissionError as exc:
return jsonify({'error': str(exc)}), 403
if not directory.is_dir():
return jsonify({'error': 'Invalid directory'}), 400
try:
files = list_video_files(directory, min_size_gb)
except PermissionError as exc:
return jsonify({'error': str(exc)}), 403
enriched = []
for f in files:
info = get_video_info(f['path'])
if info:
f.update(info)
else:
# Rough fallback: assume a 90-minute feature film
bps = int((f['size_bytes'] * 8) / (90 * 60))
f.update({
'bit_rate_bps': bps,
'bit_rate_mbps': round(bps / 1_000_000, 2),
'target_bit_rate_bps': max(bps // 3, 200_000),
'target_bit_rate_mbps': round(max(bps // 3, 200_000) / 1_000_000, 2),
'duration': 0,
'codec': 'unknown',
'width': 0,
'height': 0,
})
enriched.append(f)
enriched.sort(key=lambda x: x['size_bytes'], reverse=True)
return jsonify({'files': enriched, 'count': len(enriched)})
# ── Compression — status snapshot (for reconnect/reload) ─────────────
@app.route('/api/compress/status/<job_id>')
def compression_status(job_id):
"""
Return a complete point-in-time snapshot of a job's state.
This is used when the browser reconnects after losing the SSE stream
(page reload, tab backgrounded, network blip). The frontend replays
this snapshot to rebuild the full progress UI, then re-attaches the
live SSE stream from where it left off.
Response shape
--------------
{
job_id, status, total, current_index,
files: [ {path, name, ...original file info} ],
file_states: [ # one entry per file, index-aligned
{
status: 'waiting' | 'running' | 'done' | 'error',
percent: 0-100,
detail: str, # time elapsed / output size / error msg
filename, output, reduction_pct, output_size_gb (done only)
message (error only)
}
],
done_count: int,
event_count: int # total events stored; SSE stream resumes from here
}
"""
with job_lock:
job = active_jobs.get(job_id)
if not job:
return jsonify({'error': 'Job not found'}), 404
with job['lock']:
events = list(job['events'])
status = job['status']
total = job['total']
current_index = job['current_index']
files = job['files']
# Replay the event log to reconstruct per-file state
file_states = [
{'status': 'waiting', 'percent': 0, 'detail': '', 'filename': f.get('name', '')}
for f in files
]
done_count = 0
for evt in events:
t = evt.get('type')
idx = evt.get('index')
if t == 'file_start' and idx is not None:
file_states[idx].update({
'status': 'running',
'percent': 0,
'detail': '',
'filename': evt.get('filename', file_states[idx]['filename']),
'output': evt.get('output', ''),
'encoder': evt.get('encoder', ''),
})
elif t == 'progress' and idx is not None:
file_states[idx].update({
'status': 'running',
'percent': evt.get('percent', 0),
'detail': (
f"{fmttime(evt.get('elapsed_secs',0))} / "
f"{fmttime(evt.get('duration_secs',0))}"
if evt.get('duration_secs', 0) > 0 else ''
),
})
elif t == 'file_done' and idx is not None:
done_count += 1
file_states[idx].update({
'status': 'done',
'percent': 100,
'detail': (f"{evt.get('output_size_gb','?')} GB "
f"saved {evt.get('reduction_pct','?')}%"),
'filename': evt.get('filename', ''),
'output': evt.get('output', ''),
'reduction_pct': evt.get('reduction_pct', 0),
'output_size_gb': evt.get('output_size_gb', 0),
})
elif t == 'file_error' and idx is not None:
file_states[idx].update({
'status': 'error',
'percent': 0,
'detail': evt.get('message', 'Unknown error'),
'message': evt.get('message', ''),
})
return jsonify({
'job_id': job_id,
'status': status,
'total': total,
'current_index': current_index,
'done_count': done_count,
'event_count': len(events),
'files': files,
'file_states': file_states,
})
# ── Compression — list active jobs (for page-load auto-reconnect) ─────
@app.route('/api/compress/active')
def list_active_jobs():
"""
Return a list of jobs that are currently running or recently finished.
The frontend calls this on page load to detect whether a job is in
progress and should be reconnected to.
"""
with job_lock:
jobs = list(active_jobs.values())
result = []
for job in jobs:
with job['lock']:
result.append({
'job_id': job['id'],
'status': job['status'],
'total': job['total'],
'current_index': job['current_index'],
})
# Most recent first
result.sort(key=lambda j: j['job_id'], reverse=True)
return jsonify({'jobs': result})
# ── Compression — start ───────────────────────────────────────────────
@app.route('/api/compress/start', methods=['POST'])
def start_compression():
data = request.get_json(silent=True) or {}
files = data.get('files', [])
suffix = data.get('suffix', '_new')
notify_email = data.get('notify_email', '').strip()
if not files:
return jsonify({'error': 'No files provided'}), 400
if notify_email and (len(notify_email) > 254 or '@' not in notify_email):
return jsonify({'error': 'Invalid notification email address'}), 400
for f in files:
try:
safe_path(f.get('path', ''))
except PermissionError as exc:
return jsonify({'error': str(exc)}), 403
job_id = f"job_{int(time.time() * 1000)}"
job = {
'id': job_id,
'files': files,
'suffix': suffix,
'notify_email': notify_email,
'status': 'running',
'current_index': 0,
'total': len(files),
'events': [],
'process': None,
'cancelled': False,
'lock': threading.Lock(),
}
with job_lock:
active_jobs[job_id] = job
threading.Thread(
target=run_compression_job,
args=(job_id,),
daemon=True,
).start()
return jsonify({'job_id': job_id})
# ── Compression — SSE progress stream ─────────────────────────────────
@app.route('/api/compress/progress/<job_id>')
def compression_progress(job_id):
"""
Server-Sent Events stream for real-time job progress.
Query param: ?from=N start streaming from event index N (default 0).
On reconnect the client passes the last event index it saw so it only
receives new events, not a full replay of the history.
Compatible with Gunicorn + gevent: time.sleep() yields the greenlet
rather than blocking a real OS thread.
"""
try:
start_from = int(request.args.get('from', 0))
except (TypeError, ValueError):
start_from = 0
def event_stream():
last_idx = start_from
while True:
with job_lock:
job = active_jobs.get(job_id)
if not job:
yield (
f"data: {json.dumps({'type': 'error', 'message': 'Job not found'})}\n\n"
)
return
with job['lock']:
new_events = job['events'][last_idx:]
last_idx += len(new_events)
status = job['status']
for event in new_events:
yield f"data: {json.dumps(event)}\n\n"
if status in ('done', 'cancelled', 'error') and not new_events:
break
time.sleep(0.25)
return Response(
stream_with_context(event_stream()),
mimetype='text/event-stream',
headers={
'Cache-Control': 'no-cache',
'X-Accel-Buffering': 'no',
},
)
# ── Compression — cancel ──────────────────────────────────────────────
@app.route('/api/compress/cancel/<job_id>', methods=['POST'])
def cancel_compression(job_id):
with job_lock:
job = active_jobs.get(job_id)
if not job:
return jsonify({'error': 'Job not found'}), 404
with job['lock']:
job['cancelled'] = True
proc = job.get('process')
if proc and proc.poll() is None:
try:
proc.terminate()
time.sleep(1)
if proc.poll() is None:
proc.kill()
except Exception:
pass
return jsonify({'status': 'cancellation requested'})