470 lines
18 KiB
Python
470 lines
18 KiB
Python
"""
|
|
app/routes.py
|
|
=============
|
|
All Flask route handlers. Registered on the app object via register_routes()
|
|
which is called by the application factory in app/__init__.py.
|
|
|
|
Routes
|
|
------
|
|
GET / → index page
|
|
GET /api/config → server configuration (media_root)
|
|
GET /api/browse?path=… → directory listing
|
|
POST /api/scan → scan for video files
|
|
POST /api/compress/start → start a compression job
|
|
GET /api/compress/progress/<id> → SSE progress stream
|
|
POST /api/compress/cancel/<id> → cancel a running job
|
|
"""
|
|
|
|
import json
|
|
import time
|
|
import threading
|
|
from pathlib import Path
|
|
|
|
from flask import Flask, Response, jsonify, render_template, request, stream_with_context
|
|
|
|
from .config import MEDIA_ROOT, safe_path
|
|
from .db import get_all_settings, save_setting, delete_setting
|
|
from .media import get_video_info, list_video_files
|
|
from .jobs import active_jobs, job_lock, run_compression_job
|
|
from .notify import get_smtp_config, send_completion_email
|
|
|
|
|
|
def fmttime(seconds: float) -> str:
|
|
"""Format *seconds* as M:SS or H:MM:SS."""
|
|
s = int(seconds)
|
|
h = s // 3600
|
|
m = (s % 3600) // 60
|
|
sec = s % 60
|
|
if h:
|
|
return f"{h}:{m:02d}:{sec:02d}"
|
|
return f"{m}:{sec:02d}"
|
|
|
|
|
|
def register_routes(app: Flask) -> None:
|
|
"""Attach all routes to *app*."""
|
|
|
|
# ── UI ────────────────────────────────────────────────────────────────
|
|
|
|
@app.route('/')
|
|
def index():
|
|
return render_template('index.html', media_root=str(MEDIA_ROOT))
|
|
|
|
# ── Config ────────────────────────────────────────────────────────────
|
|
|
|
@app.route('/api/config')
|
|
def api_config():
|
|
"""Return server-side settings the frontend needs at startup."""
|
|
return jsonify({'media_root': str(MEDIA_ROOT)})
|
|
|
|
# ── SMTP settings ─────────────────────────────────────────────────────
|
|
|
|
@app.route('/api/settings/smtp', methods=['GET'])
|
|
def smtp_settings_get():
|
|
"""
|
|
Return current SMTP settings (password is never sent, only a flag
|
|
indicating whether one is stored).
|
|
"""
|
|
return jsonify(get_smtp_config())
|
|
|
|
@app.route('/api/settings/smtp', methods=['POST'])
|
|
def smtp_settings_save():
|
|
"""
|
|
Save SMTP settings to SQLite. Only fields present in the request
|
|
body are updated; omitting 'password' leaves the stored password
|
|
unchanged (useful when the user edits other fields but doesn't want
|
|
to re-enter the password).
|
|
"""
|
|
data = request.get_json(silent=True) or {}
|
|
|
|
# Fields whose DB key matches smtp_{field} exactly
|
|
for field in ('host', 'port', 'security'):
|
|
if field in data:
|
|
value = str(data[field]).strip()
|
|
if not value:
|
|
return jsonify({'error': f"'{field}' cannot be empty"}), 400
|
|
save_setting(f'smtp_{field}', value)
|
|
|
|
# from_addr is stored as 'smtp_from' (not 'smtp_from_addr')
|
|
if 'from_addr' in data:
|
|
value = str(data['from_addr']).strip()
|
|
if not value:
|
|
return jsonify({'error': "'from_addr' cannot be empty"}), 400
|
|
save_setting('smtp_from', value)
|
|
|
|
# Optional fields
|
|
if 'user' in data:
|
|
val = str(data['user']).strip()
|
|
if val:
|
|
save_setting('smtp_user', val)
|
|
else:
|
|
delete_setting('smtp_user')
|
|
|
|
# Password: only update if a non-empty value is explicitly sent
|
|
if 'password' in data and str(data['password']).strip():
|
|
save_setting('smtp_password', str(data['password']).strip())
|
|
|
|
return jsonify({'ok': True, 'config': get_smtp_config()})
|
|
|
|
@app.route('/api/settings/smtp/test', methods=['POST'])
|
|
def smtp_settings_test():
|
|
"""
|
|
Send a test email using the currently saved SMTP settings.
|
|
Always returns HTTP 200 — SMTP failures are reported in the
|
|
JSON body as {ok: false, message: "..."} so the browser can
|
|
display the exact error without interference from proxies or
|
|
the browser's own error handling for 5xx responses.
|
|
"""
|
|
data = request.get_json(silent=True) or {}
|
|
test_to = data.get('to', '').strip()
|
|
|
|
if not test_to or '@' not in test_to:
|
|
return jsonify({'ok': False, 'message': 'Please enter a valid recipient address.'}), 400
|
|
|
|
ok, err = send_completion_email(
|
|
to_address = test_to,
|
|
results = [{
|
|
'status': 'done',
|
|
'filename': 'test_video.mp4',
|
|
'output_size_gb': 1.2,
|
|
'reduction_pct': 33,
|
|
}],
|
|
cancelled = False,
|
|
)
|
|
|
|
if ok:
|
|
return jsonify({'ok': True, 'message': f'Test email sent to {test_to}.'})
|
|
|
|
# Always 200 — the caller checks data.ok, not the HTTP status
|
|
return jsonify({'ok': False, 'message': err})
|
|
|
|
# ── Directory browser ─────────────────────────────────────────────────
|
|
|
|
@app.route('/api/browse')
|
|
def browse_directory():
|
|
raw = request.args.get('path', str(MEDIA_ROOT))
|
|
try:
|
|
path = safe_path(raw)
|
|
except PermissionError as exc:
|
|
return jsonify({'error': str(exc)}), 403
|
|
|
|
if not path.exists():
|
|
return jsonify({'error': 'Path does not exist'}), 404
|
|
if not path.is_dir():
|
|
return jsonify({'error': 'Not a directory'}), 400
|
|
|
|
try:
|
|
entries = [
|
|
{'name': e.name, 'path': str(e), 'is_dir': e.is_dir()}
|
|
for e in sorted(
|
|
path.iterdir(),
|
|
key=lambda e: (not e.is_dir(), e.name.lower()),
|
|
)
|
|
if not e.name.startswith('.')
|
|
]
|
|
parent = str(path.parent) if path != MEDIA_ROOT else None
|
|
return jsonify({
|
|
'current': str(path),
|
|
'parent': parent,
|
|
'entries': entries,
|
|
'media_root': str(MEDIA_ROOT),
|
|
})
|
|
except PermissionError:
|
|
return jsonify({'error': 'Permission denied'}), 403
|
|
|
|
# ── File scanner ──────────────────────────────────────────────────────
|
|
|
|
@app.route('/api/scan', methods=['POST'])
|
|
def scan_directory():
|
|
data = request.get_json(silent=True) or {}
|
|
raw_dir = data.get('directory', '')
|
|
min_size_gb = float(data.get('min_size_gb', 1.0))
|
|
|
|
if not raw_dir:
|
|
return jsonify({'error': 'No directory provided'}), 400
|
|
try:
|
|
directory = safe_path(raw_dir)
|
|
except PermissionError as exc:
|
|
return jsonify({'error': str(exc)}), 403
|
|
if not directory.is_dir():
|
|
return jsonify({'error': 'Invalid directory'}), 400
|
|
|
|
try:
|
|
files = list_video_files(directory, min_size_gb)
|
|
except PermissionError as exc:
|
|
return jsonify({'error': str(exc)}), 403
|
|
|
|
enriched = []
|
|
for f in files:
|
|
info = get_video_info(f['path'])
|
|
if info:
|
|
f.update(info)
|
|
else:
|
|
# Rough fallback: assume a 90-minute feature film
|
|
bps = int((f['size_bytes'] * 8) / (90 * 60))
|
|
f.update({
|
|
'bit_rate_bps': bps,
|
|
'bit_rate_mbps': round(bps / 1_000_000, 2),
|
|
'target_bit_rate_bps': max(bps // 3, 200_000),
|
|
'target_bit_rate_mbps': round(max(bps // 3, 200_000) / 1_000_000, 2),
|
|
'duration': 0,
|
|
'codec': 'unknown',
|
|
'width': 0,
|
|
'height': 0,
|
|
})
|
|
enriched.append(f)
|
|
|
|
enriched.sort(key=lambda x: x['size_bytes'], reverse=True)
|
|
return jsonify({'files': enriched, 'count': len(enriched)})
|
|
|
|
# ── Compression — status snapshot (for reconnect/reload) ─────────────
|
|
|
|
@app.route('/api/compress/status/<job_id>')
|
|
def compression_status(job_id):
|
|
"""
|
|
Return a complete point-in-time snapshot of a job's state.
|
|
|
|
This is used when the browser reconnects after losing the SSE stream
|
|
(page reload, tab backgrounded, network blip). The frontend replays
|
|
this snapshot to rebuild the full progress UI, then re-attaches the
|
|
live SSE stream from where it left off.
|
|
|
|
Response shape
|
|
--------------
|
|
{
|
|
job_id, status, total, current_index,
|
|
files: [ {path, name, ...original file info} ],
|
|
file_states: [ # one entry per file, index-aligned
|
|
{
|
|
status: 'waiting' | 'running' | 'done' | 'error',
|
|
percent: 0-100,
|
|
detail: str, # time elapsed / output size / error msg
|
|
filename, output, reduction_pct, output_size_gb (done only)
|
|
message (error only)
|
|
}
|
|
],
|
|
done_count: int,
|
|
event_count: int # total events stored; SSE stream resumes from here
|
|
}
|
|
"""
|
|
with job_lock:
|
|
job = active_jobs.get(job_id)
|
|
if not job:
|
|
return jsonify({'error': 'Job not found'}), 404
|
|
|
|
with job['lock']:
|
|
events = list(job['events'])
|
|
status = job['status']
|
|
total = job['total']
|
|
current_index = job['current_index']
|
|
files = job['files']
|
|
|
|
# Replay the event log to reconstruct per-file state
|
|
file_states = [
|
|
{'status': 'waiting', 'percent': 0, 'detail': '', 'filename': f.get('name', '')}
|
|
for f in files
|
|
]
|
|
done_count = 0
|
|
|
|
for evt in events:
|
|
t = evt.get('type')
|
|
idx = evt.get('index')
|
|
|
|
if t == 'file_start' and idx is not None:
|
|
file_states[idx].update({
|
|
'status': 'running',
|
|
'percent': 0,
|
|
'detail': '',
|
|
'filename': evt.get('filename', file_states[idx]['filename']),
|
|
'output': evt.get('output', ''),
|
|
'encoder': evt.get('encoder', ''),
|
|
})
|
|
|
|
elif t == 'progress' and idx is not None:
|
|
file_states[idx].update({
|
|
'status': 'running',
|
|
'percent': evt.get('percent', 0),
|
|
'detail': (
|
|
f"{fmttime(evt.get('elapsed_secs',0))} / "
|
|
f"{fmttime(evt.get('duration_secs',0))}"
|
|
if evt.get('duration_secs', 0) > 0 else ''
|
|
),
|
|
})
|
|
|
|
elif t == 'file_done' and idx is not None:
|
|
done_count += 1
|
|
file_states[idx].update({
|
|
'status': 'done',
|
|
'percent': 100,
|
|
'detail': (f"{evt.get('output_size_gb','?')} GB "
|
|
f"saved {evt.get('reduction_pct','?')}%"),
|
|
'filename': evt.get('filename', ''),
|
|
'output': evt.get('output', ''),
|
|
'reduction_pct': evt.get('reduction_pct', 0),
|
|
'output_size_gb': evt.get('output_size_gb', 0),
|
|
})
|
|
|
|
elif t == 'file_error' and idx is not None:
|
|
file_states[idx].update({
|
|
'status': 'error',
|
|
'percent': 0,
|
|
'detail': evt.get('message', 'Unknown error'),
|
|
'message': evt.get('message', ''),
|
|
})
|
|
|
|
return jsonify({
|
|
'job_id': job_id,
|
|
'status': status,
|
|
'total': total,
|
|
'current_index': current_index,
|
|
'done_count': done_count,
|
|
'event_count': len(events),
|
|
'files': files,
|
|
'file_states': file_states,
|
|
})
|
|
|
|
# ── Compression — list active jobs (for page-load auto-reconnect) ─────
|
|
|
|
@app.route('/api/compress/active')
|
|
def list_active_jobs():
|
|
"""
|
|
Return a list of jobs that are currently running or recently finished.
|
|
The frontend calls this on page load to detect whether a job is in
|
|
progress and should be reconnected to.
|
|
"""
|
|
with job_lock:
|
|
jobs = list(active_jobs.values())
|
|
|
|
result = []
|
|
for job in jobs:
|
|
with job['lock']:
|
|
result.append({
|
|
'job_id': job['id'],
|
|
'status': job['status'],
|
|
'total': job['total'],
|
|
'current_index': job['current_index'],
|
|
})
|
|
|
|
# Most recent first
|
|
result.sort(key=lambda j: j['job_id'], reverse=True)
|
|
return jsonify({'jobs': result})
|
|
|
|
# ── Compression — start ───────────────────────────────────────────────
|
|
|
|
@app.route('/api/compress/start', methods=['POST'])
|
|
def start_compression():
|
|
data = request.get_json(silent=True) or {}
|
|
files = data.get('files', [])
|
|
suffix = data.get('suffix', '_new')
|
|
notify_email = data.get('notify_email', '').strip()
|
|
|
|
if not files:
|
|
return jsonify({'error': 'No files provided'}), 400
|
|
|
|
if notify_email and (len(notify_email) > 254 or '@' not in notify_email):
|
|
return jsonify({'error': 'Invalid notification email address'}), 400
|
|
|
|
for f in files:
|
|
try:
|
|
safe_path(f.get('path', ''))
|
|
except PermissionError as exc:
|
|
return jsonify({'error': str(exc)}), 403
|
|
|
|
job_id = f"job_{int(time.time() * 1000)}"
|
|
job = {
|
|
'id': job_id,
|
|
'files': files,
|
|
'suffix': suffix,
|
|
'notify_email': notify_email,
|
|
'status': 'running',
|
|
'current_index': 0,
|
|
'total': len(files),
|
|
'events': [],
|
|
'process': None,
|
|
'cancelled': False,
|
|
'lock': threading.Lock(),
|
|
}
|
|
with job_lock:
|
|
active_jobs[job_id] = job
|
|
|
|
threading.Thread(
|
|
target=run_compression_job,
|
|
args=(job_id,),
|
|
daemon=True,
|
|
).start()
|
|
return jsonify({'job_id': job_id})
|
|
|
|
# ── Compression — SSE progress stream ─────────────────────────────────
|
|
|
|
@app.route('/api/compress/progress/<job_id>')
|
|
def compression_progress(job_id):
|
|
"""
|
|
Server-Sent Events stream for real-time job progress.
|
|
|
|
Query param: ?from=N — start streaming from event index N (default 0).
|
|
On reconnect the client passes the last event index it saw so it only
|
|
receives new events, not a full replay of the history.
|
|
|
|
Compatible with Gunicorn + gevent: time.sleep() yields the greenlet
|
|
rather than blocking a real OS thread.
|
|
"""
|
|
try:
|
|
start_from = int(request.args.get('from', 0))
|
|
except (TypeError, ValueError):
|
|
start_from = 0
|
|
|
|
def event_stream():
|
|
last_idx = start_from
|
|
while True:
|
|
with job_lock:
|
|
job = active_jobs.get(job_id)
|
|
if not job:
|
|
yield (
|
|
f"data: {json.dumps({'type': 'error', 'message': 'Job not found'})}\n\n"
|
|
)
|
|
return
|
|
|
|
with job['lock']:
|
|
new_events = job['events'][last_idx:]
|
|
last_idx += len(new_events)
|
|
status = job['status']
|
|
|
|
for event in new_events:
|
|
yield f"data: {json.dumps(event)}\n\n"
|
|
|
|
if status in ('done', 'cancelled', 'error') and not new_events:
|
|
break
|
|
|
|
time.sleep(0.25)
|
|
|
|
return Response(
|
|
stream_with_context(event_stream()),
|
|
mimetype='text/event-stream',
|
|
headers={
|
|
'Cache-Control': 'no-cache',
|
|
'X-Accel-Buffering': 'no',
|
|
},
|
|
)
|
|
|
|
# ── Compression — cancel ──────────────────────────────────────────────
|
|
|
|
@app.route('/api/compress/cancel/<job_id>', methods=['POST'])
|
|
def cancel_compression(job_id):
|
|
with job_lock:
|
|
job = active_jobs.get(job_id)
|
|
if not job:
|
|
return jsonify({'error': 'Job not found'}), 404
|
|
|
|
with job['lock']:
|
|
job['cancelled'] = True
|
|
proc = job.get('process')
|
|
|
|
if proc and proc.poll() is None:
|
|
try:
|
|
proc.terminate()
|
|
time.sleep(1)
|
|
if proc.poll() is None:
|
|
proc.kill()
|
|
except Exception:
|
|
pass
|
|
|
|
return jsonify({'status': 'cancellation requested'})
|