291 lines
14 KiB
Python
291 lines
14 KiB
Python
import os
|
|
from flask import Flask, jsonify, request
|
|
from werkzeug.exceptions import RequestEntityTooLarge
|
|
|
|
from .extensions import db, login_manager
|
|
from .models import AppSettings, User
|
|
from .cli import ensure_db_command, init_db_command
|
|
|
|
|
|
def create_app():
|
|
app = Flask(__name__, instance_relative_config=True)
|
|
|
|
# Basic config
|
|
# Flask ships with SECRET_KEY=None by default, so setdefault() won't override it.
|
|
app.config["SECRET_KEY"] = os.environ.get("SECRET_KEY") or "dev-secret-change-me"
|
|
app.config.setdefault(
|
|
"SQLALCHEMY_DATABASE_URI",
|
|
os.environ.get("DATABASE_URL", "sqlite:///" + os.path.join(app.instance_path, "signage.sqlite")),
|
|
)
|
|
app.config.setdefault("SQLALCHEMY_TRACK_MODIFICATIONS", False)
|
|
app.config.setdefault("UPLOAD_FOLDER", os.path.join(app.root_path, "static", "uploads"))
|
|
|
|
# Target output resolution for cropped images.
|
|
# This is used by the client-side cropper (to generate an upload) and by the server-side
|
|
# image processing (to cap the resulting WEBP size).
|
|
#
|
|
# Defaults to Full HD landscape (1920x1080). Portrait is derived by swapping.
|
|
# Override via env vars, e.g.:
|
|
# IMAGE_CROP_TARGET_W=1920
|
|
# IMAGE_CROP_TARGET_H=1080
|
|
def _env_int(name: str, default: int) -> int:
|
|
try:
|
|
v = int(os.environ.get(name, "") or default)
|
|
except (TypeError, ValueError):
|
|
v = default
|
|
return max(1, v)
|
|
|
|
app.config.setdefault("IMAGE_CROP_TARGET_W", _env_int("IMAGE_CROP_TARGET_W", 1920))
|
|
app.config.setdefault("IMAGE_CROP_TARGET_H", _env_int("IMAGE_CROP_TARGET_H", 1080))
|
|
|
|
# NOTE: Videos should be max 250MB.
|
|
# Flask's MAX_CONTENT_LENGTH applies to the full request payload (multipart includes overhead).
|
|
# We set this slightly above 250MB to allow for multipart/form fields overhead, while still
|
|
# blocking excessively large uploads early.
|
|
app.config.setdefault("MAX_CONTENT_LENGTH", 260 * 1024 * 1024) # ~260MB request cap
|
|
|
|
# Explicit per-video validation lives in the upload route; this app-wide cap is a safety net.
|
|
|
|
os.makedirs(app.instance_path, exist_ok=True)
|
|
os.makedirs(app.config["UPLOAD_FOLDER"], exist_ok=True)
|
|
|
|
# Init extensions
|
|
db.init_app(app)
|
|
login_manager.init_app(app)
|
|
login_manager.login_view = "auth.login"
|
|
|
|
# Lightweight migration(s) for SQLite DBs created before new columns existed.
|
|
# This avoids requiring Alembic for this small project.
|
|
with app.app_context():
|
|
try:
|
|
uri = app.config.get("SQLALCHEMY_DATABASE_URI", "") or ""
|
|
if uri.startswith("sqlite:"):
|
|
cols = [r[1] for r in db.session.execute(db.text("PRAGMA table_info(user)")).fetchall()]
|
|
if "email" not in cols:
|
|
db.session.execute(db.text("ALTER TABLE user ADD COLUMN email VARCHAR(255)"))
|
|
# Best-effort unique index (SQLite doesn't support adding unique constraints after the fact).
|
|
db.session.execute(db.text("CREATE UNIQUE INDEX IF NOT EXISTS ix_user_email ON user (email)"))
|
|
db.session.commit()
|
|
|
|
# Displays: ensure optional description column exists.
|
|
display_cols = [
|
|
r[1] for r in db.session.execute(db.text("PRAGMA table_info(display)")).fetchall()
|
|
]
|
|
if "description" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN description VARCHAR(200)"))
|
|
db.session.commit()
|
|
|
|
# Displays: optional transition between slides (none|fade|slide)
|
|
if "transition" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN transition VARCHAR(20)"))
|
|
db.session.commit()
|
|
|
|
# Displays: per-display overlay toggle
|
|
if "show_overlay" not in display_cols:
|
|
db.session.execute(
|
|
db.text("ALTER TABLE display ADD COLUMN show_overlay BOOLEAN NOT NULL DEFAULT 0")
|
|
)
|
|
db.session.commit()
|
|
|
|
# Displays: optional ticker tape (RSS headlines)
|
|
if "ticker_enabled" not in display_cols:
|
|
db.session.execute(
|
|
db.text("ALTER TABLE display ADD COLUMN ticker_enabled BOOLEAN NOT NULL DEFAULT 0")
|
|
)
|
|
db.session.commit()
|
|
if "ticker_rss_url" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN ticker_rss_url VARCHAR(1000)"))
|
|
db.session.commit()
|
|
if "ticker_color" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN ticker_color VARCHAR(32)"))
|
|
db.session.commit()
|
|
if "ticker_bg_color" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN ticker_bg_color VARCHAR(32)"))
|
|
db.session.commit()
|
|
if "ticker_bg_opacity" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN ticker_bg_opacity INTEGER"))
|
|
db.session.commit()
|
|
if "ticker_font_family" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN ticker_font_family VARCHAR(120)"))
|
|
db.session.commit()
|
|
if "ticker_font_size_px" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN ticker_font_size_px INTEGER"))
|
|
db.session.commit()
|
|
if "ticker_speed" not in display_cols:
|
|
db.session.execute(db.text("ALTER TABLE display ADD COLUMN ticker_speed INTEGER"))
|
|
db.session.commit()
|
|
|
|
# Companies: optional per-company storage quota
|
|
company_cols = [
|
|
r[1] for r in db.session.execute(db.text("PRAGMA table_info(company)")).fetchall()
|
|
]
|
|
if "storage_max_bytes" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN storage_max_bytes BIGINT"))
|
|
db.session.commit()
|
|
|
|
# Companies: optional overlay file path
|
|
if "overlay_file_path" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN overlay_file_path VARCHAR(400)"))
|
|
db.session.commit()
|
|
|
|
# Companies: ticker tape settings (RSS + styling)
|
|
if "ticker_rss_url" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN ticker_rss_url VARCHAR(1000)"))
|
|
db.session.commit()
|
|
if "ticker_color" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN ticker_color VARCHAR(32)"))
|
|
db.session.commit()
|
|
if "ticker_bg_color" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN ticker_bg_color VARCHAR(32)"))
|
|
db.session.commit()
|
|
if "ticker_bg_opacity" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN ticker_bg_opacity INTEGER"))
|
|
db.session.commit()
|
|
if "ticker_font_family" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN ticker_font_family VARCHAR(120)"))
|
|
db.session.commit()
|
|
if "ticker_font_size_px" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN ticker_font_size_px INTEGER"))
|
|
db.session.commit()
|
|
if "ticker_speed" not in company_cols:
|
|
db.session.execute(db.text("ALTER TABLE company ADD COLUMN ticker_speed INTEGER"))
|
|
db.session.commit()
|
|
|
|
# AppSettings: create settings table if missing.
|
|
# (PRAGMA returns empty if the table doesn't exist.)
|
|
settings_cols = [
|
|
r[1] for r in db.session.execute(db.text("PRAGMA table_info(app_settings)")).fetchall()
|
|
]
|
|
if not settings_cols:
|
|
AppSettings.__table__.create(db.engine, checkfirst=True)
|
|
|
|
# AppSettings: add public_domain column if missing.
|
|
if settings_cols and "public_domain" not in settings_cols:
|
|
db.session.execute(db.text("ALTER TABLE app_settings ADD COLUMN public_domain VARCHAR(255)"))
|
|
db.session.commit()
|
|
|
|
# DisplayPlaylist: create association table for multi-playlist displays.
|
|
dp_cols = [
|
|
r[1] for r in db.session.execute(db.text("PRAGMA table_info(display_playlist)")).fetchall()
|
|
]
|
|
if not dp_cols:
|
|
# Create association table for multi-playlist displays.
|
|
# Keep schema compatible with older DBs that include an autoincrement id and position.
|
|
db.session.execute(
|
|
db.text(
|
|
"""
|
|
CREATE TABLE IF NOT EXISTS display_playlist (
|
|
id INTEGER PRIMARY KEY,
|
|
display_id INTEGER NOT NULL,
|
|
playlist_id INTEGER NOT NULL,
|
|
position INTEGER NOT NULL DEFAULT 1,
|
|
created_at DATETIME NOT NULL,
|
|
UNIQUE(display_id, playlist_id),
|
|
FOREIGN KEY(display_id) REFERENCES display (id),
|
|
FOREIGN KEY(playlist_id) REFERENCES playlist (id)
|
|
)
|
|
"""
|
|
)
|
|
)
|
|
db.session.commit()
|
|
else:
|
|
# Best-effort column additions for older/newer variants.
|
|
if "position" not in dp_cols:
|
|
db.session.execute(
|
|
db.text("ALTER TABLE display_playlist ADD COLUMN position INTEGER NOT NULL DEFAULT 1")
|
|
)
|
|
db.session.commit()
|
|
if "created_at" not in dp_cols:
|
|
# Use CURRENT_TIMESTAMP as a reasonable default for existing rows.
|
|
db.session.execute(
|
|
db.text(
|
|
"ALTER TABLE display_playlist ADD COLUMN created_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP"
|
|
)
|
|
)
|
|
db.session.commit()
|
|
if "id" not in dp_cols:
|
|
# Cannot add PRIMARY KEY via ALTER TABLE; keep nullable for compatibility.
|
|
db.session.execute(db.text("ALTER TABLE display_playlist ADD COLUMN id INTEGER"))
|
|
db.session.commit()
|
|
# Ensure uniqueness index exists (no-op if already present)
|
|
db.session.execute(
|
|
db.text(
|
|
"CREATE UNIQUE INDEX IF NOT EXISTS uq_display_playlist_display_playlist ON display_playlist (display_id, playlist_id)"
|
|
)
|
|
)
|
|
db.session.commit()
|
|
|
|
# Playlists: schedule + priority flags
|
|
playlist_cols = [
|
|
r[1] for r in db.session.execute(db.text("PRAGMA table_info(playlist)")).fetchall()
|
|
]
|
|
if "schedule_start" not in playlist_cols:
|
|
db.session.execute(db.text("ALTER TABLE playlist ADD COLUMN schedule_start DATETIME"))
|
|
db.session.commit()
|
|
if "schedule_end" not in playlist_cols:
|
|
db.session.execute(db.text("ALTER TABLE playlist ADD COLUMN schedule_end DATETIME"))
|
|
db.session.commit()
|
|
if "is_priority" not in playlist_cols:
|
|
db.session.execute(
|
|
db.text("ALTER TABLE playlist ADD COLUMN is_priority BOOLEAN NOT NULL DEFAULT 0")
|
|
)
|
|
db.session.commit()
|
|
except Exception:
|
|
db.session.rollback()
|
|
|
|
@login_manager.user_loader
|
|
def load_user(user_id: str):
|
|
return db.session.get(User, int(user_id))
|
|
|
|
# CLI
|
|
app.cli.add_command(ensure_db_command)
|
|
app.cli.add_command(init_db_command)
|
|
|
|
# Blueprints
|
|
from .routes.auth import bp as auth_bp
|
|
from .routes.admin import bp as admin_bp
|
|
from .routes.company import bp as company_bp
|
|
from .routes.display import bp as display_bp
|
|
from .routes.api import bp as api_bp
|
|
|
|
app.register_blueprint(auth_bp)
|
|
app.register_blueprint(admin_bp)
|
|
app.register_blueprint(company_bp)
|
|
app.register_blueprint(display_bp)
|
|
app.register_blueprint(api_bp)
|
|
|
|
# Home
|
|
from flask import redirect, url_for
|
|
from flask_login import current_user
|
|
|
|
@app.get("/")
|
|
def index():
|
|
if not current_user.is_authenticated:
|
|
return redirect(url_for("auth.login"))
|
|
if current_user.is_admin:
|
|
return redirect(url_for("admin.dashboard"))
|
|
return redirect(url_for("company.dashboard"))
|
|
|
|
@app.errorhandler(RequestEntityTooLarge)
|
|
def handle_request_too_large(e):
|
|
"""Return a user-friendly message when uploads exceed MAX_CONTENT_LENGTH."""
|
|
# Keep behavior consistent with our AJAX endpoints.
|
|
wants_json = (
|
|
(request.headers.get("X-Requested-With") == "XMLHttpRequest")
|
|
or ("application/json" in (request.headers.get("Accept") or ""))
|
|
or request.is_json
|
|
or (request.form.get("response") == "json")
|
|
)
|
|
|
|
msg = "Upload too large. Videos must be 250MB or smaller."
|
|
if wants_json:
|
|
return jsonify({"ok": False, "error": msg}), 413
|
|
|
|
# For non-AJAX form posts, redirect back with a flash message.
|
|
from flask import flash, redirect
|
|
|
|
flash(msg, "danger")
|
|
return redirect(request.referrer or url_for("company.dashboard")), 413
|
|
|
|
return app
|