Release 1.6
This commit is contained in:
@@ -2,6 +2,10 @@ from datetime import datetime, timedelta
|
||||
import hashlib
|
||||
import json
|
||||
import time
|
||||
import re
|
||||
from urllib.parse import urlparse
|
||||
from urllib.request import Request, urlopen
|
||||
from xml.etree import ElementTree as ET
|
||||
|
||||
from flask import Blueprint, Response, abort, jsonify, request, stream_with_context, url_for
|
||||
|
||||
@@ -15,6 +19,10 @@ bp = Blueprint("api", __name__, url_prefix="/api")
|
||||
MAX_ACTIVE_SESSIONS_PER_DISPLAY = 3
|
||||
SESSION_TTL_SECONDS = 90
|
||||
|
||||
# RSS ticker cache (in-memory; OK for this small app; avoids hammering feeds)
|
||||
TICKER_CACHE_TTL_SECONDS = 120
|
||||
_TICKER_CACHE: dict[str, dict] = {}
|
||||
|
||||
|
||||
def _is_playlist_active_now(p: Playlist, now_utc: datetime) -> bool:
|
||||
"""Return True if playlist is active based on its optional schedule window."""
|
||||
@@ -168,6 +176,127 @@ def _playlist_signature(display: Display) -> tuple[int | None, str]:
|
||||
return None, hashlib.sha1(raw.encode("utf-8")).hexdigest()
|
||||
|
||||
|
||||
def _is_http_url_allowed(url: str) -> bool:
|
||||
"""Basic SSRF hardening: only allow http(s) and disallow obvious local targets."""
|
||||
|
||||
try:
|
||||
u = urlparse(url)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
if u.scheme not in {"http", "https"}:
|
||||
return False
|
||||
|
||||
host = (u.hostname or "").strip().lower()
|
||||
if not host:
|
||||
return False
|
||||
|
||||
# Block localhost and common local domains.
|
||||
if host in {"localhost", "127.0.0.1", "::1"}:
|
||||
return False
|
||||
|
||||
# Block RFC1918-ish and link-local targets when host is an IP.
|
||||
# Note: this is best-effort; proper SSRF protection would require DNS resolution too.
|
||||
if re.match(r"^\d+\.\d+\.\d+\.\d+$", host):
|
||||
parts = [int(x) for x in host.split(".")]
|
||||
if parts[0] == 10:
|
||||
return False
|
||||
if parts[0] == 127:
|
||||
return False
|
||||
if parts[0] == 169 and parts[1] == 254:
|
||||
return False
|
||||
if parts[0] == 192 and parts[1] == 168:
|
||||
return False
|
||||
if parts[0] == 172 and 16 <= parts[1] <= 31:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _strip_text(s: str) -> str:
|
||||
s = (s or "").strip()
|
||||
s = re.sub(r"\s+", " ", s)
|
||||
return s
|
||||
|
||||
|
||||
def _fetch_rss_titles(url: str, *, limit: int = 20) -> list[str]:
|
||||
"""Fetch RSS/Atom titles from a feed URL.
|
||||
|
||||
We intentionally avoid adding dependencies (feedparser) for this project.
|
||||
This implementation is tolerant enough for typical RSS2/Atom feeds.
|
||||
"""
|
||||
|
||||
req = Request(
|
||||
url,
|
||||
headers={
|
||||
"User-Agent": "SignageTicker/1.0 (+https://example.invalid)",
|
||||
"Accept": "application/rss+xml, application/atom+xml, application/xml, text/xml, */*",
|
||||
},
|
||||
method="GET",
|
||||
)
|
||||
|
||||
with urlopen(req, timeout=8) as resp:
|
||||
# Basic size cap (avoid reading huge responses into memory)
|
||||
raw = resp.read(2_000_000) # 2MB
|
||||
|
||||
try:
|
||||
root = ET.fromstring(raw)
|
||||
except Exception:
|
||||
return []
|
||||
|
||||
titles: list[str] = []
|
||||
|
||||
# RSS2: <rss><channel><item><title>
|
||||
for el in root.findall(".//item/title"):
|
||||
t = _strip_text("".join(el.itertext()))
|
||||
if t:
|
||||
titles.append(t)
|
||||
|
||||
# Atom: <feed><entry><title>
|
||||
if not titles:
|
||||
for el in root.findall(".//{*}entry/{*}title"):
|
||||
t = _strip_text("".join(el.itertext()))
|
||||
if t:
|
||||
titles.append(t)
|
||||
|
||||
# Some feeds may have <channel><title> etc; we only want entry titles.
|
||||
# Deduplicate while preserving order.
|
||||
deduped: list[str] = []
|
||||
seen = set()
|
||||
for t in titles:
|
||||
if t in seen:
|
||||
continue
|
||||
seen.add(t)
|
||||
deduped.append(t)
|
||||
if len(deduped) >= limit:
|
||||
break
|
||||
|
||||
return deduped
|
||||
|
||||
|
||||
def _get_ticker_titles_cached(url: str) -> tuple[list[str], bool]:
|
||||
"""Return (titles, from_cache)."""
|
||||
|
||||
now = time.time()
|
||||
key = (url or "").strip()
|
||||
if not key:
|
||||
return [], True
|
||||
|
||||
entry = _TICKER_CACHE.get(key)
|
||||
if entry and (now - float(entry.get("ts") or 0)) < TICKER_CACHE_TTL_SECONDS:
|
||||
return (entry.get("titles") or []), True
|
||||
|
||||
titles: list[str] = []
|
||||
try:
|
||||
if _is_http_url_allowed(key):
|
||||
titles = _fetch_rss_titles(key)
|
||||
except Exception:
|
||||
titles = []
|
||||
|
||||
_TICKER_CACHE[key] = {"ts": now, "titles": titles}
|
||||
return titles, False
|
||||
|
||||
|
||||
@bp.get("/display/<token>/playlist")
|
||||
def display_playlist(token: str):
|
||||
display = Display.query.filter_by(token=token).first()
|
||||
@@ -188,6 +317,17 @@ def display_playlist(token: str):
|
||||
if not ok:
|
||||
return resp
|
||||
|
||||
ticker_cfg = {
|
||||
"enabled": bool(display.ticker_enabled),
|
||||
"rss_url": display.ticker_rss_url,
|
||||
"color": display.ticker_color,
|
||||
"bg_color": display.ticker_bg_color,
|
||||
"bg_opacity": display.ticker_bg_opacity,
|
||||
"font_family": display.ticker_font_family,
|
||||
"font_size_px": display.ticker_font_size_px,
|
||||
"speed": display.ticker_speed,
|
||||
}
|
||||
|
||||
# Determine active playlists. If display_playlist has any rows, use those.
|
||||
# Otherwise fall back to the legacy assigned_playlist_id.
|
||||
mapped_ids = [
|
||||
@@ -209,6 +349,7 @@ def display_playlist(token: str):
|
||||
"display": display.name,
|
||||
"transition": display.transition or "none",
|
||||
"overlay_src": overlay_src,
|
||||
"ticker": ticker_cfg,
|
||||
"playlists": [],
|
||||
"items": [],
|
||||
}
|
||||
@@ -273,12 +414,48 @@ def display_playlist(token: str):
|
||||
"display": display.name,
|
||||
"transition": display.transition or "none",
|
||||
"overlay_src": overlay_src,
|
||||
"ticker": ticker_cfg,
|
||||
"playlists": [{"id": p.id, "name": p.name} for p in ordered_playlists],
|
||||
"items": items,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@bp.get("/display/<token>/ticker")
|
||||
def display_ticker(token: str):
|
||||
"""Return ticker headlines for a display.
|
||||
|
||||
We keep it separate from /playlist so the player can refresh headlines on its own interval.
|
||||
"""
|
||||
|
||||
display = Display.query.filter_by(token=token).first()
|
||||
if not display:
|
||||
abort(404)
|
||||
|
||||
# Enforce concurrent session limit the same way as /playlist.
|
||||
sid = request.args.get("sid")
|
||||
ok, resp = _enforce_and_touch_display_session(display, sid)
|
||||
if not ok:
|
||||
return resp
|
||||
|
||||
if not display.ticker_enabled:
|
||||
return jsonify({"enabled": False, "headlines": []})
|
||||
|
||||
rss_url = (display.ticker_rss_url or "").strip()
|
||||
if not rss_url:
|
||||
return jsonify({"enabled": True, "headlines": []})
|
||||
|
||||
titles, from_cache = _get_ticker_titles_cached(rss_url)
|
||||
return jsonify(
|
||||
{
|
||||
"enabled": True,
|
||||
"rss_url": rss_url,
|
||||
"headlines": titles,
|
||||
"cached": bool(from_cache),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@bp.get("/display/<token>/events")
|
||||
def display_events(token: str):
|
||||
"""Server-Sent Events stream to notify the player when its playlist changes."""
|
||||
|
||||
Reference in New Issue
Block a user