This commit is contained in:
2026-01-23 20:48:30 +01:00
parent ea3d0164f2
commit 7f0092ff10
9 changed files with 346 additions and 3 deletions

View File

@@ -11,7 +11,13 @@ from werkzeug.utils import secure_filename
from PIL import Image, ImageOps
from ..extensions import db
from ..uploads import abs_upload_path, ensure_company_upload_dir, get_company_upload_bytes, is_valid_upload_relpath
from ..uploads import (
abs_upload_path,
compute_storage_usage,
ensure_company_upload_dir,
get_company_upload_bytes,
is_valid_upload_relpath,
)
from ..models import Company, Display, DisplaySession, Playlist, PlaylistItem, User
from ..email_utils import send_email
from ..auth_tokens import make_password_reset_token
@@ -194,6 +200,12 @@ def _format_bytes(num: int) -> str:
return f"{size:.1f} {units[idx]}"
def _storage_limit_error_message(*, storage_max_human: str | None) -> str:
if storage_max_human:
return f"Storage limit reached. Maximum allowed storage is {storage_max_human}. Please delete items to free space."
return "Storage limit reached. Please delete items to free space."
@bp.get("/my-company")
@login_required
def my_company():
@@ -226,6 +238,9 @@ def my_company():
upload_root = current_app.config["UPLOAD_FOLDER"]
used_bytes = get_company_upload_bytes(upload_root, company.id)
usage = compute_storage_usage(used_bytes=used_bytes, max_bytes=company.storage_max_bytes)
max_human = _format_bytes(usage["max_bytes"]) if usage.get("max_bytes") else None
users = User.query.filter_by(company_id=company.id, is_admin=False).order_by(User.email.asc()).all()
return render_template(
@@ -240,6 +255,9 @@ def my_company():
"active_sessions": active_sessions,
"storage_bytes": used_bytes,
"storage_human": _format_bytes(used_bytes),
"storage_max_bytes": usage.get("max_bytes"),
"storage_max_human": max_human,
"storage_used_percent": usage.get("used_percent"),
},
)
@@ -489,7 +507,29 @@ def add_playlist_item(playlist_id: int):
position=pos,
)
# Enforce storage quota for uploads (image/video).
# Webpage/YouTube do not consume local storage.
# Note: querying the DB triggers an autoflush by default. Because `item` is not yet in the
# session, SQLAlchemy may emit warnings about relationship operations. We explicitly avoid
# autoflush while checking quota.
with db.session.no_autoflush:
company = db.session.get(Company, current_user.company_id)
if not company:
abort(404)
upload_root = current_app.config["UPLOAD_FOLDER"]
used_bytes = get_company_upload_bytes(upload_root, company.id)
usage = compute_storage_usage(used_bytes=used_bytes, max_bytes=company.storage_max_bytes)
storage_max_human = _format_bytes(usage["max_bytes"]) if usage.get("max_bytes") else None
if item_type in ("image", "video"):
if usage.get("is_exceeded"):
msg = _storage_limit_error_message(storage_max_human=storage_max_human)
if wants_json:
return _json_error(msg, 403)
flash(msg, "danger")
return redirect(url_for("company.playlist_detail", playlist_id=playlist_id))
f = request.files.get("file")
if not f or not f.filename:
if wants_json:
@@ -516,6 +556,30 @@ def add_playlist_item(playlist_id: int):
current_user.company_id,
crop_mode=crop_mode,
)
# Post-save quota check for images as well.
# (We can't reliably estimate image size before compression.)
if company.storage_max_bytes is not None and int(company.storage_max_bytes or 0) > 0:
try:
used_after = get_company_upload_bytes(upload_root, company.id)
except Exception:
used_after = None
if used_after is not None:
usage_after = compute_storage_usage(
used_bytes=used_after,
max_bytes=company.storage_max_bytes,
)
if usage_after.get("is_exceeded"):
# Remove the saved file and reject.
try:
_try_delete_upload(item.file_path, upload_root)
except Exception:
pass
msg = _storage_limit_error_message(storage_max_human=storage_max_human)
if wants_json:
return _json_error(msg, 403)
flash(msg, "danger")
return redirect(url_for("company.playlist_detail", playlist_id=playlist_id))
except Exception:
if wants_json:
return _json_error("Failed to process image upload", 500)
@@ -558,6 +622,29 @@ def add_playlist_item(playlist_id: int):
save_path = os.path.join(company_dir, unique)
f.save(save_path)
# Post-save quota check: clients may not report size reliably.
# If quota is exceeded after saving, delete file and reject.
if company.storage_max_bytes is not None and int(company.storage_max_bytes or 0) > 0:
try:
used_after = get_company_upload_bytes(upload_root, company.id)
except Exception:
used_after = None
if used_after is not None:
usage_after = compute_storage_usage(
used_bytes=used_after,
max_bytes=company.storage_max_bytes,
)
if usage_after.get("is_exceeded"):
try:
os.remove(save_path)
except OSError:
pass
msg = _storage_limit_error_message(storage_max_human=storage_max_human)
if wants_json:
return _json_error(msg, 403)
flash(msg, "danger")
return redirect(url_for("company.playlist_detail", playlist_id=playlist_id))
# Safety check: validate using the actual saved file size.
# (Some clients/framework layers don't reliably report per-part size.)
try: