Dashboard Add top-right user dropdown using Syncfusion DropDownButton: shows username + role; menu entries “Profil” and “Abmelden”. Replace custom dropdown logic with Syncfusion component; position at header’s right edge. Update /logout page to call backend logout and redirect to /login (reliable user switching). Build/Config Add @syncfusion/ej2-react-splitbuttons and @syncfusion/ej2-splitbuttons dependencies. Update Vite optimizeDeps.include to pre-bundle splitbuttons and avoid import-analysis errors. Docs README: Rework Architecture Overview with clearer data flow: Listener consumes MQTT (discovery/heartbeats) and updates API. Scheduler reads from API and publishes events via MQTT to clients. Clients send via MQTT and receive via MQTT. Worker receives commands directly from API and reports results back (no MQTT). Explicit note: MariaDB is accessed exclusively by the API Server; Dashboard never talks to DB directly. README: Add SplitButtons to “Syncfusion Components Used”; add troubleshooting steps for @syncfusion/ej2-react-splitbuttons import issues (optimizeDeps + volume reset). Copilot instructions: Document header user menu and splitbuttons technical notes (deps, optimizeDeps, dev-container node_modules volume). Program info Bump to 2025.1.0-alpha.10 with changelog: UI: Header user menu (DropDownButton with username/role; Profil/Abmelden). Frontend: Syncfusion SplitButtons integration + Vite pre-bundling config. Fix: Added README guidance for splitbuttons import errors. No breaking changes.
533 lines
21 KiB
Python
533 lines
21 KiB
Python
from flask import Blueprint, request, jsonify
|
|
from server.permissions import editor_or_higher
|
|
from server.database import Session
|
|
from models.models import Event, EventMedia, MediaType, EventException
|
|
from datetime import datetime, timezone, timedelta
|
|
from sqlalchemy import and_
|
|
from dateutil.rrule import rrulestr
|
|
from dateutil.tz import UTC
|
|
import sys
|
|
sys.path.append('/workspace')
|
|
|
|
events_bp = Blueprint("events", __name__, url_prefix="/api/events")
|
|
|
|
|
|
def get_icon_for_type(event_type):
|
|
# Lucide-Icon-Namen als String
|
|
return {
|
|
"presentation": "Presentation", # <--- geändert!
|
|
"website": "Globe",
|
|
"video": "Video",
|
|
"message": "MessageSquare",
|
|
"webuntis": "School",
|
|
}.get(event_type, "")
|
|
|
|
|
|
@events_bp.route("", methods=["GET"])
|
|
def get_events():
|
|
session = Session()
|
|
start = request.args.get("start")
|
|
end = request.args.get("end")
|
|
group_id = request.args.get("group_id")
|
|
show_inactive = request.args.get(
|
|
"show_inactive", "0") == "1" # Checkbox-Logik
|
|
# Always let Syncfusion handle recurrence; do not expand on backend
|
|
expand = False
|
|
|
|
now = datetime.now(timezone.utc)
|
|
events_query = session.query(Event)
|
|
if group_id:
|
|
events_query = events_query.filter(Event.group_id == int(group_id))
|
|
events = events_query.all()
|
|
|
|
result = []
|
|
for e in events:
|
|
# Zeitzonen-Korrektur für e.end
|
|
if e.end and e.end.tzinfo is None:
|
|
end_dt = e.end.replace(tzinfo=timezone.utc)
|
|
else:
|
|
end_dt = e.end
|
|
|
|
# Setze is_active auf False, wenn Termin vorbei ist
|
|
if end_dt and end_dt < now and e.is_active:
|
|
e.is_active = False
|
|
session.commit()
|
|
if not (show_inactive or e.is_active):
|
|
continue
|
|
|
|
# Gather exceptions for this event
|
|
all_exceptions = session.query(EventException).filter(
|
|
EventException.event_id == e.id
|
|
).all()
|
|
|
|
# Build RecurrenceException (EXDATE) tokens for skipped occurrences only
|
|
# (detached occurrences are now real Event rows, not synthetic)
|
|
recurrence_exception = None
|
|
if all_exceptions:
|
|
base_start = e.start.astimezone(UTC) if e.start.tzinfo else e.start.replace(tzinfo=UTC)
|
|
tokens = []
|
|
for ex in all_exceptions:
|
|
if ex.is_skipped:
|
|
exd = ex.exception_date
|
|
# Create the EXDATE timestamp in Syncfusion's expected format
|
|
# Use the exact time of the occurrence that would have happened
|
|
occ_dt = datetime(
|
|
exd.year, exd.month, exd.day,
|
|
base_start.hour, base_start.minute, base_start.second,
|
|
tzinfo=UTC
|
|
)
|
|
# Format as compact ISO without separators (yyyyMMddThhmmssZ) - RFC 5545 format
|
|
token = occ_dt.strftime('%Y%m%dT%H%M%SZ')
|
|
tokens.append(token)
|
|
if tokens:
|
|
recurrence_exception = ','.join(tokens)
|
|
|
|
base_payload = {
|
|
"Id": str(e.id),
|
|
"GroupId": e.group_id,
|
|
"Subject": e.title,
|
|
"Description": getattr(e, 'description', None),
|
|
"StartTime": e.start.isoformat() if e.start else None,
|
|
"EndTime": e.end.isoformat() if e.end else None,
|
|
"IsAllDay": False,
|
|
"MediaId": e.event_media_id,
|
|
"Type": e.event_type.value if e.event_type else None, # <-- Enum zu String!
|
|
"Icon": get_icon_for_type(e.event_type.value if e.event_type else None),
|
|
# Recurrence metadata
|
|
"RecurrenceRule": e.recurrence_rule,
|
|
"RecurrenceEnd": e.recurrence_end.isoformat() if e.recurrence_end else None,
|
|
"RecurrenceException": recurrence_exception,
|
|
"SkipHolidays": bool(getattr(e, 'skip_holidays', False)),
|
|
}
|
|
result.append(base_payload)
|
|
|
|
# No need to emit synthetic override events anymore since detached occurrences
|
|
# are now real Event rows that will be returned in the main query
|
|
session.close()
|
|
return jsonify(result)
|
|
|
|
|
|
@events_bp.route("/<event_id>", methods=["GET"]) # get single event
|
|
def get_event(event_id):
|
|
session = Session()
|
|
try:
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
|
|
# Convert event to dictionary with all necessary fields
|
|
event_dict = {
|
|
"Id": str(event.id),
|
|
"Subject": event.title,
|
|
"StartTime": event.start.isoformat() if event.start else None,
|
|
"EndTime": event.end.isoformat() if event.end else None,
|
|
"Description": event.description,
|
|
"Type": event.event_type.value if event.event_type else "presentation",
|
|
"IsAllDay": False, # Assuming events are not all-day by default
|
|
"MediaId": str(event.event_media_id) if event.event_media_id else None,
|
|
"SlideshowInterval": event.slideshow_interval,
|
|
"WebsiteUrl": event.event_media.url if event.event_media and hasattr(event.event_media, 'url') else None,
|
|
"RecurrenceRule": event.recurrence_rule,
|
|
"RecurrenceEnd": event.recurrence_end.isoformat() if event.recurrence_end else None,
|
|
"SkipHolidays": event.skip_holidays,
|
|
"Icon": get_icon_for_type(event.event_type.value if event.event_type else "presentation"),
|
|
}
|
|
|
|
return jsonify(event_dict)
|
|
except Exception as e:
|
|
return jsonify({"error": f"Fehler beim Laden des Termins: {str(e)}"}), 500
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@events_bp.route("/<event_id>", methods=["DELETE"]) # delete series or single event
|
|
@editor_or_higher
|
|
def delete_event(event_id):
|
|
session = Session()
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
# Safety: do not allow accidental deletion of a recurring master without explicit force flag
|
|
force = request.args.get('force') == '1'
|
|
if event.recurrence_rule and not force:
|
|
session.close()
|
|
return jsonify({
|
|
"error": "Löschen der Terminserie erfordert Bestätigung",
|
|
"hint": "Fügen Sie ?force=1 zur Anfrage hinzu, um die Serie zu löschen.",
|
|
"event_id": event_id
|
|
}), 400
|
|
session.delete(event)
|
|
session.commit()
|
|
session.close()
|
|
return jsonify({"success": True})
|
|
|
|
|
|
@events_bp.route("/<event_id>/occurrences/<occurrence_date>", methods=["DELETE"]) # skip single occurrence
|
|
@editor_or_higher
|
|
def delete_event_occurrence(event_id, occurrence_date):
|
|
"""Delete a single occurrence of a recurring event by creating an EventException."""
|
|
session = Session()
|
|
|
|
try:
|
|
# Validate event exists
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
|
|
# Validate that this is a recurring event
|
|
if not event.recurrence_rule:
|
|
session.close()
|
|
return jsonify({"error": "Termin ist keine Wiederholungsserie"}), 400
|
|
|
|
# Parse the occurrence date
|
|
try:
|
|
occ_date = datetime.fromisoformat(occurrence_date).date()
|
|
except ValueError:
|
|
session.close()
|
|
return jsonify({"error": "Ungültiges Datumsformat"}), 400
|
|
|
|
# Check if an exception for this date already exists
|
|
existing_exception = session.query(EventException).filter_by(
|
|
event_id=event.id,
|
|
exception_date=occ_date
|
|
).first()
|
|
|
|
if existing_exception:
|
|
# Update existing exception to be skipped
|
|
existing_exception.is_skipped = True
|
|
existing_exception.updated_at = datetime.now()
|
|
else:
|
|
# Create new exception to skip this occurrence
|
|
exception = EventException(
|
|
event_id=event.id,
|
|
exception_date=occ_date,
|
|
is_skipped=True
|
|
)
|
|
session.add(exception)
|
|
|
|
session.commit()
|
|
session.close()
|
|
return jsonify({"success": True, "message": "Einzeltermin wurde gelöscht"})
|
|
|
|
except Exception as e:
|
|
session.rollback()
|
|
session.close()
|
|
return jsonify({"error": f"Fehler beim Löschen des Einzeltermins: {str(e)}"}), 500
|
|
|
|
|
|
|
|
@events_bp.route("/<event_id>/occurrences/<occurrence_date>/detach", methods=["POST"]) # detach single occurrence into standalone event
|
|
@editor_or_higher
|
|
def detach_event_occurrence(event_id, occurrence_date):
|
|
"""BULLETPROOF: Detach single occurrence without touching master event."""
|
|
session = Session()
|
|
try:
|
|
data = request.json or {}
|
|
|
|
# Step 0: Get master event and NEVER modify it
|
|
master = session.query(Event).filter_by(id=event_id).first()
|
|
if not master:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
if not master.recurrence_rule:
|
|
session.close()
|
|
return jsonify({"error": "Termin ist keine Wiederholungsserie"}), 400
|
|
|
|
# Store master data (read-only copy)
|
|
master_data = {
|
|
'id': master.id,
|
|
'group_id': master.group_id,
|
|
'title': master.title,
|
|
'description': master.description,
|
|
'start': master.start,
|
|
'end': master.end,
|
|
'event_type': master.event_type,
|
|
'event_media_id': master.event_media_id,
|
|
'slideshow_interval': getattr(master, 'slideshow_interval', None),
|
|
'created_by': master.created_by,
|
|
}
|
|
|
|
try:
|
|
occ_date = datetime.fromisoformat(occurrence_date).date()
|
|
except ValueError:
|
|
session.close()
|
|
return jsonify({"error": "Ungültiges Datumsformat"}), 400
|
|
|
|
# Step 1: Create exception entry (using master ID, not master object)
|
|
existing_exception = session.query(EventException).filter_by(
|
|
event_id=master_data['id'],
|
|
exception_date=occ_date
|
|
).first()
|
|
|
|
if not existing_exception:
|
|
exception = EventException(
|
|
event_id=master_data['id'],
|
|
exception_date=occ_date,
|
|
is_skipped=True
|
|
)
|
|
session.add(exception)
|
|
else:
|
|
existing_exception.is_skipped = True
|
|
|
|
# Step 2: Create new standalone event (using copied data, not master object)
|
|
new_title = data.get("title", master_data['title'])
|
|
new_description = data.get("description", master_data['description'])
|
|
|
|
if data.get("start"):
|
|
new_start = datetime.fromisoformat(data["start"])
|
|
else:
|
|
base_start_utc = master_data['start'].astimezone(UTC) if master_data['start'].tzinfo else master_data['start'].replace(tzinfo=UTC)
|
|
new_start = datetime(occ_date.year, occ_date.month, occ_date.day,
|
|
base_start_utc.hour, base_start_utc.minute, base_start_utc.second, tzinfo=UTC)
|
|
|
|
if data.get("end"):
|
|
new_end = datetime.fromisoformat(data["end"])
|
|
else:
|
|
duration = (master_data['end'] - master_data['start']) if (master_data['end'] and master_data['start']) else timedelta(minutes=30)
|
|
new_end = new_start + duration
|
|
|
|
new_event = Event(
|
|
group_id=master_data['group_id'],
|
|
title=new_title,
|
|
description=new_description,
|
|
start=new_start,
|
|
end=new_end,
|
|
event_type=master_data['event_type'],
|
|
event_media_id=master_data['event_media_id'],
|
|
slideshow_interval=master_data['slideshow_interval'],
|
|
recurrence_rule=None,
|
|
recurrence_end=None,
|
|
skip_holidays=False,
|
|
created_by=master_data['created_by'],
|
|
updated_by=master_data['created_by'],
|
|
is_active=True,
|
|
)
|
|
session.add(new_event)
|
|
|
|
# Commit both changes at once
|
|
session.commit()
|
|
|
|
new_event_id = new_event.id
|
|
session.close()
|
|
return jsonify({
|
|
"success": True,
|
|
"new_event_id": new_event_id,
|
|
"master_event_id": master_data['id'],
|
|
"message": f"Einzeltermin erstellt, Master-Event {master_data['id']} unberührt"
|
|
})
|
|
|
|
except Exception as e:
|
|
session.rollback()
|
|
session.close()
|
|
return jsonify({"error": f"Fehler beim Erstellen des Einzeltermins: {str(e)}"}), 500
|
|
|
|
|
|
@events_bp.route("", methods=["POST"])
|
|
@editor_or_higher
|
|
def create_event():
|
|
data = request.json
|
|
session = Session()
|
|
|
|
# Pflichtfelder prüfen
|
|
required = ["group_id", "title", "description",
|
|
"start", "end", "event_type", "created_by"]
|
|
for field in required:
|
|
if field not in data:
|
|
return jsonify({"error": f"Missing field: {field}"}), 400
|
|
|
|
event_type = data["event_type"]
|
|
event_media_id = None
|
|
slideshow_interval = None
|
|
|
|
# Präsentation: event_media_id und slideshow_interval übernehmen
|
|
if event_type == "presentation":
|
|
event_media_id = data.get("event_media_id")
|
|
slideshow_interval = data.get("slideshow_interval")
|
|
if not event_media_id:
|
|
return jsonify({"error": "event_media_id required for presentation"}), 400
|
|
|
|
# Website: Webseite als EventMedia anlegen und ID übernehmen
|
|
if event_type == "website":
|
|
website_url = data.get("website_url")
|
|
if not website_url:
|
|
return jsonify({"error": "website_url required for website"}), 400
|
|
# EventMedia für Webseite anlegen
|
|
media = EventMedia(
|
|
media_type=MediaType.website,
|
|
url=website_url,
|
|
file_path=website_url
|
|
)
|
|
session.add(media)
|
|
session.commit()
|
|
event_media_id = media.id
|
|
|
|
# created_by aus den Daten holen, Default: None
|
|
created_by = data.get("created_by")
|
|
|
|
# Start- und Endzeit in UTC umwandeln, falls kein Zulu-Zeitstempel
|
|
start = datetime.fromisoformat(data["start"])
|
|
end = datetime.fromisoformat(data["end"])
|
|
if start.tzinfo is None:
|
|
start = start.astimezone(timezone.utc)
|
|
if end.tzinfo is None:
|
|
end = end.astimezone(timezone.utc)
|
|
|
|
# Determine skip_holidays from either camelCase or snake_case
|
|
skip_holidays_val = bool(data.get("skipHolidays")) or bool(data.get("skip_holidays"))
|
|
|
|
# Event anlegen
|
|
event = Event(
|
|
group_id=data["group_id"],
|
|
title=data["title"],
|
|
description=data["description"],
|
|
start=start,
|
|
end=end,
|
|
event_type=event_type,
|
|
is_active=True,
|
|
event_media_id=event_media_id,
|
|
slideshow_interval=slideshow_interval,
|
|
created_by=created_by,
|
|
# Recurrence
|
|
recurrence_rule=data.get("recurrence_rule"),
|
|
skip_holidays=skip_holidays_val,
|
|
recurrence_end=(datetime.fromisoformat(data["recurrence_end"]) if data.get("recurrence_end") else None),
|
|
)
|
|
session.add(event)
|
|
session.commit()
|
|
|
|
# --- Holiday exception creation (backend) ---
|
|
def regenerate_event_exceptions(ev: Event):
|
|
from models.models import SchoolHoliday, EventException
|
|
from dateutil.rrule import rrulestr
|
|
from dateutil.tz import UTC
|
|
# Clear only auto-generated holiday skip exceptions, keep user overrides
|
|
session.query(EventException).filter(
|
|
EventException.event_id == ev.id,
|
|
EventException.is_skipped == True,
|
|
EventException.override_title.is_(None),
|
|
EventException.override_description.is_(None),
|
|
EventException.override_start.is_(None),
|
|
EventException.override_end.is_(None),
|
|
).delete(synchronize_session=False)
|
|
session.commit()
|
|
if not (ev.skip_holidays and ev.recurrence_rule):
|
|
return
|
|
# Get holidays
|
|
holidays = session.query(SchoolHoliday).all()
|
|
dtstart = ev.start.astimezone(UTC)
|
|
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
|
window_start = dtstart
|
|
window_end = ev.recurrence_end.astimezone(UTC) if ev.recurrence_end else dtstart.replace(year=dtstart.year + 1)
|
|
# Build set of all holiday dates (inclusive)
|
|
holiday_dates = set()
|
|
for h in holidays:
|
|
hs = h.start_date
|
|
he = h.end_date
|
|
d = hs
|
|
while d <= he:
|
|
holiday_dates.add(d)
|
|
d = d + timedelta(days=1)
|
|
# Create exceptions for occurrences on holiday dates
|
|
for occ_start in r.between(window_start, window_end, inc=True):
|
|
occ_date = occ_start.date()
|
|
if occ_date in holiday_dates:
|
|
session.add(EventException(event_id=ev.id, exception_date=occ_date, is_skipped=True))
|
|
session.commit()
|
|
|
|
regenerate_event_exceptions(event)
|
|
|
|
return jsonify({"success": True, "event_id": event.id})
|
|
|
|
|
|
@events_bp.route("/<event_id>", methods=["PUT"]) # update series or single event
|
|
@editor_or_higher
|
|
def update_event(event_id):
|
|
data = request.json
|
|
session = Session()
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
|
|
event.title = data.get("title", event.title)
|
|
event.description = data.get("description", event.description)
|
|
event.start = datetime.fromisoformat(
|
|
data["start"]) if "start" in data else event.start
|
|
event.end = datetime.fromisoformat(
|
|
data["end"]) if "end" in data else event.end
|
|
event.event_type = data.get("event_type", event.event_type)
|
|
event.event_media_id = data.get("event_media_id", event.event_media_id)
|
|
event.slideshow_interval = data.get("slideshow_interval", event.slideshow_interval)
|
|
event.created_by = data.get("created_by", event.created_by)
|
|
# Track previous values to decide on exception regeneration
|
|
prev_rule = event.recurrence_rule
|
|
prev_end = event.recurrence_end
|
|
prev_skip = bool(getattr(event, 'skip_holidays', False))
|
|
|
|
# Recurrence updates
|
|
if "recurrence_rule" in data:
|
|
event.recurrence_rule = data.get("recurrence_rule")
|
|
if "recurrence_end" in data:
|
|
rec_end_val = data.get("recurrence_end")
|
|
event.recurrence_end = datetime.fromisoformat(rec_end_val) if rec_end_val else None
|
|
# Skip holidays can be updated independently
|
|
if "skipHolidays" in data or "skip_holidays" in data:
|
|
event.skip_holidays = bool(data.get("skipHolidays") or data.get("skip_holidays"))
|
|
|
|
session.commit()
|
|
|
|
# Regenerate exceptions if any relevant field changed
|
|
need_regen = (
|
|
prev_rule != event.recurrence_rule or
|
|
prev_end != event.recurrence_end or
|
|
prev_skip != bool(getattr(event, 'skip_holidays', False))
|
|
)
|
|
if need_regen:
|
|
# Re-use helper from create route (preserve user overrides)
|
|
def regenerate_event_exceptions(ev: Event):
|
|
from models.models import SchoolHoliday, EventException
|
|
from dateutil.rrule import rrulestr
|
|
from dateutil.tz import UTC
|
|
# Clear only auto-generated holiday skip exceptions, keep user overrides
|
|
session.query(EventException).filter(
|
|
EventException.event_id == ev.id,
|
|
EventException.is_skipped == True,
|
|
EventException.override_title.is_(None),
|
|
EventException.override_description.is_(None),
|
|
EventException.override_start.is_(None),
|
|
EventException.override_end.is_(None),
|
|
).delete(synchronize_session=False)
|
|
session.commit()
|
|
if not (ev.skip_holidays and ev.recurrence_rule):
|
|
return
|
|
# Get holidays
|
|
holidays = session.query(SchoolHoliday).all()
|
|
dtstart = ev.start.astimezone(UTC)
|
|
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
|
window_start = dtstart
|
|
window_end = ev.recurrence_end.astimezone(UTC) if ev.recurrence_end else dtstart.replace(year=dtstart.year + 1)
|
|
# Build set of all holiday dates (inclusive)
|
|
holiday_dates = set()
|
|
for h in holidays:
|
|
hs = h.start_date
|
|
he = h.end_date
|
|
d = hs
|
|
while d <= he:
|
|
holiday_dates.add(d)
|
|
d = d + timedelta(days=1)
|
|
# Create exceptions for occurrences on holiday dates
|
|
for occ_start in r.between(window_start, window_end, inc=True):
|
|
occ_date = occ_start.date()
|
|
if occ_date in holiday_dates:
|
|
session.add(EventException(event_id=ev.id, exception_date=occ_date, is_skipped=True))
|
|
session.commit()
|
|
|
|
regenerate_event_exceptions(event)
|
|
|
|
# Return success with event id
|
|
event_id_return = event.id
|
|
session.close()
|
|
return jsonify({"success": True, "event_id": event_id_return})
|