add superadmin monitoring dashboard with protected route, menu entry, and monitoring data client add monitoring overview API endpoint and improve log serialization/aggregation for dashboard use extend listener health/log handling with robust status/event/timestamp normalization and screenshot payload extraction improve screenshot persistence and retrieval (timestamp-aware uploads, latest screenshot endpoint fallback) fix page_progress and auto_progress persistence/serialization across create, update, and detached occurrence flows align technical and project docs to reflect implemented monitoring and no-version-bump backend changes add documentation sync log entry and include minor compose env indentation cleanup
618 lines
24 KiB
Python
618 lines
24 KiB
Python
from flask import Blueprint, request, jsonify
|
|
from server.permissions import editor_or_higher
|
|
from server.database import Session
|
|
from server.serializers import dict_to_camel_case, dict_to_snake_case
|
|
from models.models import Event, EventMedia, MediaType, EventException, SystemSetting
|
|
from datetime import datetime, timezone, timedelta
|
|
from sqlalchemy import and_
|
|
from dateutil.rrule import rrulestr
|
|
from dateutil.tz import UTC
|
|
import sys
|
|
sys.path.append('/workspace')
|
|
|
|
events_bp = Blueprint("events", __name__, url_prefix="/api/events")
|
|
|
|
|
|
def get_icon_for_type(event_type):
|
|
# Lucide-Icon-Namen als String
|
|
return {
|
|
"presentation": "Presentation", # <--- geändert!
|
|
"website": "Globe",
|
|
"video": "Video",
|
|
"message": "MessageSquare",
|
|
"webuntis": "School",
|
|
}.get(event_type, "")
|
|
|
|
|
|
@events_bp.route("", methods=["GET"])
|
|
def get_events():
|
|
session = Session()
|
|
start = request.args.get("start")
|
|
end = request.args.get("end")
|
|
group_id = request.args.get("group_id")
|
|
show_inactive = request.args.get(
|
|
"show_inactive", "0") == "1" # Checkbox-Logik
|
|
# Always let Syncfusion handle recurrence; do not expand on backend
|
|
expand = False
|
|
|
|
now = datetime.now(timezone.utc)
|
|
events_query = session.query(Event)
|
|
if group_id:
|
|
events_query = events_query.filter(Event.group_id == int(group_id))
|
|
events = events_query.all()
|
|
|
|
result = []
|
|
for e in events:
|
|
# Zeitzonen-Korrektur für e.end
|
|
if e.end and e.end.tzinfo is None:
|
|
end_dt = e.end.replace(tzinfo=timezone.utc)
|
|
else:
|
|
end_dt = e.end
|
|
|
|
# Auto-deactivate only non-recurring events past their end.
|
|
# Recurring events remain active until their RecurrenceEnd (UNTIL) has passed.
|
|
if e.is_active:
|
|
if e.recurrence_rule:
|
|
# For recurring, deactivate only when recurrence_end exists and is in the past
|
|
rec_end = e.recurrence_end
|
|
if rec_end and rec_end.tzinfo is None:
|
|
rec_end = rec_end.replace(tzinfo=timezone.utc)
|
|
if rec_end and rec_end < now:
|
|
e.is_active = False
|
|
session.commit()
|
|
else:
|
|
# Non-recurring: deactivate when end is in the past
|
|
if end_dt and end_dt < now:
|
|
e.is_active = False
|
|
session.commit()
|
|
if not (show_inactive or e.is_active):
|
|
continue
|
|
|
|
# Gather exceptions for this event
|
|
all_exceptions = session.query(EventException).filter(
|
|
EventException.event_id == e.id
|
|
).all()
|
|
|
|
# Build RecurrenceException (EXDATE) tokens for skipped occurrences only
|
|
# (detached occurrences are now real Event rows, not synthetic)
|
|
recurrence_exception = None
|
|
if all_exceptions:
|
|
base_start = e.start.astimezone(UTC) if e.start.tzinfo else e.start.replace(tzinfo=UTC)
|
|
tokens = []
|
|
for ex in all_exceptions:
|
|
if ex.is_skipped:
|
|
exd = ex.exception_date
|
|
# Create the EXDATE timestamp in Syncfusion's expected format
|
|
# Use the exact time of the occurrence that would have happened
|
|
occ_dt = datetime(
|
|
exd.year, exd.month, exd.day,
|
|
base_start.hour, base_start.minute, base_start.second,
|
|
tzinfo=UTC
|
|
)
|
|
# Format as compact ISO without separators (yyyyMMddThhmmssZ) - RFC 5545 format
|
|
token = occ_dt.strftime('%Y%m%dT%H%M%SZ')
|
|
tokens.append(token)
|
|
if tokens:
|
|
recurrence_exception = ','.join(tokens)
|
|
|
|
base_payload = {
|
|
"id": str(e.id),
|
|
"group_id": e.group_id,
|
|
"subject": e.title,
|
|
"description": getattr(e, 'description', None),
|
|
"start_time": e.start.isoformat() if e.start else None,
|
|
"end_time": e.end.isoformat() if e.end else None,
|
|
"is_all_day": False,
|
|
"media_id": e.event_media_id,
|
|
"slideshow_interval": e.slideshow_interval,
|
|
"page_progress": e.page_progress,
|
|
"auto_progress": e.auto_progress,
|
|
"type": e.event_type.value if e.event_type else None,
|
|
"icon": get_icon_for_type(e.event_type.value if e.event_type else None),
|
|
# Recurrence metadata
|
|
"recurrence_rule": e.recurrence_rule,
|
|
"recurrence_end": e.recurrence_end.isoformat() if e.recurrence_end else None,
|
|
"recurrence_exception": recurrence_exception,
|
|
"skip_holidays": bool(getattr(e, 'skip_holidays', False)),
|
|
}
|
|
result.append(base_payload)
|
|
|
|
# No need to emit synthetic override events anymore since detached occurrences
|
|
# are now real Event rows that will be returned in the main query
|
|
session.close()
|
|
# Convert all keys to camelCase for frontend
|
|
return jsonify(dict_to_camel_case(result))
|
|
|
|
|
|
@events_bp.route("/<event_id>", methods=["GET"]) # get single event
|
|
def get_event(event_id):
|
|
session = Session()
|
|
try:
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
# Convert event to dictionary with all necessary fields
|
|
event_dict = {
|
|
"id": str(event.id),
|
|
"subject": event.title,
|
|
"start_time": event.start.isoformat() if event.start else None,
|
|
"end_time": event.end.isoformat() if event.end else None,
|
|
"description": event.description,
|
|
"type": event.event_type.value if event.event_type else "presentation",
|
|
"is_all_day": False, # Assuming events are not all-day by default
|
|
"media_id": str(event.event_media_id) if event.event_media_id else None,
|
|
"slideshow_interval": event.slideshow_interval,
|
|
"page_progress": event.page_progress,
|
|
"auto_progress": event.auto_progress,
|
|
"website_url": event.event_media.url if event.event_media and hasattr(event.event_media, 'url') else None,
|
|
# Video-specific fields
|
|
"autoplay": event.autoplay,
|
|
"loop": event.loop,
|
|
"volume": event.volume,
|
|
"muted": event.muted,
|
|
"recurrence_rule": event.recurrence_rule,
|
|
"recurrence_end": event.recurrence_end.isoformat() if event.recurrence_end else None,
|
|
"skip_holidays": event.skip_holidays,
|
|
"icon": get_icon_for_type(event.event_type.value if event.event_type else "presentation"),
|
|
}
|
|
|
|
return jsonify(dict_to_camel_case(event_dict))
|
|
return jsonify(event_dict)
|
|
except Exception as e:
|
|
return jsonify({"error": f"Fehler beim Laden des Termins: {str(e)}"}), 500
|
|
finally:
|
|
session.close()
|
|
|
|
|
|
@events_bp.route("/<event_id>", methods=["DELETE"]) # delete series or single event
|
|
@editor_or_higher
|
|
def delete_event(event_id):
|
|
session = Session()
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
# Safety: do not allow accidental deletion of a recurring master without explicit force flag
|
|
force = request.args.get('force') == '1'
|
|
if event.recurrence_rule and not force:
|
|
session.close()
|
|
return jsonify({
|
|
"error": "Löschen der Terminserie erfordert Bestätigung",
|
|
"hint": "Fügen Sie ?force=1 zur Anfrage hinzu, um die Serie zu löschen.",
|
|
"event_id": event_id
|
|
}), 400
|
|
session.delete(event)
|
|
session.commit()
|
|
session.close()
|
|
return jsonify({"success": True})
|
|
|
|
|
|
@events_bp.route("/<event_id>/occurrences/<occurrence_date>", methods=["DELETE"]) # skip single occurrence
|
|
@editor_or_higher
|
|
def delete_event_occurrence(event_id, occurrence_date):
|
|
"""Delete a single occurrence of a recurring event by creating an EventException."""
|
|
session = Session()
|
|
|
|
try:
|
|
# Validate event exists
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
|
|
# Validate that this is a recurring event
|
|
if not event.recurrence_rule:
|
|
session.close()
|
|
return jsonify({"error": "Termin ist keine Wiederholungsserie"}), 400
|
|
|
|
# Parse the occurrence date
|
|
try:
|
|
occ_date = datetime.fromisoformat(occurrence_date).date()
|
|
except ValueError:
|
|
session.close()
|
|
return jsonify({"error": "Ungültiges Datumsformat"}), 400
|
|
|
|
# Check if an exception for this date already exists
|
|
existing_exception = session.query(EventException).filter_by(
|
|
event_id=event.id,
|
|
exception_date=occ_date
|
|
).first()
|
|
|
|
if existing_exception:
|
|
# Update existing exception to be skipped
|
|
existing_exception.is_skipped = True
|
|
existing_exception.updated_at = datetime.now()
|
|
else:
|
|
# Create new exception to skip this occurrence
|
|
exception = EventException(
|
|
event_id=event.id,
|
|
exception_date=occ_date,
|
|
is_skipped=True
|
|
)
|
|
session.add(exception)
|
|
|
|
session.commit()
|
|
session.close()
|
|
return jsonify({"success": True, "message": "Einzeltermin wurde gelöscht"})
|
|
|
|
except Exception as e:
|
|
session.rollback()
|
|
session.close()
|
|
return jsonify({"error": f"Fehler beim Löschen des Einzeltermins: {str(e)}"}), 500
|
|
|
|
|
|
|
|
@events_bp.route("/<event_id>/occurrences/<occurrence_date>/detach", methods=["POST"]) # detach single occurrence into standalone event
|
|
@editor_or_higher
|
|
def detach_event_occurrence(event_id, occurrence_date):
|
|
"""BULLETPROOF: Detach single occurrence without touching master event."""
|
|
session = Session()
|
|
try:
|
|
data = request.json or {}
|
|
|
|
# Step 0: Get master event and NEVER modify it
|
|
master = session.query(Event).filter_by(id=event_id).first()
|
|
if not master:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
if not master.recurrence_rule:
|
|
session.close()
|
|
return jsonify({"error": "Termin ist keine Wiederholungsserie"}), 400
|
|
|
|
# Store master data (read-only copy)
|
|
master_data = {
|
|
'id': master.id,
|
|
'group_id': master.group_id,
|
|
'title': master.title,
|
|
'description': master.description,
|
|
'start': master.start,
|
|
'end': master.end,
|
|
'event_type': master.event_type,
|
|
'event_media_id': master.event_media_id,
|
|
'slideshow_interval': getattr(master, 'slideshow_interval', None),
|
|
'page_progress': getattr(master, 'page_progress', None),
|
|
'auto_progress': getattr(master, 'auto_progress', None),
|
|
'created_by': master.created_by,
|
|
}
|
|
|
|
try:
|
|
occ_date = datetime.fromisoformat(occurrence_date).date()
|
|
except ValueError:
|
|
session.close()
|
|
return jsonify({"error": "Ungültiges Datumsformat"}), 400
|
|
|
|
# Step 1: Create exception entry (using master ID, not master object)
|
|
existing_exception = session.query(EventException).filter_by(
|
|
event_id=master_data['id'],
|
|
exception_date=occ_date
|
|
).first()
|
|
|
|
if not existing_exception:
|
|
exception = EventException(
|
|
event_id=master_data['id'],
|
|
exception_date=occ_date,
|
|
is_skipped=True
|
|
)
|
|
session.add(exception)
|
|
else:
|
|
existing_exception.is_skipped = True
|
|
|
|
# Step 2: Create new standalone event (using copied data, not master object)
|
|
new_title = data.get("title", master_data['title'])
|
|
new_description = data.get("description", master_data['description'])
|
|
|
|
if data.get("start"):
|
|
new_start = datetime.fromisoformat(data["start"])
|
|
else:
|
|
base_start_utc = master_data['start'].astimezone(UTC) if master_data['start'].tzinfo else master_data['start'].replace(tzinfo=UTC)
|
|
new_start = datetime(occ_date.year, occ_date.month, occ_date.day,
|
|
base_start_utc.hour, base_start_utc.minute, base_start_utc.second, tzinfo=UTC)
|
|
|
|
if data.get("end"):
|
|
new_end = datetime.fromisoformat(data["end"])
|
|
else:
|
|
duration = (master_data['end'] - master_data['start']) if (master_data['end'] and master_data['start']) else timedelta(minutes=30)
|
|
new_end = new_start + duration
|
|
|
|
new_event = Event(
|
|
group_id=master_data['group_id'],
|
|
title=new_title,
|
|
description=new_description,
|
|
start=new_start,
|
|
end=new_end,
|
|
event_type=master_data['event_type'],
|
|
event_media_id=master_data['event_media_id'],
|
|
slideshow_interval=master_data['slideshow_interval'],
|
|
page_progress=data.get("page_progress", master_data['page_progress']),
|
|
auto_progress=data.get("auto_progress", master_data['auto_progress']),
|
|
recurrence_rule=None,
|
|
recurrence_end=None,
|
|
skip_holidays=False,
|
|
created_by=master_data['created_by'],
|
|
updated_by=master_data['created_by'],
|
|
is_active=True,
|
|
)
|
|
session.add(new_event)
|
|
|
|
# Commit both changes at once
|
|
session.commit()
|
|
|
|
new_event_id = new_event.id
|
|
session.close()
|
|
return jsonify({
|
|
"success": True,
|
|
"new_event_id": new_event_id,
|
|
"master_event_id": master_data['id'],
|
|
"message": f"Einzeltermin erstellt, Master-Event {master_data['id']} unberührt"
|
|
})
|
|
|
|
except Exception as e:
|
|
session.rollback()
|
|
session.close()
|
|
return jsonify({"error": f"Fehler beim Erstellen des Einzeltermins: {str(e)}"}), 500
|
|
|
|
|
|
@events_bp.route("", methods=["POST"])
|
|
@editor_or_higher
|
|
def create_event():
|
|
data = request.json
|
|
session = Session()
|
|
|
|
# Pflichtfelder prüfen
|
|
required = ["group_id", "title", "description",
|
|
"start", "end", "event_type", "created_by"]
|
|
for field in required:
|
|
if field not in data:
|
|
return jsonify({"error": f"Missing field: {field}"}), 400
|
|
|
|
event_type = data["event_type"]
|
|
event_media_id = None
|
|
slideshow_interval = None
|
|
page_progress = None
|
|
auto_progress = None
|
|
|
|
# Präsentation: event_media_id und slideshow_interval übernehmen
|
|
if event_type == "presentation":
|
|
event_media_id = data.get("event_media_id")
|
|
slideshow_interval = data.get("slideshow_interval")
|
|
page_progress = data.get("page_progress")
|
|
auto_progress = data.get("auto_progress")
|
|
if not event_media_id:
|
|
return jsonify({"error": "event_media_id required for presentation"}), 400
|
|
|
|
# Website: Webseite als EventMedia anlegen und ID übernehmen
|
|
if event_type == "website":
|
|
website_url = data.get("website_url")
|
|
if not website_url:
|
|
return jsonify({"error": "website_url required for website"}), 400
|
|
# EventMedia für Webseite anlegen
|
|
media = EventMedia(
|
|
media_type=MediaType.website,
|
|
url=website_url,
|
|
file_path=website_url
|
|
)
|
|
session.add(media)
|
|
session.commit()
|
|
event_media_id = media.id
|
|
|
|
# WebUntis: URL aus System-Einstellungen holen und EventMedia anlegen
|
|
if event_type == "webuntis":
|
|
# Hole WebUntis-URL aus Systemeinstellungen (verwendet supplement_table_url)
|
|
webuntis_setting = session.query(SystemSetting).filter_by(key='supplement_table_url').first()
|
|
webuntis_url = webuntis_setting.value if webuntis_setting else ''
|
|
|
|
if not webuntis_url:
|
|
return jsonify({"error": "WebUntis / Supplement table URL not configured in system settings"}), 400
|
|
|
|
# EventMedia für WebUntis anlegen
|
|
media = EventMedia(
|
|
media_type=MediaType.website,
|
|
url=webuntis_url,
|
|
file_path=webuntis_url
|
|
)
|
|
session.add(media)
|
|
session.commit()
|
|
event_media_id = media.id
|
|
|
|
# Video: event_media_id und Video-Einstellungen übernehmen
|
|
autoplay = None
|
|
loop = None
|
|
volume = None
|
|
muted = None
|
|
if event_type == "video":
|
|
event_media_id = data.get("event_media_id")
|
|
if not event_media_id:
|
|
return jsonify({"error": "event_media_id required for video"}), 400
|
|
# Get video-specific settings with defaults
|
|
autoplay = data.get("autoplay", True)
|
|
loop = data.get("loop", False)
|
|
volume = data.get("volume", 0.8)
|
|
muted = data.get("muted", False)
|
|
|
|
# created_by aus den Daten holen, Default: None
|
|
created_by = data.get("created_by")
|
|
|
|
# Start- und Endzeit in UTC umwandeln, falls kein Zulu-Zeitstempel
|
|
start = datetime.fromisoformat(data["start"])
|
|
end = datetime.fromisoformat(data["end"])
|
|
if start.tzinfo is None:
|
|
start = start.astimezone(timezone.utc)
|
|
if end.tzinfo is None:
|
|
end = end.astimezone(timezone.utc)
|
|
|
|
# Determine skip_holidays from either camelCase or snake_case
|
|
skip_holidays_val = bool(data.get("skipHolidays")) or bool(data.get("skip_holidays"))
|
|
|
|
# Event anlegen
|
|
event = Event(
|
|
group_id=data["group_id"],
|
|
title=data["title"],
|
|
description=data["description"],
|
|
start=start,
|
|
end=end,
|
|
event_type=event_type,
|
|
is_active=True,
|
|
event_media_id=event_media_id,
|
|
slideshow_interval=slideshow_interval,
|
|
page_progress=page_progress,
|
|
auto_progress=auto_progress,
|
|
autoplay=autoplay,
|
|
loop=loop,
|
|
volume=volume,
|
|
muted=muted,
|
|
created_by=created_by,
|
|
# Recurrence
|
|
recurrence_rule=data.get("recurrence_rule"),
|
|
skip_holidays=skip_holidays_val,
|
|
recurrence_end=(datetime.fromisoformat(data["recurrence_end"]) if data.get("recurrence_end") else None),
|
|
)
|
|
session.add(event)
|
|
session.commit()
|
|
|
|
# --- Holiday exception creation (backend) ---
|
|
def regenerate_event_exceptions(ev: Event):
|
|
from models.models import SchoolHoliday, EventException
|
|
from dateutil.rrule import rrulestr
|
|
from dateutil.tz import UTC
|
|
# Clear only auto-generated holiday skip exceptions, keep user overrides
|
|
session.query(EventException).filter(
|
|
EventException.event_id == ev.id,
|
|
EventException.is_skipped == True,
|
|
EventException.override_title.is_(None),
|
|
EventException.override_description.is_(None),
|
|
EventException.override_start.is_(None),
|
|
EventException.override_end.is_(None),
|
|
).delete(synchronize_session=False)
|
|
session.commit()
|
|
if not (ev.skip_holidays and ev.recurrence_rule):
|
|
return
|
|
# Get holidays
|
|
holidays = session.query(SchoolHoliday).all()
|
|
dtstart = ev.start.astimezone(UTC)
|
|
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
|
window_start = dtstart
|
|
window_end = ev.recurrence_end.astimezone(UTC) if ev.recurrence_end else dtstart.replace(year=dtstart.year + 1)
|
|
# Build set of all holiday dates (inclusive)
|
|
holiday_dates = set()
|
|
for h in holidays:
|
|
hs = h.start_date
|
|
he = h.end_date
|
|
d = hs
|
|
while d <= he:
|
|
holiday_dates.add(d)
|
|
d = d + timedelta(days=1)
|
|
# Create exceptions for occurrences on holiday dates
|
|
for occ_start in r.between(window_start, window_end, inc=True):
|
|
occ_date = occ_start.date()
|
|
if occ_date in holiday_dates:
|
|
session.add(EventException(event_id=ev.id, exception_date=occ_date, is_skipped=True))
|
|
session.commit()
|
|
|
|
regenerate_event_exceptions(event)
|
|
|
|
return jsonify({"success": True, "event_id": event.id})
|
|
|
|
|
|
@events_bp.route("/<event_id>", methods=["PUT"]) # update series or single event
|
|
@editor_or_higher
|
|
def update_event(event_id):
|
|
data = request.json
|
|
session = Session()
|
|
event = session.query(Event).filter_by(id=event_id).first()
|
|
if not event:
|
|
session.close()
|
|
return jsonify({"error": "Termin nicht gefunden"}), 404
|
|
|
|
event.title = data.get("title", event.title)
|
|
event.description = data.get("description", event.description)
|
|
event.start = datetime.fromisoformat(
|
|
data["start"]) if "start" in data else event.start
|
|
event.end = datetime.fromisoformat(
|
|
data["end"]) if "end" in data else event.end
|
|
event.event_type = data.get("event_type", event.event_type)
|
|
event.event_media_id = data.get("event_media_id", event.event_media_id)
|
|
event.slideshow_interval = data.get("slideshow_interval", event.slideshow_interval)
|
|
if "page_progress" in data:
|
|
event.page_progress = data.get("page_progress")
|
|
if "auto_progress" in data:
|
|
event.auto_progress = data.get("auto_progress")
|
|
# Video-specific fields
|
|
if "autoplay" in data:
|
|
event.autoplay = data.get("autoplay")
|
|
if "loop" in data:
|
|
event.loop = data.get("loop")
|
|
if "volume" in data:
|
|
event.volume = data.get("volume")
|
|
if "muted" in data:
|
|
event.muted = data.get("muted")
|
|
event.created_by = data.get("created_by", event.created_by)
|
|
# Track previous values to decide on exception regeneration
|
|
prev_rule = event.recurrence_rule
|
|
prev_end = event.recurrence_end
|
|
prev_skip = bool(getattr(event, 'skip_holidays', False))
|
|
|
|
# Recurrence updates
|
|
if "recurrence_rule" in data:
|
|
event.recurrence_rule = data.get("recurrence_rule")
|
|
if "recurrence_end" in data:
|
|
rec_end_val = data.get("recurrence_end")
|
|
event.recurrence_end = datetime.fromisoformat(rec_end_val) if rec_end_val else None
|
|
# Skip holidays can be updated independently
|
|
if "skipHolidays" in data or "skip_holidays" in data:
|
|
event.skip_holidays = bool(data.get("skipHolidays") or data.get("skip_holidays"))
|
|
|
|
session.commit()
|
|
|
|
# Regenerate exceptions if any relevant field changed
|
|
need_regen = (
|
|
prev_rule != event.recurrence_rule or
|
|
prev_end != event.recurrence_end or
|
|
prev_skip != bool(getattr(event, 'skip_holidays', False))
|
|
)
|
|
if need_regen:
|
|
# Re-use helper from create route (preserve user overrides)
|
|
def regenerate_event_exceptions(ev: Event):
|
|
from models.models import SchoolHoliday, EventException
|
|
from dateutil.rrule import rrulestr
|
|
from dateutil.tz import UTC
|
|
# Clear only auto-generated holiday skip exceptions, keep user overrides
|
|
session.query(EventException).filter(
|
|
EventException.event_id == ev.id,
|
|
EventException.is_skipped == True,
|
|
EventException.override_title.is_(None),
|
|
EventException.override_description.is_(None),
|
|
EventException.override_start.is_(None),
|
|
EventException.override_end.is_(None),
|
|
).delete(synchronize_session=False)
|
|
session.commit()
|
|
if not (ev.skip_holidays and ev.recurrence_rule):
|
|
return
|
|
# Get holidays
|
|
holidays = session.query(SchoolHoliday).all()
|
|
dtstart = ev.start.astimezone(UTC)
|
|
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
|
window_start = dtstart
|
|
window_end = ev.recurrence_end.astimezone(UTC) if ev.recurrence_end else dtstart.replace(year=dtstart.year + 1)
|
|
# Build set of all holiday dates (inclusive)
|
|
holiday_dates = set()
|
|
for h in holidays:
|
|
hs = h.start_date
|
|
he = h.end_date
|
|
d = hs
|
|
while d <= he:
|
|
holiday_dates.add(d)
|
|
d = d + timedelta(days=1)
|
|
# Create exceptions for occurrences on holiday dates
|
|
for occ_start in r.between(window_start, window_end, inc=True):
|
|
occ_date = occ_start.date()
|
|
if occ_date in holiday_dates:
|
|
session.add(EventException(event_id=ev.id, exception_date=occ_date, is_skipped=True))
|
|
session.commit()
|
|
|
|
regenerate_event_exceptions(event)
|
|
|
|
# Return success with event id
|
|
event_id_return = event.id
|
|
session.close()
|
|
return jsonify({"success": True, "event_id": event_id_return})
|