feat(events): reliable holiday skipping for recurrences + UI badge; clean logs
Backend: generate EventException on create/update when skip_holidays or recurrence changes; emit RecurrenceException (EXDATE) with exact occurrence start time (UTC) API: return master events with RecurrenceRule + RecurrenceException Frontend: map RecurrenceException → recurrenceException; ensure SkipHolidays instances never render on holidays; place TentTree icon (black) next to main event icon via template Docs: update README and Copilot instructions for recurrence/holiday behavior Cleanup: remove dataSource and debug console logs
This commit is contained in:
@@ -0,0 +1,26 @@
|
||||
"""add skip_holidays to events
|
||||
|
||||
Revision ID: 12ab34cd56ef
|
||||
Revises: 2b627d0885c3
|
||||
Create Date: 2025-10-12
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '12ab34cd56ef'
|
||||
down_revision = '2b627d0885c3'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
def upgrade():
|
||||
op.add_column('events', sa.Column('skip_holidays', sa.Boolean(), nullable=False, server_default=sa.text('0')))
|
||||
# Optional: create index if queries need it
|
||||
# op.create_index('ix_events_skip_holidays', 'events', ['skip_holidays'])
|
||||
|
||||
|
||||
def downgrade():
|
||||
# Optional: drop index
|
||||
# op.drop_index('ix_events_skip_holidays', table_name='events')
|
||||
op.drop_column('events', 'skip_holidays')
|
||||
@@ -0,0 +1,61 @@
|
||||
"""Add recurrence fields to events and event_exceptions table
|
||||
|
||||
Revision ID: 15c357c0cf31
|
||||
Revises: b5a6c3d4e7f8
|
||||
Create Date: 2025-10-12 05:24:43.936743
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '15c357c0cf31'
|
||||
down_revision: Union[str, None] = 'b5a6c3d4e7f8'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('event_exceptions',
|
||||
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
|
||||
sa.Column('event_id', sa.Integer(), nullable=False),
|
||||
sa.Column('exception_date', sa.Date(), nullable=False),
|
||||
sa.Column('is_skipped', sa.Boolean(), nullable=False),
|
||||
sa.Column('override_title', sa.String(length=100), nullable=True),
|
||||
sa.Column('override_description', sa.Text(), nullable=True),
|
||||
sa.Column('override_start', sa.TIMESTAMP(timezone=True), nullable=True),
|
||||
sa.Column('override_end', sa.TIMESTAMP(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
|
||||
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
|
||||
sa.ForeignKeyConstraint(['event_id'], ['events.id'], ondelete='CASCADE'),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_event_exceptions_event_id'), 'event_exceptions', ['event_id'], unique=False)
|
||||
op.create_index(op.f('ix_event_exceptions_exception_date'), 'event_exceptions', ['exception_date'], unique=False)
|
||||
op.drop_index(op.f('ix_conv_source_event_media_id'), table_name='conversions')
|
||||
op.create_index(op.f('ix_conversions_source_event_media_id'), 'conversions', ['source_event_media_id'], unique=False)
|
||||
op.add_column('events', sa.Column('recurrence_rule', sa.String(length=255), nullable=True))
|
||||
op.add_column('events', sa.Column('recurrence_end', sa.TIMESTAMP(timezone=True), nullable=True))
|
||||
op.create_index(op.f('ix_events_recurrence_end'), 'events', ['recurrence_end'], unique=False)
|
||||
op.create_index(op.f('ix_events_recurrence_rule'), 'events', ['recurrence_rule'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_events_recurrence_rule'), table_name='events')
|
||||
op.drop_index(op.f('ix_events_recurrence_end'), table_name='events')
|
||||
op.drop_column('events', 'recurrence_end')
|
||||
op.drop_column('events', 'recurrence_rule')
|
||||
op.drop_index(op.f('ix_conversions_source_event_media_id'), table_name='conversions')
|
||||
op.create_index(op.f('ix_conv_source_event_media_id'), 'conversions', ['source_event_media_id'], unique=False)
|
||||
op.drop_index(op.f('ix_event_exceptions_exception_date'), table_name='event_exceptions')
|
||||
op.drop_index(op.f('ix_event_exceptions_event_id'), table_name='event_exceptions')
|
||||
op.drop_table('event_exceptions')
|
||||
# ### end Alembic commands ###
|
||||
@@ -9,3 +9,4 @@ gunicorn
|
||||
redis>=5.0.1
|
||||
rq>=1.16.2
|
||||
requests>=2.32.3
|
||||
python-dateutil>=2.9.0.post0
|
||||
|
||||
113
server/routes/event_exceptions.py
Normal file
113
server/routes/event_exceptions.py
Normal file
@@ -0,0 +1,113 @@
|
||||
from flask import Blueprint, request, jsonify
|
||||
from server.database import Session
|
||||
from models.models import EventException, Event
|
||||
from datetime import datetime, date
|
||||
|
||||
event_exceptions_bp = Blueprint("event_exceptions", __name__, url_prefix="/api/event_exceptions")
|
||||
|
||||
|
||||
@event_exceptions_bp.route("", methods=["POST"])
|
||||
def create_exception():
|
||||
data = request.json
|
||||
session = Session()
|
||||
# required: event_id, exception_date
|
||||
required = ["event_id", "exception_date"]
|
||||
for f in required:
|
||||
if f not in data:
|
||||
return jsonify({"error": f"Missing field: {f}"}), 400
|
||||
|
||||
# Validate event exists
|
||||
event = session.query(Event).filter_by(id=data["event_id"]).first()
|
||||
if not event:
|
||||
session.close()
|
||||
return jsonify({"error": "Event not found"}), 404
|
||||
|
||||
exc_date = datetime.fromisoformat(data["exception_date"]).date() if isinstance(data["exception_date"], str) else data["exception_date"]
|
||||
# Check if an exception for this event and date already exists
|
||||
existing_exc = session.query(EventException).filter_by(event_id=event.id, exception_date=exc_date).first()
|
||||
if existing_exc:
|
||||
# Optionally, update the existing exception fields if needed
|
||||
existing_exc.is_skipped = bool(data.get("is_skipped", existing_exc.is_skipped))
|
||||
existing_exc.override_title = data.get("override_title", existing_exc.override_title)
|
||||
existing_exc.override_description = data.get("override_description", existing_exc.override_description)
|
||||
existing_exc.override_start = datetime.fromisoformat(data["override_start"]) if data.get("override_start") else existing_exc.override_start
|
||||
existing_exc.override_end = datetime.fromisoformat(data["override_end"]) if data.get("override_end") else existing_exc.override_end
|
||||
session.commit()
|
||||
return jsonify({"success": True, "id": existing_exc.id, "updated": True})
|
||||
# Otherwise, create a new exception
|
||||
exc = EventException(
|
||||
event_id=event.id,
|
||||
exception_date=exc_date,
|
||||
is_skipped=bool(data.get("is_skipped", False)),
|
||||
override_title=data.get("override_title"),
|
||||
override_description=data.get("override_description"),
|
||||
override_start=(datetime.fromisoformat(data["override_start"]) if data.get("override_start") else None),
|
||||
override_end=(datetime.fromisoformat(data["override_end"]) if data.get("override_end") else None),
|
||||
)
|
||||
session.add(exc)
|
||||
session.commit()
|
||||
return jsonify({"success": True, "id": exc.id})
|
||||
|
||||
|
||||
@event_exceptions_bp.route("/<exc_id>", methods=["PUT"])
|
||||
def update_exception(exc_id):
|
||||
data = request.json
|
||||
session = Session()
|
||||
exc = session.query(EventException).filter_by(id=exc_id).first()
|
||||
if not exc:
|
||||
session.close()
|
||||
return jsonify({"error": "Exception not found"}), 404
|
||||
|
||||
if "exception_date" in data:
|
||||
exc.exception_date = datetime.fromisoformat(data["exception_date"]).date()
|
||||
if "is_skipped" in data:
|
||||
exc.is_skipped = bool(data["is_skipped"])
|
||||
if "override_title" in data:
|
||||
exc.override_title = data.get("override_title")
|
||||
if "override_description" in data:
|
||||
exc.override_description = data.get("override_description")
|
||||
if "override_start" in data:
|
||||
exc.override_start = datetime.fromisoformat(data["override_start"]) if data.get("override_start") else None
|
||||
if "override_end" in data:
|
||||
exc.override_end = datetime.fromisoformat(data["override_end"]) if data.get("override_end") else None
|
||||
|
||||
session.commit()
|
||||
session.close()
|
||||
return jsonify({"success": True})
|
||||
|
||||
|
||||
@event_exceptions_bp.route("/<exc_id>", methods=["DELETE"])
|
||||
def delete_exception(exc_id):
|
||||
session = Session()
|
||||
exc = session.query(EventException).filter_by(id=exc_id).first()
|
||||
if not exc:
|
||||
session.close()
|
||||
return jsonify({"error": "Exception not found"}), 404
|
||||
session.delete(exc)
|
||||
session.commit()
|
||||
session.close()
|
||||
return jsonify({"success": True})
|
||||
|
||||
|
||||
@event_exceptions_bp.route("", methods=["GET"])
|
||||
def list_exceptions():
|
||||
session = Session()
|
||||
event_id = request.args.get("event_id")
|
||||
q = session.query(EventException)
|
||||
if event_id:
|
||||
q = q.filter(EventException.event_id == int(event_id))
|
||||
rows = q.all()
|
||||
out = []
|
||||
for r in rows:
|
||||
out.append({
|
||||
"id": r.id,
|
||||
"event_id": r.event_id,
|
||||
"exception_date": r.exception_date.isoformat(),
|
||||
"is_skipped": r.is_skipped,
|
||||
"override_title": r.override_title,
|
||||
"override_description": r.override_description,
|
||||
"override_start": r.override_start.isoformat() if r.override_start else None,
|
||||
"override_end": r.override_end.isoformat() if r.override_end else None,
|
||||
})
|
||||
session.close()
|
||||
return jsonify(out)
|
||||
@@ -1,8 +1,10 @@
|
||||
from flask import Blueprint, request, jsonify
|
||||
from server.database import Session
|
||||
from models.models import Event, EventMedia, MediaType
|
||||
from datetime import datetime, timezone
|
||||
from models.models import Event, EventMedia, MediaType, EventException
|
||||
from datetime import datetime, timezone, timedelta
|
||||
from sqlalchemy import and_
|
||||
from dateutil.rrule import rrulestr
|
||||
from dateutil.tz import UTC
|
||||
import sys
|
||||
sys.path.append('/workspace')
|
||||
|
||||
@@ -28,6 +30,8 @@ def get_events():
|
||||
group_id = request.args.get("group_id")
|
||||
show_inactive = request.args.get(
|
||||
"show_inactive", "0") == "1" # Checkbox-Logik
|
||||
# Always let Syncfusion handle recurrence; do not expand on backend
|
||||
expand = False
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
events_query = session.query(Event)
|
||||
@@ -47,18 +51,48 @@ def get_events():
|
||||
if end_dt and end_dt < now and e.is_active:
|
||||
e.is_active = False
|
||||
session.commit()
|
||||
if show_inactive or e.is_active:
|
||||
result.append({
|
||||
"Id": str(e.id),
|
||||
"GroupId": e.group_id,
|
||||
"Subject": e.title,
|
||||
"StartTime": e.start.isoformat() if e.start else None,
|
||||
"EndTime": e.end.isoformat() if e.end else None,
|
||||
"IsAllDay": False,
|
||||
"MediaId": e.event_media_id,
|
||||
"Type": e.event_type.value if e.event_type else None, # <-- Enum zu String!
|
||||
"Icon": get_icon_for_type(e.event_type.value if e.event_type else None),
|
||||
})
|
||||
if not (show_inactive or e.is_active):
|
||||
continue
|
||||
|
||||
# Gather exception dates for this event (for recurrenceException/EXDATE)
|
||||
exception_dates = session.query(EventException).filter(
|
||||
EventException.event_id == e.id,
|
||||
EventException.is_skipped == True
|
||||
).all()
|
||||
# Syncfusion expects recurrenceException as comma-separated ISO strings (yyyy-MM-ddTHH:mm:ssZ)
|
||||
# IMPORTANT: The time must match the event's occurrence start time. Use the event's start time-of-day.
|
||||
recurrence_exception = None
|
||||
if exception_dates:
|
||||
# Use event start time in UTC as baseline
|
||||
base_start = e.start.astimezone(UTC) if e.start.tzinfo else e.start.replace(tzinfo=UTC)
|
||||
tokens = []
|
||||
for d in exception_dates:
|
||||
exd = d.exception_date # date
|
||||
occ_dt = datetime(
|
||||
exd.year, exd.month, exd.day,
|
||||
base_start.hour, base_start.minute, base_start.second,
|
||||
tzinfo=UTC
|
||||
)
|
||||
tokens.append(occ_dt.strftime('%Y-%m-%dT%H:%M:%SZ'))
|
||||
recurrence_exception = ','.join(tokens)
|
||||
|
||||
base_payload = {
|
||||
"Id": str(e.id),
|
||||
"GroupId": e.group_id,
|
||||
"Subject": e.title,
|
||||
"StartTime": e.start.isoformat() if e.start else None,
|
||||
"EndTime": e.end.isoformat() if e.end else None,
|
||||
"IsAllDay": False,
|
||||
"MediaId": e.event_media_id,
|
||||
"Type": e.event_type.value if e.event_type else None, # <-- Enum zu String!
|
||||
"Icon": get_icon_for_type(e.event_type.value if e.event_type else None),
|
||||
# Recurrence metadata
|
||||
"RecurrenceRule": e.recurrence_rule,
|
||||
"RecurrenceEnd": e.recurrence_end.isoformat() if e.recurrence_end else None,
|
||||
"RecurrenceException": recurrence_exception,
|
||||
"SkipHolidays": bool(getattr(e, 'skip_holidays', False)),
|
||||
}
|
||||
result.append(base_payload)
|
||||
session.close()
|
||||
return jsonify(result)
|
||||
|
||||
@@ -125,6 +159,9 @@ def create_event():
|
||||
if end.tzinfo is None:
|
||||
end = end.astimezone(timezone.utc)
|
||||
|
||||
# Determine skip_holidays from either camelCase or snake_case
|
||||
skip_holidays_val = bool(data.get("skipHolidays")) or bool(data.get("skip_holidays"))
|
||||
|
||||
# Event anlegen
|
||||
event = Event(
|
||||
group_id=data["group_id"],
|
||||
@@ -134,12 +171,51 @@ def create_event():
|
||||
end=end,
|
||||
event_type=event_type,
|
||||
is_active=True,
|
||||
event_media_id=event_media_id,
|
||||
slideshow_interval=slideshow_interval,
|
||||
created_by=created_by # <--- HIER hinzugefügt
|
||||
event_media_id=event_media_id,
|
||||
slideshow_interval=slideshow_interval,
|
||||
created_by=created_by,
|
||||
# Recurrence
|
||||
recurrence_rule=data.get("recurrence_rule"),
|
||||
skip_holidays=skip_holidays_val,
|
||||
recurrence_end=(datetime.fromisoformat(data["recurrence_end"]) if data.get("recurrence_end") else None),
|
||||
)
|
||||
session.add(event)
|
||||
session.commit()
|
||||
|
||||
# --- Holiday exception creation (backend) ---
|
||||
def regenerate_event_exceptions(ev: Event):
|
||||
from models.models import SchoolHoliday, EventException
|
||||
from dateutil.rrule import rrulestr
|
||||
from dateutil.tz import UTC
|
||||
# Clear existing exceptions for this event
|
||||
session.query(EventException).filter_by(event_id=ev.id).delete()
|
||||
session.commit()
|
||||
if not (ev.skip_holidays and ev.recurrence_rule):
|
||||
return
|
||||
# Get holidays
|
||||
holidays = session.query(SchoolHoliday).all()
|
||||
dtstart = ev.start.astimezone(UTC)
|
||||
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
||||
window_start = dtstart
|
||||
window_end = ev.recurrence_end.astimezone(UTC) if ev.recurrence_end else dtstart.replace(year=dtstart.year + 1)
|
||||
# Build set of all holiday dates (inclusive)
|
||||
holiday_dates = set()
|
||||
for h in holidays:
|
||||
hs = h.start_date
|
||||
he = h.end_date
|
||||
d = hs
|
||||
while d <= he:
|
||||
holiday_dates.add(d)
|
||||
d = d + timedelta(days=1)
|
||||
# Create exceptions for occurrences on holiday dates
|
||||
for occ_start in r.between(window_start, window_end, inc=True):
|
||||
occ_date = occ_start.date()
|
||||
if occ_date in holiday_dates:
|
||||
session.add(EventException(event_id=ev.id, exception_date=occ_date, is_skipped=True))
|
||||
session.commit()
|
||||
|
||||
regenerate_event_exceptions(event)
|
||||
|
||||
return jsonify({"success": True, "event_id": event.id})
|
||||
|
||||
|
||||
@@ -160,10 +236,63 @@ def update_event(event_id):
|
||||
data["end"]) if "end" in data else event.end
|
||||
event.event_type = data.get("event_type", event.event_type)
|
||||
event.event_media_id = data.get("event_media_id", event.event_media_id)
|
||||
event.slideshow_interval = data.get(
|
||||
"slideshow_interval", event.slideshow_interval)
|
||||
event.slideshow_interval = data.get("slideshow_interval", event.slideshow_interval)
|
||||
event.created_by = data.get("created_by", event.created_by)
|
||||
# Track previous values to decide on exception regeneration
|
||||
prev_rule = event.recurrence_rule
|
||||
prev_end = event.recurrence_end
|
||||
prev_skip = bool(getattr(event, 'skip_holidays', False))
|
||||
|
||||
# Recurrence updates
|
||||
if "recurrence_rule" in data:
|
||||
event.recurrence_rule = data.get("recurrence_rule")
|
||||
if "recurrence_end" in data:
|
||||
rec_end_val = data.get("recurrence_end")
|
||||
event.recurrence_end = datetime.fromisoformat(rec_end_val) if rec_end_val else None
|
||||
# Skip holidays can be updated independently
|
||||
if "skipHolidays" in data or "skip_holidays" in data:
|
||||
event.skip_holidays = bool(data.get("skipHolidays") or data.get("skip_holidays"))
|
||||
|
||||
session.commit()
|
||||
|
||||
# Regenerate exceptions if any relevant field changed
|
||||
need_regen = (
|
||||
prev_rule != event.recurrence_rule or
|
||||
prev_end != event.recurrence_end or
|
||||
prev_skip != bool(getattr(event, 'skip_holidays', False))
|
||||
)
|
||||
if need_regen:
|
||||
# Re-use helper from create route
|
||||
def regenerate_event_exceptions(ev: Event):
|
||||
from models.models import SchoolHoliday, EventException
|
||||
from dateutil.rrule import rrulestr
|
||||
from dateutil.tz import UTC
|
||||
# Clear existing exceptions
|
||||
session.query(EventException).filter_by(event_id=ev.id).delete()
|
||||
session.commit()
|
||||
if not (ev.skip_holidays and ev.recurrence_rule):
|
||||
return
|
||||
holidays = session.query(SchoolHoliday).all()
|
||||
dtstart = ev.start.astimezone(UTC)
|
||||
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
||||
window_start = dtstart
|
||||
window_end = ev.recurrence_end.astimezone(UTC) if ev.recurrence_end else dtstart.replace(year=dtstart.year + 1)
|
||||
holiday_dates = set()
|
||||
for h in holidays:
|
||||
hs = h.start_date
|
||||
he = h.end_date
|
||||
d = hs
|
||||
while d <= he:
|
||||
holiday_dates.add(d)
|
||||
d = d + timedelta(days=1)
|
||||
for occ_start in r.between(window_start, window_end, inc=True):
|
||||
occ_date = occ_start.date()
|
||||
if occ_date in holiday_dates:
|
||||
session.add(EventException(event_id=ev.id, exception_date=occ_date, is_skipped=True))
|
||||
session.commit()
|
||||
|
||||
regenerate_event_exceptions(event)
|
||||
|
||||
event_id_return = event.id # <-- ID vor session.close() speichern!
|
||||
session.close()
|
||||
return jsonify({"success": True, "event_id": event_id_return})
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
from server.routes.eventmedia import eventmedia_bp
|
||||
from server.routes.files import files_bp
|
||||
from server.routes.events import events_bp
|
||||
from server.routes.event_exceptions import event_exceptions_bp
|
||||
from server.routes.conversions import conversions_bp
|
||||
from server.routes.holidays import holidays_bp
|
||||
from server.routes.academic_periods import academic_periods_bp
|
||||
@@ -21,6 +22,7 @@ app = Flask(__name__)
|
||||
app.register_blueprint(clients_bp)
|
||||
app.register_blueprint(groups_bp)
|
||||
app.register_blueprint(events_bp)
|
||||
app.register_blueprint(event_exceptions_bp)
|
||||
app.register_blueprint(eventmedia_bp)
|
||||
app.register_blueprint(files_bp)
|
||||
app.register_blueprint(holidays_bp)
|
||||
|
||||
Reference in New Issue
Block a user