feat: period-scoped holiday management, archive lifecycle, and docs/release sync
- add period-scoped holiday architecture end-to-end - model: scope `SchoolHoliday` to `academic_period_id` - migrations: add holiday-period scoping, academic-period archive lifecycle, and merge migration head - API: extend holidays with manual CRUD, period validation, duplicate prevention, and overlap merge/conflict handling - recurrence: regenerate holiday exceptions using period-scoped holiday sets - improve frontend settings and holiday workflows - bind holiday import/list/manual CRUD to selected academic period - show detailed import outcomes (inserted/updated/merged/skipped/conflicts) - fix file-picker UX (visible selected filename) - align settings controls/dialogs with defined frontend design rules - scope appointments/dashboard holiday loading to active period - add shared date formatting utility - strengthen academic period lifecycle handling - add archive/restore/delete flow and backend validations/blocker checks - extend API client support for lifecycle operations - release/docs updates and cleanup - bump user-facing version to `2026.1.0-alpha.15` with new changelog entry - add tech changelog entry for alpha.15 backend changes - refactor README to concise index and archive historical implementation docs - fix Copilot instruction link diagnostics via local `.github` design-rules reference
This commit is contained in:
@@ -1,43 +1,87 @@
|
||||
from flask import Blueprint, jsonify, request
|
||||
"""
|
||||
Academic periods management routes.
|
||||
|
||||
Endpoints for full CRUD lifecycle including archive, restore, and hard delete.
|
||||
All write operations require admin+ role.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, jsonify, request, session
|
||||
from server.permissions import admin_or_higher
|
||||
from server.database import Session
|
||||
from models.models import AcademicPeriod
|
||||
from datetime import datetime
|
||||
from server.serializers import dict_to_camel_case
|
||||
from models.models import AcademicPeriod, Event
|
||||
from datetime import datetime, timezone
|
||||
from sqlalchemy import and_
|
||||
from dateutil.rrule import rrulestr
|
||||
from dateutil.tz import UTC
|
||||
import sys
|
||||
|
||||
sys.path.append('/workspace')
|
||||
|
||||
academic_periods_bp = Blueprint(
|
||||
'academic_periods', __name__, url_prefix='/api/academic_periods')
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# GET ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@academic_periods_bp.route('', methods=['GET'])
|
||||
def list_academic_periods():
|
||||
session = Session()
|
||||
"""List academic periods with optional archived visibility filters, ordered by start_date."""
|
||||
db_session = Session()
|
||||
try:
|
||||
periods = session.query(AcademicPeriod).order_by(
|
||||
AcademicPeriod.start_date.asc()).all()
|
||||
return jsonify({
|
||||
'periods': [p.to_dict() for p in periods]
|
||||
})
|
||||
include_archived = request.args.get('includeArchived', '0') == '1'
|
||||
archived_only = request.args.get('archivedOnly', '0') == '1'
|
||||
|
||||
query = db_session.query(AcademicPeriod)
|
||||
|
||||
if archived_only:
|
||||
query = query.filter(AcademicPeriod.is_archived == True)
|
||||
elif not include_archived:
|
||||
query = query.filter(AcademicPeriod.is_archived == False)
|
||||
|
||||
periods = query.order_by(AcademicPeriod.start_date.asc()).all()
|
||||
|
||||
result = [dict_to_camel_case(p.to_dict()) for p in periods]
|
||||
return jsonify({'periods': result}), 200
|
||||
finally:
|
||||
session.close()
|
||||
db_session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/<int:period_id>', methods=['GET'])
|
||||
def get_academic_period(period_id):
|
||||
"""Get a single academic period by ID (including archived)."""
|
||||
db_session = Session()
|
||||
try:
|
||||
period = db_session.query(AcademicPeriod).get(period_id)
|
||||
if not period:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict())}), 200
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/active', methods=['GET'])
|
||||
def get_active_academic_period():
|
||||
session = Session()
|
||||
"""Get the currently active academic period."""
|
||||
db_session = Session()
|
||||
try:
|
||||
period = session.query(AcademicPeriod).filter(
|
||||
AcademicPeriod.is_active == True).first()
|
||||
period = db_session.query(AcademicPeriod).filter(
|
||||
AcademicPeriod.is_active == True
|
||||
).first()
|
||||
if not period:
|
||||
return jsonify({'period': None}), 200
|
||||
return jsonify({'period': period.to_dict()}), 200
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict())}), 200
|
||||
finally:
|
||||
session.close()
|
||||
db_session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/for_date', methods=['GET'])
|
||||
def get_period_for_date():
|
||||
"""
|
||||
Returns the academic period that covers the provided date (YYYY-MM-DD).
|
||||
Returns the non-archived academic period that covers the provided date (YYYY-MM-DD).
|
||||
If multiple match, prefer the one with the latest start_date.
|
||||
"""
|
||||
date_str = request.args.get('date')
|
||||
@@ -48,39 +92,414 @@ def get_period_for_date():
|
||||
except ValueError:
|
||||
return jsonify({'error': 'Invalid date format. Expected YYYY-MM-DD'}), 400
|
||||
|
||||
session = Session()
|
||||
db_session = Session()
|
||||
try:
|
||||
period = (
|
||||
session.query(AcademicPeriod)
|
||||
.filter(AcademicPeriod.start_date <= target, AcademicPeriod.end_date >= target)
|
||||
db_session.query(AcademicPeriod)
|
||||
.filter(
|
||||
AcademicPeriod.start_date <= target,
|
||||
AcademicPeriod.end_date >= target,
|
||||
AcademicPeriod.is_archived == False
|
||||
)
|
||||
.order_by(AcademicPeriod.start_date.desc())
|
||||
.first()
|
||||
)
|
||||
return jsonify({'period': period.to_dict() if period else None}), 200
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict()) if period else None}), 200
|
||||
finally:
|
||||
session.close()
|
||||
db_session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/active', methods=['POST'])
|
||||
@admin_or_higher
|
||||
def set_active_academic_period():
|
||||
data = request.get_json(silent=True) or {}
|
||||
period_id = data.get('id')
|
||||
if period_id is None:
|
||||
return jsonify({'error': 'Missing required field: id'}), 400
|
||||
session = Session()
|
||||
@academic_periods_bp.route('/<int:period_id>/usage', methods=['GET'])
|
||||
def get_period_usage(period_id):
|
||||
"""
|
||||
Check what events and media are linked to this period.
|
||||
Used for pre-flight checks before delete/archive.
|
||||
|
||||
Returns:
|
||||
{
|
||||
"linked_events": count,
|
||||
"linked_media": count,
|
||||
"has_active_recurrence": boolean,
|
||||
"blockers": ["list of reasons why delete/archive would fail"]
|
||||
}
|
||||
"""
|
||||
db_session = Session()
|
||||
try:
|
||||
target = session.query(AcademicPeriod).get(period_id)
|
||||
if not target:
|
||||
period = db_session.query(AcademicPeriod).get(period_id)
|
||||
if not period:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
# Deactivate all, then activate target
|
||||
session.query(AcademicPeriod).filter(AcademicPeriod.is_active == True).update(
|
||||
{AcademicPeriod.is_active: False}
|
||||
)
|
||||
target.is_active = True
|
||||
session.commit()
|
||||
session.refresh(target)
|
||||
return jsonify({'period': target.to_dict()}), 200
|
||||
|
||||
# Count linked events
|
||||
linked_events = db_session.query(Event).filter(
|
||||
Event.academic_period_id == period_id
|
||||
).count()
|
||||
|
||||
# Check for active recurrence (events with recurrence_rule that have future occurrences)
|
||||
has_active_recurrence = False
|
||||
blockers = []
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
recurring_events = db_session.query(Event).filter(
|
||||
Event.academic_period_id == period_id,
|
||||
Event.recurrence_rule != None
|
||||
).all()
|
||||
|
||||
for evt in recurring_events:
|
||||
try:
|
||||
rrule_obj = rrulestr(evt.recurrence_rule, dtstart=evt.start)
|
||||
# Check if there are any future occurrences
|
||||
next_occurrence = rrule_obj.after(now, inc=True)
|
||||
if next_occurrence:
|
||||
has_active_recurrence = True
|
||||
blockers.append(f"Recurring event '{evt.title}' has active occurrences")
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# If period is active, cannot archive/delete
|
||||
if period.is_active:
|
||||
blockers.append("Cannot archive or delete an active period")
|
||||
|
||||
return jsonify({
|
||||
'usage': {
|
||||
'linked_events': linked_events,
|
||||
'has_active_recurrence': has_active_recurrence,
|
||||
'blockers': blockers
|
||||
}
|
||||
}), 200
|
||||
finally:
|
||||
session.close()
|
||||
db_session.close()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# CREATE ENDPOINT
|
||||
# ============================================================================
|
||||
|
||||
@academic_periods_bp.route('', methods=['POST'])
|
||||
@admin_or_higher
|
||||
def create_academic_period():
|
||||
"""
|
||||
Create a new academic period.
|
||||
|
||||
Request body:
|
||||
{
|
||||
"name": "Schuljahr 2026/27",
|
||||
"displayName": "SJ 26/27",
|
||||
"startDate": "2026-09-01",
|
||||
"endDate": "2027-08-31",
|
||||
"periodType": "schuljahr"
|
||||
}
|
||||
"""
|
||||
data = request.get_json(silent=True) or {}
|
||||
|
||||
# Validate required fields
|
||||
name = data.get('name', '').strip()
|
||||
if not name:
|
||||
return jsonify({'error': 'Name is required and cannot be empty'}), 400
|
||||
|
||||
start_date_str = data.get('startDate')
|
||||
end_date_str = data.get('endDate')
|
||||
period_type = data.get('periodType', 'schuljahr')
|
||||
display_name = data.get('displayName', '').strip() or None
|
||||
|
||||
# Parse dates
|
||||
try:
|
||||
start_date = datetime.strptime(start_date_str, '%Y-%m-%d').date()
|
||||
end_date = datetime.strptime(end_date_str, '%Y-%m-%d').date()
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({'error': 'Invalid date format. Expected YYYY-MM-DD'}), 400
|
||||
|
||||
# Validate date range
|
||||
if start_date > end_date:
|
||||
return jsonify({'error': 'Start date must be less than or equal to end date'}), 400
|
||||
|
||||
# Validate period type
|
||||
valid_types = ['schuljahr', 'semester', 'trimester']
|
||||
if period_type not in valid_types:
|
||||
return jsonify({'error': f'Invalid periodType. Must be one of: {", ".join(valid_types)}'}), 400
|
||||
|
||||
db_session = Session()
|
||||
try:
|
||||
# Check name uniqueness among non-archived periods
|
||||
existing = db_session.query(AcademicPeriod).filter(
|
||||
AcademicPeriod.name == name,
|
||||
AcademicPeriod.is_archived == False
|
||||
).first()
|
||||
if existing:
|
||||
return jsonify({'error': 'A non-archived period with this name already exists'}), 409
|
||||
|
||||
# Check for overlaps within same period type
|
||||
overlapping = db_session.query(AcademicPeriod).filter(
|
||||
AcademicPeriod.period_type == period_type,
|
||||
AcademicPeriod.is_archived == False,
|
||||
AcademicPeriod.start_date <= end_date,
|
||||
AcademicPeriod.end_date >= start_date
|
||||
).first()
|
||||
if overlapping:
|
||||
return jsonify({'error': f'Overlapping {period_type} period already exists'}), 409
|
||||
|
||||
# Create period
|
||||
period = AcademicPeriod(
|
||||
name=name,
|
||||
display_name=display_name,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
period_type=period_type,
|
||||
is_active=False,
|
||||
is_archived=False
|
||||
)
|
||||
db_session.add(period)
|
||||
db_session.commit()
|
||||
db_session.refresh(period)
|
||||
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict())}), 201
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# UPDATE ENDPOINT
|
||||
# ============================================================================
|
||||
|
||||
@academic_periods_bp.route('/<int:period_id>', methods=['PUT'])
|
||||
@admin_or_higher
|
||||
def update_academic_period(period_id):
|
||||
"""
|
||||
Update an academic period (cannot be archived).
|
||||
|
||||
Request body (all fields optional):
|
||||
{
|
||||
"name": "...",
|
||||
"displayName": "...",
|
||||
"startDate": "YYYY-MM-DD",
|
||||
"endDate": "YYYY-MM-DD",
|
||||
"periodType": "schuljahr|semester|trimester"
|
||||
}
|
||||
"""
|
||||
db_session = Session()
|
||||
try:
|
||||
period = db_session.query(AcademicPeriod).get(period_id)
|
||||
if not period:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
if period.is_archived:
|
||||
return jsonify({'error': 'Cannot update an archived period'}), 409
|
||||
|
||||
data = request.get_json(silent=True) or {}
|
||||
|
||||
# Update fields if provided
|
||||
if 'name' in data:
|
||||
name = data['name'].strip()
|
||||
if not name:
|
||||
return jsonify({'error': 'Name cannot be empty'}), 400
|
||||
|
||||
# Check uniqueness among non-archived (excluding self)
|
||||
existing = db_session.query(AcademicPeriod).filter(
|
||||
AcademicPeriod.name == name,
|
||||
AcademicPeriod.is_archived == False,
|
||||
AcademicPeriod.id != period_id
|
||||
).first()
|
||||
if existing:
|
||||
return jsonify({'error': 'A non-archived period with this name already exists'}), 409
|
||||
|
||||
period.name = name
|
||||
|
||||
if 'displayName' in data:
|
||||
period.display_name = data['displayName'].strip() or None
|
||||
|
||||
if 'periodType' in data:
|
||||
period_type = data['periodType']
|
||||
valid_types = ['schuljahr', 'semester', 'trimester']
|
||||
if period_type not in valid_types:
|
||||
return jsonify({'error': f'Invalid periodType. Must be one of: {", ".join(valid_types)}'}), 400
|
||||
period.period_type = period_type
|
||||
|
||||
# Handle date updates with overlap checking
|
||||
if 'startDate' in data or 'endDate' in data:
|
||||
start_date = period.start_date
|
||||
end_date = period.end_date
|
||||
|
||||
if 'startDate' in data:
|
||||
try:
|
||||
start_date = datetime.strptime(data['startDate'], '%Y-%m-%d').date()
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({'error': 'Invalid startDate format. Expected YYYY-MM-DD'}), 400
|
||||
|
||||
if 'endDate' in data:
|
||||
try:
|
||||
end_date = datetime.strptime(data['endDate'], '%Y-%m-%d').date()
|
||||
except (ValueError, TypeError):
|
||||
return jsonify({'error': 'Invalid endDate format. Expected YYYY-MM-DD'}), 400
|
||||
|
||||
if start_date > end_date:
|
||||
return jsonify({'error': 'Start date must be less than or equal to end date'}), 400
|
||||
|
||||
# Check for overlaps within same period type (excluding self)
|
||||
overlapping = db_session.query(AcademicPeriod).filter(
|
||||
AcademicPeriod.period_type == period.period_type,
|
||||
AcademicPeriod.is_archived == False,
|
||||
AcademicPeriod.id != period_id,
|
||||
AcademicPeriod.start_date <= end_date,
|
||||
AcademicPeriod.end_date >= start_date
|
||||
).first()
|
||||
if overlapping:
|
||||
return jsonify({'error': f'Overlapping {period.period_type.value} period already exists'}), 409
|
||||
|
||||
period.start_date = start_date
|
||||
period.end_date = end_date
|
||||
|
||||
db_session.commit()
|
||||
db_session.refresh(period)
|
||||
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict())}), 200
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# ACTIVATE ENDPOINT
|
||||
# ============================================================================
|
||||
|
||||
@academic_periods_bp.route('/<int:period_id>/activate', methods=['POST'])
|
||||
@admin_or_higher
|
||||
def activate_academic_period(period_id):
|
||||
"""
|
||||
Activate an academic period (deactivates all others).
|
||||
Cannot activate an archived period.
|
||||
"""
|
||||
db_session = Session()
|
||||
try:
|
||||
period = db_session.query(AcademicPeriod).get(period_id)
|
||||
if not period:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
if period.is_archived:
|
||||
return jsonify({'error': 'Cannot activate an archived period'}), 409
|
||||
|
||||
# Deactivate all, then activate target
|
||||
db_session.query(AcademicPeriod).update({AcademicPeriod.is_active: False})
|
||||
period.is_active = True
|
||||
db_session.commit()
|
||||
db_session.refresh(period)
|
||||
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict())}), 200
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# ARCHIVE/RESTORE ENDPOINTS
|
||||
# ============================================================================
|
||||
|
||||
@academic_periods_bp.route('/<int:period_id>/archive', methods=['POST'])
|
||||
@admin_or_higher
|
||||
def archive_academic_period(period_id):
|
||||
"""
|
||||
Archive an academic period (soft delete).
|
||||
Cannot archive an active period or one with active recurring events.
|
||||
"""
|
||||
db_session = Session()
|
||||
try:
|
||||
period = db_session.query(AcademicPeriod).get(period_id)
|
||||
if not period:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
if period.is_archived:
|
||||
return jsonify({'error': 'Period already archived'}), 409
|
||||
|
||||
if period.is_active:
|
||||
return jsonify({'error': 'Cannot archive an active period'}), 409
|
||||
|
||||
# Check for recurrence spillover
|
||||
now = datetime.now(timezone.utc)
|
||||
recurring_events = db_session.query(Event).filter(
|
||||
Event.academic_period_id == period_id,
|
||||
Event.recurrence_rule != None
|
||||
).all()
|
||||
|
||||
for evt in recurring_events:
|
||||
try:
|
||||
rrule_obj = rrulestr(evt.recurrence_rule, dtstart=evt.start)
|
||||
next_occurrence = rrule_obj.after(now, inc=True)
|
||||
if next_occurrence:
|
||||
return jsonify({'error': f'Cannot archive: recurring event "{evt.title}" has active occurrences'}), 409
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Archive
|
||||
user_id = session.get('user_id')
|
||||
period.is_archived = True
|
||||
period.archived_at = datetime.now(timezone.utc)
|
||||
period.archived_by = user_id
|
||||
db_session.commit()
|
||||
db_session.refresh(period)
|
||||
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict())}), 200
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/<int:period_id>/restore', methods=['POST'])
|
||||
@admin_or_higher
|
||||
def restore_academic_period(period_id):
|
||||
"""
|
||||
Restore an archived academic period (returns to inactive state).
|
||||
"""
|
||||
db_session = Session()
|
||||
try:
|
||||
period = db_session.query(AcademicPeriod).get(period_id)
|
||||
if not period:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
if not period.is_archived:
|
||||
return jsonify({'error': 'Period is not archived'}), 409
|
||||
|
||||
# Restore
|
||||
period.is_archived = False
|
||||
period.archived_at = None
|
||||
period.archived_by = None
|
||||
db_session.commit()
|
||||
db_session.refresh(period)
|
||||
|
||||
return jsonify({'period': dict_to_camel_case(period.to_dict())}), 200
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# DELETE ENDPOINT
|
||||
# ============================================================================
|
||||
|
||||
@academic_periods_bp.route('/<int:period_id>', methods=['DELETE'])
|
||||
@admin_or_higher
|
||||
def delete_academic_period(period_id):
|
||||
"""
|
||||
Hard delete an archived, inactive academic period.
|
||||
Blocked if linked events exist, linked media exist, or recurrence spillover detected.
|
||||
"""
|
||||
db_session = Session()
|
||||
try:
|
||||
period = db_session.query(AcademicPeriod).get(period_id)
|
||||
if not period:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
if not period.is_archived:
|
||||
return jsonify({'error': 'Cannot hard-delete a non-archived period'}), 409
|
||||
|
||||
if period.is_active:
|
||||
return jsonify({'error': 'Cannot hard-delete an active period'}), 409
|
||||
|
||||
# Check for linked events
|
||||
linked_events = db_session.query(Event).filter(
|
||||
Event.academic_period_id == period_id
|
||||
).count()
|
||||
if linked_events > 0:
|
||||
return jsonify({'error': f'Cannot delete: {linked_events} event(s) linked to this period'}), 409
|
||||
|
||||
# Delete
|
||||
db_session.delete(period)
|
||||
db_session.commit()
|
||||
|
||||
return jsonify({'message': 'Period deleted successfully'}), 200
|
||||
finally:
|
||||
db_session.close()
|
||||
|
||||
@@ -487,7 +487,16 @@ def create_event():
|
||||
if not (ev.skip_holidays and ev.recurrence_rule):
|
||||
return
|
||||
# Get holidays
|
||||
holidays = session.query(SchoolHoliday).all()
|
||||
holidays_query = session.query(SchoolHoliday)
|
||||
if ev.academic_period_id is not None:
|
||||
holidays_query = holidays_query.filter(
|
||||
SchoolHoliday.academic_period_id == ev.academic_period_id
|
||||
)
|
||||
else:
|
||||
holidays_query = holidays_query.filter(
|
||||
SchoolHoliday.academic_period_id.is_(None)
|
||||
)
|
||||
holidays = holidays_query.all()
|
||||
dtstart = ev.start.astimezone(UTC)
|
||||
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
||||
window_start = dtstart
|
||||
@@ -588,7 +597,16 @@ def update_event(event_id):
|
||||
if not (ev.skip_holidays and ev.recurrence_rule):
|
||||
return
|
||||
# Get holidays
|
||||
holidays = session.query(SchoolHoliday).all()
|
||||
holidays_query = session.query(SchoolHoliday)
|
||||
if ev.academic_period_id is not None:
|
||||
holidays_query = holidays_query.filter(
|
||||
SchoolHoliday.academic_period_id == ev.academic_period_id
|
||||
)
|
||||
else:
|
||||
holidays_query = holidays_query.filter(
|
||||
SchoolHoliday.academic_period_id.is_(None)
|
||||
)
|
||||
holidays = holidays_query.all()
|
||||
dtstart = ev.start.astimezone(UTC)
|
||||
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
||||
window_start = dtstart
|
||||
|
||||
@@ -1,25 +1,203 @@
|
||||
from flask import Blueprint, request, jsonify
|
||||
from server.permissions import admin_or_higher
|
||||
from server.database import Session
|
||||
from models.models import SchoolHoliday
|
||||
from datetime import datetime
|
||||
from models.models import AcademicPeriod, SchoolHoliday, Event, EventException
|
||||
from datetime import datetime, date, timedelta
|
||||
from sqlalchemy import func
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
import csv
|
||||
import io
|
||||
|
||||
holidays_bp = Blueprint("holidays", __name__, url_prefix="/api/holidays")
|
||||
|
||||
|
||||
def _regenerate_for_period(session, academic_period_id) -> int:
|
||||
"""Re-generate holiday skip exceptions for all skip_holidays recurring events in the period."""
|
||||
from dateutil.rrule import rrulestr
|
||||
from dateutil.tz import UTC
|
||||
|
||||
q = session.query(Event).filter(
|
||||
Event.skip_holidays == True, # noqa: E712
|
||||
Event.recurrence_rule.isnot(None),
|
||||
)
|
||||
if academic_period_id is not None:
|
||||
q = q.filter(Event.academic_period_id == academic_period_id)
|
||||
else:
|
||||
q = q.filter(Event.academic_period_id.is_(None))
|
||||
events = q.all()
|
||||
|
||||
hq = session.query(SchoolHoliday)
|
||||
if academic_period_id is not None:
|
||||
hq = hq.filter(SchoolHoliday.academic_period_id == academic_period_id)
|
||||
else:
|
||||
hq = hq.filter(SchoolHoliday.academic_period_id.is_(None))
|
||||
holidays = hq.all()
|
||||
|
||||
holiday_dates = set()
|
||||
for h in holidays:
|
||||
d = h.start_date
|
||||
while d <= h.end_date:
|
||||
holiday_dates.add(d)
|
||||
d = d + timedelta(days=1)
|
||||
|
||||
for ev in events:
|
||||
session.query(EventException).filter(
|
||||
EventException.event_id == ev.id,
|
||||
EventException.is_skipped == True, # noqa: E712
|
||||
EventException.override_title.is_(None),
|
||||
EventException.override_description.is_(None),
|
||||
EventException.override_start.is_(None),
|
||||
EventException.override_end.is_(None),
|
||||
).delete(synchronize_session=False)
|
||||
if not holiday_dates:
|
||||
continue
|
||||
try:
|
||||
dtstart = ev.start.astimezone(UTC)
|
||||
r = rrulestr(ev.recurrence_rule, dtstart=dtstart)
|
||||
window_start = dtstart
|
||||
window_end = (
|
||||
ev.recurrence_end.astimezone(UTC)
|
||||
if ev.recurrence_end
|
||||
else dtstart.replace(year=dtstart.year + 1)
|
||||
)
|
||||
for occ_start in r.between(window_start, window_end, inc=True):
|
||||
occ_date = occ_start.date()
|
||||
if occ_date in holiday_dates:
|
||||
session.add(EventException(
|
||||
event_id=ev.id,
|
||||
exception_date=occ_date,
|
||||
is_skipped=True,
|
||||
))
|
||||
except Exception:
|
||||
pass # malformed recurrence rule — skip silently
|
||||
|
||||
return len(events)
|
||||
|
||||
|
||||
def _parse_academic_period_id(raw_value):
|
||||
if raw_value in (None, ""):
|
||||
return None
|
||||
try:
|
||||
return int(raw_value)
|
||||
except (TypeError, ValueError) as exc:
|
||||
raise ValueError("Invalid academicPeriodId") from exc
|
||||
|
||||
|
||||
def _validate_holiday_dates_within_period(period, start_date, end_date, label="Ferienblock"):
|
||||
if period is None or start_date is None or end_date is None:
|
||||
return
|
||||
if start_date < period.start_date or end_date > period.end_date:
|
||||
period_name = period.display_name or period.name
|
||||
raise ValueError(
|
||||
f"{label} liegt außerhalb der akademischen Periode \"{period_name}\" "
|
||||
f"({period.start_date.isoformat()} bis {period.end_date.isoformat()})"
|
||||
)
|
||||
|
||||
|
||||
def _normalize_optional_text(value):
|
||||
normalized = (value or "").strip()
|
||||
return normalized or None
|
||||
|
||||
|
||||
def _apply_period_filter(query, academic_period_id):
|
||||
if academic_period_id is None:
|
||||
return query.filter(SchoolHoliday.academic_period_id.is_(None))
|
||||
return query.filter(SchoolHoliday.academic_period_id == academic_period_id)
|
||||
|
||||
|
||||
def _identity_key(name, region):
|
||||
normalized_name = _normalize_optional_text(name) or ""
|
||||
normalized_region = _normalize_optional_text(region) or ""
|
||||
return normalized_name.casefold(), normalized_region.casefold()
|
||||
|
||||
|
||||
def _is_same_identity(holiday, name, region):
|
||||
return _identity_key(holiday.name, holiday.region) == _identity_key(name, region)
|
||||
|
||||
|
||||
def _find_overlapping_holidays(session, academic_period_id, start_date, end_date, exclude_id=None):
|
||||
query = _apply_period_filter(session.query(SchoolHoliday), academic_period_id).filter(
|
||||
SchoolHoliday.start_date <= end_date + timedelta(days=1),
|
||||
SchoolHoliday.end_date >= start_date - timedelta(days=1),
|
||||
)
|
||||
if exclude_id is not None:
|
||||
query = query.filter(SchoolHoliday.id != exclude_id)
|
||||
return query.order_by(SchoolHoliday.start_date.asc(), SchoolHoliday.id.asc()).all()
|
||||
|
||||
|
||||
def _split_overlap_candidates(overlaps, name, region):
|
||||
same_identity = [holiday for holiday in overlaps if _is_same_identity(holiday, name, region)]
|
||||
conflicts = [holiday for holiday in overlaps if not _is_same_identity(holiday, name, region)]
|
||||
return same_identity, conflicts
|
||||
|
||||
|
||||
def _merge_holiday_group(session, keeper, others, name, start_date, end_date, region, source_file_name=None):
|
||||
all_starts = [start_date, keeper.start_date, *[holiday.start_date for holiday in others]]
|
||||
all_ends = [end_date, keeper.end_date, *[holiday.end_date for holiday in others]]
|
||||
keeper.name = _normalize_optional_text(name) or keeper.name
|
||||
keeper.region = _normalize_optional_text(region)
|
||||
keeper.start_date = min(all_starts)
|
||||
keeper.end_date = max(all_ends)
|
||||
if source_file_name is not None:
|
||||
keeper.source_file_name = source_file_name
|
||||
for holiday in others:
|
||||
session.delete(holiday)
|
||||
return keeper
|
||||
|
||||
|
||||
def _format_overlap_conflict(label, conflicts):
|
||||
conflict_labels = ", ".join(
|
||||
f'{holiday.name} ({holiday.start_date.isoformat()} bis {holiday.end_date.isoformat()})'
|
||||
for holiday in conflicts[:3]
|
||||
)
|
||||
suffix = "" if len(conflicts) <= 3 else f" und {len(conflicts) - 3} weitere"
|
||||
return f"{label} überschneidet sich mit bestehenden Ferienblöcken: {conflict_labels}{suffix}"
|
||||
|
||||
|
||||
def _find_duplicate_holiday(session, academic_period_id, name, start_date, end_date, region, exclude_id=None):
|
||||
normalized_name = _normalize_optional_text(name)
|
||||
normalized_region = _normalize_optional_text(region)
|
||||
|
||||
query = session.query(SchoolHoliday).filter(
|
||||
func.lower(SchoolHoliday.name) == normalized_name.casefold(),
|
||||
SchoolHoliday.start_date == start_date,
|
||||
SchoolHoliday.end_date == end_date,
|
||||
)
|
||||
query = _apply_period_filter(query, academic_period_id)
|
||||
|
||||
if normalized_region is None:
|
||||
query = query.filter(SchoolHoliday.region.is_(None))
|
||||
else:
|
||||
query = query.filter(func.lower(SchoolHoliday.region) == normalized_region.casefold())
|
||||
|
||||
if exclude_id is not None:
|
||||
query = query.filter(SchoolHoliday.id != exclude_id)
|
||||
|
||||
return query.first()
|
||||
|
||||
|
||||
@holidays_bp.route("", methods=["GET"])
|
||||
def list_holidays():
|
||||
session = Session()
|
||||
region = request.args.get("region")
|
||||
q = session.query(SchoolHoliday)
|
||||
if region:
|
||||
q = q.filter(SchoolHoliday.region == region)
|
||||
rows = q.order_by(SchoolHoliday.start_date.asc()).all()
|
||||
data = [r.to_dict() for r in rows]
|
||||
session.close()
|
||||
return jsonify({"holidays": data})
|
||||
try:
|
||||
region = request.args.get("region")
|
||||
academic_period_id = _parse_academic_period_id(
|
||||
request.args.get("academicPeriodId") or request.args.get("academic_period_id")
|
||||
)
|
||||
|
||||
q = session.query(SchoolHoliday)
|
||||
if region:
|
||||
q = q.filter(SchoolHoliday.region == region)
|
||||
if academic_period_id is not None:
|
||||
q = q.filter(SchoolHoliday.academic_period_id == academic_period_id)
|
||||
|
||||
rows = q.order_by(SchoolHoliday.start_date.asc(), SchoolHoliday.end_date.asc()).all()
|
||||
data = [r.to_dict() for r in rows]
|
||||
return jsonify({"holidays": data})
|
||||
except ValueError as exc:
|
||||
return jsonify({"error": str(exc)}), 400
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@holidays_bp.route("/upload", methods=["POST"])
|
||||
@@ -41,6 +219,7 @@ def upload_holidays():
|
||||
if file.filename == "":
|
||||
return jsonify({"error": "No selected file"}), 400
|
||||
|
||||
session = Session()
|
||||
try:
|
||||
raw = file.read()
|
||||
# Try UTF-8 first (strict), then cp1252, then latin-1 as last resort
|
||||
@@ -79,9 +258,35 @@ def upload_holidays():
|
||||
continue
|
||||
raise ValueError(f"Unsupported date format: {s}")
|
||||
|
||||
session = Session()
|
||||
academic_period_id = _parse_academic_period_id(
|
||||
request.form.get("academicPeriodId") or request.form.get("academic_period_id")
|
||||
)
|
||||
|
||||
period = None
|
||||
if academic_period_id is not None:
|
||||
period = session.query(AcademicPeriod).get(academic_period_id)
|
||||
if not period:
|
||||
return jsonify({"error": "Academic period not found"}), 404
|
||||
if period.is_archived:
|
||||
return jsonify({"error": "Cannot import holidays into an archived academic period"}), 409
|
||||
|
||||
inserted = 0
|
||||
updated = 0
|
||||
merged_overlaps = 0
|
||||
skipped_duplicates = 0
|
||||
conflicts = []
|
||||
|
||||
def build_exact_key(name, start_date, end_date, region):
|
||||
normalized_name = _normalize_optional_text(name)
|
||||
normalized_region = _normalize_optional_text(region)
|
||||
return (
|
||||
(normalized_name or "").casefold(),
|
||||
start_date,
|
||||
end_date,
|
||||
(normalized_region or "").casefold(),
|
||||
)
|
||||
|
||||
seen_in_file = set()
|
||||
|
||||
# First, try headered CSV via DictReader
|
||||
dict_reader = csv.DictReader(io.StringIO(
|
||||
@@ -90,34 +295,67 @@ def upload_holidays():
|
||||
has_required_headers = {"name", "start_date",
|
||||
"end_date"}.issubset(set(fieldnames_lower))
|
||||
|
||||
def upsert(name: str, start_date, end_date, region=None):
|
||||
nonlocal inserted, updated
|
||||
def upsert(name: str, start_date, end_date, region=None, source_label="Ferienblock"):
|
||||
nonlocal inserted, updated, merged_overlaps, skipped_duplicates
|
||||
if not name or not start_date or not end_date:
|
||||
return
|
||||
existing = (
|
||||
session.query(SchoolHoliday)
|
||||
.filter(
|
||||
SchoolHoliday.name == name,
|
||||
SchoolHoliday.start_date == start_date,
|
||||
SchoolHoliday.end_date == end_date,
|
||||
SchoolHoliday.region.is_(
|
||||
region) if region is None else SchoolHoliday.region == region,
|
||||
)
|
||||
.first()
|
||||
_validate_holiday_dates_within_period(period, start_date, end_date, source_label)
|
||||
normalized_name = _normalize_optional_text(name)
|
||||
normalized_region = _normalize_optional_text(region)
|
||||
key = build_exact_key(normalized_name, start_date, end_date, normalized_region)
|
||||
|
||||
if key in seen_in_file:
|
||||
skipped_duplicates += 1
|
||||
return
|
||||
seen_in_file.add(key)
|
||||
|
||||
duplicate = _find_duplicate_holiday(
|
||||
session,
|
||||
academic_period_id,
|
||||
normalized_name,
|
||||
start_date,
|
||||
end_date,
|
||||
normalized_region,
|
||||
)
|
||||
if existing:
|
||||
existing.region = region
|
||||
existing.source_file_name = file.filename
|
||||
if duplicate:
|
||||
duplicate.source_file_name = file.filename
|
||||
updated += 1
|
||||
else:
|
||||
session.add(SchoolHoliday(
|
||||
name=name,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
region=region,
|
||||
return
|
||||
|
||||
overlaps = _find_overlapping_holidays(
|
||||
session,
|
||||
academic_period_id,
|
||||
start_date,
|
||||
end_date,
|
||||
)
|
||||
same_identity, conflicting = _split_overlap_candidates(overlaps, normalized_name, normalized_region)
|
||||
if conflicting:
|
||||
conflicts.append(_format_overlap_conflict(source_label, conflicting))
|
||||
return
|
||||
if same_identity:
|
||||
keeper = same_identity[0]
|
||||
_merge_holiday_group(
|
||||
session,
|
||||
keeper,
|
||||
same_identity[1:],
|
||||
normalized_name,
|
||||
start_date,
|
||||
end_date,
|
||||
normalized_region,
|
||||
source_file_name=file.filename,
|
||||
))
|
||||
inserted += 1
|
||||
)
|
||||
merged_overlaps += 1
|
||||
return
|
||||
|
||||
session.add(SchoolHoliday(
|
||||
academic_period_id=academic_period_id,
|
||||
name=normalized_name,
|
||||
start_date=start_date,
|
||||
end_date=end_date,
|
||||
region=normalized_region,
|
||||
source_file_name=file.filename,
|
||||
))
|
||||
inserted += 1
|
||||
|
||||
if has_required_headers:
|
||||
for row in dict_reader:
|
||||
@@ -131,12 +369,12 @@ def upload_holidays():
|
||||
continue
|
||||
region = (norm.get("region")
|
||||
or None) if "region" in norm else None
|
||||
upsert(name, start_date, end_date, region)
|
||||
upsert(name, start_date, end_date, region, f"Zeile {dict_reader.line_num}")
|
||||
else:
|
||||
# Fallback: headerless rows -> use columns [1]=name, [2]=start, [3]=end
|
||||
reader = csv.reader(io.StringIO(
|
||||
content), dialect=dialect) if dialect else csv.reader(io.StringIO(content))
|
||||
for row in reader:
|
||||
for row_index, row in enumerate(reader, start=1):
|
||||
if not row:
|
||||
continue
|
||||
# tolerate varying column counts (4 or 5); ignore first and optional last
|
||||
@@ -152,10 +390,214 @@ def upload_holidays():
|
||||
end_date = parse_date(end_raw)
|
||||
except ValueError:
|
||||
continue
|
||||
upsert(name, start_date, end_date, None)
|
||||
upsert(name, start_date, end_date, None, f"Zeile {row_index}")
|
||||
|
||||
session.commit()
|
||||
session.close()
|
||||
return jsonify({"success": True, "inserted": inserted, "updated": updated})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
"success": True,
|
||||
"inserted": inserted,
|
||||
"updated": updated,
|
||||
"merged_overlaps": merged_overlaps,
|
||||
"skipped_duplicates": skipped_duplicates,
|
||||
"conflicts": conflicts,
|
||||
"academic_period_id": academic_period_id,
|
||||
})
|
||||
except ValueError as e:
|
||||
session.rollback()
|
||||
return jsonify({"error": str(e)}), 400
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
return jsonify({"error": str(e)}), 400
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@holidays_bp.route("", methods=["POST"])
|
||||
@admin_or_higher
|
||||
def create_holiday():
|
||||
data = request.json or {}
|
||||
name = _normalize_optional_text(data.get("name")) or ""
|
||||
start_date_str = (data.get("start_date") or "").strip()
|
||||
end_date_str = (data.get("end_date") or "").strip()
|
||||
region = _normalize_optional_text(data.get("region"))
|
||||
|
||||
if not name or not start_date_str or not end_date_str:
|
||||
return jsonify({"error": "name, start_date und end_date sind erforderlich"}), 400
|
||||
try:
|
||||
start_date_val = date.fromisoformat(start_date_str)
|
||||
end_date_val = date.fromisoformat(end_date_str)
|
||||
except ValueError:
|
||||
return jsonify({"error": "Ung\u00fcltiges Datumsformat. Erwartet: YYYY-MM-DD"}), 400
|
||||
if end_date_val < start_date_val:
|
||||
return jsonify({"error": "Enddatum muss nach oder gleich Startdatum sein"}), 400
|
||||
|
||||
academic_period_id = _parse_academic_period_id(data.get("academic_period_id"))
|
||||
session = Session()
|
||||
try:
|
||||
period = None
|
||||
if academic_period_id is not None:
|
||||
period = session.query(AcademicPeriod).get(academic_period_id)
|
||||
if not period:
|
||||
return jsonify({"error": "Akademische Periode nicht gefunden"}), 404
|
||||
if period.is_archived:
|
||||
return jsonify({"error": "Archivierte Perioden k\u00f6nnen nicht bearbeitet werden"}), 409
|
||||
_validate_holiday_dates_within_period(period, start_date_val, end_date_val)
|
||||
duplicate = _find_duplicate_holiday(
|
||||
session,
|
||||
academic_period_id,
|
||||
name,
|
||||
start_date_val,
|
||||
end_date_val,
|
||||
region,
|
||||
)
|
||||
if duplicate:
|
||||
return jsonify({"error": "Ein Ferienblock mit diesem Namen und Zeitraum existiert bereits in dieser Periode"}), 409
|
||||
overlaps = _find_overlapping_holidays(session, academic_period_id, start_date_val, end_date_val)
|
||||
same_identity, conflicting = _split_overlap_candidates(overlaps, name, region)
|
||||
if conflicting:
|
||||
return jsonify({"error": _format_overlap_conflict("Der Ferienblock", conflicting)}), 409
|
||||
merged = False
|
||||
if same_identity:
|
||||
holiday = _merge_holiday_group(
|
||||
session,
|
||||
same_identity[0],
|
||||
same_identity[1:],
|
||||
name,
|
||||
start_date_val,
|
||||
end_date_val,
|
||||
region,
|
||||
source_file_name="manual",
|
||||
)
|
||||
merged = True
|
||||
else:
|
||||
holiday = SchoolHoliday(
|
||||
academic_period_id=academic_period_id,
|
||||
name=name,
|
||||
start_date=start_date_val,
|
||||
end_date=end_date_val,
|
||||
region=region,
|
||||
source_file_name="manual",
|
||||
)
|
||||
session.add(holiday)
|
||||
session.flush()
|
||||
regenerated = _regenerate_for_period(session, academic_period_id)
|
||||
session.commit()
|
||||
return jsonify({"success": True, "holiday": holiday.to_dict(), "regenerated_events": regenerated, "merged": merged}), 201
|
||||
except IntegrityError:
|
||||
session.rollback()
|
||||
return jsonify({"error": "Ein Ferienblock mit diesem Namen und Zeitraum existiert bereits in dieser Periode"}), 409
|
||||
except ValueError as e:
|
||||
session.rollback()
|
||||
return jsonify({"error": str(e)}), 400
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
return jsonify({"error": str(e)}), 400
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@holidays_bp.route("/<int:holiday_id>", methods=["PUT"])
|
||||
@admin_or_higher
|
||||
def update_holiday(holiday_id):
|
||||
data = request.json or {}
|
||||
session = Session()
|
||||
try:
|
||||
holiday = session.query(SchoolHoliday).get(holiday_id)
|
||||
if not holiday:
|
||||
return jsonify({"error": "Ferienblock nicht gefunden"}), 404
|
||||
period = None
|
||||
if holiday.academic_period_id is not None:
|
||||
period = session.query(AcademicPeriod).get(holiday.academic_period_id)
|
||||
if period and period.is_archived:
|
||||
return jsonify({"error": "Archivierte Perioden k\u00f6nnen nicht bearbeitet werden"}), 409
|
||||
if "name" in data:
|
||||
holiday.name = _normalize_optional_text(data["name"]) or ""
|
||||
if "start_date" in data:
|
||||
try:
|
||||
holiday.start_date = date.fromisoformat((data["start_date"] or "").strip())
|
||||
except ValueError:
|
||||
return jsonify({"error": "Ung\u00fcltiges Startdatum. Erwartet: YYYY-MM-DD"}), 400
|
||||
if "end_date" in data:
|
||||
try:
|
||||
holiday.end_date = date.fromisoformat((data["end_date"] or "").strip())
|
||||
except ValueError:
|
||||
return jsonify({"error": "Ung\u00fcltiges Enddatum. Erwartet: YYYY-MM-DD"}), 400
|
||||
if "region" in data:
|
||||
holiday.region = _normalize_optional_text(data["region"])
|
||||
if not holiday.name:
|
||||
return jsonify({"error": "Name darf nicht leer sein"}), 400
|
||||
if holiday.end_date < holiday.start_date:
|
||||
return jsonify({"error": "Enddatum muss nach oder gleich Startdatum sein"}), 400
|
||||
_validate_holiday_dates_within_period(period, holiday.start_date, holiday.end_date)
|
||||
duplicate = _find_duplicate_holiday(
|
||||
session,
|
||||
holiday.academic_period_id,
|
||||
holiday.name,
|
||||
holiday.start_date,
|
||||
holiday.end_date,
|
||||
holiday.region,
|
||||
exclude_id=holiday.id,
|
||||
)
|
||||
if duplicate:
|
||||
return jsonify({"error": "Ein Ferienblock mit diesem Namen und Zeitraum existiert bereits in dieser Periode"}), 409
|
||||
overlaps = _find_overlapping_holidays(
|
||||
session,
|
||||
holiday.academic_period_id,
|
||||
holiday.start_date,
|
||||
holiday.end_date,
|
||||
exclude_id=holiday.id,
|
||||
)
|
||||
same_identity, conflicting = _split_overlap_candidates(overlaps, holiday.name, holiday.region)
|
||||
if conflicting:
|
||||
return jsonify({"error": _format_overlap_conflict("Der Ferienblock", conflicting)}), 409
|
||||
merged = False
|
||||
if same_identity:
|
||||
_merge_holiday_group(
|
||||
session,
|
||||
holiday,
|
||||
same_identity,
|
||||
holiday.name,
|
||||
holiday.start_date,
|
||||
holiday.end_date,
|
||||
holiday.region,
|
||||
source_file_name="manual",
|
||||
)
|
||||
merged = True
|
||||
session.flush()
|
||||
academic_period_id = holiday.academic_period_id
|
||||
regenerated = _regenerate_for_period(session, academic_period_id)
|
||||
session.commit()
|
||||
return jsonify({"success": True, "holiday": holiday.to_dict(), "regenerated_events": regenerated, "merged": merged})
|
||||
except IntegrityError:
|
||||
session.rollback()
|
||||
return jsonify({"error": "Ein Ferienblock mit diesem Namen und Zeitraum existiert bereits in dieser Periode"}), 409
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
return jsonify({"error": str(e)}), 400
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@holidays_bp.route("/<int:holiday_id>", methods=["DELETE"])
|
||||
@admin_or_higher
|
||||
def delete_holiday(holiday_id):
|
||||
session = Session()
|
||||
try:
|
||||
holiday = session.query(SchoolHoliday).get(holiday_id)
|
||||
if not holiday:
|
||||
return jsonify({"error": "Ferienblock nicht gefunden"}), 404
|
||||
if holiday.academic_period_id is not None:
|
||||
period = session.query(AcademicPeriod).get(holiday.academic_period_id)
|
||||
if period and period.is_archived:
|
||||
return jsonify({"error": "Archivierte Perioden k\u00f6nnen nicht bearbeitet werden"}), 409
|
||||
academic_period_id = holiday.academic_period_id
|
||||
session.delete(holiday)
|
||||
session.flush()
|
||||
regenerated = _regenerate_for_period(session, academic_period_id)
|
||||
session.commit()
|
||||
return jsonify({"success": True, "regenerated_events": regenerated})
|
||||
except Exception as e:
|
||||
session.rollback()
|
||||
return jsonify({"error": str(e)}), 400
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
Reference in New Issue
Block a user