feat(academic-periods): period selector, active period
API, holiday indicators; UI polish; bump version Dashboard: Add Syncfusion academic period dropdown next to group selector Navigate scheduler to today's month/day within selected period year on change Show adjacent holiday plan badge; keep "holidays in view" counter on the right Compact dropdown widths for a tighter toolbar Default blocking of scheduling on holidays; block entries styled like all-day; black text styling API: Add academic periods routes: list, get active, set active (POST), for_date Register blueprint in wsgi Holidays: Support TXT/CSV upload; headerless TXT uses columns 2-4; region remains null Docs: Update shared Copilot instructions with academic periods endpoints and dashboard integration details
This commit is contained in:
84
server/routes/academic_periods.py
Normal file
84
server/routes/academic_periods.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from flask import Blueprint, jsonify, request
|
||||
from server.database import Session
|
||||
from models.models import AcademicPeriod
|
||||
from datetime import datetime
|
||||
|
||||
academic_periods_bp = Blueprint(
|
||||
'academic_periods', __name__, url_prefix='/api/academic_periods')
|
||||
|
||||
|
||||
@academic_periods_bp.route('', methods=['GET'])
|
||||
def list_academic_periods():
|
||||
session = Session()
|
||||
try:
|
||||
periods = session.query(AcademicPeriod).order_by(
|
||||
AcademicPeriod.start_date.asc()).all()
|
||||
return jsonify({
|
||||
'periods': [p.to_dict() for p in periods]
|
||||
})
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/active', methods=['GET'])
|
||||
def get_active_academic_period():
|
||||
session = Session()
|
||||
try:
|
||||
period = session.query(AcademicPeriod).filter(
|
||||
AcademicPeriod.is_active == True).first()
|
||||
if not period:
|
||||
return jsonify({'period': None}), 200
|
||||
return jsonify({'period': period.to_dict()}), 200
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/for_date', methods=['GET'])
|
||||
def get_period_for_date():
|
||||
"""
|
||||
Returns the academic period that covers the provided date (YYYY-MM-DD).
|
||||
If multiple match, prefer the one with the latest start_date.
|
||||
"""
|
||||
date_str = request.args.get('date')
|
||||
if not date_str:
|
||||
return jsonify({'error': 'Missing required query param: date (YYYY-MM-DD)'}), 400
|
||||
try:
|
||||
target = datetime.strptime(date_str, '%Y-%m-%d').date()
|
||||
except ValueError:
|
||||
return jsonify({'error': 'Invalid date format. Expected YYYY-MM-DD'}), 400
|
||||
|
||||
session = Session()
|
||||
try:
|
||||
period = (
|
||||
session.query(AcademicPeriod)
|
||||
.filter(AcademicPeriod.start_date <= target, AcademicPeriod.end_date >= target)
|
||||
.order_by(AcademicPeriod.start_date.desc())
|
||||
.first()
|
||||
)
|
||||
return jsonify({'period': period.to_dict() if period else None}), 200
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
|
||||
@academic_periods_bp.route('/active', methods=['POST'])
|
||||
def set_active_academic_period():
|
||||
data = request.get_json(silent=True) or {}
|
||||
period_id = data.get('id')
|
||||
if period_id is None:
|
||||
return jsonify({'error': 'Missing required field: id'}), 400
|
||||
session = Session()
|
||||
try:
|
||||
target = session.query(AcademicPeriod).get(period_id)
|
||||
if not target:
|
||||
return jsonify({'error': 'AcademicPeriod not found'}), 404
|
||||
|
||||
# Deactivate all, then activate target
|
||||
session.query(AcademicPeriod).filter(AcademicPeriod.is_active == True).update(
|
||||
{AcademicPeriod.is_active: False}
|
||||
)
|
||||
target.is_active = True
|
||||
session.commit()
|
||||
session.refresh(target)
|
||||
return jsonify({'period': target.to_dict()}), 200
|
||||
finally:
|
||||
session.close()
|
||||
@@ -24,9 +24,14 @@ def list_holidays():
|
||||
@holidays_bp.route("/upload", methods=["POST"])
|
||||
def upload_holidays():
|
||||
"""
|
||||
Accepts a CSV file upload (multipart/form-data) with columns like:
|
||||
name,start_date,end_date,region
|
||||
Dates can be in ISO (YYYY-MM-DD) or common European format (DD.MM.YYYY).
|
||||
Accepts a CSV/TXT file upload (multipart/form-data).
|
||||
|
||||
Supported formats:
|
||||
1) Headered CSV with columns (case-insensitive): name, start_date, end_date[, region]
|
||||
- Dates: YYYY-MM-DD, DD.MM.YYYY, YYYY/MM/DD, or YYYYMMDD
|
||||
2) Headerless CSV/TXT lines with columns:
|
||||
[internal, name, start_yyyymmdd, end_yyyymmdd, optional_internal]
|
||||
- Only columns 2-4 are used; 1 and 5 are ignored.
|
||||
"""
|
||||
if "file" not in request.files:
|
||||
return jsonify({"error": "No file part"}), 400
|
||||
@@ -35,26 +40,36 @@ def upload_holidays():
|
||||
return jsonify({"error": "No selected file"}), 400
|
||||
|
||||
try:
|
||||
content = file.read().decode("utf-8", errors="ignore")
|
||||
# Try to auto-detect delimiter; default ','
|
||||
raw = file.read()
|
||||
# Try UTF-8 first (strict), then cp1252, then latin-1 as last resort
|
||||
try:
|
||||
content = raw.decode("utf-8")
|
||||
except UnicodeDecodeError:
|
||||
try:
|
||||
content = raw.decode("cp1252")
|
||||
except UnicodeDecodeError:
|
||||
content = raw.decode("latin-1", errors="replace")
|
||||
|
||||
sniffer = csv.Sniffer()
|
||||
dialect = None
|
||||
try:
|
||||
dialect = sniffer.sniff(content[:1024])
|
||||
sample = content[:2048]
|
||||
# Some files may contain a lot of quotes; allow Sniffer to guess delimiter
|
||||
dialect = sniffer.sniff(sample)
|
||||
except Exception:
|
||||
pass
|
||||
reader = csv.DictReader(io.StringIO(
|
||||
content), dialect=dialect) if dialect else csv.DictReader(io.StringIO(content))
|
||||
|
||||
required = {"name", "start_date", "end_date"}
|
||||
if not required.issubset(set(h.lower() for h in reader.fieldnames or [])):
|
||||
return jsonify({"error": "CSV must contain headers: name, start_date, end_date"}), 400
|
||||
|
||||
def parse_date(s: str):
|
||||
s = (s or "").strip()
|
||||
if not s:
|
||||
return None
|
||||
# Try ISO first
|
||||
# Numeric YYYYMMDD
|
||||
if s.isdigit() and len(s) == 8:
|
||||
try:
|
||||
return datetime.strptime(s, "%Y%m%d").date()
|
||||
except ValueError:
|
||||
pass
|
||||
# Common formats
|
||||
for fmt in ("%Y-%m-%d", "%d.%m.%Y", "%Y/%m/%d"):
|
||||
try:
|
||||
return datetime.strptime(s, fmt).date()
|
||||
@@ -65,16 +80,18 @@ def upload_holidays():
|
||||
session = Session()
|
||||
inserted = 0
|
||||
updated = 0
|
||||
for row in reader:
|
||||
# Normalize headers to lower-case keys
|
||||
norm = {k.lower(): (v or "").strip() for k, v in row.items()}
|
||||
name = norm.get("name")
|
||||
start_date = parse_date(norm.get("start_date"))
|
||||
end_date = parse_date(norm.get("end_date"))
|
||||
region = norm.get("region") or None
|
||||
if not name or not start_date or not end_date:
|
||||
continue
|
||||
|
||||
# First, try headered CSV via DictReader
|
||||
dict_reader = csv.DictReader(io.StringIO(
|
||||
content), dialect=dialect) if dialect else csv.DictReader(io.StringIO(content))
|
||||
fieldnames_lower = [h.lower() for h in (dict_reader.fieldnames or [])]
|
||||
has_required_headers = {"name", "start_date",
|
||||
"end_date"}.issubset(set(fieldnames_lower))
|
||||
|
||||
def upsert(name: str, start_date, end_date, region=None):
|
||||
nonlocal inserted, updated
|
||||
if not name or not start_date or not end_date:
|
||||
return
|
||||
existing = (
|
||||
session.query(SchoolHoliday)
|
||||
.filter(
|
||||
@@ -86,9 +103,7 @@ def upload_holidays():
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if existing:
|
||||
# Optionally update region or source_file_name
|
||||
existing.region = region
|
||||
existing.source_file_name = file.filename
|
||||
updated += 1
|
||||
@@ -102,6 +117,41 @@ def upload_holidays():
|
||||
))
|
||||
inserted += 1
|
||||
|
||||
if has_required_headers:
|
||||
for row in dict_reader:
|
||||
norm = {k.lower(): (v or "").strip() for k, v in row.items()}
|
||||
name = norm.get("name")
|
||||
try:
|
||||
start_date = parse_date(norm.get("start_date"))
|
||||
end_date = parse_date(norm.get("end_date"))
|
||||
except ValueError:
|
||||
# Skip rows with unparseable dates
|
||||
continue
|
||||
region = (norm.get("region")
|
||||
or None) if "region" in norm else None
|
||||
upsert(name, start_date, end_date, region)
|
||||
else:
|
||||
# Fallback: headerless rows -> use columns [1]=name, [2]=start, [3]=end
|
||||
reader = csv.reader(io.StringIO(
|
||||
content), dialect=dialect) if dialect else csv.reader(io.StringIO(content))
|
||||
for row in reader:
|
||||
if not row:
|
||||
continue
|
||||
# tolerate varying column counts (4 or 5); ignore first and optional last
|
||||
cols = [c.strip() for c in row]
|
||||
if len(cols) < 4:
|
||||
# Not enough data
|
||||
continue
|
||||
name = cols[1].strip().strip('"')
|
||||
start_raw = cols[2]
|
||||
end_raw = cols[3]
|
||||
try:
|
||||
start_date = parse_date(start_raw)
|
||||
end_date = parse_date(end_raw)
|
||||
except ValueError:
|
||||
continue
|
||||
upsert(name, start_date, end_date, None)
|
||||
|
||||
session.commit()
|
||||
session.close()
|
||||
return jsonify({"success": True, "inserted": inserted, "updated": updated})
|
||||
|
||||
@@ -3,6 +3,7 @@ from server.routes.eventmedia import eventmedia_bp
|
||||
from server.routes.files import files_bp
|
||||
from server.routes.events import events_bp
|
||||
from server.routes.holidays import holidays_bp
|
||||
from server.routes.academic_periods import academic_periods_bp
|
||||
from server.routes.groups import groups_bp
|
||||
from server.routes.clients import clients_bp
|
||||
from server.database import Session, engine
|
||||
@@ -22,6 +23,7 @@ app.register_blueprint(events_bp)
|
||||
app.register_blueprint(eventmedia_bp)
|
||||
app.register_blueprint(files_bp)
|
||||
app.register_blueprint(holidays_bp)
|
||||
app.register_blueprint(academic_periods_bp)
|
||||
|
||||
|
||||
@app.route("/health")
|
||||
|
||||
Reference in New Issue
Block a user