Initial commit - copied workspace after database cleanup

This commit is contained in:
RobbStarkAustria
2025-10-10 15:20:14 +00:00
commit 1efe40a03b
142 changed files with 23625 additions and 0 deletions

View File

@@ -0,0 +1,84 @@
from flask import Blueprint, jsonify, request
from server.database import Session
from models.models import AcademicPeriod
from datetime import datetime
academic_periods_bp = Blueprint(
'academic_periods', __name__, url_prefix='/api/academic_periods')
@academic_periods_bp.route('', methods=['GET'])
def list_academic_periods():
session = Session()
try:
periods = session.query(AcademicPeriod).order_by(
AcademicPeriod.start_date.asc()).all()
return jsonify({
'periods': [p.to_dict() for p in periods]
})
finally:
session.close()
@academic_periods_bp.route('/active', methods=['GET'])
def get_active_academic_period():
session = Session()
try:
period = session.query(AcademicPeriod).filter(
AcademicPeriod.is_active == True).first()
if not period:
return jsonify({'period': None}), 200
return jsonify({'period': period.to_dict()}), 200
finally:
session.close()
@academic_periods_bp.route('/for_date', methods=['GET'])
def get_period_for_date():
"""
Returns the academic period that covers the provided date (YYYY-MM-DD).
If multiple match, prefer the one with the latest start_date.
"""
date_str = request.args.get('date')
if not date_str:
return jsonify({'error': 'Missing required query param: date (YYYY-MM-DD)'}), 400
try:
target = datetime.strptime(date_str, '%Y-%m-%d').date()
except ValueError:
return jsonify({'error': 'Invalid date format. Expected YYYY-MM-DD'}), 400
session = Session()
try:
period = (
session.query(AcademicPeriod)
.filter(AcademicPeriod.start_date <= target, AcademicPeriod.end_date >= target)
.order_by(AcademicPeriod.start_date.desc())
.first()
)
return jsonify({'period': period.to_dict() if period else None}), 200
finally:
session.close()
@academic_periods_bp.route('/active', methods=['POST'])
def set_active_academic_period():
data = request.get_json(silent=True) or {}
period_id = data.get('id')
if period_id is None:
return jsonify({'error': 'Missing required field: id'}), 400
session = Session()
try:
target = session.query(AcademicPeriod).get(period_id)
if not target:
return jsonify({'error': 'AcademicPeriod not found'}), 404
# Deactivate all, then activate target
session.query(AcademicPeriod).filter(AcademicPeriod.is_active == True).update(
{AcademicPeriod.is_active: False}
)
target.is_active = True
session.commit()
session.refresh(target)
return jsonify({'period': target.to_dict()}), 200
finally:
session.close()

289
server/routes/clients.py Normal file
View File

@@ -0,0 +1,289 @@
from server.database import Session
from models.models import Client, ClientGroup
from flask import Blueprint, request, jsonify
from server.mqtt_helper import publish_client_group, delete_client_group_message, publish_multiple_client_groups
import sys
sys.path.append('/workspace')
clients_bp = Blueprint("clients", __name__, url_prefix="/api/clients")
@clients_bp.route("/sync-all-groups", methods=["POST"])
def sync_all_client_groups():
"""
Administrative Route: Synchronisiert alle bestehenden Client-Gruppenzuordnungen mit MQTT
Nützlich für die einmalige Migration bestehender Clients
"""
session = Session()
try:
# Alle aktiven Clients abrufen
clients = session.query(Client).filter(Client.is_active == True).all()
if not clients:
session.close()
return jsonify({"message": "Keine aktiven Clients gefunden", "synced": 0})
# Alle Clients synchronisieren
client_group_mappings = {
client.uuid: client.group_id for client in clients}
success_count, failed_count = publish_multiple_client_groups(
client_group_mappings)
session.close()
return jsonify({
"success": True,
"message": f"Synchronisation abgeschlossen",
"synced": success_count,
"failed": failed_count,
"total": len(clients)
})
except Exception as e:
session.close()
return jsonify({"error": f"Fehler bei der Synchronisation: {str(e)}"}), 500
@clients_bp.route("/without_description", methods=["GET"])
def get_clients_without_description():
session = Session()
clients = session.query(Client).filter(
(Client.description == None) | (Client.description == "")
).all()
result = [
{
"uuid": c.uuid,
"hardware_token": c.hardware_token,
"ip": c.ip,
"type": c.type,
"hostname": c.hostname,
"os_version": c.os_version,
"software_version": c.software_version,
"macs": c.macs,
"model": c.model,
"registration_time": c.registration_time.isoformat() if c.registration_time else None,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"group_id": c.group_id,
}
for c in clients
]
session.close()
return jsonify(result)
@clients_bp.route("/<uuid>/description", methods=["PUT"])
def set_client_description(uuid):
data = request.get_json()
description = data.get("description", "").strip()
if not description:
return jsonify({"error": "Beschreibung darf nicht leer sein"}), 400
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
client.description = description
session.commit()
# MQTT: Gruppenzuordnung publizieren (wichtig für neue Clients aus SetupMode)
mqtt_success = publish_client_group(client.uuid, client.group_id)
session.close()
response = {"success": True}
if not mqtt_success:
response["warning"] = "Beschreibung gespeichert, aber MQTT-Publishing fehlgeschlagen"
return jsonify(response)
@clients_bp.route("", methods=["GET"])
def get_clients():
session = Session()
clients = session.query(Client).all()
result = [
{
"uuid": c.uuid,
"hardware_token": c.hardware_token,
"ip": c.ip,
"type": c.type,
"hostname": c.hostname,
"os_version": c.os_version,
"software_version": c.software_version,
"macs": c.macs,
"model": c.model,
"description": c.description,
"registration_time": c.registration_time.isoformat() if c.registration_time else None,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"group_id": c.group_id,
}
for c in clients
]
session.close()
return jsonify(result)
@clients_bp.route("/group", methods=["PUT"])
def update_clients_group():
data = request.get_json()
client_ids = data.get("client_ids", [])
group_id = data.get("group_id")
group_name = data.get("group_name")
if not isinstance(client_ids, list) or len(client_ids) == 0:
return jsonify({"error": "client_ids muss eine nicht-leere Liste sein"}), 400
session = Session()
# Bestimme Ziel-Gruppe: Priorität hat group_id, ansonsten group_name
group = None
if group_id is not None:
group = session.query(ClientGroup).filter_by(id=group_id).first()
if not group:
session.close()
return jsonify({"error": f"Gruppe mit id={group_id} nicht gefunden"}), 404
elif group_name:
group = session.query(ClientGroup).filter_by(name=group_name).first()
if not group:
session.close()
return jsonify({"error": f"Gruppe '{group_name}' nicht gefunden"}), 404
else:
session.close()
return jsonify({"error": "Entweder group_id oder group_name ist erforderlich"}), 400
# WICHTIG: group.id vor dem Schließen puffern, um DetachedInstanceError zu vermeiden
target_group_id = group.id
session.query(Client).filter(Client.uuid.in_(client_ids)).update(
{Client.group_id: target_group_id}, synchronize_session=False
)
session.commit()
session.close()
# MQTT: Gruppenzuordnungen für alle betroffenen Clients publizieren (nutzt gecachten target_group_id)
client_group_mappings = {
client_id: target_group_id for client_id in client_ids}
success_count, failed_count = publish_multiple_client_groups(
client_group_mappings)
response = {"success": True}
if failed_count > 0:
response[
"warning"] = f"Gruppenzuordnung gespeichert, aber {failed_count} MQTT-Publishing(s) fehlgeschlagen"
return jsonify(response)
@clients_bp.route("/<uuid>", methods=["PATCH"])
def update_client(uuid):
data = request.get_json()
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
allowed_fields = ["description", "model"]
updated = False
for field in allowed_fields:
if field in data:
setattr(client, field, data[field])
updated = True
if updated:
session.commit()
result = {"success": True}
else:
result = {"error": "Keine gültigen Felder zum Aktualisieren übergeben"}
session.close()
return jsonify(result)
# Neue Route: Liefert die aktuelle group_id für einen Client
@clients_bp.route("/<uuid>/group", methods=["GET"])
def get_client_group(uuid):
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
group_id = client.group_id
session.close()
return jsonify({"group_id": group_id})
# Neue Route: Liefert alle Clients mit Alive-Status
@clients_bp.route("/with_alive_status", methods=["GET"])
def get_clients_with_alive_status():
session = Session()
clients = session.query(Client).all()
result = []
for c in clients:
result.append({
"uuid": c.uuid,
"description": c.description,
"ip": c.ip,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"is_alive": bool(c.last_alive and c.is_active),
})
session.close()
return jsonify(result)
@clients_bp.route("/<uuid>/restart", methods=["POST"])
def restart_client(uuid):
"""
Route to restart a specific client by UUID.
Sends an MQTT message to the broker to trigger the restart.
"""
import paho.mqtt.client as mqtt
import json
# MQTT broker configuration
MQTT_BROKER = "mqtt"
MQTT_PORT = 1883
MQTT_TOPIC = f"clients/{uuid}/restart"
# Connect to the database to check if the client exists
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
session.close()
# Send MQTT message
try:
mqtt_client = mqtt.Client()
mqtt_client.connect(MQTT_BROKER, MQTT_PORT)
payload = {"action": "restart"}
mqtt_client.publish(MQTT_TOPIC, json.dumps(payload))
mqtt_client.disconnect()
return jsonify({"success": True, "message": f"Restart signal sent to client {uuid}"}), 200
except Exception as e:
return jsonify({"error": f"Failed to send MQTT message: {str(e)}"}), 500
@clients_bp.route("/<uuid>", methods=["DELETE"])
def delete_client(uuid):
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
session.delete(client)
session.commit()
session.close()
# MQTT: Retained message für gelöschten Client entfernen
mqtt_success = delete_client_group_message(uuid)
response = {"success": True}
if not mqtt_success:
response["warning"] = "Client gelöscht, aber MQTT-Message-Löschung fehlgeschlagen"
return jsonify(response)

View File

@@ -0,0 +1,94 @@
from flask import Blueprint, jsonify, request
from server.database import Session
from models.models import Conversion, ConversionStatus, EventMedia, MediaType
from server.task_queue import get_queue
from server.worker import convert_event_media_to_pdf
from datetime import datetime, timezone
import hashlib
conversions_bp = Blueprint("conversions", __name__,
url_prefix="/api/conversions")
def sha256_file(abs_path: str) -> str:
h = hashlib.sha256()
with open(abs_path, "rb") as f:
for chunk in iter(lambda: f.read(8192), b""):
h.update(chunk)
return h.hexdigest()
@conversions_bp.route("/<int:media_id>/pdf", methods=["POST"])
def ensure_conversion(media_id: int):
session = Session()
try:
media = session.query(EventMedia).get(media_id)
if not media or not media.file_path:
return jsonify({"error": "Media not found or no file"}), 404
# Only enqueue for office presentation formats
if media.media_type not in {MediaType.ppt, MediaType.pptx, MediaType.odp}:
return jsonify({"message": "No conversion required for this media_type"}), 200
# Compute file hash
import os
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
media_root = os.path.join(base_dir, "media")
abs_source = os.path.join(media_root, media.file_path)
file_hash = sha256_file(abs_source)
# Find or create conversion row
conv = (
session.query(Conversion)
.filter_by(
source_event_media_id=media.id,
target_format="pdf",
file_hash=file_hash,
)
.one_or_none()
)
if not conv:
conv = Conversion(
source_event_media_id=media.id,
target_format="pdf",
status=ConversionStatus.pending,
file_hash=file_hash,
)
session.add(conv)
session.commit()
# Enqueue if not already processing/ready
if conv.status in {ConversionStatus.pending, ConversionStatus.failed}:
q = get_queue()
job = q.enqueue(convert_event_media_to_pdf, conv.id)
return jsonify({"id": conv.id, "status": conv.status.value, "job_id": job.get_id()}), 202
else:
return jsonify({"id": conv.id, "status": conv.status.value, "target_path": conv.target_path}), 200
finally:
session.close()
@conversions_bp.route("/<int:media_id>/status", methods=["GET"])
def conversion_status(media_id: int):
session = Session()
try:
conv = (
session.query(Conversion)
.filter_by(source_event_media_id=media_id, target_format="pdf")
.order_by(Conversion.id.desc())
.first()
)
if not conv:
return jsonify({"status": "missing"}), 404
return jsonify(
{
"id": conv.id,
"status": conv.status.value,
"target_path": conv.target_path,
"started_at": conv.started_at.isoformat() if conv.started_at else None,
"completed_at": conv.completed_at.isoformat() if conv.completed_at else None,
"error_message": conv.error_message,
}
)
finally:
session.close()

261
server/routes/eventmedia.py Normal file
View File

@@ -0,0 +1,261 @@
from re import A
from flask import Blueprint, request, jsonify, send_from_directory
from server.database import Session
from models.models import EventMedia, MediaType, Conversion, ConversionStatus
from server.task_queue import get_queue
from server.worker import convert_event_media_to_pdf
import hashlib
import os
eventmedia_bp = Blueprint('eventmedia', __name__, url_prefix='/api/eventmedia')
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
def get_param(key, default=None):
# Reihenfolge: form > json > args
if request.form and key in request.form:
return request.form.get(key, default)
if request.is_json and request.json and key in request.json:
return request.json.get(key, default)
return request.args.get(key, default)
# --- FileManager: List, Create Folder, Rename, Delete, Move ---
@eventmedia_bp.route('/filemanager/operations', methods=['GET', 'POST'])
def filemanager_operations():
action = get_param('action')
path = get_param('path', '/')
name = get_param('name')
new_name = get_param('newName')
target_path = get_param('targetPath')
full_path = os.path.join(MEDIA_ROOT, path.lstrip('/'))
print(action, path, name, new_name, target_path, full_path) # Debug-Ausgabe
if action == 'read':
# List files and folders
items = []
session = Session()
for entry in os.scandir(full_path):
item = {
'name': entry.name,
'isFile': entry.is_file(),
'size': entry.stat().st_size,
'type': os.path.splitext(entry.name)[1][1:] if entry.is_file() else '',
'hasChild': entry.is_dir()
}
# Wenn Datei, versuche Upload-Datum aus DB zu holen
if entry.is_file():
media = session.query(EventMedia).filter_by(
url=entry.name).first()
if media and media.uploaded_at:
# FileManager erwartet UNIX-Timestamp (Sekunden)
item['dateModified'] = int(media.uploaded_at.timestamp())
else:
item['dateModified'] = entry.stat().st_mtime
else:
item['dateModified'] = entry.stat().st_mtime
items.append(item)
session.close()
return jsonify({'files': items, 'cwd': {'name': os.path.basename(full_path), 'path': path}})
elif action == 'details':
# Details für eine oder mehrere Dateien zurückgeben
names = request.form.getlist('names[]') or (request.json.get(
'names') if request.is_json and request.json else [])
path = get_param('path', '/')
details = []
session = Session()
for name in names:
file_path = os.path.join(MEDIA_ROOT, path.lstrip('/'), name)
media = session.query(EventMedia).filter_by(url=name).first()
if os.path.isfile(file_path):
detail = {
'name': name,
'size': os.path.getsize(file_path),
'dateModified': int(media.uploaded_at.timestamp()) if media and media.uploaded_at else int(os.path.getmtime(file_path)),
'type': os.path.splitext(name)[1][1:],
'hasChild': False,
'isFile': True,
'description': media.message_content if media else '',
# weitere Felder nach Bedarf
}
details.append(detail)
session.close()
return jsonify({'details': details})
elif action == 'delete':
for item in request.form.getlist('names[]'):
item_path = os.path.join(full_path, item)
if os.path.isdir(item_path):
os.rmdir(item_path)
else:
os.remove(item_path)
return jsonify({'success': True})
elif action == 'rename':
src = os.path.join(full_path, name)
dst = os.path.join(full_path, new_name)
os.rename(src, dst)
return jsonify({'success': True})
elif action == 'move':
src = os.path.join(full_path, name)
dst = os.path.join(MEDIA_ROOT, target_path.lstrip('/'), name)
os.rename(src, dst)
return jsonify({'success': True})
elif action == 'create':
os.makedirs(os.path.join(full_path, name), exist_ok=True)
return jsonify({'success': True})
else:
return jsonify({'error': 'Unknown action'}), 400
# --- FileManager: Upload ---
@eventmedia_bp.route('/filemanager/upload', methods=['POST'])
def filemanager_upload():
session = Session()
# Korrigiert: Erst aus request.form, dann aus request.args lesen
path = request.form.get('path') or request.args.get('path', '/')
upload_path = os.path.join(MEDIA_ROOT, path.lstrip('/'))
os.makedirs(upload_path, exist_ok=True)
for file in request.files.getlist('uploadFiles'):
file_path = os.path.join(upload_path, file.filename)
file.save(file_path)
ext = os.path.splitext(file.filename)[1][1:].lower()
try:
media_type = MediaType(ext)
except ValueError:
media_type = MediaType.other
from datetime import datetime, timezone
media = EventMedia(
media_type=media_type,
url=file.filename,
file_path=os.path.relpath(file_path, MEDIA_ROOT),
uploaded_at=datetime.now(timezone.utc)
)
session.add(media)
session.commit()
# Enqueue conversion for office presentation types
if media_type in {MediaType.ppt, MediaType.pptx, MediaType.odp}:
# compute file hash
h = hashlib.sha256()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(8192), b""):
h.update(chunk)
file_hash = h.hexdigest()
# upsert Conversion row
conv = (
session.query(Conversion)
.filter_by(
source_event_media_id=media.id,
target_format='pdf',
file_hash=file_hash,
)
.one_or_none()
)
if not conv:
conv = Conversion(
source_event_media_id=media.id,
target_format='pdf',
status=ConversionStatus.pending,
file_hash=file_hash,
)
session.add(conv)
session.commit()
if conv.status in {ConversionStatus.pending, ConversionStatus.failed}:
q = get_queue()
q.enqueue(convert_event_media_to_pdf, conv.id)
session.commit()
return jsonify({'success': True})
# --- FileManager: Download ---
@eventmedia_bp.route('/filemanager/download', methods=['GET'])
def filemanager_download():
path = request.args.get('path', '/')
names = request.args.getlist('names[]')
# Nur Einzel-Download für Beispiel
if names:
file_path = os.path.join(MEDIA_ROOT, path.lstrip('/'), names[0])
return send_from_directory(os.path.dirname(file_path), os.path.basename(file_path), as_attachment=True)
return jsonify({'error': 'No file specified'}), 400
# --- FileManager: Get Image (optional, für Thumbnails) ---
@eventmedia_bp.route('/filemanager/get-image', methods=['GET'])
def filemanager_get_image():
path = request.args.get('path', '/')
file_path = os.path.join(MEDIA_ROOT, path.lstrip('/'))
return send_from_directory(os.path.dirname(file_path), os.path.basename(file_path))
# --- EventMedia-API: Metadaten-Liste (wie gehabt) ---
@eventmedia_bp.route('', methods=['GET'])
def list_media():
session = Session()
media = session.query(EventMedia).all()
return jsonify([m.to_dict() for m in media])
# --- EventMedia-API: Metadaten-Update ---
@eventmedia_bp.route('/<int:media_id>', methods=['PUT'])
def update_media(media_id):
session = Session()
media = session.query(EventMedia).get(media_id)
if not media:
return jsonify({'error': 'Not found'}), 404
data = request.json
media.url = data.get('title', media.url)
media.message_content = data.get('description', media.message_content)
# Event-Zuordnung ggf. ergänzen
session.commit()
return jsonify(media.to_dict())
@eventmedia_bp.route('/find_by_filename', methods=['GET'])
def find_by_filename():
filename = request.args.get('filename')
if not filename:
return jsonify({'error': 'Missing filename'}), 400
session = Session()
# Suche nach exaktem Dateinamen in url oder file_path
media = session.query(EventMedia).filter(
(EventMedia.url == filename) | (
EventMedia.file_path.like(f"%{filename}"))
).first()
if not media:
return jsonify({'error': 'Not found'}), 404
return jsonify({
'id': media.id,
'file_path': media.file_path,
'url': media.url
})
@eventmedia_bp.route('/<int:media_id>', methods=['GET'])
def get_media_by_id(media_id):
session = Session()
media = session.query(EventMedia).get(media_id)
if not media:
session.close()
return jsonify({'error': 'Not found'}), 404
result = {
'id': media.id,
'file_path': media.file_path,
'url': media.url,
'name': media.url, # oder ein anderes Feld für den Namen
'media_type': media.media_type.name if media.media_type else None
}
session.close()
return jsonify(result)

169
server/routes/events.py Normal file
View File

@@ -0,0 +1,169 @@
from flask import Blueprint, request, jsonify
from server.database import Session
from models.models import Event, EventMedia, MediaType
from datetime import datetime, timezone
from sqlalchemy import and_
import sys
sys.path.append('/workspace')
events_bp = Blueprint("events", __name__, url_prefix="/api/events")
def get_icon_for_type(event_type):
# Lucide-Icon-Namen als String
return {
"presentation": "Presentation", # <--- geändert!
"website": "Globe",
"video": "Video",
"message": "MessageSquare",
"webuntis": "School",
}.get(event_type, "")
@events_bp.route("", methods=["GET"])
def get_events():
session = Session()
start = request.args.get("start")
end = request.args.get("end")
group_id = request.args.get("group_id")
show_inactive = request.args.get(
"show_inactive", "0") == "1" # Checkbox-Logik
now = datetime.now(timezone.utc)
events_query = session.query(Event)
if group_id:
events_query = events_query.filter(Event.group_id == int(group_id))
events = events_query.all()
result = []
for e in events:
# Zeitzonen-Korrektur für e.end
if e.end and e.end.tzinfo is None:
end_dt = e.end.replace(tzinfo=timezone.utc)
else:
end_dt = e.end
# Setze is_active auf False, wenn Termin vorbei ist
if end_dt and end_dt < now and e.is_active:
e.is_active = False
session.commit()
if show_inactive or e.is_active:
result.append({
"Id": str(e.id),
"GroupId": e.group_id,
"Subject": e.title,
"StartTime": e.start.isoformat() if e.start else None,
"EndTime": e.end.isoformat() if e.end else None,
"IsAllDay": False,
"MediaId": e.event_media_id,
"Type": e.event_type.value if e.event_type else None, # <-- Enum zu String!
"Icon": get_icon_for_type(e.event_type.value if e.event_type else None),
})
session.close()
return jsonify(result)
@events_bp.route("/<event_id>", methods=["DELETE"])
def delete_event(event_id):
session = Session()
event = session.query(Event).filter_by(id=event_id).first()
if not event:
session.close()
return jsonify({"error": "Termin nicht gefunden"}), 404
session.delete(event)
session.commit()
session.close()
return jsonify({"success": True})
@events_bp.route("", methods=["POST"])
def create_event():
data = request.json
session = Session()
# Pflichtfelder prüfen
required = ["group_id", "title", "description",
"start", "end", "event_type", "created_by"]
for field in required:
if field not in data:
return jsonify({"error": f"Missing field: {field}"}), 400
event_type = data["event_type"]
event_media_id = None
slideshow_interval = None
# Präsentation: event_media_id und slideshow_interval übernehmen
if event_type == "presentation":
event_media_id = data.get("event_media_id")
slideshow_interval = data.get("slideshow_interval")
if not event_media_id:
return jsonify({"error": "event_media_id required for presentation"}), 400
# Website: Webseite als EventMedia anlegen und ID übernehmen
if event_type == "website":
website_url = data.get("website_url")
if not website_url:
return jsonify({"error": "website_url required for website"}), 400
# EventMedia für Webseite anlegen
media = EventMedia(
media_type=MediaType.website,
url=website_url,
file_path=website_url
)
session.add(media)
session.commit()
event_media_id = media.id
# created_by aus den Daten holen, Default: None
created_by = data.get("created_by")
# Start- und Endzeit in UTC umwandeln, falls kein Zulu-Zeitstempel
start = datetime.fromisoformat(data["start"])
end = datetime.fromisoformat(data["end"])
if start.tzinfo is None:
start = start.astimezone(timezone.utc)
if end.tzinfo is None:
end = end.astimezone(timezone.utc)
# Event anlegen
event = Event(
group_id=data["group_id"],
title=data["title"],
description=data["description"],
start=start,
end=end,
event_type=event_type,
is_active=True,
event_media_id=event_media_id,
slideshow_interval=slideshow_interval,
created_by=created_by # <--- HIER hinzugefügt
)
session.add(event)
session.commit()
return jsonify({"success": True, "event_id": event.id})
@events_bp.route("/<event_id>", methods=["PUT"])
def update_event(event_id):
data = request.json
session = Session()
event = session.query(Event).filter_by(id=event_id).first()
if not event:
session.close()
return jsonify({"error": "Termin nicht gefunden"}), 404
event.title = data.get("title", event.title)
event.description = data.get("description", event.description)
event.start = datetime.fromisoformat(
data["start"]) if "start" in data else event.start
event.end = datetime.fromisoformat(
data["end"]) if "end" in data else event.end
event.event_type = data.get("event_type", event.event_type)
event.event_media_id = data.get("event_media_id", event.event_media_id)
event.slideshow_interval = data.get(
"slideshow_interval", event.slideshow_interval)
event.created_by = data.get("created_by", event.created_by)
session.commit()
event_id_return = event.id # <-- ID vor session.close() speichern!
session.close()
return jsonify({"success": True, "event_id": event_id_return})

68
server/routes/files.py Normal file
View File

@@ -0,0 +1,68 @@
from flask import Blueprint, jsonify, send_from_directory
from server.database import Session
from models.models import EventMedia
import os
# Blueprint for direct file downloads by media ID
files_bp = Blueprint("files", __name__, url_prefix="/api/files")
# Reuse the same media root convention as eventmedia.py
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
@files_bp.route("/<int:media_id>/<path:filename>", methods=["GET"])
def download_media_file(media_id: int, filename: str):
"""
Download the stored media file for a given EventMedia ID.
URL format example:
/api/files/26/LPUV4I_Folien_Nowitzki_Bewertungskriterien.pptx
Behavior:
- Looks up EventMedia by ID
- Validates requested filename against stored metadata (best-effort)
- Serves the file from server/media using the stored relative file_path
"""
session = Session()
media = session.query(EventMedia).get(media_id)
if not media:
session.close()
return jsonify({"error": "Not found"}), 404
# Prefer the stored relative file_path; fall back to the URL/filename
rel_path = media.file_path or media.url
# Basic filename consistency check to avoid leaking other files
# Only enforce if media.url is present
if media.url and os.path.basename(filename) != os.path.basename(media.url):
session.close()
return jsonify({
"error": "Filename mismatch",
"expected": os.path.basename(media.url),
"got": os.path.basename(filename),
}), 400
abs_path = os.path.join(MEDIA_ROOT, rel_path)
# Ensure file exists
if not os.path.isfile(abs_path):
session.close()
return jsonify({"error": "File not found on server"}), 404
# Serve as attachment (download)
directory = os.path.dirname(abs_path)
served_name = os.path.basename(abs_path)
session.close()
return send_from_directory(directory, served_name, as_attachment=True)
@files_bp.route("/converted/<path:relpath>", methods=["GET"])
def download_converted(relpath: str):
"""Serve converted files (e.g., PDFs) relative to media/converted."""
abs_path = os.path.join(MEDIA_ROOT, relpath)
if not abs_path.startswith(MEDIA_ROOT):
return jsonify({"error": "Invalid path"}), 400
if not os.path.isfile(abs_path):
return jsonify({"error": "File not found"}), 404
return send_from_directory(os.path.dirname(abs_path), os.path.basename(abs_path), as_attachment=True)

189
server/routes/groups.py Normal file
View File

@@ -0,0 +1,189 @@
from models.models import Client
# Neue Route: Liefert alle Gruppen mit zugehörigen Clients und deren Alive-Status
from server.database import Session
from models.models import ClientGroup
from flask import Blueprint, request, jsonify
from sqlalchemy import func
import sys
import os
from datetime import datetime, timedelta
sys.path.append('/workspace')
groups_bp = Blueprint("groups", __name__, url_prefix="/api/groups")
def get_grace_period():
"""Wählt die Grace-Periode abhängig von ENV."""
env = os.environ.get("ENV", "production").lower()
if env == "development" or env == "dev":
return int(os.environ.get("HEARTBEAT_GRACE_PERIOD_DEV", "15"))
return int(os.environ.get("HEARTBEAT_GRACE_PERIOD_PROD", "180"))
def is_client_alive(last_alive, is_active):
"""Berechnet, ob ein Client als alive gilt."""
if not last_alive or not is_active:
return False
grace_period = get_grace_period()
# last_alive kann ein String oder datetime sein
if isinstance(last_alive, str):
last_alive_str = last_alive[:-
1] if last_alive.endswith('Z') else last_alive
try:
last_alive_dt = datetime.fromisoformat(last_alive_str)
except Exception:
return False
else:
last_alive_dt = last_alive
return datetime.utcnow() - last_alive_dt <= timedelta(seconds=grace_period)
@groups_bp.route("", methods=["POST"])
def create_group():
data = request.get_json()
name = data.get("name")
if not name or not name.strip():
return jsonify({"error": "Gruppenname erforderlich"}), 400
session = Session()
if session.query(ClientGroup).filter_by(name=name).first():
session.close()
return jsonify({"error": "Gruppe existiert bereits"}), 409
group = ClientGroup(name=name, is_active=True)
session.add(group)
session.commit()
result = {
"id": group.id,
"name": group.name,
"created_at": group.created_at.isoformat() if group.created_at else None,
"is_active": group.is_active,
}
session.close()
return jsonify(result), 201
@groups_bp.route("", methods=["GET"])
def get_groups():
session = Session()
groups = session.query(ClientGroup).all()
result = [
{
"id": g.id,
"name": g.name,
"created_at": g.created_at.isoformat() if g.created_at else None,
"is_active": g.is_active,
}
for g in groups
]
session.close()
return jsonify(result)
@groups_bp.route("/<int:group_id>", methods=["PUT"])
def update_group(group_id):
data = request.get_json()
session = Session()
group = session.query(ClientGroup).filter_by(id=group_id).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
if "name" in data:
group.name = data["name"]
if "is_active" in data:
group.is_active = bool(data["is_active"])
session.commit()
result = {
"id": group.id,
"name": group.name,
"created_at": group.created_at.isoformat() if group.created_at else None,
"is_active": group.is_active,
}
session.close()
return jsonify(result)
@groups_bp.route("/<int:group_id>", methods=["DELETE"])
def delete_group(group_id):
session = Session()
group = session.query(ClientGroup).filter_by(id=group_id).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
session.delete(group)
session.commit()
session.close()
return jsonify({"success": True})
@groups_bp.route("/byname/<string:group_name>", methods=["DELETE"])
def delete_group_by_name(group_name):
session = Session()
group = session.query(ClientGroup).filter_by(name=group_name).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
session.delete(group)
session.commit()
session.close()
return jsonify({"success": True})
@groups_bp.route("/byname/<string:old_name>", methods=["PUT"])
def rename_group_by_name(old_name):
data = request.get_json()
new_name = data.get("newName")
if not new_name or not new_name.strip():
return jsonify({"error": "Neuer Name erforderlich"}), 400
session = Session()
group = session.query(ClientGroup).filter_by(name=old_name).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
# Prüfe, ob der neue Name schon existiert
if session.query(ClientGroup).filter(func.binary(ClientGroup.name) == new_name).first():
session.close()
return jsonify({"error": f'Gruppe mit dem Namen "{new_name}" existiert bereits', "duplicate_name": new_name}), 409
group.name = new_name
session.commit()
result = {
"id": group.id,
"name": group.name,
"created_at": group.created_at.isoformat() if group.created_at else None,
"is_active": group.is_active,
}
session.close()
return jsonify(result)
@groups_bp.route("/with_clients", methods=["GET"])
def get_groups_with_clients():
session = Session()
groups = session.query(ClientGroup).all()
result = []
for g in groups:
clients = session.query(Client).filter_by(group_id=g.id).all()
client_list = []
for c in clients:
client_list.append({
"uuid": c.uuid,
"description": c.description,
"ip": c.ip,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"is_alive": is_client_alive(c.last_alive, c.is_active),
})
result.append({
"id": g.id,
"name": g.name,
"created_at": g.created_at.isoformat() if g.created_at else None,
"is_active": g.is_active,
"clients": client_list,
})
session.close()
return jsonify(result)

159
server/routes/holidays.py Normal file
View File

@@ -0,0 +1,159 @@
from flask import Blueprint, request, jsonify
from server.database import Session
from models.models import SchoolHoliday
from datetime import datetime
import csv
import io
holidays_bp = Blueprint("holidays", __name__, url_prefix="/api/holidays")
@holidays_bp.route("", methods=["GET"])
def list_holidays():
session = Session()
region = request.args.get("region")
q = session.query(SchoolHoliday)
if region:
q = q.filter(SchoolHoliday.region == region)
rows = q.order_by(SchoolHoliday.start_date.asc()).all()
data = [r.to_dict() for r in rows]
session.close()
return jsonify({"holidays": data})
@holidays_bp.route("/upload", methods=["POST"])
def upload_holidays():
"""
Accepts a CSV/TXT file upload (multipart/form-data).
Supported formats:
1) Headered CSV with columns (case-insensitive): name, start_date, end_date[, region]
- Dates: YYYY-MM-DD, DD.MM.YYYY, YYYY/MM/DD, or YYYYMMDD
2) Headerless CSV/TXT lines with columns:
[internal, name, start_yyyymmdd, end_yyyymmdd, optional_internal]
- Only columns 2-4 are used; 1 and 5 are ignored.
"""
if "file" not in request.files:
return jsonify({"error": "No file part"}), 400
file = request.files["file"]
if file.filename == "":
return jsonify({"error": "No selected file"}), 400
try:
raw = file.read()
# Try UTF-8 first (strict), then cp1252, then latin-1 as last resort
try:
content = raw.decode("utf-8")
except UnicodeDecodeError:
try:
content = raw.decode("cp1252")
except UnicodeDecodeError:
content = raw.decode("latin-1", errors="replace")
sniffer = csv.Sniffer()
dialect = None
try:
sample = content[:2048]
# Some files may contain a lot of quotes; allow Sniffer to guess delimiter
dialect = sniffer.sniff(sample)
except Exception:
pass
def parse_date(s: str):
s = (s or "").strip()
if not s:
return None
# Numeric YYYYMMDD
if s.isdigit() and len(s) == 8:
try:
return datetime.strptime(s, "%Y%m%d").date()
except ValueError:
pass
# Common formats
for fmt in ("%Y-%m-%d", "%d.%m.%Y", "%Y/%m/%d"):
try:
return datetime.strptime(s, fmt).date()
except ValueError:
continue
raise ValueError(f"Unsupported date format: {s}")
session = Session()
inserted = 0
updated = 0
# First, try headered CSV via DictReader
dict_reader = csv.DictReader(io.StringIO(
content), dialect=dialect) if dialect else csv.DictReader(io.StringIO(content))
fieldnames_lower = [h.lower() for h in (dict_reader.fieldnames or [])]
has_required_headers = {"name", "start_date",
"end_date"}.issubset(set(fieldnames_lower))
def upsert(name: str, start_date, end_date, region=None):
nonlocal inserted, updated
if not name or not start_date or not end_date:
return
existing = (
session.query(SchoolHoliday)
.filter(
SchoolHoliday.name == name,
SchoolHoliday.start_date == start_date,
SchoolHoliday.end_date == end_date,
SchoolHoliday.region.is_(
region) if region is None else SchoolHoliday.region == region,
)
.first()
)
if existing:
existing.region = region
existing.source_file_name = file.filename
updated += 1
else:
session.add(SchoolHoliday(
name=name,
start_date=start_date,
end_date=end_date,
region=region,
source_file_name=file.filename,
))
inserted += 1
if has_required_headers:
for row in dict_reader:
norm = {k.lower(): (v or "").strip() for k, v in row.items()}
name = norm.get("name")
try:
start_date = parse_date(norm.get("start_date"))
end_date = parse_date(norm.get("end_date"))
except ValueError:
# Skip rows with unparseable dates
continue
region = (norm.get("region")
or None) if "region" in norm else None
upsert(name, start_date, end_date, region)
else:
# Fallback: headerless rows -> use columns [1]=name, [2]=start, [3]=end
reader = csv.reader(io.StringIO(
content), dialect=dialect) if dialect else csv.reader(io.StringIO(content))
for row in reader:
if not row:
continue
# tolerate varying column counts (4 or 5); ignore first and optional last
cols = [c.strip() for c in row]
if len(cols) < 4:
# Not enough data
continue
name = cols[1].strip().strip('"')
start_raw = cols[2]
end_raw = cols[3]
try:
start_date = parse_date(start_raw)
end_date = parse_date(end_raw)
except ValueError:
continue
upsert(name, start_date, end_date, None)
session.commit()
session.close()
return jsonify({"success": True, "inserted": inserted, "updated": updated})
except Exception as e:
return jsonify({"error": str(e)}), 400

21
server/routes/setup.py Normal file
View File

@@ -0,0 +1,21 @@
from flask import Blueprint, jsonify
from server.database import get_db
from models.models import Client
bp = Blueprint('setup', __name__, url_prefix='/api/setup')
@bp.route('/clients_without_description', methods=['GET'])
def clients_without_description():
db = get_db()
clients = db.query(Client).filter(Client.description == None).all()
result = []
for c in clients:
result.append({
'uuid': c.uuid,
'hostname': c.hostname,
'ip_address': c.ip_address,
'last_alive': c.last_alive,
'created_at': c.created_at,
'group': c.group_id,
})
return jsonify(result)