feat(monitoring): complete monitoring pipeline and fix presentation flag persistence

add superadmin monitoring dashboard with protected route, menu entry, and monitoring data client
add monitoring overview API endpoint and improve log serialization/aggregation for dashboard use
extend listener health/log handling with robust status/event/timestamp normalization and screenshot payload extraction
improve screenshot persistence and retrieval (timestamp-aware uploads, latest screenshot endpoint fallback)
fix page_progress and auto_progress persistence/serialization across create, update, and detached occurrence flows
align technical and project docs to reflect implemented monitoring and no-version-bump backend changes
add documentation sync log entry and include minor compose env indentation cleanup
This commit is contained in:
2026-03-24 11:18:33 +00:00
parent 3107d0f671
commit 9c330f984f
18 changed files with 2095 additions and 104 deletions

View File

@@ -4,6 +4,7 @@ from flask import Blueprint, request, jsonify
from server.permissions import admin_or_higher
from server.mqtt_helper import publish_client_group, delete_client_group_message, publish_multiple_client_groups
import sys
from datetime import datetime, timezone
sys.path.append('/workspace')
clients_bp = Blueprint("clients", __name__, url_prefix="/api/clients")
@@ -284,21 +285,46 @@ def upload_screenshot(uuid):
import os
import base64
import glob
from datetime import datetime
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
session.close()
try:
screenshot_timestamp = None
# Handle JSON payload with base64-encoded image
if request.is_json:
data = request.get_json()
if "image" not in data:
return jsonify({"error": "Missing 'image' field in JSON payload"}), 400
raw_timestamp = data.get("timestamp")
if raw_timestamp is not None:
try:
if isinstance(raw_timestamp, (int, float)):
ts_value = float(raw_timestamp)
if ts_value > 1e12:
ts_value = ts_value / 1000.0
screenshot_timestamp = datetime.fromtimestamp(ts_value, timezone.utc)
elif isinstance(raw_timestamp, str):
ts = raw_timestamp.strip()
if ts:
if ts.isdigit():
ts_value = float(ts)
if ts_value > 1e12:
ts_value = ts_value / 1000.0
screenshot_timestamp = datetime.fromtimestamp(ts_value, timezone.utc)
else:
ts_normalized = ts.replace("Z", "+00:00") if ts.endswith("Z") else ts
screenshot_timestamp = datetime.fromisoformat(ts_normalized)
if screenshot_timestamp.tzinfo is None:
screenshot_timestamp = screenshot_timestamp.replace(tzinfo=timezone.utc)
else:
screenshot_timestamp = screenshot_timestamp.astimezone(timezone.utc)
except Exception:
screenshot_timestamp = None
# Decode base64 image
image_data = base64.b64decode(data["image"])
@@ -314,7 +340,8 @@ def upload_screenshot(uuid):
os.makedirs(screenshots_dir, exist_ok=True)
# Store screenshot with timestamp to track latest
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
now_utc = screenshot_timestamp or datetime.now(timezone.utc)
timestamp = now_utc.strftime("%Y%m%d_%H%M%S")
filename = f"{uuid}_{timestamp}.jpg"
filepath = os.path.join(screenshots_dir, filename)
@@ -326,6 +353,10 @@ def upload_screenshot(uuid):
with open(latest_filepath, "wb") as f:
f.write(image_data)
# Update screenshot receive timestamp for monitoring dashboard
client.last_screenshot_analyzed = now_utc
session.commit()
# Cleanup: keep only last 20 timestamped screenshots per client
pattern = os.path.join(screenshots_dir, f"{uuid}_*.jpg")
existing_screenshots = sorted(glob.glob(pattern))
@@ -349,7 +380,10 @@ def upload_screenshot(uuid):
}), 200
except Exception as e:
session.rollback()
return jsonify({"error": f"Failed to process screenshot: {str(e)}"}), 500
finally:
session.close()
@clients_bp.route("/<uuid>", methods=["DELETE"])