Files
infoscreen/server/routes/clients.py
Olaf 24cdf07279 feat(monitoring): add priority screenshot pipeline with screenshot_type + docs cleanup
Implement end-to-end support for typed screenshots and priority rendering in monitoring.

Added
- Accept and forward screenshot_type from MQTT screenshot/dashboard payloads
  (periodic, event_start, event_stop)
- Extend screenshot upload handling to persist typed screenshots and metadata
- Add dedicated priority screenshot serving endpoint with fallback behavior
- Extend monitoring overview with priority screenshot fields and summary count
- Add configurable PRIORITY_SCREENSHOT_TTL_SECONDS window for active priority state

Fixed
- Ensure screenshot cache-busting updates reliably via screenshot hash updates
- Preserve normal periodic screenshot flow while introducing event_start/event_stop priority path

Improved
- Monitoring dashboard now displays screenshot type badges
- Adaptive polling: faster refresh while priority screenshots are active
- Priority screenshot presentation is surfaced immediately to operators

Docs
- Update README and copilot-instructions to match new screenshot_type behavior,
  priority endpoint, TTL config, monitoring fields, and retention model
- Remove redundant/duplicate documentation blocks and improve troubleshooting section clarity
2026-03-29 13:13:13 +00:00

462 lines
16 KiB
Python

from server.database import Session
from models.models import Client, ClientGroup
from flask import Blueprint, request, jsonify
from server.permissions import admin_or_higher
from server.mqtt_helper import publish_client_group, delete_client_group_message, publish_multiple_client_groups
import sys
import os
import glob
import base64
import hashlib
import json
from datetime import datetime, timezone
sys.path.append('/workspace')
clients_bp = Blueprint("clients", __name__, url_prefix="/api/clients")
VALID_SCREENSHOT_TYPES = {"periodic", "event_start", "event_stop"}
def _normalize_screenshot_type(raw_type):
if raw_type is None:
return "periodic"
normalized = str(raw_type).strip().lower()
if normalized in VALID_SCREENSHOT_TYPES:
return normalized
return "periodic"
def _parse_screenshot_timestamp(raw_timestamp):
if raw_timestamp is None:
return None
try:
if isinstance(raw_timestamp, (int, float)):
ts_value = float(raw_timestamp)
if ts_value > 1e12:
ts_value = ts_value / 1000.0
return datetime.fromtimestamp(ts_value, timezone.utc)
if isinstance(raw_timestamp, str):
ts = raw_timestamp.strip()
if not ts:
return None
if ts.isdigit():
ts_value = float(ts)
if ts_value > 1e12:
ts_value = ts_value / 1000.0
return datetime.fromtimestamp(ts_value, timezone.utc)
ts_normalized = ts.replace("Z", "+00:00") if ts.endswith("Z") else ts
parsed = datetime.fromisoformat(ts_normalized)
if parsed.tzinfo is None:
return parsed.replace(tzinfo=timezone.utc)
return parsed.astimezone(timezone.utc)
except Exception:
return None
return None
@clients_bp.route("/sync-all-groups", methods=["POST"])
@admin_or_higher
def sync_all_client_groups():
"""
Administrative Route: Synchronisiert alle bestehenden Client-Gruppenzuordnungen mit MQTT
Nützlich für die einmalige Migration bestehender Clients
"""
session = Session()
try:
# Alle aktiven Clients abrufen
clients = session.query(Client).filter(Client.is_active == True).all()
if not clients:
session.close()
return jsonify({"message": "Keine aktiven Clients gefunden", "synced": 0})
# Alle Clients synchronisieren
client_group_mappings = {
client.uuid: client.group_id for client in clients}
success_count, failed_count = publish_multiple_client_groups(
client_group_mappings)
session.close()
return jsonify({
"success": True,
"message": f"Synchronisation abgeschlossen",
"synced": success_count,
"failed": failed_count,
"total": len(clients)
})
except Exception as e:
session.close()
return jsonify({"error": f"Fehler bei der Synchronisation: {str(e)}"}), 500
@clients_bp.route("/without_description", methods=["GET"])
def get_clients_without_description():
session = Session()
clients = session.query(Client).filter(
(Client.description == None) | (Client.description == "")
).all()
result = [
{
"uuid": c.uuid,
"hardware_token": c.hardware_token,
"ip": c.ip,
"type": c.type,
"hostname": c.hostname,
"os_version": c.os_version,
"software_version": c.software_version,
"macs": c.macs,
"model": c.model,
"registration_time": c.registration_time.isoformat() if c.registration_time else None,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"group_id": c.group_id,
}
for c in clients
]
session.close()
return jsonify(result)
@clients_bp.route("/<uuid>/description", methods=["PUT"])
@admin_or_higher
def set_client_description(uuid):
data = request.get_json()
description = data.get("description", "").strip()
if not description:
return jsonify({"error": "Beschreibung darf nicht leer sein"}), 400
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
client.description = description
session.commit()
# MQTT: Gruppenzuordnung publizieren (wichtig für neue Clients aus SetupMode)
mqtt_success = publish_client_group(client.uuid, client.group_id)
session.close()
response = {"success": True}
if not mqtt_success:
response["warning"] = "Beschreibung gespeichert, aber MQTT-Publishing fehlgeschlagen"
return jsonify(response)
@clients_bp.route("", methods=["GET"])
def get_clients():
session = Session()
clients = session.query(Client).all()
result = [
{
"uuid": c.uuid,
"hardware_token": c.hardware_token,
"ip": c.ip,
"type": c.type,
"hostname": c.hostname,
"os_version": c.os_version,
"software_version": c.software_version,
"macs": c.macs,
"model": c.model,
"description": c.description,
"registration_time": c.registration_time.isoformat() if c.registration_time else None,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"group_id": c.group_id,
}
for c in clients
]
session.close()
return jsonify(result)
@clients_bp.route("/group", methods=["PUT"])
@admin_or_higher
def update_clients_group():
data = request.get_json()
client_ids = data.get("client_ids", [])
group_id = data.get("group_id")
group_name = data.get("group_name")
if not isinstance(client_ids, list) or len(client_ids) == 0:
return jsonify({"error": "client_ids muss eine nicht-leere Liste sein"}), 400
session = Session()
# Bestimme Ziel-Gruppe: Priorität hat group_id, ansonsten group_name
group = None
if group_id is not None:
group = session.query(ClientGroup).filter_by(id=group_id).first()
if not group:
session.close()
return jsonify({"error": f"Gruppe mit id={group_id} nicht gefunden"}), 404
elif group_name:
group = session.query(ClientGroup).filter_by(name=group_name).first()
if not group:
session.close()
return jsonify({"error": f"Gruppe '{group_name}' nicht gefunden"}), 404
else:
session.close()
return jsonify({"error": "Entweder group_id oder group_name ist erforderlich"}), 400
# WICHTIG: group.id vor dem Schließen puffern, um DetachedInstanceError zu vermeiden
target_group_id = group.id
session.query(Client).filter(Client.uuid.in_(client_ids)).update(
{Client.group_id: target_group_id}, synchronize_session=False
)
session.commit()
session.close()
# MQTT: Gruppenzuordnungen für alle betroffenen Clients publizieren (nutzt gecachten target_group_id)
client_group_mappings = {
client_id: target_group_id for client_id in client_ids}
success_count, failed_count = publish_multiple_client_groups(
client_group_mappings)
response = {"success": True}
if failed_count > 0:
response[
"warning"] = f"Gruppenzuordnung gespeichert, aber {failed_count} MQTT-Publishing(s) fehlgeschlagen"
return jsonify(response)
@clients_bp.route("/<uuid>", methods=["PATCH"])
@admin_or_higher
def update_client(uuid):
data = request.get_json()
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
allowed_fields = ["description", "model"]
updated = False
for field in allowed_fields:
if field in data:
setattr(client, field, data[field])
updated = True
if updated:
session.commit()
result = {"success": True}
else:
result = {"error": "Keine gültigen Felder zum Aktualisieren übergeben"}
session.close()
return jsonify(result)
# Neue Route: Liefert die aktuelle group_id für einen Client
@clients_bp.route("/<uuid>/group", methods=["GET"])
def get_client_group(uuid):
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
group_id = client.group_id
session.close()
return jsonify({"group_id": group_id})
# Neue Route: Liefert alle Clients mit Alive-Status
@clients_bp.route("/with_alive_status", methods=["GET"])
def get_clients_with_alive_status():
session = Session()
clients = session.query(Client).all()
result = []
for c in clients:
result.append({
"uuid": c.uuid,
"description": c.description,
"ip": c.ip,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"is_alive": bool(c.last_alive and c.is_active),
})
session.close()
return jsonify(result)
@clients_bp.route("/<uuid>/restart", methods=["POST"])
@admin_or_higher
def restart_client(uuid):
"""
Route to restart a specific client by UUID.
Sends an MQTT message to the broker to trigger the restart.
"""
import paho.mqtt.client as mqtt
import json
# MQTT broker configuration
MQTT_BROKER = "mqtt"
MQTT_PORT = 1883
MQTT_TOPIC = f"clients/{uuid}/restart"
# Connect to the database to check if the client exists
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
session.close()
# Send MQTT message
try:
mqtt_client = mqtt.Client()
mqtt_client.connect(MQTT_BROKER, MQTT_PORT)
payload = {"action": "restart"}
mqtt_client.publish(MQTT_TOPIC, json.dumps(payload))
mqtt_client.disconnect()
return jsonify({"success": True, "message": f"Restart signal sent to client {uuid}"}), 200
except Exception as e:
return jsonify({"error": f"Failed to send MQTT message: {str(e)}"}), 500
@clients_bp.route("/<uuid>/screenshot", methods=["POST"])
def upload_screenshot(uuid):
"""
Route to receive and store a screenshot from a client.
Expected payload: base64-encoded image data in JSON or binary image data.
Screenshots are stored as {uuid}.jpg in the screenshots folder.
Keeps last 20 screenshots per client (auto-cleanup).
"""
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
try:
screenshot_timestamp = None
screenshot_type = "periodic"
# Handle JSON payload with base64-encoded image
if request.is_json:
data = request.get_json()
if "image" not in data:
return jsonify({"error": "Missing 'image' field in JSON payload"}), 400
screenshot_timestamp = _parse_screenshot_timestamp(data.get("timestamp"))
screenshot_type = _normalize_screenshot_type(data.get("screenshot_type") or data.get("screenshotType"))
# Decode base64 image
image_data = base64.b64decode(data["image"])
else:
# Handle raw binary image data
image_data = request.get_data()
if not image_data:
return jsonify({"error": "No image data received"}), 400
# Ensure screenshots directory exists
screenshots_dir = os.path.join(os.path.dirname(__file__), "..", "screenshots")
os.makedirs(screenshots_dir, exist_ok=True)
# Store screenshot with timestamp to track latest
now_utc = screenshot_timestamp or datetime.now(timezone.utc)
timestamp = now_utc.strftime("%Y%m%d_%H%M%S_%f")
filename = f"{uuid}_{timestamp}_{screenshot_type}.jpg"
filepath = os.path.join(screenshots_dir, filename)
with open(filepath, "wb") as f:
f.write(image_data)
# Also create/update a symlink or copy to {uuid}.jpg for easy retrieval
latest_filepath = os.path.join(screenshots_dir, f"{uuid}.jpg")
with open(latest_filepath, "wb") as f:
f.write(image_data)
# Keep a dedicated copy for high-priority event screenshots.
if screenshot_type in ("event_start", "event_stop"):
priority_filepath = os.path.join(screenshots_dir, f"{uuid}_priority.jpg")
with open(priority_filepath, "wb") as f:
f.write(image_data)
metadata_path = os.path.join(screenshots_dir, f"{uuid}_meta.json")
metadata = {}
if os.path.exists(metadata_path):
try:
with open(metadata_path, "r", encoding="utf-8") as meta_file:
metadata = json.load(meta_file)
except Exception:
metadata = {}
metadata.update({
"latest_screenshot_type": screenshot_type,
"latest_received_at": now_utc.isoformat(),
})
if screenshot_type in ("event_start", "event_stop"):
metadata["last_priority_screenshot_type"] = screenshot_type
metadata["last_priority_received_at"] = now_utc.isoformat()
with open(metadata_path, "w", encoding="utf-8") as meta_file:
json.dump(metadata, meta_file)
# Update screenshot receive timestamp for monitoring dashboard
client.last_screenshot_analyzed = now_utc
client.last_screenshot_hash = hashlib.md5(image_data).hexdigest()
session.commit()
# Cleanup: keep only last 20 timestamped screenshots per client
pattern = os.path.join(screenshots_dir, f"{uuid}_*.jpg")
existing_screenshots = sorted(
[path for path in glob.glob(pattern) if not path.endswith("_priority.jpg")]
)
# Keep last 20, delete older ones
max_screenshots = 20
if len(existing_screenshots) > max_screenshots:
for old_file in existing_screenshots[:-max_screenshots]:
try:
os.remove(old_file)
except Exception as cleanup_error:
# Log but don't fail the request if cleanup fails
import logging
logging.warning(f"Failed to cleanup old screenshot {old_file}: {cleanup_error}")
return jsonify({
"success": True,
"message": f"Screenshot received for client {uuid}",
"filename": filename,
"size": len(image_data),
"screenshot_type": screenshot_type,
}), 200
except Exception as e:
session.rollback()
return jsonify({"error": f"Failed to process screenshot: {str(e)}"}), 500
finally:
session.close()
@clients_bp.route("/<uuid>", methods=["DELETE"])
@admin_or_higher
def delete_client(uuid):
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
session.delete(client)
session.commit()
session.close()
# MQTT: Retained message für gelöschten Client entfernen
mqtt_success = delete_client_group_message(uuid)
response = {"success": True}
if not mqtt_success:
response["warning"] = "Client gelöscht, aber MQTT-Message-Löschung fehlgeschlagen"
return jsonify(response)