feat(tv-power): implement server PR1 with tests and documentation

This commit is contained in:
2026-04-01 08:07:18 +00:00
parent b5f5f30005
commit 3fc7d33e43
15 changed files with 1997 additions and 3 deletions

View File

@@ -2,6 +2,8 @@
from dotenv import load_dotenv
import os
from datetime import datetime
import hashlib
import json
import logging
from sqlalchemy.orm import sessionmaker, joinedload
from sqlalchemy import create_engine, or_, and_, text
@@ -184,6 +186,131 @@ def get_system_setting_value(key: str, default: str | None = None) -> str | None
session.close()
def _parse_utc_datetime(value):
"""Parse datetime-like values and normalize to timezone-aware UTC."""
if value is None:
return None
if isinstance(value, datetime):
dt = value
else:
try:
dt = datetime.fromisoformat(str(value))
except Exception:
return None
if dt.tzinfo is None:
return dt.replace(tzinfo=timezone.utc)
return dt.astimezone(timezone.utc)
def _normalize_group_id(group_id):
try:
return int(group_id)
except (TypeError, ValueError):
return None
def _event_range_from_dict(event):
start = _parse_utc_datetime(event.get("start"))
end = _parse_utc_datetime(event.get("end"))
if start is None or end is None or end <= start:
return None
return start, end
def _merge_ranges(ranges, adjacency_seconds=0):
"""Merge overlapping or adjacent [start, end] ranges."""
if not ranges:
return []
ranges_sorted = sorted(ranges, key=lambda r: (r[0], r[1]))
merged = [ranges_sorted[0]]
adjacency_delta = max(0, int(adjacency_seconds))
for current_start, current_end in ranges_sorted[1:]:
last_start, last_end = merged[-1]
if current_start <= last_end or (current_start - last_end).total_seconds() <= adjacency_delta:
if current_end > last_end:
merged[-1] = (last_start, current_end)
else:
merged.append((current_start, current_end))
return merged
def compute_group_power_intent_basis(events, group_id, now_utc=None, adjacency_seconds=0):
"""Return pure, deterministic power intent basis for one group at a point in time.
The returned mapping intentionally excludes volatile fields such as intent_id,
issued_at and expires_at.
"""
normalized_gid = _normalize_group_id(group_id)
effective_now = _parse_utc_datetime(now_utc) or datetime.now(timezone.utc)
ranges = []
active_event_ids = []
for event in events or []:
if _normalize_group_id(event.get("group_id")) != normalized_gid:
continue
parsed_range = _event_range_from_dict(event)
if parsed_range is None:
continue
start, end = parsed_range
ranges.append((start, end))
if start <= effective_now < end:
event_id = event.get("id")
if event_id is not None:
active_event_ids.append(event_id)
merged_ranges = _merge_ranges(ranges, adjacency_seconds=adjacency_seconds)
active_window_start = None
active_window_end = None
for window_start, window_end in merged_ranges:
if window_start <= effective_now < window_end:
active_window_start = window_start
active_window_end = window_end
break
desired_state = "on" if active_window_start is not None else "off"
reason = "active_event" if desired_state == "on" else "no_active_event"
return {
"schema_version": "1.0",
"group_id": normalized_gid,
"desired_state": desired_state,
"reason": reason,
"poll_interval_sec": None,
"event_window_start": active_window_start.isoformat().replace("+00:00", "Z") if active_window_start else None,
"event_window_end": active_window_end.isoformat().replace("+00:00", "Z") if active_window_end else None,
"active_event_ids": sorted(set(active_event_ids)),
}
def build_group_power_intent_body(intent_basis, poll_interval_sec):
"""Build deterministic payload body (without intent_id/issued_at/expires_at)."""
body = {
"schema_version": intent_basis.get("schema_version", "1.0"),
"group_id": intent_basis.get("group_id"),
"desired_state": intent_basis.get("desired_state", "off"),
"reason": intent_basis.get("reason", "no_active_event"),
"poll_interval_sec": int(poll_interval_sec),
"event_window_start": intent_basis.get("event_window_start"),
"event_window_end": intent_basis.get("event_window_end"),
"active_event_ids": list(intent_basis.get("active_event_ids", [])),
}
return body
def compute_group_power_intent_fingerprint(intent_body):
"""Create a stable hash for semantic transition detection."""
canonical_json = json.dumps(intent_body, sort_keys=True, separators=(",", ":"))
return hashlib.sha256(canonical_json.encode("utf-8")).hexdigest()
def format_event_with_media(event):
"""Transform Event + EventMedia into client-expected format"""
event_dict = {

View File

@@ -2,11 +2,200 @@
import os
import logging
from .db_utils import get_active_events, get_system_setting_value
from .db_utils import (
get_active_events,
get_system_setting_value,
compute_group_power_intent_basis,
build_group_power_intent_body,
compute_group_power_intent_fingerprint,
)
import paho.mqtt.client as mqtt
import json
import datetime
import time
import uuid
def _to_utc_z(dt: datetime.datetime) -> str:
if dt.tzinfo is None:
dt = dt.replace(tzinfo=datetime.timezone.utc)
else:
dt = dt.astimezone(datetime.timezone.utc)
return dt.isoformat().replace("+00:00", "Z")
def _republish_cached_power_intents(client, last_power_intents, power_intent_metrics):
if not last_power_intents:
return
logging.info(
"MQTT reconnect power-intent republish count=%s",
len(last_power_intents),
)
for gid, cached in last_power_intents.items():
topic = f"infoscreen/groups/{gid}/power/intent"
client.publish(topic, cached["payload"], qos=1, retain=True)
power_intent_metrics["retained_republish_total"] += 1
def _publish_group_power_intents(
client,
events,
now,
poll_interval,
heartbeat_enabled,
expiry_multiplier,
min_expiry_seconds,
last_power_intents,
power_intent_metrics,
):
expiry_seconds = max(
expiry_multiplier * poll_interval,
min_expiry_seconds,
)
candidate_group_ids = set()
for event in events:
group_id = event.get("group_id")
if group_id is None:
continue
try:
candidate_group_ids.add(int(group_id))
except (TypeError, ValueError):
continue
candidate_group_ids.update(last_power_intents.keys())
for gid in sorted(candidate_group_ids):
# Guard: validate group_id is a valid positive integer
if not isinstance(gid, int) or gid <= 0:
logging.error(
"event=power_intent_publish_error reason=invalid_group_id group_id=%s",
gid,
)
continue
intent_basis = compute_group_power_intent_basis(
events=events,
group_id=gid,
now_utc=now,
adjacency_seconds=0,
)
intent_body = build_group_power_intent_body(
intent_basis=intent_basis,
poll_interval_sec=poll_interval,
)
fingerprint = compute_group_power_intent_fingerprint(intent_body)
previous = last_power_intents.get(gid)
is_transition_publish = previous is None or previous["fingerprint"] != fingerprint
is_heartbeat_publish = bool(heartbeat_enabled and not is_transition_publish)
if not is_transition_publish and not is_heartbeat_publish:
continue
intent_id = previous["intent_id"] if previous and not is_transition_publish else str(uuid.uuid4())
# Guard: validate intent_id is not empty
if not intent_id or not isinstance(intent_id, str) or len(intent_id.strip()) == 0:
logging.error(
"event=power_intent_publish_error group_id=%s reason=invalid_intent_id",
gid,
)
continue
issued_at = now
expires_at = issued_at + datetime.timedelta(seconds=expiry_seconds)
# Guard: validate expiry window is positive and issued_at has valid timezone
if expires_at <= issued_at:
logging.error(
"event=power_intent_publish_error group_id=%s reason=invalid_expiry issued_at=%s expires_at=%s",
gid,
_to_utc_z(issued_at),
_to_utc_z(expires_at),
)
continue
issued_at_str = _to_utc_z(issued_at)
expires_at_str = _to_utc_z(expires_at)
# Guard: ensure Z suffix on timestamps (format validation)
if not issued_at_str.endswith("Z") or not expires_at_str.endswith("Z"):
logging.error(
"event=power_intent_publish_error group_id=%s reason=invalid_timestamp_format issued_at=%s expires_at=%s",
gid,
issued_at_str,
expires_at_str,
)
continue
payload_dict = {
**intent_body,
"intent_id": intent_id,
"issued_at": issued_at_str,
"expires_at": expires_at_str,
}
# Guard: ensure payload serialization succeeds before publishing
try:
payload = json.dumps(payload_dict, sort_keys=True, separators=(",", ":"))
except (TypeError, ValueError) as e:
logging.error(
"event=power_intent_publish_error group_id=%s reason=payload_serialization_error error=%s",
gid,
str(e),
)
continue
topic = f"infoscreen/groups/{gid}/power/intent"
result = client.publish(topic, payload, qos=1, retain=True)
result.wait_for_publish(timeout=5.0)
if result.rc != mqtt.MQTT_ERR_SUCCESS:
power_intent_metrics["publish_error_total"] += 1
logging.error(
"event=power_intent_publish_error group_id=%s desired_state=%s intent_id=%s "
"transition_publish=%s heartbeat_publish=%s topic=%s qos=1 retained=true rc=%s",
gid,
payload_dict.get("desired_state"),
intent_id,
is_transition_publish,
is_heartbeat_publish,
topic,
result.rc,
)
continue
last_power_intents[gid] = {
"fingerprint": fingerprint,
"intent_id": intent_id,
"payload": payload,
}
if is_transition_publish:
power_intent_metrics["intent_transitions_total"] += 1
if is_heartbeat_publish:
power_intent_metrics["heartbeat_republish_total"] += 1
power_intent_metrics["publish_success_total"] += 1
logging.info(
"event=power_intent_publish group_id=%s desired_state=%s reason=%s intent_id=%s "
"issued_at=%s expires_at=%s transition_publish=%s heartbeat_publish=%s "
"topic=%s qos=1 retained=true",
gid,
payload_dict.get("desired_state"),
payload_dict.get("reason"),
intent_id,
issued_at_str,
expires_at_str,
is_transition_publish,
is_heartbeat_publish,
topic,
)
def _env_bool(name: str, default: bool) -> bool:
value = os.getenv(name)
if value is None:
return default
return value.strip().lower() in ("1", "true", "yes", "on")
# Logging-Konfiguration
from logging.handlers import RotatingFileHandler
@@ -35,7 +224,7 @@ def main():
client = mqtt.Client(callback_api_version=mqtt.CallbackAPIVersion.VERSION2)
client.reconnect_delay_set(min_delay=1, max_delay=30)
POLL_INTERVAL = 30 # Sekunden, Empfehlung für seltene Änderungen
POLL_INTERVAL = int(os.getenv("POLL_INTERVAL_SECONDS", "30"))
# 0 = aus; z.B. 600 für alle 10 Min
# initial value from DB or fallback to env
try:
@@ -43,10 +232,35 @@ def main():
REFRESH_SECONDS = int(db_val) if db_val is not None else int(os.getenv("REFRESH_SECONDS", "0"))
except Exception:
REFRESH_SECONDS = int(os.getenv("REFRESH_SECONDS", "0"))
# TV power intent (PR-1): group-level publishing is feature-flagged and disabled by default.
POWER_INTENT_PUBLISH_ENABLED = _env_bool("POWER_INTENT_PUBLISH_ENABLED", False)
POWER_INTENT_HEARTBEAT_ENABLED = _env_bool("POWER_INTENT_HEARTBEAT_ENABLED", True)
POWER_INTENT_EXPIRY_MULTIPLIER = int(os.getenv("POWER_INTENT_EXPIRY_MULTIPLIER", "3"))
POWER_INTENT_MIN_EXPIRY_SECONDS = int(os.getenv("POWER_INTENT_MIN_EXPIRY_SECONDS", "90"))
logging.info(
"Scheduler config: poll_interval=%ss refresh_seconds=%s power_intent_enabled=%s "
"power_intent_heartbeat=%s power_intent_expiry_multiplier=%s power_intent_min_expiry=%ss",
POLL_INTERVAL,
REFRESH_SECONDS,
POWER_INTENT_PUBLISH_ENABLED,
POWER_INTENT_HEARTBEAT_ENABLED,
POWER_INTENT_EXPIRY_MULTIPLIER,
POWER_INTENT_MIN_EXPIRY_SECONDS,
)
# Konfigurierbares Zeitfenster in Tagen (Standard: 7)
WINDOW_DAYS = int(os.getenv("EVENTS_WINDOW_DAYS", "7"))
last_payloads = {} # group_id -> payload
last_published_at = {} # group_id -> epoch seconds
last_power_intents = {} # group_id -> {fingerprint, intent_id, payload}
power_intent_metrics = {
"intent_transitions_total": 0,
"publish_success_total": 0,
"publish_error_total": 0,
"heartbeat_republish_total": 0,
"retained_republish_total": 0,
}
# Beim (Re-)Connect alle bekannten retained Payloads erneut senden
def on_connect(client, userdata, flags, reasonCode, properties=None):
@@ -56,6 +270,9 @@ def main():
topic = f"infoscreen/events/{gid}"
client.publish(topic, payload, retain=True)
if POWER_INTENT_PUBLISH_ENABLED:
_republish_cached_power_intents(client, last_power_intents, power_intent_metrics)
client.on_connect = on_connect
client.connect("mqtt", 1883)
@@ -150,6 +367,29 @@ def main():
del last_payloads[gid]
last_published_at.pop(gid, None)
if POWER_INTENT_PUBLISH_ENABLED:
_publish_group_power_intents(
client=client,
events=events,
now=now,
poll_interval=POLL_INTERVAL,
heartbeat_enabled=POWER_INTENT_HEARTBEAT_ENABLED,
expiry_multiplier=POWER_INTENT_EXPIRY_MULTIPLIER,
min_expiry_seconds=POWER_INTENT_MIN_EXPIRY_SECONDS,
last_power_intents=last_power_intents,
power_intent_metrics=power_intent_metrics,
)
logging.debug(
"event=power_intent_metrics intent_transitions_total=%s publish_success_total=%s "
"publish_error_total=%s heartbeat_republish_total=%s retained_republish_total=%s",
power_intent_metrics["intent_transitions_total"],
power_intent_metrics["publish_success_total"],
power_intent_metrics["publish_error_total"],
power_intent_metrics["heartbeat_republish_total"],
power_intent_metrics["retained_republish_total"],
)
time.sleep(POLL_INTERVAL)

View File

@@ -0,0 +1,191 @@
import json
import unittest
from datetime import datetime, timedelta, timezone
from scheduler.scheduler import (
_publish_group_power_intents,
_republish_cached_power_intents,
)
class _FakePublishResult:
def __init__(self, rc=0):
self.rc = rc
self.wait_timeout = None
def wait_for_publish(self, timeout=None):
self.wait_timeout = timeout
class _FakeMqttClient:
def __init__(self, rc=0):
self.rc = rc
self.calls = []
def publish(self, topic, payload, qos=0, retain=False):
result = _FakePublishResult(rc=self.rc)
self.calls.append(
{
"topic": topic,
"payload": payload,
"qos": qos,
"retain": retain,
"result": result,
}
)
return result
class PowerIntentSchedulerTests(unittest.TestCase):
def test_transition_then_heartbeat_reuses_intent_id(self):
client = _FakeMqttClient(rc=0)
last_power_intents = {}
metrics = {
"intent_transitions_total": 0,
"publish_success_total": 0,
"publish_error_total": 0,
"heartbeat_republish_total": 0,
"retained_republish_total": 0,
}
events = [
{
"id": 101,
"group_id": 12,
"start": "2026-03-31T10:00:00+00:00",
"end": "2026-03-31T10:30:00+00:00",
}
]
now_first = datetime(2026, 3, 31, 10, 5, 0, tzinfo=timezone.utc)
_publish_group_power_intents(
client=client,
events=events,
now=now_first,
poll_interval=15,
heartbeat_enabled=True,
expiry_multiplier=3,
min_expiry_seconds=90,
last_power_intents=last_power_intents,
power_intent_metrics=metrics,
)
first_payload = json.loads(client.calls[0]["payload"])
first_intent_id = first_payload["intent_id"]
now_second = now_first + timedelta(seconds=15)
_publish_group_power_intents(
client=client,
events=events,
now=now_second,
poll_interval=15,
heartbeat_enabled=True,
expiry_multiplier=3,
min_expiry_seconds=90,
last_power_intents=last_power_intents,
power_intent_metrics=metrics,
)
self.assertEqual(len(client.calls), 2)
second_payload = json.loads(client.calls[1]["payload"])
self.assertEqual(first_payload["desired_state"], "on")
self.assertEqual(second_payload["desired_state"], "on")
self.assertEqual(first_intent_id, second_payload["intent_id"])
self.assertEqual(client.calls[0]["topic"], "infoscreen/groups/12/power/intent")
self.assertEqual(client.calls[0]["qos"], 1)
self.assertTrue(client.calls[0]["retain"])
self.assertEqual(metrics["intent_transitions_total"], 1)
self.assertEqual(metrics["heartbeat_republish_total"], 1)
self.assertEqual(metrics["publish_success_total"], 2)
self.assertEqual(metrics["publish_error_total"], 0)
def test_state_change_creates_new_intent_id(self):
client = _FakeMqttClient(rc=0)
last_power_intents = {}
metrics = {
"intent_transitions_total": 0,
"publish_success_total": 0,
"publish_error_total": 0,
"heartbeat_republish_total": 0,
"retained_republish_total": 0,
}
events_on = [
{
"id": 88,
"group_id": 3,
"start": "2026-03-31T10:00:00+00:00",
"end": "2026-03-31T10:30:00+00:00",
}
]
now_on = datetime(2026, 3, 31, 10, 5, 0, tzinfo=timezone.utc)
_publish_group_power_intents(
client=client,
events=events_on,
now=now_on,
poll_interval=15,
heartbeat_enabled=True,
expiry_multiplier=3,
min_expiry_seconds=90,
last_power_intents=last_power_intents,
power_intent_metrics=metrics,
)
first_payload = json.loads(client.calls[0]["payload"])
events_off = [
{
"id": 88,
"group_id": 3,
"start": "2026-03-31T10:00:00+00:00",
"end": "2026-03-31T10:30:00+00:00",
}
]
now_off = datetime(2026, 3, 31, 10, 35, 0, tzinfo=timezone.utc)
_publish_group_power_intents(
client=client,
events=events_off,
now=now_off,
poll_interval=15,
heartbeat_enabled=True,
expiry_multiplier=3,
min_expiry_seconds=90,
last_power_intents=last_power_intents,
power_intent_metrics=metrics,
)
second_payload = json.loads(client.calls[1]["payload"])
self.assertNotEqual(first_payload["intent_id"], second_payload["intent_id"])
self.assertEqual(second_payload["desired_state"], "off")
self.assertEqual(metrics["intent_transitions_total"], 2)
def test_republish_cached_power_intents(self):
client = _FakeMqttClient(rc=0)
metrics = {
"intent_transitions_total": 0,
"publish_success_total": 0,
"publish_error_total": 0,
"heartbeat_republish_total": 0,
"retained_republish_total": 0,
}
cache = {
5: {
"fingerprint": "abc",
"intent_id": "intent-1",
"payload": '{"group_id":5,"desired_state":"on"}',
}
}
_republish_cached_power_intents(client, cache, metrics)
self.assertEqual(len(client.calls), 1)
self.assertEqual(client.calls[0]["topic"], "infoscreen/groups/5/power/intent")
self.assertEqual(client.calls[0]["qos"], 1)
self.assertTrue(client.calls[0]["retain"])
self.assertEqual(metrics["retained_republish_total"], 1)
if __name__ == "__main__":
unittest.main()

View File

@@ -0,0 +1,106 @@
import unittest
from datetime import datetime, timezone
from scheduler.db_utils import (
build_group_power_intent_body,
compute_group_power_intent_basis,
compute_group_power_intent_fingerprint,
)
class PowerIntentUtilsTests(unittest.TestCase):
def test_no_events_results_in_off(self):
now = datetime(2026, 3, 31, 10, 0, 0, tzinfo=timezone.utc)
basis = compute_group_power_intent_basis(events=[], group_id=7, now_utc=now)
self.assertEqual(basis["group_id"], 7)
self.assertEqual(basis["desired_state"], "off")
self.assertEqual(basis["reason"], "no_active_event")
self.assertIsNone(basis["event_window_start"])
self.assertIsNone(basis["event_window_end"])
def test_active_event_results_in_on(self):
now = datetime(2026, 3, 31, 10, 5, 0, tzinfo=timezone.utc)
events = [
{
"id": 101,
"group_id": 2,
"start": "2026-03-31T10:00:00+00:00",
"end": "2026-03-31T10:30:00+00:00",
}
]
basis = compute_group_power_intent_basis(events=events, group_id=2, now_utc=now)
self.assertEqual(basis["desired_state"], "on")
self.assertEqual(basis["reason"], "active_event")
self.assertEqual(basis["event_window_start"], "2026-03-31T10:00:00Z")
self.assertEqual(basis["event_window_end"], "2026-03-31T10:30:00Z")
self.assertEqual(basis["active_event_ids"], [101])
def test_adjacent_events_are_merged_without_off_blip(self):
now = datetime(2026, 3, 31, 10, 30, 0, tzinfo=timezone.utc)
events = [
{
"id": 1,
"group_id": 3,
"start": "2026-03-31T10:00:00+00:00",
"end": "2026-03-31T10:30:00+00:00",
},
{
"id": 2,
"group_id": 3,
"start": "2026-03-31T10:30:00+00:00",
"end": "2026-03-31T11:00:00+00:00",
},
]
basis = compute_group_power_intent_basis(events=events, group_id=3, now_utc=now)
self.assertEqual(basis["desired_state"], "on")
self.assertEqual(basis["event_window_start"], "2026-03-31T10:00:00Z")
self.assertEqual(basis["event_window_end"], "2026-03-31T11:00:00Z")
def test_true_gap_results_in_off(self):
now = datetime(2026, 3, 31, 10, 31, 0, tzinfo=timezone.utc)
events = [
{
"id": 1,
"group_id": 4,
"start": "2026-03-31T10:00:00+00:00",
"end": "2026-03-31T10:30:00+00:00",
},
{
"id": 2,
"group_id": 4,
"start": "2026-03-31T10:35:00+00:00",
"end": "2026-03-31T11:00:00+00:00",
},
]
basis = compute_group_power_intent_basis(events=events, group_id=4, now_utc=now)
self.assertEqual(basis["desired_state"], "off")
self.assertEqual(basis["reason"], "no_active_event")
def test_fingerprint_is_stable_for_same_semantics(self):
basis = {
"schema_version": "1.0",
"group_id": 9,
"desired_state": "on",
"reason": "active_event",
"event_window_start": "2026-03-31T10:00:00Z",
"event_window_end": "2026-03-31T10:30:00Z",
"active_event_ids": [12, 7],
}
body_a = build_group_power_intent_body(basis, poll_interval_sec=15)
body_b = build_group_power_intent_body(basis, poll_interval_sec=15)
fingerprint_a = compute_group_power_intent_fingerprint(body_a)
fingerprint_b = compute_group_power_intent_fingerprint(body_b)
self.assertEqual(fingerprint_a, fingerprint_b)
if __name__ == "__main__":
unittest.main()