models.py moved to models/models.py

refactor all imports
This commit is contained in:
2025-07-15 10:45:56 +00:00
parent 661d25d70c
commit f37744b31e
29 changed files with 379 additions and 462 deletions

View File

@@ -1,8 +1,27 @@
networks: networks:
infoscreen-net: infoscreen-net:
driver: bridge driver: bridge
services: services:
listener:
build:
context: ./listener
dockerfile: Dockerfile
image: infoscreen-listener:latest
container_name: infoscreen-listener
restart: unless-stopped
depends_on:
db:
condition: service_healthy
mqtt:
condition: service_healthy
environment:
- DB_URL=mysql+pymysql://${DB_USER}:${DB_PASSWORD}@db/${DB_NAME}
volumes:
- ./listener:/app:rw
networks:
- infoscreen-net
proxy: proxy:
image: nginx:1.27 image: nginx:1.27
container_name: infoscreen-proxy container_name: infoscreen-proxy

4
listener/.dockerignore Normal file
View File

@@ -0,0 +1,4 @@
__pycache__/
*.pyc
*.pyo
*.log

11
listener/Dockerfile Normal file
View File

@@ -0,0 +1,11 @@
# Listener Dockerfile
FROM python:3.13-slim
WORKDIR /app
COPY requirements.txt ./
RUN pip install --no-cache-dir -r requirements.txt
COPY . .
CMD ["python", "listener.py"]

64
listener/listener.py Normal file
View File

@@ -0,0 +1,64 @@
import os
import json
import logging
import paho.mqtt.client as mqtt
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models.models import Client
# Logging
logging.basicConfig(level=logging.INFO,
format='%(asctime)s [%(levelname)s] %(message)s')
# DB-Konfiguration (Beispiel: MariaDB/MySQL, anpassen!)
DB_URL = os.environ.get(
"DB_URL", "mysql+pymysql://user:password@db/infoscreen")
engine = create_engine(DB_URL)
Session = sessionmaker(bind=engine)
# MQTT-Callback
def on_message(client, userdata, msg):
try:
payload = json.loads(msg.payload.decode())
logging.info(f"Discovery empfangen: {payload}")
session = Session()
# Prüfen, ob Client schon existiert
existing = session.query(Client).filter_by(
client_id=payload["client_id"]).first()
if not existing:
new_client = Client(
client_id=payload.get("client_id"),
hardware_token=payload.get("hardware_token"),
ip=payload.get("ip"),
type=payload.get("type"),
hostname=payload.get("hostname"),
os_version=payload.get("os_version"),
software_version=payload.get("software_version"),
macs=",".join(payload.get("macs", [])),
model=payload.get("model"),
)
session.add(new_client)
session.commit()
logging.info(f"Neuer Client registriert: {payload['client_id']}")
else:
logging.info(f"Client bereits bekannt: {payload['client_id']}")
session.close()
except Exception as e:
logging.error(f"Fehler bei Verarbeitung: {e}")
def main():
mqtt_client = mqtt.Client(protocol=mqtt.MQTTv311, callback_api_version=2)
mqtt_client.on_message = on_message
mqtt_client.connect("mqtt", 1883)
mqtt_client.subscribe("infoscreen/discovery")
logging.info("Listener gestartet und abonniert auf infoscreen/discovery")
mqtt_client.loop_forever()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,3 @@
paho-mqtt>=2.0
SQLAlchemy>=2.0
pymysql

1
models/__init__.py Normal file
View File

@@ -0,0 +1 @@
# models package for shared SQLAlchemy models

View File

@@ -1,11 +1,14 @@
# scheduler/db_utils.py # scheduler/db_utils.py
from models import Event from dotenv import load_dotenv
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from datetime import datetime
import os import os
import sys from datetime import datetime
sys.path.append('/workspace/server') from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from models.models import Event
# import sys
# sys.path.append('/workspace/server')
load_dotenv('/workspace/.env')
# DB-URL aus Umgebungsvariable oder Fallback # DB-URL aus Umgebungsvariable oder Fallback
DB_CONN = os.environ.get("DB_CONN", "mysql+pymysql://user:password@db/dbname") DB_CONN = os.environ.get("DB_CONN", "mysql+pymysql://user:password@db/dbname")

View File

@@ -1,5 +1,5 @@
# scheduler/scheduler.py # scheduler/scheduler.py
from db_utils import get_active_events from scheduler.db_utils import get_active_events
import paho.mqtt.client as mqtt import paho.mqtt.client as mqtt
import json import json
import datetime import datetime

View File

@@ -1,18 +1,29 @@
from logging.config import fileConfig
# isort: skip_file
import os
import sys
sys.path.insert(0, '/workspace')
print("sys.path:", sys.path)
print("models dir exists:", os.path.isdir('/workspace/models'))
print("models/models.py exists:", os.path.isfile('/workspace/models/models.py'))
print("models/__init__.py exists:",
os.path.isfile('/workspace/models/__init__.py'))
from models.models import Base
from dotenv import load_dotenv
from logging.config import fileConfig
from sqlalchemy import engine_from_config from sqlalchemy import engine_from_config
from sqlalchemy import pool from sqlalchemy import pool
from alembic import context from alembic import context
import sys print("sys.path:", sys.path)
sys.path.append('/workspace') print("models dir exists:", os.path.isdir('/workspace/models'))
import os print("models/models.py exists:", os.path.isfile('/workspace/models/models.py'))
from dotenv import load_dotenv print("models/__init__.py exists:",
from server.models import Base os.path.isfile('/workspace/models/__init__.py'))
# .env laden (optional) # .env laden (optional)
env_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../.env')) env_path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../../.env'))
print(f"Loading environment variables from: {env_path}") print(f"Loading environment variables from: {env_path}")
load_dotenv(env_path) load_dotenv(env_path)

View File

@@ -0,0 +1,109 @@
"""initial
Revision ID: 3d15c3cac7b6
Revises:
Create Date: 2025-07-15 09:43:16.209294
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '3d15c3cac7b6'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('client_groups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('event_media',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('media_type', sa.Enum('pdf', 'ppt', 'pptx', 'odp', 'mp4', 'avi', 'mkv', 'mov', 'wmv', 'flv', 'webm', 'mpg', 'mpeg', 'ogv', 'jpg', 'jpeg', 'png', 'gif', 'bmp', 'tiff', 'svg', 'html', name='mediatype'), nullable=False),
sa.Column('url', sa.String(length=255), nullable=False),
sa.Column('file_path', sa.String(length=255), nullable=True),
sa.Column('message_content', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=False),
sa.Column('role', sa.Enum('user', 'admin', 'superadmin', name='userrole'), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
op.create_table('clients',
sa.Column('uuid', sa.String(length=36), nullable=False),
sa.Column('hardware_hash', sa.String(length=64), nullable=False),
sa.Column('location', sa.String(length=100), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('registration_time', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('last_alive', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['client_groups.id'], ),
sa.PrimaryKeyConstraint('uuid')
)
op.create_index(op.f('ix_clients_hardware_hash'), 'clients', ['hardware_hash'], unique=False)
op.create_index(op.f('ix_clients_ip_address'), 'clients', ['ip_address'], unique=False)
op.create_table('events',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('start', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('end', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('event_type', sa.Enum('presentation', 'website', 'video', 'message', 'other', 'webuntis', name='eventtype'), nullable=False),
sa.Column('event_media_id', sa.Integer(), nullable=True),
sa.Column('autoplay', sa.Boolean(), nullable=True),
sa.Column('loop', sa.Boolean(), nullable=True),
sa.Column('volume', sa.Float(), nullable=True),
sa.Column('slideshow_interval', sa.Integer(), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ),
sa.ForeignKeyConstraint(['event_media_id'], ['event_media.id'], ),
sa.ForeignKeyConstraint(['group_id'], ['client_groups.id'], ),
sa.ForeignKeyConstraint(['updated_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_events_end'), 'events', ['end'], unique=False)
op.create_index(op.f('ix_events_group_id'), 'events', ['group_id'], unique=False)
op.create_index(op.f('ix_events_start'), 'events', ['start'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_events_start'), table_name='events')
op.drop_index(op.f('ix_events_group_id'), table_name='events')
op.drop_index(op.f('ix_events_end'), table_name='events')
op.drop_table('events')
op.drop_index(op.f('ix_clients_ip_address'), table_name='clients')
op.drop_index(op.f('ix_clients_hardware_hash'), table_name='clients')
op.drop_table('clients')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_table('users')
op.drop_table('event_media')
op.drop_table('client_groups')
# ### end Alembic commands ###

View File

@@ -1,57 +0,0 @@
"""Add client_groups table and group_id to clients
Revision ID: 8a45ec34f84d
Revises: c1178d5fa549
Create Date: 2025-06-26 18:40:10.988281
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '8a45ec34f84d'
down_revision: Union[str, None] = 'c1178d5fa549'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# 1. Neue Tabelle anlegen
op.create_table(
'client_groups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
# 2. Gruppe "Nicht zugeordnet" mit id=0 anlegen
op.execute(
"INSERT INTO client_groups (id, name, is_active) VALUES (0, 'Nicht zugeordnet', true)")
# 3. Spalte group_id mit Default 0 hinzufügen
op.add_column('clients', sa.Column('group_id', sa.Integer(),
nullable=False, server_default='0'))
# 4. Für alle bestehenden Clients group_id auf 0 setzen (optional, falls Daten vorhanden)
op.execute("UPDATE clients SET group_id = 0 WHERE group_id IS NULL")
# 5. Foreign Key Constraint setzen
op.create_foreign_key(None, 'clients', 'client_groups', [
'group_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'clients', type_='foreignkey')
op.drop_column('clients', 'group_id')
op.drop_table('client_groups')
# ### end Alembic commands ###

View File

@@ -1,34 +0,0 @@
"""Update media_type enum for event_media
Revision ID: a0f3f9325e05
Revises: bb29b5524f5c
Create Date: 2025-07-05 07:49:37.696162
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'a0f3f9325e05'
down_revision: Union[str, None] = 'bb29b5524f5c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade():
op.execute("""
ALTER TABLE event_media MODIFY COLUMN media_type ENUM(
'pdf','ppt','pptx','odp',
'mp4','avi','mkv','mov','wmv','flv','webm','mpg','mpeg','ogv',
'jpg','jpeg','png','gif','bmp','tiff','svg',
'html'
) NOT NULL;
""")
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -1,46 +0,0 @@
"""Refactor EventMedia: move playback fields to events, add MediaType enum, remove order
Revision ID: bb29b5524f5c
Revises: fadba5bc526c
Create Date: 2025-07-05 05:13:31.837339
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision: str = 'bb29b5524f5c'
down_revision: Union[str, None] = 'fadba5bc526c'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('event_media', 'autoplay')
op.drop_column('event_media', 'order')
op.drop_column('event_media', 'loop')
op.drop_column('event_media', 'volume')
op.add_column('events', sa.Column('autoplay', sa.Boolean(), nullable=True))
op.add_column('events', sa.Column('loop', sa.Boolean(), nullable=True))
op.add_column('events', sa.Column('volume', sa.Float(), nullable=True))
op.add_column('events', sa.Column('slideshow_interval', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('events', 'slideshow_interval')
op.drop_column('events', 'volume')
op.drop_column('events', 'loop')
op.drop_column('events', 'autoplay')
op.add_column('event_media', sa.Column('volume', mysql.FLOAT(), nullable=True))
op.add_column('event_media', sa.Column('loop', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True))
op.add_column('event_media', sa.Column('order', mysql.INTEGER(display_width=11), autoincrement=False, nullable=True))
op.add_column('event_media', sa.Column('autoplay', mysql.TINYINT(display_width=1), autoincrement=False, nullable=True))
# ### end Alembic commands ###

View File

@@ -1,88 +0,0 @@
"""event_db
Revision ID: c1178d5fa549
Revises: f7dd3165f238
Create Date: 2025-06-08 12:29:28.366231
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision: str = 'c1178d5fa549'
down_revision: Union[str, None] = 'f7dd3165f238'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('events',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('client_uuid', sa.String(length=36), nullable=False),
sa.Column('title', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('start', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('end', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('event_type', sa.Enum('presentation', 'website', 'video', 'message', 'other', 'webuntis', name='eventtype'), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['client_uuid'], ['clients.uuid'], ),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ),
sa.ForeignKeyConstraint(['updated_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_events_client_uuid'), 'events', ['client_uuid'], unique=False)
op.create_index(op.f('ix_events_end'), 'events', ['end'], unique=False)
op.create_index(op.f('ix_events_start'), 'events', ['start'], unique=False)
op.create_table('event_media',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('event_id', sa.Integer(), nullable=False),
sa.Column('media_type', sa.Enum('presentation', 'website', 'video', 'message', 'other', 'webuntis', name='eventtype'), nullable=False),
sa.Column('url', sa.String(length=255), nullable=False),
sa.Column('order', sa.Integer(), nullable=True),
sa.Column('autoplay', sa.Boolean(), nullable=True),
sa.Column('loop', sa.Boolean(), nullable=True),
sa.Column('volume', sa.Float(), nullable=True),
sa.ForeignKeyConstraint(['event_id'], ['events.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.add_column('clients', sa.Column('is_active', sa.Boolean(), nullable=False))
op.create_index(op.f('ix_clients_hardware_hash'), 'clients', ['hardware_hash'], unique=False)
op.create_index(op.f('ix_clients_ip_address'), 'clients', ['ip_address'], unique=False)
op.add_column('users', sa.Column('is_active', sa.Boolean(), nullable=False))
op.alter_column('users', 'password_hash',
existing_type=mysql.VARCHAR(length=60),
type_=sa.String(length=128),
existing_nullable=False)
op.drop_index(op.f('username'), table_name='users')
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_users_username'), table_name='users')
op.create_index(op.f('username'), 'users', ['username'], unique=True)
op.alter_column('users', 'password_hash',
existing_type=sa.String(length=128),
type_=mysql.VARCHAR(length=60),
existing_nullable=False)
op.drop_column('users', 'is_active')
op.drop_index(op.f('ix_clients_ip_address'), table_name='clients')
op.drop_index(op.f('ix_clients_hardware_hash'), table_name='clients')
op.drop_column('clients', 'is_active')
op.drop_table('event_media')
op.drop_index(op.f('ix_events_start'), table_name='events')
op.drop_index(op.f('ix_events_end'), table_name='events')
op.drop_index(op.f('ix_events_client_uuid'), table_name='events')
op.drop_table('events')
# ### end Alembic commands ###

View File

@@ -1,46 +0,0 @@
"""Refactor Event/EventMedia relation
Revision ID: c571e4214528
Revises: d490cbfdea65
Create Date: 2025-07-04 06:08:57.004474
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision: str = 'c571e4214528'
down_revision: Union[str, None] = 'd490cbfdea65'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key(None, 'clients', 'client_groups', ['group_id'], ['id'])
op.drop_constraint(op.f('event_media_ibfk_1'), 'event_media', type_='foreignkey')
op.drop_column('event_media', 'event_id')
op.add_column('events', sa.Column('event_media_id', sa.Integer(), nullable=True))
op.alter_column('events', 'group_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=False)
op.create_foreign_key(None, 'events', 'event_media', ['event_media_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'events', type_='foreignkey')
op.alter_column('events', 'group_id',
existing_type=mysql.INTEGER(display_width=11),
nullable=True)
op.drop_column('events', 'event_media_id')
op.add_column('event_media', sa.Column('event_id', mysql.INTEGER(display_width=11), autoincrement=False, nullable=False))
op.create_foreign_key(op.f('event_media_ibfk_1'), 'event_media', 'events', ['event_id'], ['id'])
op.drop_constraint(None, 'clients', type_='foreignkey')
# ### end Alembic commands ###

View File

@@ -1,91 +0,0 @@
"""Migrate events to use group_id instead of client_uuid
Revision ID: d490cbfdea65
Revises: 8a45ec34f84d
Create Date: 2025-06-30 19:16:29.138440
ACHTUNG:
Ein Downgrade dieser Migration ist NICHT verlustfrei möglich, wenn mehrere Clients einer Gruppe zugeordnet sind.
Beim Downgrade wird jedem Event willkürlich ein (der erste gefundene) Client der Gruppe zugeordnet.
Die ursprüngliche Zuordnung von Events zu Clients kann dadurch NICHT wiederhergestellt werden!
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'd490cbfdea65'
down_revision: Union[str, None] = '8a45ec34f84d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# 1. Neue Spalte group_id (vorübergehend nullable)
op.add_column('events', sa.Column('group_id', sa.Integer(), nullable=True))
op.create_foreign_key(
'fk_events_group_id_client_groups',
'events', 'client_groups',
['group_id'], ['id']
)
op.create_index('ix_events_group_id', 'events', ['group_id'])
# 2. group_id für alle Events anhand client_uuid setzen
op.execute("""
UPDATE events
SET group_id = (
SELECT group_id FROM clients WHERE clients.uuid = events.client_uuid
)
""")
# 3. client_uuid entfernen
op.drop_constraint('events_ibfk_1', 'events',
type_='foreignkey') # Name ggf. anpassen!
# Name ggf. anpassen!
op.drop_index('ix_events_client_uuid', table_name='events')
op.drop_column('events', 'client_uuid')
# 4. group_id auf NOT NULL setzen
op.alter_column(
'events',
'group_id',
existing_type=sa.Integer(),
nullable=False
)
def downgrade() -> None:
"""Downgrade schema.
ACHTUNG:
Ein Downgrade ist nicht verlustfrei möglich, wenn mehrere Clients pro Gruppe existieren.
Es wird jeweils ein beliebiger (erster) Client der Gruppe für die Zuordnung gewählt.
"""
# 1. client_uuid wieder hinzufügen
op.add_column('events', sa.Column(
'client_uuid', sa.String(length=36), nullable=True))
op.create_foreign_key(
'fk_events_client_uuid_clients',
'events', 'clients',
['client_uuid'], ['uuid']
)
op.create_index('ix_events_client_uuid', 'events', ['client_uuid'])
# 2. client_uuid anhand group_id zurücksetzen (nur möglich, wenn 1:1-Beziehung!)
# Falls mehrere Clients pro Gruppe: Datenverlust möglich!
# Hier ggf. eine Strategie überlegen oder leerlassen.
op.execute("""
UPDATE events
SET client_uuid = (
SELECT uuid FROM clients WHERE clients.group_id = events.group_id LIMIT 1
)
""")
# 3. group_id entfernen
op.drop_constraint('fk_events_group_id_client_groups',
'events', type_='foreignkey')
op.drop_index('ix_events_group_id', table_name='events')
op.drop_column('events', 'group_id')

View File

@@ -1,32 +0,0 @@
"""initial
Revision ID: f7dd3165f238
Revises:
Create Date: 2025-06-08 12:25:27.174339
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'f7dd3165f238'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@@ -1,34 +0,0 @@
"""Add Filepath and Message-Content to EventMedia
Revision ID: fadba5bc526c
Revises: c571e4214528
Create Date: 2025-07-05 04:46:13.542887
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'fadba5bc526c'
down_revision: Union[str, None] = 'c571e4214528'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('event_media', sa.Column('file_path', sa.String(length=255), nullable=True))
op.add_column('event_media', sa.Column('message_content', sa.Text(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('event_media', 'message_content')
op.drop_column('event_media', 'file_path')
# ### end Alembic commands ###

View File

@@ -1,6 +1,6 @@
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from models import Client from models.models import Client
from dotenv import load_dotenv from dotenv import load_dotenv
import os import os
from datetime import datetime, timedelta from datetime import datetime, timedelta

View File

@@ -1,6 +1,6 @@
from sqlalchemy import create_engine from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from models import Event, EventMedia, EventType, Client from models.models import Event, EventMedia, EventType, Client
from dotenv import load_dotenv from dotenv import load_dotenv
import os import os
from datetime import datetime, timedelta from datetime import datetime, timedelta

View File

@@ -1,6 +1,8 @@
import sys
sys.path.insert(0, '/workspace')
from sqlalchemy import create_engine, text from sqlalchemy import create_engine, text
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from models import Base, User, UserRole from models.models import Base, User, UserRole
import os import os
from dotenv import load_dotenv from dotenv import load_dotenv
import bcrypt import bcrypt

View File

@@ -8,7 +8,7 @@ from datetime import datetime
from paho.mqtt import client as mqtt_client from paho.mqtt import client as mqtt_client
from sqlalchemy import create_engine, func from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from models import Client, Base from models.models import Client, Base
from helpers.check_folder import ensure_folder_exists from helpers.check_folder import ensure_folder_exists
import shutil import shutil

View File

@@ -1,7 +1,7 @@
import sys import sys
sys.path.append('/workspace') sys.path.append('/workspace')
from flask import Blueprint, request, jsonify from flask import Blueprint, request, jsonify
from models import Client, ClientGroup from models.models import Client, ClientGroup
from database import Session from database import Session
clients_bp = Blueprint("clients", __name__, url_prefix="/api/clients") clients_bp = Blueprint("clients", __name__, url_prefix="/api/clients")

View File

@@ -1,7 +1,7 @@
from re import A from re import A
from flask import Blueprint, request, jsonify, send_from_directory from flask import Blueprint, request, jsonify, send_from_directory
from database import Session from database import Session
from models import EventMedia, MediaType from models.models import EventMedia, MediaType
import os import os
eventmedia_bp = Blueprint('eventmedia', __name__, url_prefix='/api/eventmedia') eventmedia_bp = Blueprint('eventmedia', __name__, url_prefix='/api/eventmedia')

View File

@@ -1,6 +1,6 @@
from flask import Blueprint, request, jsonify from flask import Blueprint, request, jsonify
from database import Session from database import Session
from models import Event, EventMedia, MediaType from models.models import Event, EventMedia, MediaType
from datetime import datetime, timezone from datetime import datetime, timezone
from sqlalchemy import and_ from sqlalchemy import and_
import sys import sys

View File

@@ -1,5 +1,5 @@
from database import Session from database import Session
from models import ClientGroup from models.models import ClientGroup
from flask import Blueprint, request, jsonify from flask import Blueprint, request, jsonify
from sqlalchemy import func from sqlalchemy import func
import sys import sys

View File

@@ -1,8 +1,8 @@
# server/wsgi.py # server/wsgi.py
from routes.eventmedia import eventmedia_bp from server.routes.eventmedia import eventmedia_bp
from routes.events import events_bp from server.routes.events import events_bp
from routes.groups import groups_bp from server.routes.groups import groups_bp
from routes.clients import clients_bp from server.routes.clients import clients_bp
from database import Session, engine from database import Session, engine
from flask import Flask, jsonify, send_from_directory, request from flask import Flask, jsonify, send_from_directory, request
import glob import glob

View File

@@ -1,21 +1,139 @@
# simclient/simclient.py # simclient/simclient.py
import time import time
import uuid
import json
import socket
import hashlib
import paho.mqtt.client as mqtt import paho.mqtt.client as mqtt
import os
import re
import platform
import logging
DEBUG_MODE = True # Auf False setzen für Produktion
# Logging-Konfiguration
LOG_DIR = os.path.dirname(os.path.abspath(__file__))
LOG_PATH = os.path.join(LOG_DIR, "simclient.log")
log_handlers = [logging.FileHandler(LOG_PATH, encoding="utf-8")]
if DEBUG_MODE:
log_handlers.append(logging.StreamHandler())
logging.basicConfig(
level=logging.DEBUG if DEBUG_MODE else logging.INFO,
format="%(asctime)s [%(levelname)s] %(message)s",
handlers=log_handlers
)
def on_message(client, userdata, msg): def on_message(client, userdata, msg):
print(f"Empfangen: {msg.topic} {msg.payload.decode()}") logging.info(f"Empfangen: {msg.topic} {msg.payload.decode()}")
def get_mac_addresses():
macs = set()
try:
for root, dirs, files in os.walk('/sys/class/net/'):
for iface in dirs:
try:
with open(f'/sys/class/net/{iface}/address') as f:
mac = f.read().strip()
if mac and mac != '00:00:00:00:00:00':
macs.add(mac)
except Exception:
continue
break
except Exception:
pass
return sorted(macs)
def get_board_serial():
# Raspberry Pi: /proc/cpuinfo, andere: /sys/class/dmi/id/product_serial
serial = None
try:
with open('/proc/cpuinfo') as f:
for line in f:
if line.lower().startswith('serial'):
serial = line.split(':')[1].strip()
break
except Exception:
pass
if not serial:
try:
with open('/sys/class/dmi/id/product_serial') as f:
serial = f.read().strip()
except Exception:
pass
return serial or "unknown"
def get_ip():
# Versucht, die lokale IP zu ermitteln (nicht 127.0.0.1)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip = s.getsockname()[0]
s.close()
return ip
except Exception:
return "unknown"
def get_hardware_token():
serial = get_board_serial()
macs = get_mac_addresses()
token_raw = serial + "_" + "_".join(macs)
# Hashen für Datenschutz
token_hash = hashlib.sha256(token_raw.encode()).hexdigest()
return token_hash
def get_model():
# Versucht, das Modell auszulesen (z.B. Raspberry Pi, PC, etc.)
try:
if os.path.exists('/proc/device-tree/model'):
with open('/proc/device-tree/model') as f:
return f.read().strip()
elif os.path.exists('/sys/class/dmi/id/product_name'):
with open('/sys/class/dmi/id/product_name') as f:
return f.read().strip()
except Exception:
pass
return "unknown"
SOFTWARE_VERSION = "1.0.0" # Optional: Anpassen bei neuen Releases
def send_discovery(client, client_id, hardware_token, ip_addr):
macs = get_mac_addresses()
discovery_msg = {
"client_id": client_id,
"hardware_token": hardware_token,
"ip": ip_addr,
"type": "infoscreen",
"hostname": socket.gethostname(),
"os_version": platform.platform(),
"software_version": SOFTWARE_VERSION,
"macs": macs,
"model": get_model(),
}
client.publish("infoscreen/discovery", json.dumps(discovery_msg))
logging.info(f"Discovery-Nachricht gesendet: {discovery_msg}")
def main(): def main():
client = mqtt.Client() client_id = str(uuid.uuid4())
hardware_token = get_hardware_token()
ip_addr = get_ip()
client = mqtt.Client(protocol=mqtt.MQTTv311, callback_api_version=2)
client.on_message = on_message client.on_message = on_message
# Im Docker-Netzwerk: Hostname des MQTT-Brokers ist "mqtt"
client.connect("mqtt", 1883) client.connect("mqtt", 1883)
client.subscribe("infoscreen/+/now") client.subscribe(f"infoscreen/{client_id}/config")
send_discovery(client, client_id, hardware_token, ip_addr)
while True: while True:
# Heartbeat senden # Heartbeat senden
client.publish("infoscreen/client1/heartbeat", "alive") client.publish(f"infoscreen/{client_id}/heartbeat", "alive")
logging.debug("Heartbeat gesendet.")
client.loop(timeout=1.0) client.loop(timeout=1.0)
time.sleep(5) time.sleep(5)