Initial commit - copied workspace after database cleanup

This commit is contained in:
RobbStarkAustria
2025-10-10 15:20:14 +00:00
commit 1efe40a03b
142 changed files with 23625 additions and 0 deletions

61
server/Dockerfile Normal file
View File

@@ -0,0 +1,61 @@
# server/Dockerfile
# 🔧 OPTIMIERT: Multi-Stage-Build für ein minimales und sicheres Produktions-Image
# Stage 1: Builder - Installiert Abhängigkeiten
FROM python:3.13-slim AS builder
WORKDIR /app
# Installiert nur die für den Build notwendigen Systemabhängigkeiten
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
libmariadb-dev-compat libmariadb-dev gcc \
&& rm -rf /var/lib/apt/lists/*
# Kopiert nur die requirements.txt, um den Docker-Cache optimal zu nutzen
COPY /server/requirements.txt .
# Installiert die Python-Pakete in ein separates Verzeichnis
RUN pip install --no-cache-dir --prefix="/install" -r requirements.txt
# Stage 2: Final - Das eigentliche Produktions-Image
FROM python:3.13-slim
# --- Arbeitsverzeichnis ---
WORKDIR /app
# Installiert nur die für die Laufzeit notwendigen Systemabhängigkeiten
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
libmariadb-dev-compat locales curl \
&& rm -rf /var/lib/apt/lists/*
# --- Locale konfigurieren ---
RUN sed -i 's/# de_DE.UTF-8 UTF-8/de_DE.UTF-8 UTF-8/' /etc/locale.gen \
&& locale-gen
ENV LANG=de_DE.UTF-8 \
LC_ALL=de_DE.UTF-8
# Kopiert die installierten Pakete aus der Builder-Stage
COPY --from=builder /install /usr/local
# --- Applikationscode ---
# Kopiert den Server-Code in das Arbeitsverzeichnis
COPY server/ ./server
COPY models/ ./models
# --- Non-Root User anlegen und Rechte setzen ---
ARG USER_ID=1000
ARG GROUP_ID=1000
RUN groupadd -g ${GROUP_ID} infoscreen_taa \
&& useradd -u ${USER_ID} -g ${GROUP_ID} \
--shell /bin/bash --create-home infoscreen_taa \
&& chown -R infoscreen_taa:infoscreen_taa /app
USER infoscreen_taa
# --- Port für die API exposed ---
EXPOSE 8000
# --- Startbefehl für Gunicorn ---
CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:8000", "server.wsgi:app"]

42
server/Dockerfile.dev Normal file
View File

@@ -0,0 +1,42 @@
# Datei: server/Dockerfile.dev
# 🔧 OPTIMIERT: Für die Entwicklung im Dev-Container
# ==========================================
FROM python:3.13-slim
# Die Erstellung des non-root Users und die Locale-Konfiguration
# sind für den Dev-Container nicht zwingend nötig, da VS Code sich als 'root'
# verbindet (gemäß devcontainer.json). Sie schaden aber nicht.
ARG USER_ID=1000
ARG GROUP_ID=1000
RUN apt-get update && apt-get install -y --no-install-recommends locales curl git \
&& groupadd -g ${GROUP_ID} infoscreen_taa \
&& useradd -u ${USER_ID} -g ${GROUP_ID} --shell /bin/bash --create-home infoscreen_taa \
&& sed -i 's/# de_DE.UTF-8 UTF-8/de_DE.UTF-8 UTF-8/' /etc/locale.gen \
&& locale-gen \
&& rm -rf /var/lib/apt/lists/*
ENV LANG=de_DE.UTF-8 \
LC_ALL=de_DE.UTF-8
# Setze das Arbeitsverzeichnis auf den Workspace-Root, passend zu den Mounts.
WORKDIR /app
# Kopiere die Anforderungsdateien in das korrekte Unterverzeichnis.
# ✅ KORRIGIERT: Pfade sind jetzt relativ zum Build-Kontext (dem 'server'-Verzeichnis)
COPY server/requirements.txt server/requirements-dev.txt ./server/
# Installiere die Python-Abhängigkeiten
RUN pip install --upgrade pip \
&& pip install --no-cache-dir -r server/requirements.txt \
&& pip install --no-cache-dir -r server/requirements-dev.txt
# Das Kopieren des Codes ist nicht nötig, da das Verzeichnis gemountet wird.
# Exponiere die Ports für die Flask API und den Debugger
EXPOSE 8000 5678
# Der Startbefehl wird in der docker-compose.override.yml definiert.
# Ein Standard-CMD dient als Fallback.
CMD ["python", "-m", "debugpy", "--listen", "0.0.0.0:5678", "-m", "flask", "run", "--host=0.0.0.0", "--port=8000"]

8
server/__init__.py Normal file
View File

@@ -0,0 +1,8 @@
"""Server package initializer.
Expose submodules required by external importers (e.g., RQ string paths).
"""
# Ensure 'server.worker' is available as an attribute of the 'server' package
# so that RQ can resolve 'server.worker.convert_event_media_to_pdf'.
from . import worker # noqa: F401

141
server/alembic.ini Normal file
View File

@@ -0,0 +1,141 @@
# A generic, single database configuration.
[alembic]
# path to migration scripts.
# this is typically a path given in POSIX (e.g. forward slashes)
# format, relative to the token %(here)s which refers to the location of this
# ini file
script_location = %(here)s/alembic
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory. for multiple paths, the path separator
# is defined by "path_separator" below.
prepend_sys_path = .
# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
# string value is passed to ZoneInfo()
# leave blank for localtime
# timezone =
# max length of characters to apply to the "slug" field
# truncate_slug_length = 40
# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false
# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false
# version location specification; This defaults
# to <script_location>/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "path_separator"
# below.
# version_locations = %(here)s/bar:%(here)s/bat:%(here)s/alembic/versions
# path_separator; This indicates what character is used to split lists of file
# paths, including version_locations and prepend_sys_path within configparser
# files such as alembic.ini.
# The default rendered in new alembic.ini files is "os", which uses os.pathsep
# to provide os-dependent path splitting.
#
# Note that in order to support legacy alembic.ini files, this default does NOT
# take place if path_separator is not present in alembic.ini. If this
# option is omitted entirely, fallback logic is as follows:
#
# 1. Parsing of the version_locations option falls back to using the legacy
# "version_path_separator" key, which if absent then falls back to the legacy
# behavior of splitting on spaces and/or commas.
# 2. Parsing of the prepend_sys_path option falls back to the legacy
# behavior of splitting on spaces, commas, or colons.
#
# Valid values for path_separator are:
#
# path_separator = :
# path_separator = ;
# path_separator = space
# path_separator = newline
#
# Use os.pathsep. Default configuration used for new projects.
path_separator = os
# set to 'true' to search source files recursively
# in each "version_locations" directory
# new in Alembic version 1.10
# recursive_version_locations = false
# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8
# database URL. This is consumed by the user-maintained env.py script only.
# other means of configuring database URLs may be customized within the env.py
# file.
# sqlalchemy.url = driver://user:pass@localhost/dbname
[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples
# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
# hooks = ruff
# ruff.type = exec
# ruff.executable = %(here)s/.venv/bin/ruff
# ruff.options = check --fix REVISION_SCRIPT_FILENAME
# Logging configuration. This is also consumed by the user-maintained
# env.py script only.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARNING
handlers = console
qualname =
[logger_sqlalchemy]
level = WARNING
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

1
server/alembic/README Normal file
View File

@@ -0,0 +1 @@
Generic single-database configuration.

115
server/alembic/env.py Normal file
View File

@@ -0,0 +1,115 @@
# isort: skip_file
from alembic import context
from sqlalchemy import pool
from sqlalchemy import engine_from_config
from logging.config import fileConfig
from dotenv import load_dotenv
from models.models import Base
import os
import sys
sys.path.insert(0, '/workspace')
print("sys.path:", sys.path)
print("models dir exists:", os.path.isdir('/workspace/models'))
print("models/models.py exists:", os.path.isfile('/workspace/models/models.py'))
print("models/__init__.py exists:",
os.path.isfile('/workspace/models/__init__.py'))
print("sys.path:", sys.path)
print("models dir exists:", os.path.isdir('/workspace/models'))
print("models/models.py exists:", os.path.isfile('/workspace/models/models.py'))
print("models/__init__.py exists:",
os.path.isfile('/workspace/models/__init__.py'))
# .env laden (optional)
env_path = os.path.abspath(os.path.join(
os.path.dirname(__file__), '../../.env'))
print(f"Loading environment variables from: {env_path}")
load_dotenv(env_path)
DB_CONN = os.getenv("DB_CONN")
if DB_CONN:
DATABASE_URL = DB_CONN
else:
# Datenbank-Zugangsdaten aus .env
DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST = os.getenv("DB_HOST", "db") # Default jetzt 'db'
DB_PORT = os.getenv("DB_PORT", "3306")
DB_NAME = os.getenv("DB_NAME")
DATABASE_URL = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}:{DB_PORT}/{DB_NAME}"
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
print(f"Using DATABASE_URL: {DATABASE_URL}")
config.set_main_option("sqlalchemy.url", DATABASE_URL)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
with connectable.connect() as connection:
context.configure(
connection=connection, target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,28 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,36 @@
"""Rename location to description in client_groups, add description to clients
Revision ID: 0c47280d3e2d
Revises: 3a09ef909689
Create Date: 2025-07-16 08:47:00.355445
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision: str = '0c47280d3e2d'
down_revision: Union[str, None] = '3a09ef909689'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('client_groups', sa.Column('description', sa.String(length=255), nullable=True))
op.drop_column('client_groups', 'location')
op.add_column('clients', sa.Column('description', sa.String(length=255), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('clients', 'description')
op.add_column('client_groups', sa.Column('location', mysql.VARCHAR(length=100), nullable=True))
op.drop_column('client_groups', 'description')
# ### end Alembic commands ###

View File

@@ -0,0 +1,56 @@
"""Update clients table for new fields
Revision ID: 207f5b190f93
Revises: 3d15c3cac7b6
Create Date: 2025-07-15 14:12:42.427274
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision: str = '207f5b190f93'
down_revision: Union[str, None] = '3d15c3cac7b6'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('clients', sa.Column('hardware_token', sa.String(length=64), nullable=True))
op.add_column('clients', sa.Column('ip', sa.String(length=45), nullable=True))
op.add_column('clients', sa.Column('type', sa.String(length=50), nullable=True))
op.add_column('clients', sa.Column('hostname', sa.String(length=100), nullable=True))
op.add_column('clients', sa.Column('os_version', sa.String(length=100), nullable=True))
op.add_column('clients', sa.Column('software_version', sa.String(length=100), nullable=True))
op.add_column('clients', sa.Column('macs', sa.String(length=255), nullable=True))
op.add_column('clients', sa.Column('model', sa.String(length=100), nullable=True))
op.drop_index(op.f('ix_clients_hardware_hash'), table_name='clients')
op.drop_index(op.f('ix_clients_ip_address'), table_name='clients')
op.drop_column('clients', 'location')
op.drop_column('clients', 'hardware_hash')
op.drop_column('clients', 'ip_address')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('clients', sa.Column('ip_address', mysql.VARCHAR(length=45), nullable=True))
op.add_column('clients', sa.Column('hardware_hash', mysql.VARCHAR(length=64), nullable=False))
op.add_column('clients', sa.Column('location', mysql.VARCHAR(length=100), nullable=True))
op.create_index(op.f('ix_clients_ip_address'), 'clients', ['ip_address'], unique=False)
op.create_index(op.f('ix_clients_hardware_hash'), 'clients', ['hardware_hash'], unique=False)
op.drop_column('clients', 'model')
op.drop_column('clients', 'macs')
op.drop_column('clients', 'software_version')
op.drop_column('clients', 'os_version')
op.drop_column('clients', 'hostname')
op.drop_column('clients', 'type')
op.drop_column('clients', 'ip')
op.drop_column('clients', 'hardware_token')
# ### end Alembic commands ###

View File

@@ -0,0 +1,38 @@
"""Change uploaded_at to TIMESTAMP in EventMedia
Revision ID: 216402147826
Revises: b22d339ed2af
Create Date: 2025-09-01 10:22:55.285710
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision: str = '216402147826'
down_revision: Union[str, None] = 'b22d339ed2af'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('event_media', 'uploaded_at',
existing_type=mysql.DATETIME(),
type_=sa.TIMESTAMP(),
nullable=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('event_media', 'uploaded_at',
existing_type=sa.TIMESTAMP(),
type_=mysql.DATETIME(),
nullable=True)
# ### end Alembic commands ###

View File

@@ -0,0 +1,28 @@
"""merge heads after conversions
Revision ID: 2b627d0885c3
Revises: 5b3c1a2f8d10, 8d1df7199cb7
Create Date: 2025-10-06 20:27:53.974926
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '2b627d0885c3'
down_revision: Union[str, None] = ('5b3c1a2f8d10', '8d1df7199cb7')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,32 @@
"""Add location to client_groups
Revision ID: 3a09ef909689
Revises: 207f5b190f93
Create Date: 2025-07-16 08:36:08.535836
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '3a09ef909689'
down_revision: Union[str, None] = '207f5b190f93'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('client_groups', sa.Column('location', sa.String(length=100), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('client_groups', 'location')
# ### end Alembic commands ###

View File

@@ -0,0 +1,109 @@
"""initial
Revision ID: 3d15c3cac7b6
Revises:
Create Date: 2025-07-15 09:43:16.209294
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '3d15c3cac7b6'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('client_groups',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('event_media',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('media_type', sa.Enum('pdf', 'ppt', 'pptx', 'odp', 'mp4', 'avi', 'mkv', 'mov', 'wmv', 'flv', 'webm', 'mpg', 'mpeg', 'ogv', 'jpg', 'jpeg', 'png', 'gif', 'bmp', 'tiff', 'svg', 'html', name='mediatype'), nullable=False),
sa.Column('url', sa.String(length=255), nullable=False),
sa.Column('file_path', sa.String(length=255), nullable=True),
sa.Column('message_content', sa.Text(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('password_hash', sa.String(length=128), nullable=False),
sa.Column('role', sa.Enum('user', 'admin', 'superadmin', name='userrole'), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True)
op.create_table('clients',
sa.Column('uuid', sa.String(length=36), nullable=False),
sa.Column('hardware_hash', sa.String(length=64), nullable=False),
sa.Column('location', sa.String(length=100), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('registration_time', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('last_alive', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['group_id'], ['client_groups.id'], ),
sa.PrimaryKeyConstraint('uuid')
)
op.create_index(op.f('ix_clients_hardware_hash'), 'clients', ['hardware_hash'], unique=False)
op.create_index(op.f('ix_clients_ip_address'), 'clients', ['ip_address'], unique=False)
op.create_table('events',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('group_id', sa.Integer(), nullable=False),
sa.Column('title', sa.String(length=100), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('start', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('end', sa.TIMESTAMP(timezone=True), nullable=False),
sa.Column('event_type', sa.Enum('presentation', 'website', 'video', 'message', 'other', 'webuntis', name='eventtype'), nullable=False),
sa.Column('event_media_id', sa.Integer(), nullable=True),
sa.Column('autoplay', sa.Boolean(), nullable=True),
sa.Column('loop', sa.Boolean(), nullable=True),
sa.Column('volume', sa.Float(), nullable=True),
sa.Column('slideshow_interval', sa.Integer(), nullable=True),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('created_by', sa.Integer(), nullable=False),
sa.Column('updated_by', sa.Integer(), nullable=True),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.ForeignKeyConstraint(['created_by'], ['users.id'], ),
sa.ForeignKeyConstraint(['event_media_id'], ['event_media.id'], ),
sa.ForeignKeyConstraint(['group_id'], ['client_groups.id'], ),
sa.ForeignKeyConstraint(['updated_by'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_events_end'), 'events', ['end'], unique=False)
op.create_index(op.f('ix_events_group_id'), 'events', ['group_id'], unique=False)
op.create_index(op.f('ix_events_start'), 'events', ['start'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_events_start'), table_name='events')
op.drop_index(op.f('ix_events_group_id'), table_name='events')
op.drop_index(op.f('ix_events_end'), table_name='events')
op.drop_table('events')
op.drop_index(op.f('ix_clients_ip_address'), table_name='clients')
op.drop_index(op.f('ix_clients_hardware_hash'), table_name='clients')
op.drop_table('clients')
op.drop_index(op.f('ix_users_username'), table_name='users')
op.drop_table('users')
op.drop_table('event_media')
op.drop_table('client_groups')
# ### end Alembic commands ###

View File

@@ -0,0 +1,53 @@
"""Add conversions table
Revision ID: 5b3c1a2f8d10
Revises: e6eaede720aa
Create Date: 2025-10-06 12:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '5b3c1a2f8d10'
down_revision: Union[str, None] = 'e6eaede720aa'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table(
'conversions',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('source_event_media_id', sa.Integer(), nullable=False),
sa.Column('target_format', sa.String(length=10), nullable=False),
sa.Column('target_path', sa.String(length=512), nullable=True),
sa.Column('status', sa.Enum('pending', 'processing', 'ready', 'failed', name='conversionstatus'),
nullable=False, server_default='pending'),
sa.Column('file_hash', sa.String(length=64), nullable=True),
sa.Column('started_at', sa.TIMESTAMP(timezone=True), nullable=True),
sa.Column('completed_at', sa.TIMESTAMP(timezone=True), nullable=True),
sa.Column('error_message', sa.Text(), nullable=True),
sa.ForeignKeyConstraint(['source_event_media_id'], ['event_media.id'],
name='fk_conversions_event_media', ondelete='CASCADE'),
)
op.create_index('ix_conv_source_event_media_id', 'conversions', ['source_event_media_id'])
op.create_index('ix_conversions_target_format', 'conversions', ['target_format'])
op.create_index('ix_conv_status_target', 'conversions', ['status', 'target_format'])
op.create_index('ix_conv_source_target', 'conversions', ['source_event_media_id', 'target_format'])
op.create_unique_constraint('uq_conv_source_target_hash', 'conversions',
['source_event_media_id', 'target_format', 'file_hash'])
def downgrade() -> None:
op.drop_constraint('uq_conv_source_target_hash', 'conversions', type_='unique')
op.drop_index('ix_conv_source_target', table_name='conversions')
op.drop_index('ix_conv_status_target', table_name='conversions')
op.drop_index('ix_conversions_target_format', table_name='conversions')
op.drop_index('ix_conv_source_event_media_id', table_name='conversions')
op.drop_table('conversions')

View File

@@ -0,0 +1,28 @@
"""merge heads after holidays table
Revision ID: 71ba7ab08d84
Revises: 216402147826, 9b7a1f2a4d2b
Create Date: 2025-09-18 19:04:12.755422
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '71ba7ab08d84'
down_revision: Union[str, None] = ('216402147826', '9b7a1f2a4d2b')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
pass
def downgrade() -> None:
"""Downgrade schema."""
pass

View File

@@ -0,0 +1,62 @@
"""add academic periods system
Revision ID: 8d1df7199cb7
Revises: 71ba7ab08d84
Create Date: 2025-09-20 11:07:08.059374
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '8d1df7199cb7'
down_revision: Union[str, None] = '71ba7ab08d84'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('academic_periods',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=100), nullable=False),
sa.Column('display_name', sa.String(length=50), nullable=True),
sa.Column('start_date', sa.Date(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=False),
sa.Column('period_type', sa.Enum('schuljahr', 'semester', 'trimester', name='academicperiodtype'), nullable=False),
sa.Column('is_active', sa.Boolean(), nullable=False),
sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.TIMESTAMP(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name', name='uq_academic_periods_name')
)
op.create_index('ix_academic_periods_active', 'academic_periods', ['is_active'], unique=False)
op.create_index(op.f('ix_academic_periods_end_date'), 'academic_periods', ['end_date'], unique=False)
op.create_index(op.f('ix_academic_periods_start_date'), 'academic_periods', ['start_date'], unique=False)
op.add_column('event_media', sa.Column('academic_period_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_event_media_academic_period_id'), 'event_media', ['academic_period_id'], unique=False)
op.create_foreign_key(None, 'event_media', 'academic_periods', ['academic_period_id'], ['id'])
op.add_column('events', sa.Column('academic_period_id', sa.Integer(), nullable=True))
op.create_index(op.f('ix_events_academic_period_id'), 'events', ['academic_period_id'], unique=False)
op.create_foreign_key(None, 'events', 'academic_periods', ['academic_period_id'], ['id'])
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'events', type_='foreignkey')
op.drop_index(op.f('ix_events_academic_period_id'), table_name='events')
op.drop_column('events', 'academic_period_id')
op.drop_constraint(None, 'event_media', type_='foreignkey')
op.drop_index(op.f('ix_event_media_academic_period_id'), table_name='event_media')
op.drop_column('event_media', 'academic_period_id')
op.drop_index(op.f('ix_academic_periods_start_date'), table_name='academic_periods')
op.drop_index(op.f('ix_academic_periods_end_date'), table_name='academic_periods')
op.drop_index('ix_academic_periods_active', table_name='academic_periods')
op.drop_table('academic_periods')
# ### end Alembic commands ###

View File

@@ -0,0 +1,47 @@
"""add school holidays table
Revision ID: 9b7a1f2a4d2b
Revises: e6eaede720aa
Create Date: 2025-09-18 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '9b7a1f2a4d2b'
down_revision = 'e6eaede720aa'
branch_labels = None
depends_on = None
def upgrade() -> None:
op.create_table(
'school_holidays',
sa.Column('id', sa.Integer(), primary_key=True, autoincrement=True),
sa.Column('name', sa.String(length=150), nullable=False),
sa.Column('start_date', sa.Date(), nullable=False),
sa.Column('end_date', sa.Date(), nullable=False),
sa.Column('region', sa.String(length=100), nullable=True),
sa.Column('source_file_name', sa.String(length=255), nullable=True),
sa.Column('imported_at', sa.TIMESTAMP(timezone=True),
server_default=sa.text('CURRENT_TIMESTAMP')),
)
op.create_index('ix_school_holidays_start_date',
'school_holidays', ['start_date'])
op.create_index('ix_school_holidays_end_date',
'school_holidays', ['end_date'])
op.create_index('ix_school_holidays_region', 'school_holidays', ['region'])
op.create_unique_constraint('uq_school_holidays_unique', 'school_holidays', [
'name', 'start_date', 'end_date', 'region'])
def downgrade() -> None:
op.drop_constraint('uq_school_holidays_unique',
'school_holidays', type_='unique')
op.drop_index('ix_school_holidays_region', table_name='school_holidays')
op.drop_index('ix_school_holidays_end_date', table_name='school_holidays')
op.drop_index('ix_school_holidays_start_date',
table_name='school_holidays')
op.drop_table('school_holidays')

View File

@@ -0,0 +1,32 @@
"""Add uploaded_at to EventMedia
Revision ID: b22d339ed2af
Revises: e6eaede720aa
Create Date: 2025-09-01 10:07:46.915640
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'b22d339ed2af'
down_revision: Union[str, None] = 'e6eaede720aa'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('event_media', sa.Column('uploaded_at', sa.DateTime(timezone=True), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('event_media', 'uploaded_at')
# ### end Alembic commands ###

View File

@@ -0,0 +1,40 @@
"""Make conversions.file_hash NOT NULL
Revision ID: b5a6c3d4e7f8
Revises: 2b627d0885c3
Create Date: 2025-10-06 21:05:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "b5a6c3d4e7f8"
down_revision: Union[str, None] = "2b627d0885c3"
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Ensure no NULLs remain before altering nullability
op.execute("UPDATE conversions SET file_hash = '' WHERE file_hash IS NULL")
op.alter_column(
"conversions",
"file_hash",
existing_type=sa.String(length=64),
nullable=False,
existing_nullable=True,
)
def downgrade() -> None:
op.alter_column(
"conversions",
"file_hash",
existing_type=sa.String(length=64),
nullable=True,
existing_nullable=False,
)

View File

@@ -0,0 +1,34 @@
"""Add website to MediaType enum
Revision ID: e6eaede720aa
Revises: 0c47280d3e2d
Create Date: 2025-07-24 13:40:50.553863
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'e6eaede720aa'
down_revision: Union[str, None] = '0c47280d3e2d'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.execute(
"ALTER TABLE event_media MODIFY COLUMN media_type ENUM('pdf','ppt','pptx','odp','mp4','avi','mkv','mov','wmv','flv','webm','mpg','mpeg','ogv','jpg','jpeg','png','gif','bmp','tiff','svg','html','website') NOT NULL;"
)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

23
server/database.py Normal file
View File

@@ -0,0 +1,23 @@
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from dotenv import load_dotenv
import os
# Nur im Dev-Modus .env laden
if os.getenv("ENV", "development") == "development":
load_dotenv(dotenv_path=os.path.join(
os.path.dirname(__file__), '..', '.env'))
# Prod: DB_CONN direkt aus Umgebungsvariable (von Compose gesetzt)
DB_URL = os.getenv("DB_CONN")
if not DB_URL:
# Dev: DB-URL aus Einzelwerten bauen
DB_USER = os.getenv("DB_USER", "infoscreen_admin")
DB_PASSWORD = os.getenv("DB_PASSWORD", "KqtpM7wmNd&mFKs")
DB_HOST = os.getenv("DB_HOST", "db") # IMMER 'db' als Host im Container!
DB_NAME = os.getenv("DB_NAME", "infoscreen_by_taa")
DB_URL = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}"
print(f"Using DB_URL: {DB_URL}") # Debug-Ausgabe
engine = create_engine(DB_URL, echo=False)
Session = sessionmaker(bind=engine)

45
server/dummy_clients.py Normal file
View File

@@ -0,0 +1,45 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models.models import Client
from dotenv import load_dotenv
import os
from datetime import datetime, timedelta
import random
import uuid
# .env laden
load_dotenv()
DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST = os.getenv("DB_HOST")
DB_NAME = os.getenv("DB_NAME")
db_conn_str = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}"
engine = create_engine(db_conn_str)
Session = sessionmaker(bind=engine)
session = Session()
# Dummy-Clients erzeugen
locations = [
"Raum 101",
"Raum 102",
"Lehrerzimmer",
"Aula",
"Bibliothek"
]
for i in range(5):
client = Client(
uuid=str(uuid.uuid4()),
hardware_hash=f"dummyhash{i:02d}",
location=locations[i],
ip_address=f"192.168.0.{100+i}",
registration_time=datetime.now() - timedelta(days=random.randint(1, 30)),
last_alive=datetime.now(),
is_active=True
)
session.add(client)
session.commit()
print("5 Dummy-Clients wurden angelegt.")

63
server/dummy_events.py Normal file
View File

@@ -0,0 +1,63 @@
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
from models.models import Event, EventMedia, EventType, Client
from dotenv import load_dotenv
import os
from datetime import datetime, timedelta
import random
# .env laden
load_dotenv()
DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST = os.getenv("DB_HOST")
DB_NAME = os.getenv("DB_NAME")
db_conn_str = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}"
engine = create_engine(db_conn_str)
Session = sessionmaker(bind=engine)
session = Session()
now = datetime.now()
def random_time_on_day(day_offset: int, duration_hours: int = 1):
"""Erzeugt eine zufällige Start- und Endzeit zwischen 8 und 16 Uhr für einen Tag."""
start_hour = random.randint(8, 15 - duration_hours + 1)
start = (now + timedelta(days=day_offset)).replace(hour=start_hour,
minute=0, second=0, microsecond=0)
end = start + timedelta(hours=duration_hours)
return start, end
# Hole alle Clients aus der Datenbank
clients = session.query(Client).all()
created_by = 1 # Passe ggf. an
all_events = []
for client in clients:
for i in range(10):
day_offset = random.randint(0, 13) # Termine in den nächsten 14 Tagen
duration = random.choice([1, 2]) # 1 oder 2 Stunden
start, end = random_time_on_day(day_offset, duration)
event = Event(
client_uuid=client.uuid,
title=f"Termin {i+1} für {client.location or client.uuid[:8]}",
description=f"Automatisch generierter Termin {i+1} für Client {client.uuid}",
start=start,
end=end,
event_type=random.choice(list(EventType)),
created_by=created_by,
updated_by=None,
is_active=True
)
all_events.append(event)
# Events speichern
for event in all_events:
session.add(event)
session.commit()
print(f"{len(all_events)} Termine für {len(clients)} Clients wurden angelegt.")

View File

@@ -0,0 +1,74 @@
#!/usr/bin/env python3
"""
Erstellt Standard-Schuljahre für österreichische Schulen
Führe dieses Skript nach der Migration aus, um Standard-Perioden zu erstellen.
"""
from datetime import date
from models.models import AcademicPeriod, AcademicPeriodType
from server.database import Session
import sys
sys.path.append('/workspace')
def create_default_academic_periods():
"""Erstellt Standard-Schuljahre für österreichische Schulen"""
session = Session()
try:
# Prüfe ob bereits Perioden existieren
existing = session.query(AcademicPeriod).first()
if existing:
print("Academic periods already exist. Skipping creation.")
return
# Standard Schuljahre erstellen
periods = [
{
'name': 'Schuljahr 2024/25',
'display_name': 'SJ 24/25',
'start_date': date(2024, 9, 2),
'end_date': date(2025, 7, 4),
'period_type': AcademicPeriodType.schuljahr,
'is_active': True # Aktuelles Schuljahr
},
{
'name': 'Schuljahr 2025/26',
'display_name': 'SJ 25/26',
'start_date': date(2025, 9, 1),
'end_date': date(2026, 7, 3),
'period_type': AcademicPeriodType.schuljahr,
'is_active': False
},
{
'name': 'Schuljahr 2026/27',
'display_name': 'SJ 26/27',
'start_date': date(2026, 9, 7),
'end_date': date(2027, 7, 2),
'period_type': AcademicPeriodType.schuljahr,
'is_active': False
}
]
for period_data in periods:
period = AcademicPeriod(**period_data)
session.add(period)
session.commit()
print(f"Successfully created {len(periods)} academic periods")
# Zeige erstellte Perioden
for period in session.query(AcademicPeriod).all():
status = "AKTIV" if period.is_active else "inaktiv"
print(
f" - {period.name} ({period.start_date} - {period.end_date}) [{status}]")
except Exception as e:
session.rollback()
print(f"Error creating academic periods: {e}")
finally:
session.close()
if __name__ == "__main__":
create_default_academic_periods()

38
server/init_defaults.py Normal file
View File

@@ -0,0 +1,38 @@
from sqlalchemy import create_engine, text
import os
from dotenv import load_dotenv
import bcrypt
# .env laden
load_dotenv()
DB_URL = f"mysql+pymysql://{os.getenv('DB_USER')}:{os.getenv('DB_PASSWORD')}@{os.getenv('DB_HOST')}:3306/{os.getenv('DB_NAME')}"
engine = create_engine(DB_URL, isolation_level="AUTOCOMMIT")
with engine.connect() as conn:
# Default-Gruppe mit id=1 anlegen, falls nicht vorhanden
result = conn.execute(
text("SELECT COUNT(*) FROM client_groups WHERE id=1"))
if result.scalar() == 0:
conn.execute(
text(
"INSERT INTO client_groups (id, name, is_active) VALUES (1, 'Nicht zugeordnet', 1)")
)
print("✅ Default-Gruppe mit id=1 angelegt.")
# Admin-Benutzer anlegen, falls nicht vorhanden
admin_user = os.getenv("DEFAULT_ADMIN_USERNAME", "infoscreen_admin")
admin_pw = os.getenv("DEFAULT_ADMIN_PASSWORD", "Info_screen_admin25!")
# Passwort hashen mit bcrypt
hashed_pw = bcrypt.hashpw(admin_pw.encode(
'utf-8'), bcrypt.gensalt()).decode('utf-8')
# Prüfen, ob User existiert
result = conn.execute(text(
"SELECT COUNT(*) FROM users WHERE username=:username"), {"username": admin_user})
if result.scalar() == 0:
# Rolle: 1 = Admin (ggf. anpassen je nach Modell)
conn.execute(
text("INSERT INTO users (username, password_hash, role, is_active) VALUES (:username, :password_hash, 1, 1)"),
{"username": admin_user, "password_hash": hashed_pw}
)
print(f"✅ Admin-Benutzer '{admin_user}' angelegt.")

144
server/initialize_database.py Executable file
View File

@@ -0,0 +1,144 @@
#!/usr/bin/env python3
"""
Complete database initialization script for the infoscreen application.
This script:
1. Runs all Alembic migrations to create/update database schema
2. Creates default user groups and admin user
3. Initializes academic periods for Austrian schools
Usage:
python initialize_database.py
"""
import os
import sys
import subprocess
from pathlib import Path
# Add workspace to Python path
sys.path.insert(0, '/workspace')
def run_command(cmd, description):
"""Run a command and handle errors."""
print(f"\n🔄 {description}...")
try:
result = subprocess.run(cmd, shell=True, check=True, capture_output=True, text=True)
if result.stdout:
print(result.stdout)
print(f"{description} completed successfully")
return True
except subprocess.CalledProcessError as e:
print(f"{description} failed:")
print(f"Error: {e}")
if e.stdout:
print(f"Stdout: {e.stdout}")
if e.stderr:
print(f"Stderr: {e.stderr}")
return False
def check_database_connection():
"""Check if database is accessible."""
print("\n🔍 Checking database connection...")
try:
from dotenv import load_dotenv
from sqlalchemy import create_engine, text
load_dotenv('/workspace/.env')
DB_USER = os.getenv('DB_USER')
DB_PASSWORD = os.getenv('DB_PASSWORD')
DB_HOST = os.getenv('DB_HOST', 'db')
DB_NAME = os.getenv('DB_NAME')
DB_URL = f'mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}'
engine = create_engine(DB_URL)
with engine.connect() as conn:
result = conn.execute(text('SELECT VERSION()'))
version = result.scalar()
print(f"✅ Connected to database: {version}")
return True
except Exception as e:
print(f"❌ Database connection failed: {e}")
return False
def check_current_migration():
"""Check current Alembic migration status."""
print("\n🔍 Checking current migration status...")
try:
result = subprocess.run(
"cd /workspace/server && alembic current",
shell=True,
capture_output=True,
text=True
)
if "head" in result.stdout:
print("✅ Database is up to date")
return True
elif result.stdout.strip() == "":
print("⚠️ No migrations applied yet")
return False
else:
print(f"⚠️ Current migration: {result.stdout.strip()}")
return False
except Exception as e:
print(f"❌ Migration check failed: {e}")
return False
def main():
"""Main initialization function."""
print("🚀 Starting database initialization for infoscreen application")
print("=" * 60)
# Check if we're in the right directory
if not os.path.exists('/workspace/server/alembic.ini'):
print("❌ Error: alembic.ini not found. Are you in the correct directory?")
return False
# Check database connection
if not check_database_connection():
print("\n❌ Cannot connect to database. Please ensure:")
print(" - Database container is running")
print(" - Environment variables are set correctly")
print(" - Network connectivity is available")
return False
# Check current migration status
needs_migration = not check_current_migration()
# Run migrations if needed
if needs_migration:
if not run_command(
"cd /workspace/server && alembic upgrade head",
"Running Alembic migrations"
):
return False
else:
print("⏭️ Skipping migrations (already up to date)")
# Initialize default data
if not run_command(
"cd /workspace/server && python init_defaults.py",
"Creating default groups and admin user"
):
return False
# Initialize academic periods
if not run_command(
"cd /workspace/server && python init_academic_periods.py",
"Setting up academic periods"
):
return False
print("\n" + "=" * 60)
print("🎉 Database initialization completed successfully!")
print("\nNext steps:")
print(" 1. Start the application services")
print(" 2. Access the dashboard to verify everything works")
print(" 3. Login with admin credentials if needed")
return True
if __name__ == "__main__":
success = main()
sys.exit(0 if success else 1)

142
server/mqtt_helper.py Normal file
View File

@@ -0,0 +1,142 @@
"""
Einfache MQTT-Hilfsfunktion für Client-Gruppenzuordnungen
"""
import os
import json
import logging
import paho.mqtt.client as mqtt
logger = logging.getLogger(__name__)
def publish_client_group(client_uuid: str, group_id: int) -> bool:
"""
Publiziert die Gruppenzuordnung eines Clients als retained message
Args:
client_uuid: UUID des Clients
group_id: ID der Gruppe
Returns:
bool: True bei Erfolg, False bei Fehler
"""
try:
# MQTT-Konfiguration aus .env
broker_host = os.getenv("MQTT_BROKER_HOST", "mqtt")
broker_port = int(os.getenv("MQTT_BROKER_PORT", 1883))
username = os.getenv("MQTT_USER")
password = os.getenv("MQTT_PASSWORD")
# Topic und Payload
topic = f"infoscreen/{client_uuid}/group_id"
payload = json.dumps({
"group_id": group_id,
"client_uuid": client_uuid
})
# MQTT-Client erstellen und verbinden
client = mqtt.Client()
if username and password:
client.username_pw_set(username, password)
client.connect(broker_host, broker_port, 60)
# Retained message publizieren
result = client.publish(topic, payload, qos=1, retain=True)
result.wait_for_publish(timeout=5.0)
client.disconnect()
logger.info(
f"Group assignment published for client {client_uuid}: group_id={group_id}")
return True
except Exception as e:
logger.error(
f"Error publishing group assignment for client {client_uuid}: {e}")
return False
def publish_multiple_client_groups(client_group_mappings: dict) -> tuple[int, int]:
"""
Publiziert Gruppenzuordnungen für mehrere Clients in einer Verbindung
Args:
client_group_mappings: Dict mit {client_uuid: group_id}
Returns:
tuple: (success_count, failed_count)
"""
try:
broker_host = os.getenv("MQTT_BROKER_HOST", "mqtt")
broker_port = int(os.getenv("MQTT_BROKER_PORT", 1883))
username = os.getenv("MQTT_USER")
password = os.getenv("MQTT_PASSWORD")
client = mqtt.Client()
if username and password:
client.username_pw_set(username, password)
client.connect(broker_host, broker_port, 60)
success_count = 0
failed_count = 0
for client_uuid, group_id in client_group_mappings.items():
try:
topic = f"infoscreen/{client_uuid}/group_id"
payload = json.dumps({
"group_id": group_id,
"client_uuid": client_uuid
})
result = client.publish(topic, payload, qos=1, retain=True)
result.wait_for_publish(timeout=5.0)
success_count += 1
except Exception as e:
logger.error(f"Failed to publish for {client_uuid}: {e}")
failed_count += 1
client.disconnect()
logger.info(
f"Bulk publish completed: {success_count} success, {failed_count} failed")
return success_count, failed_count
except Exception as e:
logger.error(f"Error in bulk publish: {e}")
return 0, len(client_group_mappings)
def delete_client_group_message(client_uuid: str) -> bool:
"""
Löscht die retained message für einen Client (bei Client-Löschung)
"""
try:
broker_host = os.getenv("MQTT_BROKER_HOST", "mqtt")
broker_port = int(os.getenv("MQTT_BROKER_PORT", 1883))
username = os.getenv("MQTT_USER")
password = os.getenv("MQTT_PASSWORD")
topic = f"infoscreen/{client_uuid}/group_id"
client = mqtt.Client()
if username and password:
client.username_pw_set(username, password)
client.connect(broker_host, broker_port, 60)
# Leere retained message löscht die vorherige
result = client.publish(topic, "", qos=1, retain=True)
result.wait_for_publish(timeout=5.0)
client.disconnect()
logger.info(f"Deleted retained group message for client {client_uuid}")
return True
except Exception as e:
logger.error(
f"Error deleting group message for client {client_uuid}: {e}")
return False

View File

@@ -0,0 +1,159 @@
import sys
sys.path.append('/workspace')
import os
import json
import base64
import glob
from datetime import datetime
from paho.mqtt import client as mqtt_client
from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker
from models.models import Client, Base
from helpers.check_folder import ensure_folder_exists
import shutil
# Basisverzeichnis relativ zum aktuellen Skript
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Konfiguration
MQTT_BROKER = os.getenv("MQTT_BROKER_HOST", "localhost")
MQTT_PORT = int(os.getenv("MQTT_BROKER_PORT", 1883))
MQTT_USER = os.getenv("MQTT_USER")
MQTT_PASSWORD = os.getenv("MQTT_PASSWORD")
MQTT_KEEPALIVE = int(os.getenv("MQTT_KEEPALIVE"))
DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST = os.getenv("DB_HOST")
DB_NAME = os.getenv("DB_NAME")
topics = [
("infoscreen/screenshot", 0),
("infoscreen/heartbeat", 0),
# ... weitere Topics hier
]
# Verzeichnisse für Screenshots
RECEIVED_DIR = os.path.join(BASE_DIR, "received_screenshots")
LATEST_DIR = os.path.join(BASE_DIR, "screenshots")
MAX_PER_CLIENT = 20
# Ordner für empfangene Screenshots und den neuesten Screenshot anlegen
ensure_folder_exists(RECEIVED_DIR)
ensure_folder_exists(LATEST_DIR)
# Datenbank konfigurieren (MariaDB)
DB_URL = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}"
engine = create_engine(DB_URL, echo=False)
Session = sessionmaker(bind=engine)
Base.metadata.create_all(engine)
def prune_old_screenshots(client_id: str):
"""Löscht alte Screenshots, wenn mehr als MAX_PER_CLIENT vorhanden sind."""
pattern = os.path.join(RECEIVED_DIR, f"{client_id}_*.jpg")
files = sorted(glob.glob(pattern), key=os.path.getmtime)
while len(files) > MAX_PER_CLIENT:
oldest = files.pop(0)
try:
os.remove(oldest)
print(f"Altes Bild gelöscht: {oldest}")
except OSError as e:
print(f"Fehler beim Löschen von {oldest}: {e}")
def handle_screenshot(msg):
"""Verarbeitet eingehende Screenshot-Payloads."""
try:
payload = json.loads(msg.payload.decode("utf-8"))
client_id = payload.get("client_id", "unknown")
ts = datetime.fromtimestamp(
payload.get("timestamp", datetime.now().timestamp())
)
b64_str = payload["screenshot"]
img_data = base64.b64decode(b64_str)
# Dateiname mit Client-ID und Zeitstempel
filename = ts.strftime(f"{client_id}_%Y%m%d_%H%M%S.jpg")
received_path = os.path.join(RECEIVED_DIR, filename)
# Bild im Verzeichnis "received_screenshots" speichern
with open(received_path, "wb") as f:
f.write(img_data)
print(f"Bild gespeichert: {received_path}")
# Kopiere den neuesten Screenshot in das Verzeichnis "screenshots"
latest_path = os.path.join(LATEST_DIR, f"{client_id}.jpg")
shutil.copy(received_path, latest_path)
print(f"Neuester Screenshot aktualisiert: {latest_path}")
# Alte Screenshots beschneiden
prune_old_screenshots(client_id)
except Exception as e:
print("Fehler beim Verarbeiten der Screenshot-Nachricht:", e)
def handle_heartbeat(msg):
"""Verarbeitet Heartbeat und aktualisiert oder legt Clients an."""
session = Session()
try:
payload = json.loads(msg.payload.decode("utf-8"))
uuid = payload.get("client_id")
hardware_hash = payload.get("hardware_hash")
ip_address = payload.get("ip_address")
# Versuche, Client zu finden
client = session.query(Client).filter_by(uuid=uuid).first()
if client:
# Bekannter Client: last_alive und IP aktualisieren
client.ip_address = ip_address
client.last_alive = func.now()
session.commit()
print(f"Heartbeat aktualisiert für Client {uuid}")
else:
# Neuer Client: Location per input abfragen
location = input(f"Neuer Client {uuid} gefunden. Bitte Standort eingeben: ")
new_client = Client(
uuid=uuid,
hardware_hash=hardware_hash,
location=location,
ip_address=ip_address
)
session.add(new_client)
session.commit()
print(f"Neuer Client {uuid} angelegt mit Standort {location}")
except Exception as e:
print("Fehler beim Verarbeiten der Heartbeat-Nachricht:", e)
session.rollback()
finally:
session.close()
# Mapping von Topics auf Handler-Funktionen
handlers = {
"infoscreen/screenshot": handle_screenshot,
"infoscreen/heartbeat": handle_heartbeat,
# ... weitere Zuordnungen hier
}
def on_connect(client, userdata, flags, rc, properties):
print("Verbunden mit Code:", rc)
client.subscribe(topics)
def on_message(client, userdata, msg):
topic = msg.topic
if topic in handlers:
handlers[topic](msg)
else:
print(f"Unbekanntes Topic '{topic}', keine Verarbeitung definiert.")
if __name__ == "__main__":
client = mqtt_client.Client(callback_api_version=mqtt_client.CallbackAPIVersion.VERSION2)
client.username_pw_set(MQTT_USER, MQTT_PASSWORD) # <<<< AUTHENTIFIZIERUNG
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_BROKER, MQTT_PORT, keepalive=MQTT_KEEPALIVE)
client.loop_forever()

57
server/mqtt_receiver.py Normal file
View File

@@ -0,0 +1,57 @@
import os
import base64
import json
from datetime import datetime
import paho.mqtt.client as mqtt
# MQTT-Konfiguration
MQTT_BROKER = "mqtt_broker"
MQTT_PORT = 1883
MQTT_USER = "infoscreen_taa_user"
MQTT_PASSWORD = "infoscreen_taa_MQTT25!"
TOPIC_SCREENSHOTS = "infoscreen/screenshot"
SAVE_DIR = "received_screenshots"
topics = [
("infoscreen/screenshot", 0),
("infoscreen/heartbeat", 0),
# ... weitere Topics hier
]
# Ordner für empfangene Screenshots anlegen
os.makedirs(SAVE_DIR, exist_ok=True)
# Callback, wenn eine Nachricht eintrifft
def on_message(client, userdata, msg):
try:
payload = json.loads(msg.payload.decode('utf-8'))
b64_str = payload["screenshot"]
img_data = base64.b64decode(b64_str)
# Dateiname mit Zeitstempel
ts = datetime.fromtimestamp(payload.get("timestamp", datetime.now().timestamp()))
filename = ts.strftime("screenshot_%Y%m%d_%H%M%S.jpg")
filepath = os.path.join(SAVE_DIR, filename)
# Bild speichern
with open(filepath, "wb") as f:
f.write(img_data)
print(f"Bild gespeichert: {filepath}")
except Exception as e:
print("Fehler beim Verarbeiten der Nachricht:", e)
# Callback bei erfolgreicher Verbindung
def on_connect(client, userdata, flags, rc, properties):
if rc == 0:
print("Mit MQTT-Server verbunden.")
client.subscribe(TOPIC_SCREENSHOTS, qos=1)
else:
print(f"Verbindung fehlgeschlagen (Code {rc})")
if __name__ == "__main__":
client = mqtt.Client(mqtt.CallbackAPIVersion.VERSION2)
client.username_pw_set(MQTT_USER, MQTT_PASSWORD) # <<<< AUTHENTIFIZIERUNG
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_BROKER, MQTT_PORT, keepalive=60)
client.loop_forever()

View File

@@ -0,0 +1,2 @@
python-dotenv>=1.1.0
debugpy

11
server/requirements.txt Normal file
View File

@@ -0,0 +1,11 @@
alembic>=1.16.1
bcrypt>=4.3.0
paho-mqtt>=2.1.0
PyMySQL>=1.1.1
python-dotenv>=1.1.0
SQLAlchemy>=2.0.41
flask
gunicorn
redis>=5.0.1
rq>=1.16.2
requests>=2.32.3

View File

@@ -0,0 +1,84 @@
from flask import Blueprint, jsonify, request
from server.database import Session
from models.models import AcademicPeriod
from datetime import datetime
academic_periods_bp = Blueprint(
'academic_periods', __name__, url_prefix='/api/academic_periods')
@academic_periods_bp.route('', methods=['GET'])
def list_academic_periods():
session = Session()
try:
periods = session.query(AcademicPeriod).order_by(
AcademicPeriod.start_date.asc()).all()
return jsonify({
'periods': [p.to_dict() for p in periods]
})
finally:
session.close()
@academic_periods_bp.route('/active', methods=['GET'])
def get_active_academic_period():
session = Session()
try:
period = session.query(AcademicPeriod).filter(
AcademicPeriod.is_active == True).first()
if not period:
return jsonify({'period': None}), 200
return jsonify({'period': period.to_dict()}), 200
finally:
session.close()
@academic_periods_bp.route('/for_date', methods=['GET'])
def get_period_for_date():
"""
Returns the academic period that covers the provided date (YYYY-MM-DD).
If multiple match, prefer the one with the latest start_date.
"""
date_str = request.args.get('date')
if not date_str:
return jsonify({'error': 'Missing required query param: date (YYYY-MM-DD)'}), 400
try:
target = datetime.strptime(date_str, '%Y-%m-%d').date()
except ValueError:
return jsonify({'error': 'Invalid date format. Expected YYYY-MM-DD'}), 400
session = Session()
try:
period = (
session.query(AcademicPeriod)
.filter(AcademicPeriod.start_date <= target, AcademicPeriod.end_date >= target)
.order_by(AcademicPeriod.start_date.desc())
.first()
)
return jsonify({'period': period.to_dict() if period else None}), 200
finally:
session.close()
@academic_periods_bp.route('/active', methods=['POST'])
def set_active_academic_period():
data = request.get_json(silent=True) or {}
period_id = data.get('id')
if period_id is None:
return jsonify({'error': 'Missing required field: id'}), 400
session = Session()
try:
target = session.query(AcademicPeriod).get(period_id)
if not target:
return jsonify({'error': 'AcademicPeriod not found'}), 404
# Deactivate all, then activate target
session.query(AcademicPeriod).filter(AcademicPeriod.is_active == True).update(
{AcademicPeriod.is_active: False}
)
target.is_active = True
session.commit()
session.refresh(target)
return jsonify({'period': target.to_dict()}), 200
finally:
session.close()

289
server/routes/clients.py Normal file
View File

@@ -0,0 +1,289 @@
from server.database import Session
from models.models import Client, ClientGroup
from flask import Blueprint, request, jsonify
from server.mqtt_helper import publish_client_group, delete_client_group_message, publish_multiple_client_groups
import sys
sys.path.append('/workspace')
clients_bp = Blueprint("clients", __name__, url_prefix="/api/clients")
@clients_bp.route("/sync-all-groups", methods=["POST"])
def sync_all_client_groups():
"""
Administrative Route: Synchronisiert alle bestehenden Client-Gruppenzuordnungen mit MQTT
Nützlich für die einmalige Migration bestehender Clients
"""
session = Session()
try:
# Alle aktiven Clients abrufen
clients = session.query(Client).filter(Client.is_active == True).all()
if not clients:
session.close()
return jsonify({"message": "Keine aktiven Clients gefunden", "synced": 0})
# Alle Clients synchronisieren
client_group_mappings = {
client.uuid: client.group_id for client in clients}
success_count, failed_count = publish_multiple_client_groups(
client_group_mappings)
session.close()
return jsonify({
"success": True,
"message": f"Synchronisation abgeschlossen",
"synced": success_count,
"failed": failed_count,
"total": len(clients)
})
except Exception as e:
session.close()
return jsonify({"error": f"Fehler bei der Synchronisation: {str(e)}"}), 500
@clients_bp.route("/without_description", methods=["GET"])
def get_clients_without_description():
session = Session()
clients = session.query(Client).filter(
(Client.description == None) | (Client.description == "")
).all()
result = [
{
"uuid": c.uuid,
"hardware_token": c.hardware_token,
"ip": c.ip,
"type": c.type,
"hostname": c.hostname,
"os_version": c.os_version,
"software_version": c.software_version,
"macs": c.macs,
"model": c.model,
"registration_time": c.registration_time.isoformat() if c.registration_time else None,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"group_id": c.group_id,
}
for c in clients
]
session.close()
return jsonify(result)
@clients_bp.route("/<uuid>/description", methods=["PUT"])
def set_client_description(uuid):
data = request.get_json()
description = data.get("description", "").strip()
if not description:
return jsonify({"error": "Beschreibung darf nicht leer sein"}), 400
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
client.description = description
session.commit()
# MQTT: Gruppenzuordnung publizieren (wichtig für neue Clients aus SetupMode)
mqtt_success = publish_client_group(client.uuid, client.group_id)
session.close()
response = {"success": True}
if not mqtt_success:
response["warning"] = "Beschreibung gespeichert, aber MQTT-Publishing fehlgeschlagen"
return jsonify(response)
@clients_bp.route("", methods=["GET"])
def get_clients():
session = Session()
clients = session.query(Client).all()
result = [
{
"uuid": c.uuid,
"hardware_token": c.hardware_token,
"ip": c.ip,
"type": c.type,
"hostname": c.hostname,
"os_version": c.os_version,
"software_version": c.software_version,
"macs": c.macs,
"model": c.model,
"description": c.description,
"registration_time": c.registration_time.isoformat() if c.registration_time else None,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"group_id": c.group_id,
}
for c in clients
]
session.close()
return jsonify(result)
@clients_bp.route("/group", methods=["PUT"])
def update_clients_group():
data = request.get_json()
client_ids = data.get("client_ids", [])
group_id = data.get("group_id")
group_name = data.get("group_name")
if not isinstance(client_ids, list) or len(client_ids) == 0:
return jsonify({"error": "client_ids muss eine nicht-leere Liste sein"}), 400
session = Session()
# Bestimme Ziel-Gruppe: Priorität hat group_id, ansonsten group_name
group = None
if group_id is not None:
group = session.query(ClientGroup).filter_by(id=group_id).first()
if not group:
session.close()
return jsonify({"error": f"Gruppe mit id={group_id} nicht gefunden"}), 404
elif group_name:
group = session.query(ClientGroup).filter_by(name=group_name).first()
if not group:
session.close()
return jsonify({"error": f"Gruppe '{group_name}' nicht gefunden"}), 404
else:
session.close()
return jsonify({"error": "Entweder group_id oder group_name ist erforderlich"}), 400
# WICHTIG: group.id vor dem Schließen puffern, um DetachedInstanceError zu vermeiden
target_group_id = group.id
session.query(Client).filter(Client.uuid.in_(client_ids)).update(
{Client.group_id: target_group_id}, synchronize_session=False
)
session.commit()
session.close()
# MQTT: Gruppenzuordnungen für alle betroffenen Clients publizieren (nutzt gecachten target_group_id)
client_group_mappings = {
client_id: target_group_id for client_id in client_ids}
success_count, failed_count = publish_multiple_client_groups(
client_group_mappings)
response = {"success": True}
if failed_count > 0:
response[
"warning"] = f"Gruppenzuordnung gespeichert, aber {failed_count} MQTT-Publishing(s) fehlgeschlagen"
return jsonify(response)
@clients_bp.route("/<uuid>", methods=["PATCH"])
def update_client(uuid):
data = request.get_json()
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
allowed_fields = ["description", "model"]
updated = False
for field in allowed_fields:
if field in data:
setattr(client, field, data[field])
updated = True
if updated:
session.commit()
result = {"success": True}
else:
result = {"error": "Keine gültigen Felder zum Aktualisieren übergeben"}
session.close()
return jsonify(result)
# Neue Route: Liefert die aktuelle group_id für einen Client
@clients_bp.route("/<uuid>/group", methods=["GET"])
def get_client_group(uuid):
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
group_id = client.group_id
session.close()
return jsonify({"group_id": group_id})
# Neue Route: Liefert alle Clients mit Alive-Status
@clients_bp.route("/with_alive_status", methods=["GET"])
def get_clients_with_alive_status():
session = Session()
clients = session.query(Client).all()
result = []
for c in clients:
result.append({
"uuid": c.uuid,
"description": c.description,
"ip": c.ip,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"is_alive": bool(c.last_alive and c.is_active),
})
session.close()
return jsonify(result)
@clients_bp.route("/<uuid>/restart", methods=["POST"])
def restart_client(uuid):
"""
Route to restart a specific client by UUID.
Sends an MQTT message to the broker to trigger the restart.
"""
import paho.mqtt.client as mqtt
import json
# MQTT broker configuration
MQTT_BROKER = "mqtt"
MQTT_PORT = 1883
MQTT_TOPIC = f"clients/{uuid}/restart"
# Connect to the database to check if the client exists
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
session.close()
# Send MQTT message
try:
mqtt_client = mqtt.Client()
mqtt_client.connect(MQTT_BROKER, MQTT_PORT)
payload = {"action": "restart"}
mqtt_client.publish(MQTT_TOPIC, json.dumps(payload))
mqtt_client.disconnect()
return jsonify({"success": True, "message": f"Restart signal sent to client {uuid}"}), 200
except Exception as e:
return jsonify({"error": f"Failed to send MQTT message: {str(e)}"}), 500
@clients_bp.route("/<uuid>", methods=["DELETE"])
def delete_client(uuid):
session = Session()
client = session.query(Client).filter_by(uuid=uuid).first()
if not client:
session.close()
return jsonify({"error": "Client nicht gefunden"}), 404
session.delete(client)
session.commit()
session.close()
# MQTT: Retained message für gelöschten Client entfernen
mqtt_success = delete_client_group_message(uuid)
response = {"success": True}
if not mqtt_success:
response["warning"] = "Client gelöscht, aber MQTT-Message-Löschung fehlgeschlagen"
return jsonify(response)

View File

@@ -0,0 +1,94 @@
from flask import Blueprint, jsonify, request
from server.database import Session
from models.models import Conversion, ConversionStatus, EventMedia, MediaType
from server.task_queue import get_queue
from server.worker import convert_event_media_to_pdf
from datetime import datetime, timezone
import hashlib
conversions_bp = Blueprint("conversions", __name__,
url_prefix="/api/conversions")
def sha256_file(abs_path: str) -> str:
h = hashlib.sha256()
with open(abs_path, "rb") as f:
for chunk in iter(lambda: f.read(8192), b""):
h.update(chunk)
return h.hexdigest()
@conversions_bp.route("/<int:media_id>/pdf", methods=["POST"])
def ensure_conversion(media_id: int):
session = Session()
try:
media = session.query(EventMedia).get(media_id)
if not media or not media.file_path:
return jsonify({"error": "Media not found or no file"}), 404
# Only enqueue for office presentation formats
if media.media_type not in {MediaType.ppt, MediaType.pptx, MediaType.odp}:
return jsonify({"message": "No conversion required for this media_type"}), 200
# Compute file hash
import os
base_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
media_root = os.path.join(base_dir, "media")
abs_source = os.path.join(media_root, media.file_path)
file_hash = sha256_file(abs_source)
# Find or create conversion row
conv = (
session.query(Conversion)
.filter_by(
source_event_media_id=media.id,
target_format="pdf",
file_hash=file_hash,
)
.one_or_none()
)
if not conv:
conv = Conversion(
source_event_media_id=media.id,
target_format="pdf",
status=ConversionStatus.pending,
file_hash=file_hash,
)
session.add(conv)
session.commit()
# Enqueue if not already processing/ready
if conv.status in {ConversionStatus.pending, ConversionStatus.failed}:
q = get_queue()
job = q.enqueue(convert_event_media_to_pdf, conv.id)
return jsonify({"id": conv.id, "status": conv.status.value, "job_id": job.get_id()}), 202
else:
return jsonify({"id": conv.id, "status": conv.status.value, "target_path": conv.target_path}), 200
finally:
session.close()
@conversions_bp.route("/<int:media_id>/status", methods=["GET"])
def conversion_status(media_id: int):
session = Session()
try:
conv = (
session.query(Conversion)
.filter_by(source_event_media_id=media_id, target_format="pdf")
.order_by(Conversion.id.desc())
.first()
)
if not conv:
return jsonify({"status": "missing"}), 404
return jsonify(
{
"id": conv.id,
"status": conv.status.value,
"target_path": conv.target_path,
"started_at": conv.started_at.isoformat() if conv.started_at else None,
"completed_at": conv.completed_at.isoformat() if conv.completed_at else None,
"error_message": conv.error_message,
}
)
finally:
session.close()

261
server/routes/eventmedia.py Normal file
View File

@@ -0,0 +1,261 @@
from re import A
from flask import Blueprint, request, jsonify, send_from_directory
from server.database import Session
from models.models import EventMedia, MediaType, Conversion, ConversionStatus
from server.task_queue import get_queue
from server.worker import convert_event_media_to_pdf
import hashlib
import os
eventmedia_bp = Blueprint('eventmedia', __name__, url_prefix='/api/eventmedia')
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
def get_param(key, default=None):
# Reihenfolge: form > json > args
if request.form and key in request.form:
return request.form.get(key, default)
if request.is_json and request.json and key in request.json:
return request.json.get(key, default)
return request.args.get(key, default)
# --- FileManager: List, Create Folder, Rename, Delete, Move ---
@eventmedia_bp.route('/filemanager/operations', methods=['GET', 'POST'])
def filemanager_operations():
action = get_param('action')
path = get_param('path', '/')
name = get_param('name')
new_name = get_param('newName')
target_path = get_param('targetPath')
full_path = os.path.join(MEDIA_ROOT, path.lstrip('/'))
print(action, path, name, new_name, target_path, full_path) # Debug-Ausgabe
if action == 'read':
# List files and folders
items = []
session = Session()
for entry in os.scandir(full_path):
item = {
'name': entry.name,
'isFile': entry.is_file(),
'size': entry.stat().st_size,
'type': os.path.splitext(entry.name)[1][1:] if entry.is_file() else '',
'hasChild': entry.is_dir()
}
# Wenn Datei, versuche Upload-Datum aus DB zu holen
if entry.is_file():
media = session.query(EventMedia).filter_by(
url=entry.name).first()
if media and media.uploaded_at:
# FileManager erwartet UNIX-Timestamp (Sekunden)
item['dateModified'] = int(media.uploaded_at.timestamp())
else:
item['dateModified'] = entry.stat().st_mtime
else:
item['dateModified'] = entry.stat().st_mtime
items.append(item)
session.close()
return jsonify({'files': items, 'cwd': {'name': os.path.basename(full_path), 'path': path}})
elif action == 'details':
# Details für eine oder mehrere Dateien zurückgeben
names = request.form.getlist('names[]') or (request.json.get(
'names') if request.is_json and request.json else [])
path = get_param('path', '/')
details = []
session = Session()
for name in names:
file_path = os.path.join(MEDIA_ROOT, path.lstrip('/'), name)
media = session.query(EventMedia).filter_by(url=name).first()
if os.path.isfile(file_path):
detail = {
'name': name,
'size': os.path.getsize(file_path),
'dateModified': int(media.uploaded_at.timestamp()) if media and media.uploaded_at else int(os.path.getmtime(file_path)),
'type': os.path.splitext(name)[1][1:],
'hasChild': False,
'isFile': True,
'description': media.message_content if media else '',
# weitere Felder nach Bedarf
}
details.append(detail)
session.close()
return jsonify({'details': details})
elif action == 'delete':
for item in request.form.getlist('names[]'):
item_path = os.path.join(full_path, item)
if os.path.isdir(item_path):
os.rmdir(item_path)
else:
os.remove(item_path)
return jsonify({'success': True})
elif action == 'rename':
src = os.path.join(full_path, name)
dst = os.path.join(full_path, new_name)
os.rename(src, dst)
return jsonify({'success': True})
elif action == 'move':
src = os.path.join(full_path, name)
dst = os.path.join(MEDIA_ROOT, target_path.lstrip('/'), name)
os.rename(src, dst)
return jsonify({'success': True})
elif action == 'create':
os.makedirs(os.path.join(full_path, name), exist_ok=True)
return jsonify({'success': True})
else:
return jsonify({'error': 'Unknown action'}), 400
# --- FileManager: Upload ---
@eventmedia_bp.route('/filemanager/upload', methods=['POST'])
def filemanager_upload():
session = Session()
# Korrigiert: Erst aus request.form, dann aus request.args lesen
path = request.form.get('path') or request.args.get('path', '/')
upload_path = os.path.join(MEDIA_ROOT, path.lstrip('/'))
os.makedirs(upload_path, exist_ok=True)
for file in request.files.getlist('uploadFiles'):
file_path = os.path.join(upload_path, file.filename)
file.save(file_path)
ext = os.path.splitext(file.filename)[1][1:].lower()
try:
media_type = MediaType(ext)
except ValueError:
media_type = MediaType.other
from datetime import datetime, timezone
media = EventMedia(
media_type=media_type,
url=file.filename,
file_path=os.path.relpath(file_path, MEDIA_ROOT),
uploaded_at=datetime.now(timezone.utc)
)
session.add(media)
session.commit()
# Enqueue conversion for office presentation types
if media_type in {MediaType.ppt, MediaType.pptx, MediaType.odp}:
# compute file hash
h = hashlib.sha256()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(8192), b""):
h.update(chunk)
file_hash = h.hexdigest()
# upsert Conversion row
conv = (
session.query(Conversion)
.filter_by(
source_event_media_id=media.id,
target_format='pdf',
file_hash=file_hash,
)
.one_or_none()
)
if not conv:
conv = Conversion(
source_event_media_id=media.id,
target_format='pdf',
status=ConversionStatus.pending,
file_hash=file_hash,
)
session.add(conv)
session.commit()
if conv.status in {ConversionStatus.pending, ConversionStatus.failed}:
q = get_queue()
q.enqueue(convert_event_media_to_pdf, conv.id)
session.commit()
return jsonify({'success': True})
# --- FileManager: Download ---
@eventmedia_bp.route('/filemanager/download', methods=['GET'])
def filemanager_download():
path = request.args.get('path', '/')
names = request.args.getlist('names[]')
# Nur Einzel-Download für Beispiel
if names:
file_path = os.path.join(MEDIA_ROOT, path.lstrip('/'), names[0])
return send_from_directory(os.path.dirname(file_path), os.path.basename(file_path), as_attachment=True)
return jsonify({'error': 'No file specified'}), 400
# --- FileManager: Get Image (optional, für Thumbnails) ---
@eventmedia_bp.route('/filemanager/get-image', methods=['GET'])
def filemanager_get_image():
path = request.args.get('path', '/')
file_path = os.path.join(MEDIA_ROOT, path.lstrip('/'))
return send_from_directory(os.path.dirname(file_path), os.path.basename(file_path))
# --- EventMedia-API: Metadaten-Liste (wie gehabt) ---
@eventmedia_bp.route('', methods=['GET'])
def list_media():
session = Session()
media = session.query(EventMedia).all()
return jsonify([m.to_dict() for m in media])
# --- EventMedia-API: Metadaten-Update ---
@eventmedia_bp.route('/<int:media_id>', methods=['PUT'])
def update_media(media_id):
session = Session()
media = session.query(EventMedia).get(media_id)
if not media:
return jsonify({'error': 'Not found'}), 404
data = request.json
media.url = data.get('title', media.url)
media.message_content = data.get('description', media.message_content)
# Event-Zuordnung ggf. ergänzen
session.commit()
return jsonify(media.to_dict())
@eventmedia_bp.route('/find_by_filename', methods=['GET'])
def find_by_filename():
filename = request.args.get('filename')
if not filename:
return jsonify({'error': 'Missing filename'}), 400
session = Session()
# Suche nach exaktem Dateinamen in url oder file_path
media = session.query(EventMedia).filter(
(EventMedia.url == filename) | (
EventMedia.file_path.like(f"%{filename}"))
).first()
if not media:
return jsonify({'error': 'Not found'}), 404
return jsonify({
'id': media.id,
'file_path': media.file_path,
'url': media.url
})
@eventmedia_bp.route('/<int:media_id>', methods=['GET'])
def get_media_by_id(media_id):
session = Session()
media = session.query(EventMedia).get(media_id)
if not media:
session.close()
return jsonify({'error': 'Not found'}), 404
result = {
'id': media.id,
'file_path': media.file_path,
'url': media.url,
'name': media.url, # oder ein anderes Feld für den Namen
'media_type': media.media_type.name if media.media_type else None
}
session.close()
return jsonify(result)

169
server/routes/events.py Normal file
View File

@@ -0,0 +1,169 @@
from flask import Blueprint, request, jsonify
from server.database import Session
from models.models import Event, EventMedia, MediaType
from datetime import datetime, timezone
from sqlalchemy import and_
import sys
sys.path.append('/workspace')
events_bp = Blueprint("events", __name__, url_prefix="/api/events")
def get_icon_for_type(event_type):
# Lucide-Icon-Namen als String
return {
"presentation": "Presentation", # <--- geändert!
"website": "Globe",
"video": "Video",
"message": "MessageSquare",
"webuntis": "School",
}.get(event_type, "")
@events_bp.route("", methods=["GET"])
def get_events():
session = Session()
start = request.args.get("start")
end = request.args.get("end")
group_id = request.args.get("group_id")
show_inactive = request.args.get(
"show_inactive", "0") == "1" # Checkbox-Logik
now = datetime.now(timezone.utc)
events_query = session.query(Event)
if group_id:
events_query = events_query.filter(Event.group_id == int(group_id))
events = events_query.all()
result = []
for e in events:
# Zeitzonen-Korrektur für e.end
if e.end and e.end.tzinfo is None:
end_dt = e.end.replace(tzinfo=timezone.utc)
else:
end_dt = e.end
# Setze is_active auf False, wenn Termin vorbei ist
if end_dt and end_dt < now and e.is_active:
e.is_active = False
session.commit()
if show_inactive or e.is_active:
result.append({
"Id": str(e.id),
"GroupId": e.group_id,
"Subject": e.title,
"StartTime": e.start.isoformat() if e.start else None,
"EndTime": e.end.isoformat() if e.end else None,
"IsAllDay": False,
"MediaId": e.event_media_id,
"Type": e.event_type.value if e.event_type else None, # <-- Enum zu String!
"Icon": get_icon_for_type(e.event_type.value if e.event_type else None),
})
session.close()
return jsonify(result)
@events_bp.route("/<event_id>", methods=["DELETE"])
def delete_event(event_id):
session = Session()
event = session.query(Event).filter_by(id=event_id).first()
if not event:
session.close()
return jsonify({"error": "Termin nicht gefunden"}), 404
session.delete(event)
session.commit()
session.close()
return jsonify({"success": True})
@events_bp.route("", methods=["POST"])
def create_event():
data = request.json
session = Session()
# Pflichtfelder prüfen
required = ["group_id", "title", "description",
"start", "end", "event_type", "created_by"]
for field in required:
if field not in data:
return jsonify({"error": f"Missing field: {field}"}), 400
event_type = data["event_type"]
event_media_id = None
slideshow_interval = None
# Präsentation: event_media_id und slideshow_interval übernehmen
if event_type == "presentation":
event_media_id = data.get("event_media_id")
slideshow_interval = data.get("slideshow_interval")
if not event_media_id:
return jsonify({"error": "event_media_id required for presentation"}), 400
# Website: Webseite als EventMedia anlegen und ID übernehmen
if event_type == "website":
website_url = data.get("website_url")
if not website_url:
return jsonify({"error": "website_url required for website"}), 400
# EventMedia für Webseite anlegen
media = EventMedia(
media_type=MediaType.website,
url=website_url,
file_path=website_url
)
session.add(media)
session.commit()
event_media_id = media.id
# created_by aus den Daten holen, Default: None
created_by = data.get("created_by")
# Start- und Endzeit in UTC umwandeln, falls kein Zulu-Zeitstempel
start = datetime.fromisoformat(data["start"])
end = datetime.fromisoformat(data["end"])
if start.tzinfo is None:
start = start.astimezone(timezone.utc)
if end.tzinfo is None:
end = end.astimezone(timezone.utc)
# Event anlegen
event = Event(
group_id=data["group_id"],
title=data["title"],
description=data["description"],
start=start,
end=end,
event_type=event_type,
is_active=True,
event_media_id=event_media_id,
slideshow_interval=slideshow_interval,
created_by=created_by # <--- HIER hinzugefügt
)
session.add(event)
session.commit()
return jsonify({"success": True, "event_id": event.id})
@events_bp.route("/<event_id>", methods=["PUT"])
def update_event(event_id):
data = request.json
session = Session()
event = session.query(Event).filter_by(id=event_id).first()
if not event:
session.close()
return jsonify({"error": "Termin nicht gefunden"}), 404
event.title = data.get("title", event.title)
event.description = data.get("description", event.description)
event.start = datetime.fromisoformat(
data["start"]) if "start" in data else event.start
event.end = datetime.fromisoformat(
data["end"]) if "end" in data else event.end
event.event_type = data.get("event_type", event.event_type)
event.event_media_id = data.get("event_media_id", event.event_media_id)
event.slideshow_interval = data.get(
"slideshow_interval", event.slideshow_interval)
event.created_by = data.get("created_by", event.created_by)
session.commit()
event_id_return = event.id # <-- ID vor session.close() speichern!
session.close()
return jsonify({"success": True, "event_id": event_id_return})

68
server/routes/files.py Normal file
View File

@@ -0,0 +1,68 @@
from flask import Blueprint, jsonify, send_from_directory
from server.database import Session
from models.models import EventMedia
import os
# Blueprint for direct file downloads by media ID
files_bp = Blueprint("files", __name__, url_prefix="/api/files")
# Reuse the same media root convention as eventmedia.py
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
MEDIA_ROOT = os.path.join(BASE_DIR, "media")
@files_bp.route("/<int:media_id>/<path:filename>", methods=["GET"])
def download_media_file(media_id: int, filename: str):
"""
Download the stored media file for a given EventMedia ID.
URL format example:
/api/files/26/LPUV4I_Folien_Nowitzki_Bewertungskriterien.pptx
Behavior:
- Looks up EventMedia by ID
- Validates requested filename against stored metadata (best-effort)
- Serves the file from server/media using the stored relative file_path
"""
session = Session()
media = session.query(EventMedia).get(media_id)
if not media:
session.close()
return jsonify({"error": "Not found"}), 404
# Prefer the stored relative file_path; fall back to the URL/filename
rel_path = media.file_path or media.url
# Basic filename consistency check to avoid leaking other files
# Only enforce if media.url is present
if media.url and os.path.basename(filename) != os.path.basename(media.url):
session.close()
return jsonify({
"error": "Filename mismatch",
"expected": os.path.basename(media.url),
"got": os.path.basename(filename),
}), 400
abs_path = os.path.join(MEDIA_ROOT, rel_path)
# Ensure file exists
if not os.path.isfile(abs_path):
session.close()
return jsonify({"error": "File not found on server"}), 404
# Serve as attachment (download)
directory = os.path.dirname(abs_path)
served_name = os.path.basename(abs_path)
session.close()
return send_from_directory(directory, served_name, as_attachment=True)
@files_bp.route("/converted/<path:relpath>", methods=["GET"])
def download_converted(relpath: str):
"""Serve converted files (e.g., PDFs) relative to media/converted."""
abs_path = os.path.join(MEDIA_ROOT, relpath)
if not abs_path.startswith(MEDIA_ROOT):
return jsonify({"error": "Invalid path"}), 400
if not os.path.isfile(abs_path):
return jsonify({"error": "File not found"}), 404
return send_from_directory(os.path.dirname(abs_path), os.path.basename(abs_path), as_attachment=True)

189
server/routes/groups.py Normal file
View File

@@ -0,0 +1,189 @@
from models.models import Client
# Neue Route: Liefert alle Gruppen mit zugehörigen Clients und deren Alive-Status
from server.database import Session
from models.models import ClientGroup
from flask import Blueprint, request, jsonify
from sqlalchemy import func
import sys
import os
from datetime import datetime, timedelta
sys.path.append('/workspace')
groups_bp = Blueprint("groups", __name__, url_prefix="/api/groups")
def get_grace_period():
"""Wählt die Grace-Periode abhängig von ENV."""
env = os.environ.get("ENV", "production").lower()
if env == "development" or env == "dev":
return int(os.environ.get("HEARTBEAT_GRACE_PERIOD_DEV", "15"))
return int(os.environ.get("HEARTBEAT_GRACE_PERIOD_PROD", "180"))
def is_client_alive(last_alive, is_active):
"""Berechnet, ob ein Client als alive gilt."""
if not last_alive or not is_active:
return False
grace_period = get_grace_period()
# last_alive kann ein String oder datetime sein
if isinstance(last_alive, str):
last_alive_str = last_alive[:-
1] if last_alive.endswith('Z') else last_alive
try:
last_alive_dt = datetime.fromisoformat(last_alive_str)
except Exception:
return False
else:
last_alive_dt = last_alive
return datetime.utcnow() - last_alive_dt <= timedelta(seconds=grace_period)
@groups_bp.route("", methods=["POST"])
def create_group():
data = request.get_json()
name = data.get("name")
if not name or not name.strip():
return jsonify({"error": "Gruppenname erforderlich"}), 400
session = Session()
if session.query(ClientGroup).filter_by(name=name).first():
session.close()
return jsonify({"error": "Gruppe existiert bereits"}), 409
group = ClientGroup(name=name, is_active=True)
session.add(group)
session.commit()
result = {
"id": group.id,
"name": group.name,
"created_at": group.created_at.isoformat() if group.created_at else None,
"is_active": group.is_active,
}
session.close()
return jsonify(result), 201
@groups_bp.route("", methods=["GET"])
def get_groups():
session = Session()
groups = session.query(ClientGroup).all()
result = [
{
"id": g.id,
"name": g.name,
"created_at": g.created_at.isoformat() if g.created_at else None,
"is_active": g.is_active,
}
for g in groups
]
session.close()
return jsonify(result)
@groups_bp.route("/<int:group_id>", methods=["PUT"])
def update_group(group_id):
data = request.get_json()
session = Session()
group = session.query(ClientGroup).filter_by(id=group_id).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
if "name" in data:
group.name = data["name"]
if "is_active" in data:
group.is_active = bool(data["is_active"])
session.commit()
result = {
"id": group.id,
"name": group.name,
"created_at": group.created_at.isoformat() if group.created_at else None,
"is_active": group.is_active,
}
session.close()
return jsonify(result)
@groups_bp.route("/<int:group_id>", methods=["DELETE"])
def delete_group(group_id):
session = Session()
group = session.query(ClientGroup).filter_by(id=group_id).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
session.delete(group)
session.commit()
session.close()
return jsonify({"success": True})
@groups_bp.route("/byname/<string:group_name>", methods=["DELETE"])
def delete_group_by_name(group_name):
session = Session()
group = session.query(ClientGroup).filter_by(name=group_name).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
session.delete(group)
session.commit()
session.close()
return jsonify({"success": True})
@groups_bp.route("/byname/<string:old_name>", methods=["PUT"])
def rename_group_by_name(old_name):
data = request.get_json()
new_name = data.get("newName")
if not new_name or not new_name.strip():
return jsonify({"error": "Neuer Name erforderlich"}), 400
session = Session()
group = session.query(ClientGroup).filter_by(name=old_name).first()
if not group:
session.close()
return jsonify({"error": "Gruppe nicht gefunden"}), 404
# Prüfe, ob der neue Name schon existiert
if session.query(ClientGroup).filter(func.binary(ClientGroup.name) == new_name).first():
session.close()
return jsonify({"error": f'Gruppe mit dem Namen "{new_name}" existiert bereits', "duplicate_name": new_name}), 409
group.name = new_name
session.commit()
result = {
"id": group.id,
"name": group.name,
"created_at": group.created_at.isoformat() if group.created_at else None,
"is_active": group.is_active,
}
session.close()
return jsonify(result)
@groups_bp.route("/with_clients", methods=["GET"])
def get_groups_with_clients():
session = Session()
groups = session.query(ClientGroup).all()
result = []
for g in groups:
clients = session.query(Client).filter_by(group_id=g.id).all()
client_list = []
for c in clients:
client_list.append({
"uuid": c.uuid,
"description": c.description,
"ip": c.ip,
"last_alive": c.last_alive.isoformat() if c.last_alive else None,
"is_active": c.is_active,
"is_alive": is_client_alive(c.last_alive, c.is_active),
})
result.append({
"id": g.id,
"name": g.name,
"created_at": g.created_at.isoformat() if g.created_at else None,
"is_active": g.is_active,
"clients": client_list,
})
session.close()
return jsonify(result)

159
server/routes/holidays.py Normal file
View File

@@ -0,0 +1,159 @@
from flask import Blueprint, request, jsonify
from server.database import Session
from models.models import SchoolHoliday
from datetime import datetime
import csv
import io
holidays_bp = Blueprint("holidays", __name__, url_prefix="/api/holidays")
@holidays_bp.route("", methods=["GET"])
def list_holidays():
session = Session()
region = request.args.get("region")
q = session.query(SchoolHoliday)
if region:
q = q.filter(SchoolHoliday.region == region)
rows = q.order_by(SchoolHoliday.start_date.asc()).all()
data = [r.to_dict() for r in rows]
session.close()
return jsonify({"holidays": data})
@holidays_bp.route("/upload", methods=["POST"])
def upload_holidays():
"""
Accepts a CSV/TXT file upload (multipart/form-data).
Supported formats:
1) Headered CSV with columns (case-insensitive): name, start_date, end_date[, region]
- Dates: YYYY-MM-DD, DD.MM.YYYY, YYYY/MM/DD, or YYYYMMDD
2) Headerless CSV/TXT lines with columns:
[internal, name, start_yyyymmdd, end_yyyymmdd, optional_internal]
- Only columns 2-4 are used; 1 and 5 are ignored.
"""
if "file" not in request.files:
return jsonify({"error": "No file part"}), 400
file = request.files["file"]
if file.filename == "":
return jsonify({"error": "No selected file"}), 400
try:
raw = file.read()
# Try UTF-8 first (strict), then cp1252, then latin-1 as last resort
try:
content = raw.decode("utf-8")
except UnicodeDecodeError:
try:
content = raw.decode("cp1252")
except UnicodeDecodeError:
content = raw.decode("latin-1", errors="replace")
sniffer = csv.Sniffer()
dialect = None
try:
sample = content[:2048]
# Some files may contain a lot of quotes; allow Sniffer to guess delimiter
dialect = sniffer.sniff(sample)
except Exception:
pass
def parse_date(s: str):
s = (s or "").strip()
if not s:
return None
# Numeric YYYYMMDD
if s.isdigit() and len(s) == 8:
try:
return datetime.strptime(s, "%Y%m%d").date()
except ValueError:
pass
# Common formats
for fmt in ("%Y-%m-%d", "%d.%m.%Y", "%Y/%m/%d"):
try:
return datetime.strptime(s, fmt).date()
except ValueError:
continue
raise ValueError(f"Unsupported date format: {s}")
session = Session()
inserted = 0
updated = 0
# First, try headered CSV via DictReader
dict_reader = csv.DictReader(io.StringIO(
content), dialect=dialect) if dialect else csv.DictReader(io.StringIO(content))
fieldnames_lower = [h.lower() for h in (dict_reader.fieldnames or [])]
has_required_headers = {"name", "start_date",
"end_date"}.issubset(set(fieldnames_lower))
def upsert(name: str, start_date, end_date, region=None):
nonlocal inserted, updated
if not name or not start_date or not end_date:
return
existing = (
session.query(SchoolHoliday)
.filter(
SchoolHoliday.name == name,
SchoolHoliday.start_date == start_date,
SchoolHoliday.end_date == end_date,
SchoolHoliday.region.is_(
region) if region is None else SchoolHoliday.region == region,
)
.first()
)
if existing:
existing.region = region
existing.source_file_name = file.filename
updated += 1
else:
session.add(SchoolHoliday(
name=name,
start_date=start_date,
end_date=end_date,
region=region,
source_file_name=file.filename,
))
inserted += 1
if has_required_headers:
for row in dict_reader:
norm = {k.lower(): (v or "").strip() for k, v in row.items()}
name = norm.get("name")
try:
start_date = parse_date(norm.get("start_date"))
end_date = parse_date(norm.get("end_date"))
except ValueError:
# Skip rows with unparseable dates
continue
region = (norm.get("region")
or None) if "region" in norm else None
upsert(name, start_date, end_date, region)
else:
# Fallback: headerless rows -> use columns [1]=name, [2]=start, [3]=end
reader = csv.reader(io.StringIO(
content), dialect=dialect) if dialect else csv.reader(io.StringIO(content))
for row in reader:
if not row:
continue
# tolerate varying column counts (4 or 5); ignore first and optional last
cols = [c.strip() for c in row]
if len(cols) < 4:
# Not enough data
continue
name = cols[1].strip().strip('"')
start_raw = cols[2]
end_raw = cols[3]
try:
start_date = parse_date(start_raw)
end_date = parse_date(end_raw)
except ValueError:
continue
upsert(name, start_date, end_date, None)
session.commit()
session.close()
return jsonify({"success": True, "inserted": inserted, "updated": updated})
except Exception as e:
return jsonify({"error": str(e)}), 400

21
server/routes/setup.py Normal file
View File

@@ -0,0 +1,21 @@
from flask import Blueprint, jsonify
from server.database import get_db
from models.models import Client
bp = Blueprint('setup', __name__, url_prefix='/api/setup')
@bp.route('/clients_without_description', methods=['GET'])
def clients_without_description():
db = get_db()
clients = db.query(Client).filter(Client.description == None).all()
result = []
for c in clients:
result.append({
'uuid': c.uuid,
'hostname': c.hostname,
'ip_address': c.ip_address,
'last_alive': c.last_alive,
'created_at': c.created_at,
'group': c.group_id,
})
return jsonify(result)

15
server/rq_worker.py Normal file
View File

@@ -0,0 +1,15 @@
import os
from rq import Worker
from server.task_queue import get_queue, get_redis_url
import redis
def main():
conn = redis.from_url(get_redis_url())
# Single queue named 'conversions'
w = Worker([get_queue().name], connection=conn)
w.work(with_scheduler=True)
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,54 @@
#!/usr/bin/env python3
"""
Einmaliges Skript zur Synchronisation aller bestehenden Client-Gruppenzuordnungen
Verwendung: python sync_existing_clients.py
"""
from server.mqtt_helper import publish_multiple_client_groups
from models.models import Client
from server.database import Session
import sys
import os
sys.path.append('/workspace')
def main():
print("Synchronisiere bestehende Client-Gruppenzuordnungen mit MQTT...")
session = Session()
try:
# Alle aktiven Clients abrufen
clients = session.query(Client).filter(Client.is_active == True).all()
if not clients:
print("Keine aktiven Clients gefunden.")
return
print(f"Gefunden: {len(clients)} aktive Clients")
# Mapping erstellen
client_group_mappings = {
client.uuid: client.group_id for client in clients}
# Alle auf einmal publizieren
success_count, failed_count = publish_multiple_client_groups(
client_group_mappings)
print(f"Synchronisation abgeschlossen:")
print(f" Erfolgreich: {success_count}")
print(f" Fehlgeschlagen: {failed_count}")
print(f" Gesamt: {len(clients)}")
if failed_count == 0:
print("✅ Alle Clients erfolgreich synchronisiert!")
else:
print(
f"⚠️ {failed_count} Clients konnten nicht synchronisiert werden.")
except Exception as e:
print(f"Fehler: {e}")
finally:
session.close()
if __name__ == "__main__":
main()

14
server/task_queue.py Normal file
View File

@@ -0,0 +1,14 @@
import os
import redis
from rq import Queue
def get_redis_url() -> str:
# Default to local Redis service name in compose network
return os.getenv("REDIS_URL", "redis://redis:6379/0")
def get_queue(name: str = "conversions") -> Queue:
conn = redis.from_url(get_redis_url())
# 10 minutes default
return Queue(name, connection=conn, default_timeout=600)

94
server/worker.py Normal file
View File

@@ -0,0 +1,94 @@
import os
import traceback
from datetime import datetime, timezone
import requests
from sqlalchemy.orm import Session as SASession
from server.database import Session
from models.models import Conversion, ConversionStatus, EventMedia, MediaType
GOTENBERG_URL = os.getenv("GOTENBERG_URL", "http://gotenberg:3000")
def _now():
return datetime.now(timezone.utc)
def convert_event_media_to_pdf(conversion_id: int):
"""
Job entry point: convert a single EventMedia to PDF using Gotenberg.
Steps:
- Load conversion + source media
- Set status=processing, started_at
- POST to Gotenberg /forms/libreoffice/convert with the source file bytes
- Save response bytes to target_path
- Set status=ready, completed_at, target_path
- On error: set status=failed, error_message
"""
session: SASession = Session()
try:
conv: Conversion = session.query(Conversion).get(conversion_id)
if not conv:
return
media: EventMedia = session.query(
EventMedia).get(conv.source_event_media_id)
if not media or not media.file_path:
conv.status = ConversionStatus.failed
conv.error_message = "Source media or file_path missing"
conv.completed_at = _now()
session.commit()
return
conv.status = ConversionStatus.processing
conv.started_at = _now()
session.commit()
# Get the server directory (where this worker.py file is located)
server_dir = os.path.dirname(os.path.abspath(__file__))
media_root = os.path.join(server_dir, "media")
abs_source = os.path.join(media_root, media.file_path)
# Output target under media/converted
converted_dir = os.path.join(media_root, "converted")
os.makedirs(converted_dir, exist_ok=True)
filename_wo_ext = os.path.splitext(
os.path.basename(media.file_path))[0]
pdf_name = f"{filename_wo_ext}.pdf"
abs_target = os.path.join(converted_dir, pdf_name)
# Send to Gotenberg
with open(abs_source, "rb") as f:
files = {"files": (os.path.basename(abs_source), f)}
resp = requests.post(
f"{GOTENBERG_URL}/forms/libreoffice/convert",
files=files,
timeout=600,
)
resp.raise_for_status()
with open(abs_target, "wb") as out:
out.write(resp.content)
conv.status = ConversionStatus.ready
# Store relative path under media/
conv.target_path = os.path.relpath(abs_target, media_root)
conv.completed_at = _now()
session.commit()
except requests.exceptions.Timeout:
conv = session.query(Conversion).get(conversion_id)
if conv:
conv.status = ConversionStatus.failed
conv.error_message = "Conversion timeout"
conv.completed_at = _now()
session.commit()
except Exception as e:
conv = session.query(Conversion).get(conversion_id)
if conv:
conv.status = ConversionStatus.failed
conv.error_message = f"{e}\n{traceback.format_exc()}"
conv.completed_at = _now()
session.commit()
finally:
session.close()

53
server/wsgi.py Normal file
View File

@@ -0,0 +1,53 @@
# server/wsgi.py
from server.routes.eventmedia import eventmedia_bp
from server.routes.files import files_bp
from server.routes.events import events_bp
from server.routes.conversions import conversions_bp
from server.routes.holidays import holidays_bp
from server.routes.academic_periods import academic_periods_bp
from server.routes.groups import groups_bp
from server.routes.clients import clients_bp
from server.database import Session, engine
from flask import Flask, jsonify, send_from_directory, request
import glob
import os
import sys
sys.path.append('/workspace')
app = Flask(__name__)
# Blueprints importieren und registrieren
app.register_blueprint(clients_bp)
app.register_blueprint(groups_bp)
app.register_blueprint(events_bp)
app.register_blueprint(eventmedia_bp)
app.register_blueprint(files_bp)
app.register_blueprint(holidays_bp)
app.register_blueprint(academic_periods_bp)
app.register_blueprint(conversions_bp)
@app.route("/health")
def health():
return jsonify(status="ok")
@app.route("/")
def index():
return "Hello from InfoscreenAPI!"
@app.route("/screenshots/<uuid>")
def get_screenshot(uuid):
pattern = os.path.join("screenshots", f"{uuid}*.jpg")
files = glob.glob(pattern)
if not files:
# Dummy-Bild als Redirect oder direkt als Response
return jsonify({"error": "Screenshot not found", "dummy": "https://placehold.co/400x300?text=No+Screenshot"}), 404
filename = os.path.basename(files[0])
return send_from_directory("screenshots", filename)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8000, debug=True)