introduce nginex-proxy

use host ip if working in wsl
This commit is contained in:
2025-06-08 04:44:42 +00:00
parent 138c5b1e8c
commit 628a3b1fe9
16 changed files with 164 additions and 103 deletions

1
.gitignore vendored
View File

@@ -66,3 +66,4 @@ desktop.ini
received_screenshots/ received_screenshots/
mosquitto/ mosquitto/
alte/ alte/
screenshots/

View File

@@ -1,56 +0,0 @@
```dockerfile
# Use a stable Python base image
FROM python:3.11-slim
# Build arguments for host user mapping
ARG USER_ID=1000
ARG GROUP_ID=1000
# Create non-root user
RUN groupadd -g ${GROUP_ID} infoscreen_taa \
&& useradd -u ${USER_ID} -g ${GROUP_ID} --shell /bin/bash --create-home infoscreen_taa
# Ensure user exists
RUN getent passwd infoscreen_taa
# Install locale dependencies and generate UTF-8 locale
RUN apt-get update && apt-get install -y locales \
&& sed -i 's/# de_DE.UTF-8 UTF-8/de_DE.UTF-8 UTF-8/' /etc/locale.gen \
&& locale-gen
# Set environment variables for locale
ENV LANG=de_DE.UTF-8 \
LANGUAGE=de_DE:de \
LC_ALL=de_DE.UTF-8
# Enable Dash debug during development
ENV DASH_DEBUG_MODE=True
# Working directory inside container
WORKDIR /app # entspricht mount in devcontainer.json
# Copy only requirements first for efficient caching
COPY server/requirements-dev.txt ./
# Install dev dependencies under the non-root user
USER infoscreen_taa
RUN pip install --upgrade pip \
&& pip install --user -r requirements-dev.txt
# Switch back to root to copy source files and fix permissions
USER root
# Copy the server application code into /app
COPY server/ /app
RUN chown -R infoscreen_taa:infoscreen_taa /app
# Create config directory under the non-root user's home
RUN mkdir -p /home/infoscreen_taa/.config/Infoscreen-Server \
&& chown -R infoscreen_taa:infoscreen_taa /home/infoscreen_taa/.config/Infoscreen-Server
# Expose development ports
EXPOSE 8000 8050
# Use a long-running process so the container stays alive
CMD ["tail", "-f", "/dev/null"]
```

View File

@@ -10,7 +10,7 @@ WORKDIR /app
# --- Systemabhängigkeiten installieren (falls benötigt) --- # --- Systemabhängigkeiten installieren (falls benötigt) ---
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y --no-install-recommends \ && apt-get install -y --no-install-recommends \
build-essential \ build-essential git \
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# --- Python-Abhängigkeiten kopieren und installieren --- # --- Python-Abhängigkeiten kopieren und installieren ---

View File

@@ -28,10 +28,12 @@ WORKDIR /app
# Kopiere nur Requirements für schnellen Rebuild # Kopiere nur Requirements für schnellen Rebuild
COPY requirements.txt ./ COPY requirements.txt ./
COPY requirements-dev.txt ./
# Installiere Abhängigkeiten # Installiere Abhängigkeiten
RUN pip install --upgrade pip \ RUN pip install --upgrade pip \
&& pip install --no-cache-dir -r requirements.txt && pip install --no-cache-dir -r requirements.txt \
&& pip install --no-cache-dir -r requirements-dev.txt
# Setze Entwicklungs-Modus # Setze Entwicklungs-Modus
ENV DASH_DEBUG_MODE=True ENV DASH_DEBUG_MODE=True
@@ -39,9 +41,10 @@ ENV API_URL=http://server:8000/api
# Ports für Dash und optional Live-Reload # Ports für Dash und optional Live-Reload
EXPOSE 8050 EXPOSE 8050
EXPOSE 5678
# Wechsle zum non-root User # Wechsle zum non-root User
USER infoscreen_taa USER infoscreen_taa
# Dev-Start: Dash mit Hot-Reload # Dev-Start: Dash mit Hot-Reload
CMD ["tail", "-f", "/dev/null"] CMD ["python", "-m", "debugpy", "--listen", "0.0.0.0:5637", "app.py"]

View File

@@ -1,4 +1,6 @@
# dashboard/app.py # dashboard/app.py
import sys
sys.path.append('/workspace')
from dash import Dash, html, dcc, page_container from dash import Dash, html, dcc, page_container
from flask import Flask from flask import Flask

View File

@@ -1,8 +1,9 @@
# dashboard/callbacks/overview_callbacks.py # dashboard/callbacks/overview_callbacks.py
import sys import sys
print(sys.path) sys.path.append('/workspace')
import threading import threading
import dash import dash
import requests
from dash import Input, Output, State, MATCH, html, dcc from dash import Input, Output, State, MATCH, html, dcc
from flask import session from flask import session
from utils.db import get_session # Diese Funktion muss eine SQLAlchemy-Session liefern! from utils.db import get_session # Diese Funktion muss eine SQLAlchemy-Session liefern!
@@ -10,7 +11,8 @@ from utils.mqtt_client import publish, start_loop
from config import ENV from config import ENV
import dash_bootstrap_components as dbc import dash_bootstrap_components as dbc
import os import os
from server.models import Client
API_BASE_URL = os.getenv("API_BASE_URL", "http://infoscreen-api:8000")
mqtt_thread_started = False mqtt_thread_started = False
SCREENSHOT_DIR = "received-screenshots" SCREENSHOT_DIR = "received-screenshots"
@@ -23,46 +25,48 @@ def ensure_mqtt_running():
mqtt_thread_started = True mqtt_thread_started = True
def get_latest_screenshot(client_uuid): def get_latest_screenshot(client_uuid):
prefix = f"{client_uuid}_" # TODO: Hier genau im Produkitv-Modus die IPs testen!
# Wenn API_BASE_URL auf "http" beginnt, absolute URL verwenden (z.B. im lokalen Dev)
if API_BASE_URL.startswith("http"):
return f"{API_BASE_URL}/screenshots/{client_uuid}"
# Sonst relative URL (nginx-Proxy übernimmt das Routing)
return f"/screenshots/{client_uuid}"
def fetch_clients():
try: try:
files = [f for f in os.listdir('..', SCREENSHOT_DIR) if f.startswith(prefix)] resp = requests.get(f"{API_BASE_URL}/api/clients")
if not files: resp.raise_for_status()
return "/assets/placeholder.png" return resp.json()
latest = max(files, key=lambda x: os.path.getmtime(os.path.join('.', SCREENSHOT_DIR, x))) except Exception as e:
return f"/received-screenshots/{latest}" print("Fehler beim Abrufen der Clients:", e)
except Exception: return []
return "/assets/placeholder.png"
@dash.callback( @dash.callback(
Output("clients-cards-container", "children"), Output("clients-cards-container", "children"),
Input("interval-update", "n_intervals") Input("interval-update", "n_intervals")
) )
def update_clients(n): def update_clients(n):
# Auto-Login im Development-Modus # ... Session-Handling wie gehabt ...
if "role" not in session:
if ENV == "development":
session["role"] = "admin"
else:
return dcc.Location(id="redirect-login", href="/login")
ensure_mqtt_running() ensure_mqtt_running()
session_db = get_session() clients = fetch_clients()
clients = session_db.query(Client).all()
session_db.close()
cards = [] cards = []
for client in clients: for client in clients:
uuid = client.uuid uuid = client["uuid"]
# screenshot = get_latest_screenshot(uuid)
screenshot = get_latest_screenshot(uuid) screenshot = get_latest_screenshot(uuid)
# if screenshot[-3] != "jpg":
# screenshot += ".jpg"
print(f"UUID: {uuid}, Screenshot: {screenshot}")
card = dbc.Card( card = dbc.Card(
[ [
dbc.CardHeader(client.location or client.hardware_hash), dbc.CardHeader(client["location"]),
dbc.CardBody([ dbc.CardBody([
html.Img( html.Img(
src=screenshot, src=screenshot,
style={"width": "160px", "height": "90px", "object-fit": "cover"}, style={"width": "160px", "height": "90px", "object-fit": "cover"},
), ),
html.P(f"IP: {client.ip_address or '-'}", className="card-text"), html.P(f"IP: {client["ip_address"] or '-'}", className="card-text"),
html.P(f"Letzte Aktivität: {client.last_alive or '-'}", className="card-text"), html.P(f"Letzte Aktivität: {client["last_alive"] or '-'}", className="card-text"),
dbc.ButtonGroup([ dbc.ButtonGroup([
dbc.Button("Reload Page", color="primary", id={"type": "btn-reload", "index": uuid}, n_clicks=0), dbc.Button("Reload Page", color="primary", id={"type": "btn-reload", "index": uuid}, n_clicks=0),
dbc.Button("Restart Client", color="danger", id={"type": "btn-restart", "index": uuid}, n_clicks=0), dbc.Button("Restart Client", color="danger", id={"type": "btn-restart", "index": uuid}, n_clicks=0),

View File

@@ -0,0 +1 @@
debugpy

View File

@@ -8,3 +8,5 @@ full-calendar-component>=0.0.4
pandas>=2.2.3 pandas>=2.2.3
paho-mqtt>=2.1.0 paho-mqtt>=2.1.0
python-dotenv>=1.1.0 python-dotenv>=1.1.0
PyMySQL>=1.1.1
SQLAlchemy>=2.0.41

View File

@@ -5,6 +5,7 @@ import threading
import time import time
from dotenv import load_dotenv from dotenv import load_dotenv
import paho.mqtt.client as mqtt import paho.mqtt.client as mqtt
import random
# 1. Laden der Umgebungsvariablen aus .env # 1. Laden der Umgebungsvariablen aus .env
load_dotenv(dotenv_path=os.path.join(os.path.dirname(__file__), "..", ".env")) load_dotenv(dotenv_path=os.path.join(os.path.dirname(__file__), "..", ".env"))
@@ -15,7 +16,9 @@ MQTT_BROKER_PORT = int(os.getenv("MQTT_BROKER_PORT", "1883"))
MQTT_USERNAME = os.getenv("MQTT_USERNAME", None) MQTT_USERNAME = os.getenv("MQTT_USERNAME", None)
MQTT_PASSWORD = os.getenv("MQTT_PASSWORD", None) MQTT_PASSWORD = os.getenv("MQTT_PASSWORD", None)
MQTT_KEEPALIVE = int(os.getenv("MQTT_KEEPALIVE", "60")) MQTT_KEEPALIVE = int(os.getenv("MQTT_KEEPALIVE", "60"))
MQTT_CLIENT_ID = os.getenv("MQTT_CLIENT_ID", f"dash-{int(time.time())}") base_id = os.getenv("MQTT_CLIENT_ID", "dash")
unique_part = f"{os.getpid()}_{random.randint(1000,9999)}"
MQTT_CLIENT_ID = f"{base_id}-{unique_part}"
# 3. Erstelle eine globale ClientInstanz # 3. Erstelle eine globale ClientInstanz
client = mqtt.Client(client_id=MQTT_CLIENT_ID) client = mqtt.Client(client_id=MQTT_CLIENT_ID)

View File

@@ -3,6 +3,19 @@ networks:
driver: bridge driver: bridge
services: services:
proxy:
image: nginx:1.25
container_name: infoscreen-proxy
ports:
- "80:80"
- "443:443"
volumes:
- ./nginx.conf:/etc/nginx/nginx.conf:ro
depends_on:
- server
- dashboard
networks:
- infoscreen-net
db: db:
image: mariadb:11.4.7 image: mariadb:11.4.7
container_name: infoscreen-db container_name: infoscreen-db

24
nginx.conf Normal file
View File

@@ -0,0 +1,24 @@
events {}
http {
upstream dashboard {
server infoscreen-dashboard:8050;
}
server {
listen 80;
server_name _;
location /api/ {
proxy_pass http://infoscreen-api:8000/api/;
}
location /screenshots/ {
proxy_pass http://infoscreen-api:8000/screenshots/;
}
location / {
proxy_pass http://dashboard;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
}
}
}

View File

@@ -10,7 +10,7 @@ WORKDIR /app
# --- Systemabhängigkeiten für MariaDB-Client & Locale --- # --- Systemabhängigkeiten für MariaDB-Client & Locale ---
RUN apt-get update \ RUN apt-get update \
&& apt-get install -y --no-install-recommends \ && apt-get install -y --no-install-recommends \
libmariadb-dev-compat libmariadb-dev locales \ libmariadb-dev-compat libmariadb-dev locales git\
&& rm -rf /var/lib/apt/lists/* && rm -rf /var/lib/apt/lists/*
# --- Locale konfigurieren --- # --- Locale konfigurieren ---

View File

@@ -34,6 +34,7 @@ RUN pip install --upgrade pip \
# Expose Ports für Flask API # Expose Ports für Flask API
EXPOSE 8000 EXPOSE 8000
EXPOSE 5678
# Setze Env für Dev # Setze Env für Dev
ENV FLASK_ENV=development ENV FLASK_ENV=development
@@ -43,4 +44,4 @@ ENV ENV_FILE=.env
USER infoscreen_taa USER infoscreen_taa
# Default Command: Flask Development Server # Default Command: Flask Development Server
CMD ["flask", "run", "--host=0.0.0.0", "--port=8000"] CMD ["python", "-m", "debugpy", "--listen", "0.0.0.0:5678", "wsgi.py"]

View File

@@ -1,15 +1,19 @@
import sys
sys.path.append('/workspace')
import os import os
import json import json
import base64 import base64
import glob import glob
from datetime import datetime, timezone from datetime import datetime
# import paho.mqtt.client as mqtt
from paho.mqtt import client as mqtt_client from paho.mqtt import client as mqtt_client
import pytz
from sqlalchemy import create_engine, func from sqlalchemy import create_engine, func
from sqlalchemy.orm import sessionmaker from sqlalchemy.orm import sessionmaker
from models import Client, Base from models import Client, Base
from helpers.check_folder import ensure_folder_exists from helpers.check_folder import ensure_folder_exists
import shutil
# Basisverzeichnis relativ zum aktuellen Skript
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Konfiguration # Konfiguration
MQTT_BROKER = os.getenv("MQTT_BROKER_HOST", "localhost") MQTT_BROKER = os.getenv("MQTT_BROKER_HOST", "localhost")
@@ -27,24 +31,26 @@ topics = [
("infoscreen/heartbeat", 0), ("infoscreen/heartbeat", 0),
# ... weitere Topics hier # ... weitere Topics hier
] ]
SAVE_DIR = "received_screenshots"
# Verzeichnisse für Screenshots
RECEIVED_DIR = os.path.join(BASE_DIR, "received_screenshots")
LATEST_DIR = os.path.join(BASE_DIR, "screenshots")
MAX_PER_CLIENT = 20 MAX_PER_CLIENT = 20
# Ordner für empfangene Screenshots anlegen # Ordner für empfangene Screenshots und den neuesten Screenshot anlegen
ensure_folder_exists(SAVE_DIR) ensure_folder_exists(RECEIVED_DIR)
ensure_folder_exists(LATEST_DIR)
# Datenbank konfigurieren (MariaDB) # Datenbank konfigurieren (MariaDB)
# Ersetze user, password, host und datenbankname entsprechend.
DB_URL = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}" DB_URL = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}"
engine = create_engine(DB_URL, echo=False) engine = create_engine(DB_URL, echo=False)
Session = sessionmaker(bind=engine) Session = sessionmaker(bind=engine)
# Falls Tabellen noch nicht existieren
Base.metadata.create_all(engine) Base.metadata.create_all(engine)
def prune_old_screenshots(client_id: str): def prune_old_screenshots(client_id: str):
"""Löscht alte Screenshots, wenn mehr als MAX_PER_CLIENT vorhanden sind.""" """Löscht alte Screenshots, wenn mehr als MAX_PER_CLIENT vorhanden sind."""
pattern = os.path.join(SAVE_DIR, f"{client_id}_*.jpg") pattern = os.path.join(RECEIVED_DIR, f"{client_id}_*.jpg")
files = sorted(glob.glob(pattern), key=os.path.getmtime) files = sorted(glob.glob(pattern), key=os.path.getmtime)
while len(files) > MAX_PER_CLIENT: while len(files) > MAX_PER_CLIENT:
oldest = files.pop(0) oldest = files.pop(0)
@@ -68,12 +74,17 @@ def handle_screenshot(msg):
# Dateiname mit Client-ID und Zeitstempel # Dateiname mit Client-ID und Zeitstempel
filename = ts.strftime(f"{client_id}_%Y%m%d_%H%M%S.jpg") filename = ts.strftime(f"{client_id}_%Y%m%d_%H%M%S.jpg")
filepath = os.path.join(SAVE_DIR, filename) received_path = os.path.join(RECEIVED_DIR, filename)
# Bild speichern # Bild im Verzeichnis "received_screenshots" speichern
with open(filepath, "wb") as f: with open(received_path, "wb") as f:
f.write(img_data) f.write(img_data)
print(f"Bild gespeichert: {filepath}") print(f"Bild gespeichert: {received_path}")
# Kopiere den neuesten Screenshot in das Verzeichnis "screenshots"
latest_path = os.path.join(LATEST_DIR, f"{client_id}.jpg")
shutil.copy(received_path, latest_path)
print(f"Neuester Screenshot aktualisiert: {latest_path}")
# Alte Screenshots beschneiden # Alte Screenshots beschneiden
prune_old_screenshots(client_id) prune_old_screenshots(client_id)
@@ -81,6 +92,7 @@ def handle_screenshot(msg):
except Exception as e: except Exception as e:
print("Fehler beim Verarbeiten der Screenshot-Nachricht:", e) print("Fehler beim Verarbeiten der Screenshot-Nachricht:", e)
def handle_heartbeat(msg): def handle_heartbeat(msg):
"""Verarbeitet Heartbeat und aktualisiert oder legt Clients an.""" """Verarbeitet Heartbeat und aktualisiert oder legt Clients an."""
session = Session() session = Session()
@@ -100,7 +112,6 @@ def handle_heartbeat(msg):
else: else:
# Neuer Client: Location per input abfragen # Neuer Client: Location per input abfragen
location = input(f"Neuer Client {uuid} gefunden. Bitte Standort eingeben: ") location = input(f"Neuer Client {uuid} gefunden. Bitte Standort eingeben: ")
# ip_address = msg._sock.getpeername()[0]
new_client = Client( new_client = Client(
uuid=uuid, uuid=uuid,
hardware_hash=hardware_hash, hardware_hash=hardware_hash,

View File

@@ -1 +1,2 @@
python-dotenv>=1.1.0 python-dotenv>=1.1.0
debugpy

View File

@@ -1,6 +1,20 @@
# server/wsgi.py # server/wsgi.py
import glob
import os
from flask import Flask, jsonify, send_from_directory
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from models import Client, Base
from flask import Flask, jsonify DB_USER = os.getenv("DB_USER")
DB_PASSWORD = os.getenv("DB_PASSWORD")
DB_HOST = os.getenv("DB_HOST")
DB_NAME = os.getenv("DB_NAME")
# Datenbank-Engine und Session anlegen (passe ggf. die DB-URL an)
DB_URL = f"mysql+pymysql://{DB_USER}:{DB_PASSWORD}@{DB_HOST}/{DB_NAME}"
engine = create_engine(DB_URL, echo=False)
Session = sessionmaker(bind=engine)
app = Flask(__name__) app = Flask(__name__)
@@ -14,3 +28,40 @@ def index():
return "Hello from InfoscreenAPI!" return "Hello from InfoscreenAPI!"
# (Weitere Endpunkte, Blueprints, Datenbank-Initialisierung usw. kommen hierher) # (Weitere Endpunkte, Blueprints, Datenbank-Initialisierung usw. kommen hierher)
@app.route("/screenshots/<uuid>")
def get_screenshot(uuid):
"""Liefert den aktuellen Screenshot für die angegebene UUID zurück."""
print(f"Anfrage für Screenshot mit UUID: {uuid}")
pattern = os.path.join("screenshots", f"{uuid}*.jpg")
files = glob.glob(pattern)
if not files:
return jsonify({"error": "Screenshot not found"}), 404
# Es gibt nur eine Datei pro UUID
filename = os.path.basename(files[0])
print(filename)
print("Arbeitsverzeichnis:", os.getcwd())
print("Suchmuster:", pattern)
print("Gefundene Dateien:", files)
return send_from_directory("screenshots", filename)
@app.route("/api/clients")
def get_clients():
# from models import Client # Import lokal, da im selben Container
print("Abrufen der Clients aus der Datenbank...")
session = Session()
clients = session.query(Client).all()
result = [
{
"uuid": c.uuid,
"location": c.location,
"hardware_hash": c.hardware_hash,
"ip_address": c.ip_address,
"last_alive": c.last_alive.isoformat() if c.last_alive else None
}
for c in clients
]
session.close()
return jsonify(result)
if __name__ == "__main__":
app.run(host="0.0.0.0", port=8000, debug=True)