Preparation for first deployment-test
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -75,3 +75,4 @@ dashboard/sidebar_test.py
|
||||
dashboard/assets/responsive-sidebar.css
|
||||
certs/
|
||||
sync.ffs_db
|
||||
.pnpm-store/
|
||||
|
||||
3105
dashboard/pnpm-lock.yaml
generated
3105
dashboard/pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,25 +1,56 @@
|
||||
import React from 'react';
|
||||
|
||||
interface CustomMediaInfoPanelProps {
|
||||
mediaId: string;
|
||||
title: string;
|
||||
description: string;
|
||||
eventId?: string;
|
||||
onSave: (data: { title: string; description: string; eventId?: string }) => void;
|
||||
name: string;
|
||||
size: number;
|
||||
type: string;
|
||||
dateModified: number;
|
||||
description?: string | null;
|
||||
}
|
||||
|
||||
const CustomMediaInfoPanel: React.FC<CustomMediaInfoPanelProps> = ({
|
||||
mediaId,
|
||||
title,
|
||||
name,
|
||||
size,
|
||||
type,
|
||||
dateModified,
|
||||
description,
|
||||
eventId,
|
||||
onSave,
|
||||
}) => {
|
||||
// Hier kannst du Formularfelder und Logik für die Bearbeitung einbauen
|
||||
function formatLocalDate(timestamp: number | undefined | null) {
|
||||
if (!timestamp || isNaN(timestamp)) return '-';
|
||||
const date = new Date(timestamp * 1000);
|
||||
return date.toLocaleString('de-DE');
|
||||
}
|
||||
return (
|
||||
<div>
|
||||
<h3>Medien-Informationen bearbeiten</h3>
|
||||
{/* Formularfelder für Titel, Beschreibung, Event-Zuordnung */}
|
||||
<div
|
||||
style={{
|
||||
padding: 16,
|
||||
border: '1px solid #eee',
|
||||
borderRadius: 8,
|
||||
background: '#fafafa',
|
||||
maxWidth: 400,
|
||||
}}
|
||||
>
|
||||
<h3 style={{ marginBottom: 12 }}>Datei-Eigenschaften</h3>
|
||||
<div>
|
||||
<b>Name:</b> {name || '-'}
|
||||
</div>
|
||||
<div>
|
||||
<b>Typ:</b> {type || '-'}
|
||||
</div>
|
||||
<div>
|
||||
<b>Größe:</b> {typeof size === 'number' && !isNaN(size) ? size + ' Bytes' : '-'}
|
||||
</div>
|
||||
<div>
|
||||
<b>Geändert:</b> {formatLocalDate(dateModified)}
|
||||
</div>
|
||||
<div>
|
||||
<b>Beschreibung:</b>{' '}
|
||||
{description && description !== 'null' ? (
|
||||
description
|
||||
) : (
|
||||
<span style={{ color: '#888' }}>Keine Beschreibung</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
@@ -19,33 +19,43 @@ interface MediaItem {
|
||||
}
|
||||
|
||||
const Media: React.FC = () => {
|
||||
const [mediaList, setMediaList] = useState<MediaItem[]>([]);
|
||||
const [selectedMedia, setSelectedMedia] = useState<MediaItem | null>(null);
|
||||
// State für die angezeigten Dateidetails
|
||||
const [fileDetails, setFileDetails] = useState<null | {
|
||||
name: string;
|
||||
size: number;
|
||||
type: string;
|
||||
dateModified: number;
|
||||
description?: string | null;
|
||||
}>(null);
|
||||
// Ansicht: 'LargeIcons', 'Details'
|
||||
const [viewMode, setViewMode] = useState<'LargeIcons' | 'Details'>('LargeIcons');
|
||||
|
||||
// Medien vom Server laden
|
||||
useEffect(() => {
|
||||
fetch('/api/eventmedia')
|
||||
.then(res => res.json())
|
||||
.then(setMediaList);
|
||||
}, []);
|
||||
|
||||
|
||||
// Speichern von Metadaten/Event-Zuordnung
|
||||
const handleSave = async (data: { title: string; description: string; eventId?: string }) => {
|
||||
if (!selectedMedia) return;
|
||||
await fetch(`/api/eventmedia/${selectedMedia.id}`, {
|
||||
method: 'PUT',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify(data),
|
||||
});
|
||||
// Nach dem Speichern neu laden
|
||||
const res = await fetch('/api/eventmedia');
|
||||
setMediaList(await res.json());
|
||||
};
|
||||
// Hilfsfunktion für Datum in Browser-Zeitzone
|
||||
function formatLocalDate(timestamp: number) {
|
||||
if (!timestamp) return '';
|
||||
const date = new Date(timestamp * 1000);
|
||||
return date.toLocaleString('de-DE'); // Zeigt lokale Zeit des Browsers
|
||||
}
|
||||
|
||||
return (
|
||||
<div>
|
||||
<h2 className="text-xl font-bold mb-4">Medien</h2>
|
||||
{/* Ansicht-Umschalter */}
|
||||
<div style={{ marginBottom: 12 }}>
|
||||
<button
|
||||
className={viewMode === 'LargeIcons' ? 'e-btn e-active' : 'e-btn'}
|
||||
onClick={() => setViewMode('LargeIcons')}
|
||||
style={{ marginRight: 8 }}
|
||||
>
|
||||
Icons
|
||||
</button>
|
||||
<button
|
||||
className={viewMode === 'Details' ? 'e-btn e-active' : 'e-btn'}
|
||||
onClick={() => setViewMode('Details')}
|
||||
>
|
||||
Details
|
||||
</button>
|
||||
</div>
|
||||
<FileManagerComponent
|
||||
ajaxSettings={{
|
||||
url: hostUrl + 'operations',
|
||||
@@ -71,18 +81,29 @@ const Media: React.FC = () => {
|
||||
layout: ['SortBy', 'Refresh', '|', 'View', 'Details'],
|
||||
}}
|
||||
allowMultiSelection={false}
|
||||
view={viewMode}
|
||||
detailsViewSettings={{
|
||||
columns: [
|
||||
{ field: 'name', headerText: 'Name', minWidth: '120', width: '200' },
|
||||
{ field: 'size', headerText: 'Größe', minWidth: '80', width: '100' },
|
||||
{
|
||||
field: 'dateModified',
|
||||
headerText: 'Upload-Datum',
|
||||
minWidth: '120',
|
||||
width: '180',
|
||||
template: (data: { dateModified: number }) => formatLocalDate(data.dateModified),
|
||||
},
|
||||
{ field: 'type', headerText: 'Typ', minWidth: '80', width: '100' },
|
||||
],
|
||||
}}
|
||||
menuClick={(args: any) => {
|
||||
console.log('FileManager popupOpen:', args);
|
||||
}}
|
||||
>
|
||||
<Inject services={[NavigationPane, DetailsView, Toolbar]} />
|
||||
</FileManagerComponent>
|
||||
{selectedMedia && (
|
||||
<CustomMediaInfoPanel
|
||||
mediaId={selectedMedia.id}
|
||||
title={selectedMedia.url}
|
||||
description={selectedMedia.description}
|
||||
eventId={selectedMedia.eventId}
|
||||
onSave={handleSave}
|
||||
/>
|
||||
)}
|
||||
{/* Details-Panel anzeigen, wenn Details verfügbar sind */}
|
||||
{fileDetails && <CustomMediaInfoPanel {...fileDetails} />}
|
||||
</div>
|
||||
);
|
||||
};
|
||||
|
||||
131
docker-compose.prod.yml
Normal file
131
docker-compose.prod.yml
Normal file
@@ -0,0 +1,131 @@
|
||||
version: '3.8'
|
||||
|
||||
networks:
|
||||
infoscreen-net:
|
||||
driver: bridge
|
||||
|
||||
services:
|
||||
proxy:
|
||||
image: nginx:1.25
|
||||
container_name: infoscreen-proxy
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "80:80"
|
||||
- "443:443"
|
||||
volumes:
|
||||
- ./nginx.conf:/etc/nginx/nginx.conf:ro
|
||||
- ./certs:/etc/nginx/certs:ro
|
||||
depends_on:
|
||||
- server
|
||||
- dashboard
|
||||
networks:
|
||||
- infoscreen-net
|
||||
|
||||
db:
|
||||
image: mariadb:11.2
|
||||
container_name: infoscreen-db
|
||||
restart: unless-stopped
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: ${DB_ROOT_PASSWORD}
|
||||
MYSQL_DATABASE: ${DB_NAME}
|
||||
MYSQL_USER: ${DB_USER}
|
||||
MYSQL_PASSWORD: ${DB_PASSWORD}
|
||||
volumes:
|
||||
- db-data:/var/lib/mysql
|
||||
networks:
|
||||
- infoscreen-net
|
||||
healthcheck:
|
||||
test: ["CMD", "healthcheck.sh", "--connect", "--innodb_initialized"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 30s
|
||||
|
||||
mqtt:
|
||||
image: eclipse-mosquitto:2.0.21
|
||||
container_name: infoscreen-mqtt
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./mosquitto.conf:/mosquitto/config/mosquitto.conf:ro
|
||||
ports:
|
||||
- "1883:1883"
|
||||
- "9001:9001"
|
||||
networks:
|
||||
- infoscreen-net
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "mosquitto_pub -h localhost -t test -m 'health' || exit 1"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 10s
|
||||
|
||||
# Verwende fertige Images statt Build
|
||||
server:
|
||||
image: ghcr.io/robbstarkaustria/infoscreen-api:latest # Oder wo auch immer Ihre Images liegen
|
||||
container_name: infoscreen-api
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
mqtt:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DB_CONN: "mysql+pymysql://${DB_USER}:${DB_PASSWORD}@db/${DB_NAME}"
|
||||
FLASK_ENV: production
|
||||
MQTT_BROKER_URL: mqtt://mqtt:1883
|
||||
MQTT_USER: ${MQTT_USER}
|
||||
MQTT_PASSWORD: ${MQTT_PASSWORD}
|
||||
networks:
|
||||
- infoscreen-net
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
|
||||
interval: 30s
|
||||
timeout: 5s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
dashboard:
|
||||
image: ghcr.io/robbstarkaustria/infoscreen-dashboard:latest # Oder wo auch immer Ihre Images liegen
|
||||
container_name: infoscreen-dashboard
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
server:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
VITE_API_URL: ${API_URL}
|
||||
networks:
|
||||
- infoscreen-net
|
||||
|
||||
listener:
|
||||
image: ghcr.io/robbstarkaustria/infoscreen-listener:latest # Oder wo auch immer Ihre Images liegen
|
||||
container_name: infoscreen-listener
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
mqtt:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DB_URL: mysql+pymysql://${DB_USER}:${DB_PASSWORD}@db/${DB_NAME}
|
||||
networks:
|
||||
- infoscreen-net
|
||||
|
||||
scheduler:
|
||||
image: ghcr.io/robbstarkaustria/infoscreen-scheduler:latest # Oder wo auch immer Ihre Images liegen
|
||||
container_name: infoscreen-scheduler
|
||||
restart: unless-stopped
|
||||
depends_on:
|
||||
db:
|
||||
condition: service_healthy
|
||||
mqtt:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
DB_CONN: "mysql+pymysql://${DB_USER}:${DB_PASSWORD}@db/${DB_NAME}"
|
||||
MQTT_BROKER_URL: mqtt
|
||||
MQTT_PORT: 1883
|
||||
networks:
|
||||
- infoscreen-net
|
||||
|
||||
volumes:
|
||||
db-data:
|
||||
100
early-validation.sh
Normal file
100
early-validation.sh
Normal file
@@ -0,0 +1,100 @@
|
||||
#!/bin/bash
|
||||
# Early Hardware Validation für 25% Entwicklungsstand
|
||||
# Ziel: Architektur-Probleme früh erkennen, nicht Volltest
|
||||
|
||||
echo "🧪 Infoscreen Early Hardware Validation"
|
||||
echo "======================================"
|
||||
echo "Entwicklungsstand: ~25-30%"
|
||||
echo "Ziel: Basis-Deployment + Performance-Baseline"
|
||||
echo ""
|
||||
|
||||
# Phase 1: Quick-Setup (30 Min)
|
||||
echo "📦 Phase 1: Container-Setup-Test"
|
||||
echo "- Docker-Compose startet alle Services?"
|
||||
echo "- Health-Checks werden grün?"
|
||||
echo "- Ports sind erreichbar?"
|
||||
echo ""
|
||||
|
||||
# Phase 2: Connectivity-Test (1 Stunde)
|
||||
echo "🌐 Phase 2: Service-Kommunikation"
|
||||
echo "- Database-Connection vom Server?"
|
||||
echo "- MQTT-Broker empfängt Messages?"
|
||||
echo "- Nginx routet zu Services?"
|
||||
echo "- API-Grundendpoints antworten?"
|
||||
echo ""
|
||||
|
||||
# Phase 3: Performance-Baseline (2 Stunden)
|
||||
echo "📊 Phase 3: Performance-Snapshot"
|
||||
echo "- Memory-Verbrauch pro Container"
|
||||
echo "- CPU-Usage im Idle"
|
||||
echo "- Startup-Zeiten messen"
|
||||
echo "- Network-Latency zwischen Services"
|
||||
echo ""
|
||||
|
||||
# Phase 4: Basic Load-Test (4 Stunden)
|
||||
echo "🔥 Phase 4: Basis-Belastungstest"
|
||||
echo "- 10 parallele API-Requests"
|
||||
echo "- 1000 MQTT-Messages senden"
|
||||
echo "- Database-Insert-Performance"
|
||||
echo "- Memory-Leak-Check (1h Laufzeit)"
|
||||
echo ""
|
||||
|
||||
# Test-Checklist erstellen
|
||||
cat > early-validation-checklist.md << 'EOF'
|
||||
# Early Hardware Validation Checklist
|
||||
|
||||
## ✅ Container-Setup
|
||||
- [ ] `docker compose up -d` erfolgreich
|
||||
- [ ] Alle Services zeigen "healthy" Status
|
||||
- [ ] Keine Error-Logs in den ersten 5 Minuten
|
||||
- [ ] Ports 80, 8000, 3306, 1883 erreichbar
|
||||
|
||||
## ✅ Service-Kommunikation
|
||||
- [ ] Server kann zu Database verbinden
|
||||
- [ ] MQTT-Test-Message wird empfangen
|
||||
- [ ] Nginx zeigt Service-Status-Page
|
||||
- [ ] API-Health-Endpoint antwortet (200 OK)
|
||||
|
||||
## ✅ Performance-Baseline
|
||||
- [ ] Total Memory < 4GB bei Idle
|
||||
- [ ] CPU-Usage < 10% bei Idle
|
||||
- [ ] Container-Startup < 60s
|
||||
- [ ] API-Response-Time < 500ms
|
||||
|
||||
## ✅ Basic-Load-Test
|
||||
- [ ] 10 parallele Requests ohne Errors
|
||||
- [ ] 1000 MQTT-Messages ohne Message-Loss
|
||||
- [ ] Memory-Usage stabil über 1h
|
||||
- [ ] Keine Container-Restarts
|
||||
|
||||
## 📊 Baseline-Metriken (dokumentieren)
|
||||
- Memory pro Container: ___MB
|
||||
- CPU-Usage bei Load: ___%
|
||||
- API-Response-Time: ___ms
|
||||
- Database-Query-Time: ___ms
|
||||
- Container-Startup-Zeit: ___s
|
||||
|
||||
## 🚨 Gefundene Probleme
|
||||
- [ ] Performance-Bottlenecks: ____________
|
||||
- [ ] Memory-Issues: ____________________
|
||||
- [ ] Network-Probleme: _________________
|
||||
- [ ] Container-Probleme: _______________
|
||||
|
||||
## ✅ Architektur-Validierung
|
||||
- [ ] Container-Orchestrierung funktioniert
|
||||
- [ ] Service-Discovery läuft
|
||||
- [ ] Volume-Mounting korrekt
|
||||
- [ ] Environment-Variables werden geladen
|
||||
- [ ] Health-Checks sind aussagekräftig
|
||||
EOF
|
||||
|
||||
echo "✅ Early Validation Checklist erstellt: early-validation-checklist.md"
|
||||
echo ""
|
||||
echo "🎯 Erwartetes Ergebnis:"
|
||||
echo "- Architektur-Probleme identifiziert"
|
||||
echo "- Performance-Baseline dokumentiert"
|
||||
echo "- Deployment-Prozess validiert"
|
||||
echo "- Basis für spätere Tests gelegt"
|
||||
echo ""
|
||||
echo "⏰ Geschätzter Aufwand: 8-12 Stunden über 2-3 Tage"
|
||||
echo "💰 ROI: Verhindert teure Architektur-Änderungen später"
|
||||
140
hardware-test-setup.sh
Normal file
140
hardware-test-setup.sh
Normal file
@@ -0,0 +1,140 @@
|
||||
#!/bin/bash
|
||||
# Infoscreen Hardware Test Setup für Quad-Core 16GB System
|
||||
|
||||
echo "🖥️ Infoscreen Hardware Test Setup"
|
||||
echo "=================================="
|
||||
echo "System: Quad-Core, 16GB RAM, SSD"
|
||||
echo ""
|
||||
|
||||
# System-Info anzeigen
|
||||
echo "📊 System-Information:"
|
||||
echo "CPU Cores: $(nproc)"
|
||||
echo "RAM Total: $(free -h | grep Mem | awk '{print $2}')"
|
||||
echo "Disk Free: $(df -h / | tail -1 | awk '{print $4}')"
|
||||
echo ""
|
||||
|
||||
# Docker-Setup
|
||||
echo "🐳 Docker-Setup..."
|
||||
sudo apt update -y
|
||||
sudo apt install -y docker.io docker-compose-plugin
|
||||
sudo systemctl enable docker
|
||||
sudo systemctl start docker
|
||||
sudo usermod -aG docker $USER
|
||||
|
||||
# Test-Verzeichnisse erstellen
|
||||
echo "📁 Test-Umgebung erstellen..."
|
||||
mkdir -p ~/infoscreen-hardware-test/{prod,dev,monitoring,scripts,backups}
|
||||
|
||||
# Performance-Monitoring-Tools
|
||||
echo "📊 Monitoring-Tools installieren..."
|
||||
sudo apt install -y htop iotop nethogs ncdu stress-ng
|
||||
|
||||
# Test-Script erstellen
|
||||
cat > ~/infoscreen-hardware-test/scripts/system-monitor.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# System-Monitoring während Tests
|
||||
|
||||
echo "=== Infoscreen System Monitor ==="
|
||||
echo "Zeit: $(date)"
|
||||
echo ""
|
||||
|
||||
echo "🖥️ CPU-Info:"
|
||||
echo "Load: $(uptime | awk -F'load average:' '{print $2}')"
|
||||
echo "Cores: $(nproc) | Usage: $(top -bn1 | grep "Cpu(s)" | awk '{print $2}' | cut -d'%' -f1)%"
|
||||
|
||||
echo ""
|
||||
echo "💾 Memory-Info:"
|
||||
free -h
|
||||
|
||||
echo ""
|
||||
echo "💿 Disk-Info:"
|
||||
df -h /
|
||||
|
||||
echo ""
|
||||
echo "🐳 Docker-Info:"
|
||||
docker ps --format "table {{.Names}}\t{{.Status}}\t{{.Ports}}"
|
||||
|
||||
echo ""
|
||||
echo "🌡️ System-Temperature (falls verfügbar):"
|
||||
sensors 2>/dev/null || echo "lm-sensors nicht installiert"
|
||||
|
||||
echo ""
|
||||
echo "🌐 Network-Connections:"
|
||||
ss -tuln | grep :80\\\|:443\\\|:8000\\\|:3306\\\|:1883
|
||||
EOF
|
||||
|
||||
chmod +x ~/infoscreen-hardware-test/scripts/system-monitor.sh
|
||||
|
||||
# Load-Test-Script erstellen
|
||||
cat > ~/infoscreen-hardware-test/scripts/load-test.sh << 'EOF'
|
||||
#!/bin/bash
|
||||
# Load-Test für Infoscreen-System
|
||||
|
||||
echo "🔥 Infoscreen Load-Test startet..."
|
||||
|
||||
# CPU-Load erzeugen (für Thermal-Tests)
|
||||
echo "CPU-Stress-Test (30s)..."
|
||||
stress-ng --cpu $(nproc) --timeout 30s &
|
||||
|
||||
# Memory-Test
|
||||
echo "Memory-Stress-Test..."
|
||||
stress-ng --vm 2 --vm-bytes 2G --timeout 30s &
|
||||
|
||||
# Disk-I/O-Test
|
||||
echo "Disk-I/O-Test..."
|
||||
stress-ng --hdd 1 --hdd-bytes 1G --timeout 30s &
|
||||
|
||||
# Warten auf Tests
|
||||
wait
|
||||
|
||||
echo "✅ Load-Test abgeschlossen"
|
||||
EOF
|
||||
|
||||
chmod +x ~/infoscreen-hardware-test/scripts/load-test.sh
|
||||
|
||||
# Docker-Test-Setup
|
||||
echo "🧪 Docker-Test-Setup..."
|
||||
cat > ~/infoscreen-hardware-test/docker-compose.test.yml << 'EOF'
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
test-web:
|
||||
image: nginx:alpine
|
||||
ports: ["8080:80"]
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 256M
|
||||
reservations:
|
||||
memory: 128M
|
||||
|
||||
test-db:
|
||||
image: mariadb:11.2
|
||||
environment:
|
||||
MYSQL_ROOT_PASSWORD: test123
|
||||
MYSQL_DATABASE: testdb
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
memory: 512M
|
||||
reservations:
|
||||
memory: 256M
|
||||
|
||||
test-load:
|
||||
image: alpine
|
||||
command: sh -c "while true; do wget -q -O- http://test-web/ > /dev/null; sleep 0.1; done"
|
||||
depends_on: [test-web]
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
echo "✅ Setup abgeschlossen!"
|
||||
echo ""
|
||||
echo "🚀 Nächste Schritte:"
|
||||
echo "1. Logout/Login für Docker-Gruppe"
|
||||
echo "2. Test: docker run hello-world"
|
||||
echo "3. System-Monitor: ~/infoscreen-hardware-test/scripts/system-monitor.sh"
|
||||
echo "4. Load-Test: ~/infoscreen-hardware-test/scripts/load-test.sh"
|
||||
echo "5. Docker-Test: cd ~/infoscreen-hardware-test && docker compose -f docker-compose.test.yml up"
|
||||
echo ""
|
||||
echo "📁 Test-Verzeichnis: ~/infoscreen-hardware-test/"
|
||||
echo "📊 Monitoring: Führen Sie system-monitor.sh parallel zu Tests aus"
|
||||
@@ -1,8 +1,9 @@
|
||||
from sqlalchemy import (
|
||||
Column, Integer, String, Enum, TIMESTAMP, func, Boolean, ForeignKey, Float, Text, Index
|
||||
Column, Integer, String, Enum, TIMESTAMP, func, Boolean, ForeignKey, Float, Text, Index, DateTime
|
||||
)
|
||||
from sqlalchemy.orm import declarative_base
|
||||
import enum
|
||||
from datetime import datetime, timezone
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
@@ -129,6 +130,7 @@ class EventMedia(Base):
|
||||
url = Column(String(255), nullable=False)
|
||||
file_path = Column(String(255), nullable=True)
|
||||
message_content = Column(Text, nullable=True)
|
||||
uploaded_at = Column(TIMESTAMP, nullable=False, default=lambda: datetime.now(timezone.utc))
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
events {}
|
||||
http {
|
||||
upstream dashboard {
|
||||
server 127.0.0.1:3000;
|
||||
server infoscreen-dashboard:80;
|
||||
}
|
||||
upstream infoscreen_api {
|
||||
server infoscreen-api:8000;
|
||||
|
||||
@@ -0,0 +1,38 @@
|
||||
"""Change uploaded_at to TIMESTAMP in EventMedia
|
||||
|
||||
Revision ID: 216402147826
|
||||
Revises: b22d339ed2af
|
||||
Create Date: 2025-09-01 10:22:55.285710
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import mysql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '216402147826'
|
||||
down_revision: Union[str, None] = 'b22d339ed2af'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('event_media', 'uploaded_at',
|
||||
existing_type=mysql.DATETIME(),
|
||||
type_=sa.TIMESTAMP(),
|
||||
nullable=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.alter_column('event_media', 'uploaded_at',
|
||||
existing_type=sa.TIMESTAMP(),
|
||||
type_=mysql.DATETIME(),
|
||||
nullable=True)
|
||||
# ### end Alembic commands ###
|
||||
@@ -0,0 +1,32 @@
|
||||
"""Add uploaded_at to EventMedia
|
||||
|
||||
Revision ID: b22d339ed2af
|
||||
Revises: e6eaede720aa
|
||||
Create Date: 2025-09-01 10:07:46.915640
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'b22d339ed2af'
|
||||
down_revision: Union[str, None] = 'e6eaede720aa'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('event_media', sa.Column('uploaded_at', sa.DateTime(timezone=True), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('event_media', 'uploaded_at')
|
||||
# ### end Alembic commands ###
|
||||
@@ -36,16 +36,54 @@ def filemanager_operations():
|
||||
if action == 'read':
|
||||
# List files and folders
|
||||
items = []
|
||||
session = Session()
|
||||
for entry in os.scandir(full_path):
|
||||
items.append({
|
||||
item = {
|
||||
'name': entry.name,
|
||||
'isFile': entry.is_file(),
|
||||
'size': entry.stat().st_size,
|
||||
'dateModified': entry.stat().st_mtime,
|
||||
'type': os.path.splitext(entry.name)[1][1:] if entry.is_file() else '',
|
||||
'hasChild': entry.is_dir()
|
||||
})
|
||||
}
|
||||
# Wenn Datei, versuche Upload-Datum aus DB zu holen
|
||||
if entry.is_file():
|
||||
media = session.query(EventMedia).filter_by(
|
||||
url=entry.name).first()
|
||||
if media and media.uploaded_at:
|
||||
# FileManager erwartet UNIX-Timestamp (Sekunden)
|
||||
item['dateModified'] = int(media.uploaded_at.timestamp())
|
||||
else:
|
||||
item['dateModified'] = entry.stat().st_mtime
|
||||
else:
|
||||
item['dateModified'] = entry.stat().st_mtime
|
||||
items.append(item)
|
||||
session.close()
|
||||
return jsonify({'files': items, 'cwd': {'name': os.path.basename(full_path), 'path': path}})
|
||||
|
||||
elif action == 'details':
|
||||
# Details für eine oder mehrere Dateien zurückgeben
|
||||
names = request.form.getlist('names[]') or (request.json.get(
|
||||
'names') if request.is_json and request.json else [])
|
||||
path = get_param('path', '/')
|
||||
details = []
|
||||
session = Session()
|
||||
for name in names:
|
||||
file_path = os.path.join(MEDIA_ROOT, path.lstrip('/'), name)
|
||||
media = session.query(EventMedia).filter_by(url=name).first()
|
||||
if os.path.isfile(file_path):
|
||||
detail = {
|
||||
'name': name,
|
||||
'size': os.path.getsize(file_path),
|
||||
'dateModified': int(media.uploaded_at.timestamp()) if media and media.uploaded_at else int(os.path.getmtime(file_path)),
|
||||
'type': os.path.splitext(name)[1][1:],
|
||||
'hasChild': False,
|
||||
'isFile': True,
|
||||
'description': media.message_content if media else '',
|
||||
# weitere Felder nach Bedarf
|
||||
}
|
||||
details.append(detail)
|
||||
session.close()
|
||||
return jsonify({'details': details})
|
||||
elif action == 'delete':
|
||||
for item in request.form.getlist('names[]'):
|
||||
item_path = os.path.join(full_path, item)
|
||||
@@ -88,10 +126,12 @@ def filemanager_upload():
|
||||
media_type = MediaType(ext)
|
||||
except ValueError:
|
||||
media_type = MediaType.other
|
||||
from datetime import datetime, timezone
|
||||
media = EventMedia(
|
||||
media_type=media_type,
|
||||
url=file.filename,
|
||||
file_path=os.path.relpath(file_path, MEDIA_ROOT)
|
||||
file_path=os.path.relpath(file_path, MEDIA_ROOT),
|
||||
uploaded_at=datetime.now(timezone.utc)
|
||||
)
|
||||
session.add(media)
|
||||
session.commit()
|
||||
|
||||
38
setup-deployment.sh
Normal file
38
setup-deployment.sh
Normal file
@@ -0,0 +1,38 @@
|
||||
#!/bin/bash
|
||||
|
||||
# Minimaler Setup für Produktions-Deployment
|
||||
# Dieser Script erstellt nur die nötigen Dateien für Container-Deployment
|
||||
|
||||
echo "🚀 Infoscreen Production Deployment Setup"
|
||||
|
||||
# 1. Deployment-Ordner erstellen
|
||||
mkdir -p deployment/{certs,config}
|
||||
|
||||
# 2. Produktions docker-compose kopieren
|
||||
cp docker-compose.prod.yml deployment/
|
||||
cp .env deployment/
|
||||
cp nginx.conf deployment/
|
||||
|
||||
# 3. Mosquitto-Konfiguration erstellen
|
||||
cat > deployment/mosquitto.conf << 'EOF'
|
||||
listener 1883
|
||||
allow_anonymous true
|
||||
listener 9001
|
||||
protocol websockets
|
||||
EOF
|
||||
|
||||
# 4. SSL-Zertifikate kopieren (falls vorhanden)
|
||||
if [ -f "certs/dev.crt" ] && [ -f "certs/dev.key" ]; then
|
||||
cp certs/* deployment/certs/
|
||||
echo "✅ SSL-Zertifikate kopiert"
|
||||
else
|
||||
echo "⚠️ SSL-Zertifikate fehlen - werden auf Zielmaschine erstellt"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
echo "📦 Deployment-Paket erstellt in ./deployment/"
|
||||
echo ""
|
||||
echo "Nächste Schritte:"
|
||||
echo "1. Kopieren Sie den 'deployment'-Ordner auf den Zielserver"
|
||||
echo "2. Images bereitstellen (Registry oder TAR-Export)"
|
||||
echo "3. docker compose -f docker-compose.prod.yml up -d"
|
||||
78
test-vm-setup.sh
Normal file
78
test-vm-setup.sh
Normal file
@@ -0,0 +1,78 @@
|
||||
#!/bin/bash
|
||||
# Quick VM Setup Script für Infoscreen Deployment Test
|
||||
|
||||
echo "🧪 Infoscreen VM Test Setup"
|
||||
echo "=========================="
|
||||
|
||||
# System Update
|
||||
echo "📦 System aktualisieren..."
|
||||
sudo apt update -y
|
||||
sudo apt upgrade -y
|
||||
|
||||
# Docker Installation
|
||||
echo "🐳 Docker installieren..."
|
||||
sudo apt install -y docker.io docker-compose-plugin curl wget htop
|
||||
|
||||
# Docker aktivieren
|
||||
sudo systemctl enable docker
|
||||
sudo systemctl start docker
|
||||
|
||||
# User zu Docker-Gruppe hinzufügen
|
||||
sudo usermod -aG docker $USER
|
||||
|
||||
# Firewall konfigurieren
|
||||
echo "🔥 Firewall konfigurieren..."
|
||||
sudo ufw --force enable
|
||||
sudo ufw allow 22/tcp # SSH
|
||||
sudo ufw allow 80/tcp # HTTP
|
||||
sudo ufw allow 443/tcp # HTTPS
|
||||
|
||||
# Test-Verzeichnis erstellen
|
||||
mkdir -p ~/infoscreen-test
|
||||
cd ~/infoscreen-test
|
||||
|
||||
# Basis-Konfiguration erstellen
|
||||
cat > docker-compose.test.yml << 'EOF'
|
||||
version: '3.8'
|
||||
services:
|
||||
nginx:
|
||||
image: nginx:alpine
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- ./nginx-test.conf:/etc/nginx/nginx.conf:ro
|
||||
|
||||
test-api:
|
||||
image: httpd:alpine
|
||||
environment:
|
||||
- TEST=true
|
||||
EOF
|
||||
|
||||
cat > nginx-test.conf << 'EOF'
|
||||
events {}
|
||||
http {
|
||||
server {
|
||||
listen 80;
|
||||
location / {
|
||||
return 200 "✅ VM Test erfolgreich!\n";
|
||||
add_header Content-Type text/plain;
|
||||
}
|
||||
}
|
||||
}
|
||||
EOF
|
||||
|
||||
echo ""
|
||||
echo "✅ VM Setup abgeschlossen!"
|
||||
echo ""
|
||||
echo "Nächste Schritte:"
|
||||
echo "1. Logout/Login für Docker-Gruppe"
|
||||
echo "2. Test: docker run hello-world"
|
||||
echo "3. Test: docker compose -f docker-compose.test.yml up -d"
|
||||
echo "4. Test: curl http://localhost"
|
||||
echo "5. Echtes Deployment: Dateien übertragen und starten"
|
||||
echo ""
|
||||
echo "🔍 System-Info:"
|
||||
echo "Docker: $(docker --version)"
|
||||
echo "Compose: $(docker compose version)"
|
||||
echo "RAM: $(free -h | grep Mem | awk '{print $2}')"
|
||||
echo "Disk: $(df -h / | tail -1 | awk '{print $4}') frei"
|
||||
Reference in New Issue
Block a user