"""add client monitoring tables and columns Revision ID: c1d2e3f4g5h6 Revises: 4f0b8a3e5c20 Create Date: 2026-03-09 21:08:38.000000 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'c1d2e3f4g5h6' down_revision = '4f0b8a3e5c20' branch_labels = None depends_on = None def upgrade(): bind = op.get_bind() inspector = sa.inspect(bind) # 1. Add health monitoring columns to clients table (safe on rerun) existing_client_columns = {c['name'] for c in inspector.get_columns('clients')} if 'current_event_id' not in existing_client_columns: op.add_column('clients', sa.Column('current_event_id', sa.Integer(), nullable=True)) if 'current_process' not in existing_client_columns: op.add_column('clients', sa.Column('current_process', sa.String(50), nullable=True)) if 'process_status' not in existing_client_columns: op.add_column('clients', sa.Column('process_status', sa.Enum('running', 'crashed', 'starting', 'stopped', name='processstatus'), nullable=True)) if 'process_pid' not in existing_client_columns: op.add_column('clients', sa.Column('process_pid', sa.Integer(), nullable=True)) if 'last_screenshot_analyzed' not in existing_client_columns: op.add_column('clients', sa.Column('last_screenshot_analyzed', sa.TIMESTAMP(timezone=True), nullable=True)) if 'screen_health_status' not in existing_client_columns: op.add_column('clients', sa.Column('screen_health_status', sa.Enum('OK', 'BLACK', 'FROZEN', 'UNKNOWN', name='screenhealthstatus'), nullable=True, server_default='UNKNOWN')) if 'last_screenshot_hash' not in existing_client_columns: op.add_column('clients', sa.Column('last_screenshot_hash', sa.String(32), nullable=True)) # 2. Create client_logs table (safe on rerun) if not inspector.has_table('client_logs'): op.create_table('client_logs', sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), sa.Column('client_uuid', sa.String(36), nullable=False), sa.Column('timestamp', sa.TIMESTAMP(timezone=True), nullable=False), sa.Column('level', sa.Enum('ERROR', 'WARN', 'INFO', 'DEBUG', name='loglevel'), nullable=False), sa.Column('message', sa.Text(), nullable=False), sa.Column('context', sa.JSON(), nullable=True), sa.Column('created_at', sa.TIMESTAMP(timezone=True), server_default=sa.func.current_timestamp(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.ForeignKeyConstraint(['client_uuid'], ['clients.uuid'], ondelete='CASCADE'), mysql_charset='utf8mb4', mysql_collate='utf8mb4_unicode_ci', mysql_engine='InnoDB' ) # 3. Create indexes for efficient querying (safe on rerun) client_log_indexes = {idx['name'] for idx in inspector.get_indexes('client_logs')} if inspector.has_table('client_logs') else set() client_indexes = {idx['name'] for idx in inspector.get_indexes('clients')} if 'ix_client_logs_client_timestamp' not in client_log_indexes: op.create_index('ix_client_logs_client_timestamp', 'client_logs', ['client_uuid', 'timestamp']) if 'ix_client_logs_level_timestamp' not in client_log_indexes: op.create_index('ix_client_logs_level_timestamp', 'client_logs', ['level', 'timestamp']) if 'ix_clients_process_status' not in client_indexes: op.create_index('ix_clients_process_status', 'clients', ['process_status']) def downgrade(): # Drop indexes op.drop_index('ix_clients_process_status', table_name='clients') op.drop_index('ix_client_logs_level_timestamp', table_name='client_logs') op.drop_index('ix_client_logs_client_timestamp', table_name='client_logs') # Drop table op.drop_table('client_logs') # Drop columns from clients op.drop_column('clients', 'last_screenshot_hash') op.drop_column('clients', 'screen_health_status') op.drop_column('clients', 'last_screenshot_analyzed') op.drop_column('clients', 'process_pid') op.drop_column('clients', 'process_status') op.drop_column('clients', 'current_process') op.drop_column('clients', 'current_event_id')