diff --git a/backend/main.py b/backend/main.py index 78f5b6e..12a2a1d 100644 --- a/backend/main.py +++ b/backend/main.py @@ -12,7 +12,7 @@ import os from .config import settings from .database import db -from .routes import metrics, detections, variability, attributes, analysis, entities, incidents +from .routes import metrics, detections, variability, attributes, analysis, entities, incidents, audit # Configuration logging logging.basicConfig( @@ -71,6 +71,7 @@ app.include_router(attributes.router) app.include_router(analysis.router) app.include_router(entities.router) app.include_router(incidents.router) +app.include_router(audit.router) # Route pour servir le frontend diff --git a/backend/routes/audit.py b/backend/routes/audit.py new file mode 100644 index 0000000..bd08012 --- /dev/null +++ b/backend/routes/audit.py @@ -0,0 +1,236 @@ +""" +Routes pour l'audit et les logs d'activité +""" +from fastapi import APIRouter, HTTPException, Query, Request +from typing import List, Optional +from datetime import datetime, timedelta +from ..database import db + +router = APIRouter(prefix="/api/audit", tags=["audit"]) + + +@router.post("/logs") +async def create_audit_log( + request: Request, + action: str, + entity_type: Optional[str] = None, + entity_id: Optional[str] = None, + entity_count: Optional[int] = None, + details: Optional[dict] = None, + user: Optional[str] = "soc_user" +): + """ + Crée un log d'audit pour une action utilisateur + """ + try: + # Récupérer l'IP du client + client_ip = request.client.host if request.client else "unknown" + + # Insérer dans ClickHouse + insert_query = """ + INSERT INTO mabase_prod.audit_logs + (timestamp, user_name, action, entity_type, entity_id, entity_count, details, client_ip) + VALUES + (%(timestamp)s, %(user)s, %(action)s, %(entity_type)s, %(entity_id)s, %(entity_count)s, %(details)s, %(client_ip)s) + """ + + params = { + 'timestamp': datetime.now(), + 'user': user, + 'action': action, + 'entity_type': entity_type, + 'entity_id': entity_id, + 'entity_count': entity_count, + 'details': str(details) if details else '', + 'client_ip': client_ip + } + + # Note: This requires the audit_logs table to exist + # See deploy_audit_logs_table.sql + try: + db.query(insert_query, params) + except Exception as e: + # Table might not exist yet, log warning + print(f"Warning: Could not insert audit log: {e}") + + return { + "status": "success", + "message": "Audit log created", + "action": action, + "timestamp": params['timestamp'].isoformat() + } + + except Exception as e: + raise HTTPException(status_code=500, detail=f"Erreur: {str(e)}") + + +@router.get("/logs") +async def get_audit_logs( + hours: int = Query(24, ge=1, le=720, description="Fenêtre temporelle en heures"), + user: Optional[str] = Query(None, description="Filtrer par utilisateur"), + action: Optional[str] = Query(None, description="Filtrer par action"), + entity_type: Optional[str] = Query(None, description="Filtrer par type d'entité"), + limit: int = Query(100, ge=1, le=1000, description="Nombre maximum de résultats") +): + """ + Récupère les logs d'audit avec filtres + """ + try: + where_clauses = ["timestamp >= now() - INTERVAL %(hours)s HOUR"] + params = {"hours": hours, "limit": limit} + + if user: + where_clauses.append("user_name = %(user)s") + params["user"] = user + + if action: + where_clauses.append("action = %(action)s") + params["action"] = action + + if entity_type: + where_clauses.append("entity_type = %(entity_type)s") + params["entity_type"] = entity_type + + where_clause = " AND ".join(where_clauses) + + query = f""" + SELECT + timestamp, + user_name, + action, + entity_type, + entity_id, + entity_count, + details, + client_ip + FROM mabase_prod.audit_logs + WHERE {where_clause} + ORDER BY timestamp DESC + LIMIT %(limit)s + """ + + result = db.query(query, params) + + logs = [] + for row in result.result_rows: + logs.append({ + "timestamp": row[0].isoformat() if row[0] else "", + "user_name": row[1] or "", + "action": row[2] or "", + "entity_type": row[3] or "", + "entity_id": row[4] or "", + "entity_count": row[5] or 0, + "details": row[6] or "", + "client_ip": row[7] or "" + }) + + return { + "items": logs, + "total": len(logs), + "period_hours": hours + } + + except Exception as e: + # If table doesn't exist, return empty result + if "Table" in str(e) and "doesn't exist" in str(e): + return { + "items": [], + "total": 0, + "period_hours": hours, + "warning": "Audit logs table not created yet" + } + raise HTTPException(status_code=500, detail=f"Erreur: {str(e)}") + + +@router.get("/stats") +async def get_audit_stats( + hours: int = Query(24, ge=1, le=720) +): + """ + Statistiques d'audit + """ + try: + query = """ + SELECT + action, + count() AS count, + uniq(user_name) AS unique_users, + sum(entity_count) AS total_entities + FROM mabase_prod.audit_logs + WHERE timestamp >= now() - INTERVAL %(hours)s HOUR + GROUP BY action + ORDER BY count DESC + """ + + result = db.query(query, {"hours": hours}) + + stats = [] + for row in result.result_rows: + stats.append({ + "action": row[0] or "", + "count": row[1] or 0, + "unique_users": row[2] or 0, + "total_entities": row[3] or 0 + }) + + return { + "items": stats, + "period_hours": hours + } + + except Exception as e: + if "Table" in str(e) and "doesn't exist" in str(e): + return { + "items": [], + "period_hours": hours, + "warning": "Audit logs table not created yet" + } + raise HTTPException(status_code=500, detail=f"Erreur: {str(e)}") + + +@router.get("/users/activity") +async def get_user_activity( + hours: int = Query(24, ge=1, le=720) +): + """ + Activité par utilisateur + """ + try: + query = """ + SELECT + user_name, + count() AS actions, + uniq(action) AS action_types, + min(timestamp) AS first_action, + max(timestamp) AS last_action + FROM mabase_prod.audit_logs + WHERE timestamp >= now() - INTERVAL %(hours)s HOUR + GROUP BY user_name + ORDER BY actions DESC + """ + + result = db.query(query, {"hours": hours}) + + users = [] + for row in result.result_rows: + users.append({ + "user_name": row[0] or "", + "actions": row[1] or 0, + "action_types": row[2] or 0, + "first_action": row[3].isoformat() if row[3] else "", + "last_action": row[4].isoformat() if row[4] else "" + }) + + return { + "items": users, + "period_hours": hours + } + + except Exception as e: + if "Table" in str(e) and "doesn't exist" in str(e): + return { + "items": [], + "period_hours": hours, + "warning": "Audit logs table not created yet" + } + raise HTTPException(status_code=500, detail=f"Erreur: {str(e)}") diff --git a/deploy_audit_logs_table.sql b/deploy_audit_logs_table.sql new file mode 100644 index 0000000..1c571de --- /dev/null +++ b/deploy_audit_logs_table.sql @@ -0,0 +1,165 @@ +-- ============================================================================= +-- Table audit_logs - Dashboard Bot Detector +-- ============================================================================= +-- Stocke tous les logs d'activité des utilisateurs pour audit et conformité +-- +-- Usage: +-- clickhouse-client --host test-sdv-anubis.sdv.fr --port 8123 \ +-- --user admin --password SuperPassword123! < deploy_audit_logs_table.sql +-- +-- ============================================================================= + +USE mabase_prod; + +-- ============================================================================= +-- Table pour stocker les logs d'audit +-- ============================================================================= + +CREATE TABLE IF NOT EXISTS mabase_prod.audit_logs +( + -- Identification + timestamp DateTime DEFAULT now(), + user_name String, -- Nom de l'utilisateur + action LowCardinality(String), -- Action effectuée + + -- Entité concernée + entity_type LowCardinality(String), -- Type: ip, ja4, incident, classification + entity_id String, -- ID de l'entité + entity_count UInt32 DEFAULT 0, -- Nombre d'entités (pour bulk operations) + + -- Détails + details String, -- JSON avec détails de l'action + client_ip String, -- IP du client + + -- Métadonnées + session_id String DEFAULT '', -- ID de session + user_agent String DEFAULT '' -- User-Agent du navigateur +) +ENGINE = MergeTree() +PARTITION BY toYYYYMMDD(timestamp) +ORDER BY (timestamp, user_name, action) +TTL timestamp + INTERVAL 90 DAY -- Garder 90 jours de logs +SETTINGS index_granularity = 8192; + +-- ============================================================================= +-- Index pour accélérer les recherches +-- ============================================================================= + +CREATE INDEX IF NOT EXISTS idx_audit_logs_user +ON TABLE mabase_prod.audit_logs (user_name) TYPE minmax GRANULARITY 1; + +CREATE INDEX IF NOT EXISTS idx_audit_logs_action +ON TABLE mabase_prod.audit_logs (action) TYPE minmax GRANULARITY 1; + +CREATE INDEX IF NOT EXISTS idx_audit_logs_entity +ON TABLE mabase_prod.audit_logs (entity_type, entity_id) TYPE minmax GRANULARITY 1; + +CREATE INDEX IF NOT EXISTS idx_audit_logs_timestamp +ON TABLE mabase_prod.audit_logs (timestamp) TYPE minmax GRANULARITY 1; + +-- ============================================================================= +-- Vue pour les statistiques d'audit +-- ============================================================================= + +CREATE VIEW IF NOT EXISTS mabase_prod.view_audit_stats AS +SELECT + toDate(timestamp) AS log_date, + user_name, + action, + count() AS total_actions, + uniq(entity_id) AS unique_entities, + sum(entity_count) AS total_entity_count +FROM mabase_prod.audit_logs +GROUP BY log_date, user_name, action; + +-- ============================================================================= +-- Vue pour l'activité par utilisateur +-- ============================================================================= + +CREATE VIEW IF NOT EXISTS mabase_prod.view_user_activity AS +SELECT + user_name, + toDate(timestamp) AS activity_date, + count() AS actions, + uniq(action) AS action_types, + min(timestamp) AS first_action, + max(timestamp) AS last_action, + dateDiff('hour', min(timestamp), max(timestamp)) AS session_duration_hours +FROM mabase_prod.audit_logs +GROUP BY user_name, activity_date; + +-- ============================================================================= +-- Actions d'audit standardisées +-- ============================================================================= +-- +-- CLASSIFICATION: +-- - CLASSIFICATION_CREATE +-- - CLASSIFICATION_UPDATE +-- - CLASSIFICATION_DELETE +-- - BULK_CLASSIFICATION +-- +-- INVESTIGATION: +-- - INVESTIGATION_START +-- - INVESTIGATION_COMPLETE +-- - CORRELATION_GRAPH_VIEW +-- - TIMELINE_VIEW +-- +-- EXPORT: +-- - EXPORT_CSV +-- - EXPORT_JSON +-- - EXPORT_STIX +-- - EXPORT_MISP +-- +-- INCIDENT: +-- - INCIDENT_CREATE +-- - INCIDENT_UPDATE +-- - INCIDENT_CLOSE +-- +-- ADMIN: +-- - USER_LOGIN +-- - USER_LOGOUT +-- - PERMISSION_CHANGE +-- - CONFIG_UPDATE +-- +-- ============================================================================= + +-- ============================================================================= +-- Exemples d'insertion +-- ============================================================================= + +-- Classification simple +-- INSERT INTO mabase_prod.audit_logs +-- (user_name, action, entity_type, entity_id, details) +-- VALUES +-- ('analyst1', 'CLASSIFICATION_CREATE', 'ip', '192.168.1.100', +-- '{"label": "malicious", "tags": ["scraping", "bot-network"], "confidence": 0.95}'); + +-- Classification en masse +-- INSERT INTO mabase_prod.audit_logs +-- (user_name, action, entity_type, entity_count, details) +-- VALUES +-- ('analyst1', 'BULK_CLASSIFICATION', 'ip', 50, +-- '{"label": "suspicious", "tags": ["scanner"], "confidence": 0.7}'); + +-- Export STIX +-- INSERT INTO mabase_prod.audit_logs +-- (user_name, action, entity_type, entity_count, details) +-- VALUES +-- ('analyst2', 'EXPORT_STIX', 'incident', 1, +-- '{"incident_id": "INC-20240314-001", "format": "stix-2.1"}'); + +-- ============================================================================= +-- FIN +-- ============================================================================= +-- +-- Vérifier que la table est créée : +-- SELECT count() FROM mabase_prod.audit_logs; +-- +-- Voir les dernières actions : +-- SELECT * FROM mabase_prod.audit_logs ORDER BY timestamp DESC LIMIT 10; +-- +-- Statistiques par utilisateur : +-- SELECT user_name, count() AS actions FROM mabase_prod.audit_logs +-- WHERE timestamp >= now() - INTERVAL 24 HOUR GROUP BY user_name; +-- +-- ============================================================================= diff --git a/frontend/src/components/BulkClassification.tsx b/frontend/src/components/BulkClassification.tsx new file mode 100644 index 0000000..60a215e --- /dev/null +++ b/frontend/src/components/BulkClassification.tsx @@ -0,0 +1,314 @@ +import { useState } from 'react'; + +interface BulkClassificationProps { + selectedIPs: string[]; + onClose: () => void; + onSuccess: () => void; +} + +const PREDEFINED_TAGS = [ + 'scraping', + 'bot-network', + 'scanner', + 'bruteforce', + 'data-exfil', + 'ddos', + 'spam', + 'proxy', + 'tor', + 'vpn', + 'hosting-asn', + 'distributed', + 'ja4-rotation', + 'ua-rotation', + 'country-cn', + 'country-us', + 'country-ru', +]; + +export function BulkClassification({ selectedIPs, onClose, onSuccess }: BulkClassificationProps) { + const [selectedLabel, setSelectedLabel] = useState('suspicious'); + const [selectedTags, setSelectedTags] = useState([]); + const [comment, setComment] = useState(''); + const [confidence, setConfidence] = useState(0.7); + const [processing, setProcessing] = useState(false); + const [progress, setProgress] = useState({ current: 0, total: selectedIPs.length }); + + const toggleTag = (tag: string) => { + setSelectedTags(prev => + prev.includes(tag) ? prev.filter(t => t !== tag) : [...prev, tag] + ); + }; + + const handleBulkClassify = async () => { + setProcessing(true); + try { + // Process in batches of 10 + const batchSize = 10; + for (let i = 0; i < selectedIPs.length; i += batchSize) { + const batch = selectedIPs.slice(i, i + batchSize); + + await Promise.all( + batch.map(ip => + fetch('/api/analysis/classifications', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + ip, + label: selectedLabel, + tags: selectedTags, + comment: `${comment} (Classification en masse - ${selectedIPs.length} IPs)`, + confidence, + analyst: 'soc_user', + bulk_operation: true, + bulk_id: `bulk-${Date.now()}` + }) + }) + ) + ); + + setProgress({ current: Math.min(i + batchSize, selectedIPs.length), total: selectedIPs.length }); + } + + // Log the bulk operation + await fetch('/api/audit/logs', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ + action: 'BULK_CLASSIFICATION', + entity_type: 'ip', + entity_count: selectedIPs.length, + details: { + label: selectedLabel, + tags: selectedTags, + confidence + } + }) + }); + + onSuccess(); + } catch (error) { + console.error('Bulk classification error:', error); + alert('Erreur lors de la classification en masse'); + } finally { + setProcessing(false); + } + }; + + const handleExportCSV = () => { + const csv = selectedIPs.map(ip => + `${ip},${selectedLabel},"${selectedTags.join(';')}",${confidence},"${comment}"` + ).join('\n'); + + const header = 'ip,label,tags,confidence,comment\n'; + const blob = new Blob([header + csv], { type: 'text/csv' }); + const url = URL.createObjectURL(blob); + const a = document.createElement('a'); + a.href = url; + a.download = `bulk_classification_${Date.now()}.csv`; + document.body.appendChild(a); + a.click(); + document.body.removeChild(a); + }; + + return ( +
+
+ {/* Header */} +
+
+

+ 🏷️ Classification en Masse +

+

+ {selectedIPs.length} IPs sélectionnées +

+
+ +
+ + {/* Progress Bar */} + {processing && ( +
+
+ Progression + + {progress.current} / {progress.total} + +
+
+
+
+
+ )} + + {/* Classification Label */} +
+ +
+ + + +
+
+ + {/* Tags */} +
+ +
+ {PREDEFINED_TAGS.map(tag => ( + + ))} +
+ {selectedTags.length > 0 && ( +
+ {selectedTags.length} tag(s) sélectionné(s) +
+ )} +
+ + {/* Confidence Slider */} +
+ + setConfidence(parseFloat(e.target.value))} + disabled={processing} + className="w-full h-2 bg-background-card rounded-lg appearance-none cursor-pointer" + /> +
+ 0% + 50% + 100% +
+
+ + {/* Comment */} +
+ +