feat: ajout de 7 nouveaux dashboards d'analyse avancée

- 🔥 Brute Force & Credential Stuffing (view_form_bruteforce_detected)
- 🧬 TCP/OS Spoofing (view_tcp_spoofing_detected, 86K détections)
- 📡 Header Fingerprint Clustering (agg_header_fingerprint_1h, 1374 clusters)
- ⏱️ Heatmap Temporelle (agg_host_ip_ja4_1h, pic à 20h)
- 🌍 Botnets Distribués / JA4 spread (view_host_ja4_anomalies)
- 🔄 Rotation JA4 & Persistance (view_host_ip_ja4_rotation + view_ip_recurrence)
- 🤖 Features ML / Radar (view_ai_features_1h, radar SVG + scatter plot)

Backend: 7 nouveaux router FastAPI avec requêtes ClickHouse optimisées
Frontend: 7 nouveaux composants React + navigation 'Analyse Avancée' dans la sidebar
Fixes: alias fuzzing_index → max_fuzzing (ORDER BY ClickHouse), normalisation IPs ::ffff:

Co-authored-by: Copilot <223556219+Copilot@users.noreply.github.com>
This commit is contained in:
SOC Analyst
2026-03-15 23:57:27 +01:00
parent 1455e04303
commit e2bc4a47cd
16 changed files with 3499 additions and 1 deletions

105
backend/routes/botnets.py Normal file
View File

@ -0,0 +1,105 @@
"""
Endpoints pour l'analyse des botnets via la propagation des fingerprints JA4
"""
from fastapi import APIRouter, HTTPException, Query
from ..database import db
router = APIRouter(prefix="/api/botnets", tags=["botnets"])
def _botnet_class(unique_countries: int) -> str:
if unique_countries > 100:
return "global_botnet"
if unique_countries > 20:
return "regional_botnet"
return "concentrated"
@router.get("/ja4-spread")
async def get_ja4_spread():
"""Propagation des JA4 fingerprints à travers les pays et les IPs."""
try:
sql = """
SELECT
ja4,
unique_ips,
unique_countries,
targeted_hosts
FROM mabase_prod.view_host_ja4_anomalies
ORDER BY unique_countries DESC
"""
result = db.query(sql)
items = []
for row in result.result_rows:
ja4 = str(row[0])
unique_ips = int(row[1])
unique_countries = int(row[2])
targeted_hosts = int(row[3])
dist_score = round(
unique_countries / max(unique_ips ** 0.5, 0.001), 2
)
items.append({
"ja4": ja4,
"unique_ips": unique_ips,
"unique_countries": unique_countries,
"targeted_hosts": targeted_hosts,
"distribution_score":dist_score,
"botnet_class": _botnet_class(unique_countries),
})
return {"items": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/ja4/{ja4}/countries")
async def get_ja4_countries(ja4: str, limit: int = Query(30, ge=1, le=200)):
"""Top pays pour un JA4 donné depuis agg_host_ip_ja4_1h."""
try:
sql = """
SELECT
src_country_code AS country_code,
uniq(replaceRegexpAll(toString(src_ip), '^::ffff:', '')) AS unique_ips,
sum(hits) AS hits
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE ja4 = %(ja4)s
GROUP BY src_country_code
ORDER BY unique_ips DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"ja4": ja4, "limit": limit})
items = [
{
"country_code": str(row[0]),
"unique_ips": int(row[1]),
"hits": int(row[2]),
}
for row in result.result_rows
]
return {"items": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/summary")
async def get_botnets_summary():
"""Statistiques globales sur les botnets détectés."""
try:
sql = """
SELECT
countIf(unique_countries > 100) AS total_global_botnets,
sumIf(unique_ips, unique_countries > 50) AS total_ips_in_botnets,
argMax(ja4, unique_countries) AS most_spread_ja4,
argMax(ja4, unique_ips) AS most_ips_ja4
FROM mabase_prod.view_host_ja4_anomalies
"""
result = db.query(sql)
row = result.result_rows[0]
return {
"total_global_botnets": int(row[0]),
"total_ips_in_botnets": int(row[1]),
"most_spread_ja4": str(row[2]),
"most_ips_ja4": str(row[3]),
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

View File

@ -0,0 +1,107 @@
"""
Endpoints pour l'analyse des attaques par force brute sur les formulaires
"""
from fastapi import APIRouter, HTTPException, Query
from ..database import db
router = APIRouter(prefix="/api/bruteforce", tags=["bruteforce"])
@router.get("/targets")
async def get_bruteforce_targets():
"""Liste des hôtes ciblés par brute-force, triés par total_hits DESC."""
try:
sql = """
SELECT
host,
uniq(src_ip) AS unique_ips,
sum(hits) AS total_hits,
sum(query_params_count) AS total_params,
groupArray(3)(ja4) AS top_ja4s
FROM mabase_prod.view_form_bruteforce_detected
GROUP BY host
ORDER BY total_hits DESC
"""
result = db.query(sql)
items = []
for row in result.result_rows:
host = str(row[0])
unique_ips = int(row[1])
total_hits = int(row[2])
total_params= int(row[3])
top_ja4s = [str(j) for j in (row[4] or [])]
attack_type = (
"credential_stuffing"
if total_hits > 0 and total_params / total_hits > 0.5
else "enumeration"
)
items.append({
"host": host,
"unique_ips": unique_ips,
"total_hits": total_hits,
"total_params":total_params,
"attack_type": attack_type,
"top_ja4s": top_ja4s,
})
return {"items": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/attackers")
async def get_bruteforce_attackers(limit: int = Query(50, ge=1, le=500)):
"""Top IPs attaquantes triées par total_hits DESC."""
try:
sql = """
SELECT
src_ip AS ip,
uniq(host) AS distinct_hosts,
sum(hits) AS total_hits,
sum(query_params_count) AS total_params,
argMax(ja4, hits) AS ja4
FROM mabase_prod.view_form_bruteforce_detected
GROUP BY src_ip
ORDER BY total_hits DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"limit": limit})
items = []
for row in result.result_rows:
items.append({
"ip": str(row[0]),
"distinct_hosts":int(row[1]),
"total_hits": int(row[2]),
"total_params": int(row[3]),
"ja4": str(row[4]),
})
return {"items": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/timeline")
async def get_bruteforce_timeline():
"""Hits par heure (dernières 72h) depuis agg_host_ip_ja4_1h."""
try:
sql = """
SELECT
toHour(window_start) AS hour,
sum(hits) AS hits,
uniq(replaceRegexpAll(toString(src_ip), '^::ffff:', '')) AS ips
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE window_start >= now() - INTERVAL 72 HOUR
GROUP BY hour
ORDER BY hour ASC
"""
result = db.query(sql)
hours = []
for row in result.result_rows:
hours.append({
"hour": int(row[0]),
"hits": int(row[1]),
"ips": int(row[2]),
})
return {"hours": hours}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

View File

@ -0,0 +1,101 @@
"""
Endpoints pour l'analyse des empreintes d'en-têtes HTTP
"""
from fastapi import APIRouter, HTTPException, Query
from ..database import db
router = APIRouter(prefix="/api/headers", tags=["header_fingerprint"])
@router.get("/clusters")
async def get_header_clusters(limit: int = Query(50, ge=1, le=200)):
"""Clusters d'empreintes d'en-têtes groupés par header_order_hash."""
try:
sql = """
SELECT
header_order_hash AS hash,
uniq(replaceRegexpAll(toString(src_ip), '^::ffff:', '')) AS unique_ips,
avg(modern_browser_score) AS avg_browser_score,
sum(ua_ch_mismatch) AS ua_ch_mismatch_count,
round(sum(ua_ch_mismatch) * 100.0 / count(), 2) AS ua_ch_mismatch_pct,
groupArray(5)(sec_fetch_mode) AS top_sec_fetch_modes,
round(sum(has_cookie) * 100.0 / count(), 2) AS has_cookie_pct,
round(sum(has_referer) * 100.0 / count(), 2) AS has_referer_pct
FROM mabase_prod.agg_header_fingerprint_1h
GROUP BY header_order_hash
ORDER BY unique_ips DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"limit": limit})
total_sql = """
SELECT uniq(header_order_hash)
FROM mabase_prod.agg_header_fingerprint_1h
"""
total_clusters = int(db.query(total_sql).result_rows[0][0])
clusters = []
for row in result.result_rows:
h = str(row[0])
unique_ips = int(row[1])
avg_browser_score = float(row[2] or 0)
ua_ch_mismatch_cnt = int(row[3])
ua_ch_mismatch_pct = float(row[4] or 0)
top_modes = list(set(str(m) for m in (row[5] or [])))
has_cookie_pct = float(row[6] or 0)
has_referer_pct = float(row[7] or 0)
if avg_browser_score >= 90 and ua_ch_mismatch_pct < 5:
classification = "legitimate"
elif ua_ch_mismatch_pct > 50:
classification = "bot_suspicious"
else:
classification = "mixed"
clusters.append({
"hash": h,
"unique_ips": unique_ips,
"avg_browser_score": round(avg_browser_score, 2),
"ua_ch_mismatch_count":ua_ch_mismatch_cnt,
"ua_ch_mismatch_pct": ua_ch_mismatch_pct,
"top_sec_fetch_modes": top_modes,
"has_cookie_pct": has_cookie_pct,
"has_referer_pct": has_referer_pct,
"classification": classification,
})
return {"clusters": clusters, "total_clusters": total_clusters}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/cluster/{hash}/ips")
async def get_cluster_ips(hash: str, limit: int = Query(50, ge=1, le=500)):
"""Liste des IPs appartenant à un cluster d'en-têtes donné."""
try:
sql = """
SELECT
replaceRegexpAll(toString(src_ip), '^::ffff:', '') AS ip,
any(modern_browser_score) AS browser_score,
any(ua_ch_mismatch) AS ua_ch_mismatch,
any(sec_fetch_mode) AS sec_fetch_mode,
any(sec_fetch_dest) AS sec_fetch_dest
FROM mabase_prod.agg_header_fingerprint_1h
WHERE header_order_hash = %(hash)s
GROUP BY src_ip
ORDER BY browser_score DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"hash": hash, "limit": limit})
items = []
for row in result.result_rows:
items.append({
"ip": str(row[0]),
"browser_score": int(row[1] or 0),
"ua_ch_mismatch": int(row[2] or 0),
"sec_fetch_mode": str(row[3] or ""),
"sec_fetch_dest": str(row[4] or ""),
})
return {"items": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

145
backend/routes/heatmap.py Normal file
View File

@ -0,0 +1,145 @@
"""
Endpoints pour la heatmap temporelle (hits par heure / hôte)
"""
from collections import defaultdict
from fastapi import APIRouter, HTTPException, Query
from ..database import db
router = APIRouter(prefix="/api/heatmap", tags=["heatmap"])
@router.get("/hourly")
async def get_heatmap_hourly():
"""Hits agrégés par heure sur les 72 dernières heures."""
try:
sql = """
SELECT
toHour(window_start) AS hour,
sum(hits) AS hits,
uniq(replaceRegexpAll(toString(src_ip), '^::ffff:', '')) AS unique_ips,
max(max_requests_per_sec) AS max_rps
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE window_start >= now() - INTERVAL 72 HOUR
GROUP BY hour
ORDER BY hour ASC
"""
result = db.query(sql)
hours = [
{
"hour": int(row[0]),
"hits": int(row[1]),
"unique_ips": int(row[2]),
"max_rps": int(row[3]),
}
for row in result.result_rows
]
return {"hours": hours}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/top-hosts")
async def get_heatmap_top_hosts(limit: int = Query(20, ge=1, le=100)):
"""Hôtes les plus ciblés avec répartition horaire sur 24h."""
try:
# Aggregate overall stats per host
agg_sql = """
SELECT
host,
sum(hits) AS total_hits,
uniq(replaceRegexpAll(toString(src_ip), '^::ffff:', '')) AS unique_ips,
uniq(ja4) AS unique_ja4s
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE window_start >= now() - INTERVAL 72 HOUR
GROUP BY host
ORDER BY total_hits DESC
LIMIT %(limit)s
"""
agg_res = db.query(agg_sql, {"limit": limit})
top_hosts = [str(r[0]) for r in agg_res.result_rows]
host_stats = {
str(r[0]): {
"host": str(r[0]),
"total_hits": int(r[1]),
"unique_ips": int(r[2]),
"unique_ja4s":int(r[3]),
}
for r in agg_res.result_rows
}
if not top_hosts:
return {"items": []}
# Hourly breakdown per host
hourly_sql = """
SELECT
host,
toHour(window_start) AS hour,
sum(hits) AS hits
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE window_start >= now() - INTERVAL 72 HOUR
AND host IN %(hosts)s
GROUP BY host, hour
"""
hourly_res = db.query(hourly_sql, {"hosts": top_hosts})
hourly_map: dict = defaultdict(lambda: [0] * 24)
for row in hourly_res.result_rows:
h = str(row[0])
hour = int(row[1])
hits = int(row[2])
hourly_map[h][hour] += hits
items = []
for host in top_hosts:
entry = dict(host_stats[host])
entry["hourly_hits"] = hourly_map[host]
items.append(entry)
return {"items": items}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/matrix")
async def get_heatmap_matrix():
"""Matrice top-15 hôtes × 24 heures (sum hits) sur les 72 dernières heures."""
try:
top_sql = """
SELECT host, sum(hits) AS total_hits
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE window_start >= now() - INTERVAL 72 HOUR
GROUP BY host
ORDER BY total_hits DESC
LIMIT 15
"""
top_res = db.query(top_sql)
top_hosts = [str(r[0]) for r in top_res.result_rows]
if not top_hosts:
return {"hosts": [], "matrix": []}
cell_sql = """
SELECT
host,
toHour(window_start) AS hour,
sum(hits) AS hits
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE window_start >= now() - INTERVAL 72 HOUR
AND host IN %(hosts)s
GROUP BY host, hour
"""
cell_res = db.query(cell_sql, {"hosts": top_hosts})
matrix_map: dict = defaultdict(lambda: [0] * 24)
for row in cell_res.result_rows:
h = str(row[0])
hour = int(row[1])
hits = int(row[2])
matrix_map[h][hour] += hits
matrix = [matrix_map[h] for h in top_hosts]
return {"hosts": top_hosts, "matrix": matrix}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

View File

@ -0,0 +1,157 @@
"""
Endpoints pour les features ML / IA (scores d'anomalies, radar, scatter)
"""
from fastapi import APIRouter, HTTPException, Query
from ..database import db
router = APIRouter(prefix="/api/ml", tags=["ml_features"])
def _attack_type(fuzzing_index: float, hit_velocity: float,
is_fake_nav: int, ua_ch_mismatch: int) -> str:
if fuzzing_index > 50:
return "brute_force"
if hit_velocity > 1.0:
return "flood"
if is_fake_nav:
return "scraper"
if ua_ch_mismatch:
return "spoofing"
return "scanner"
@router.get("/top-anomalies")
async def get_top_anomalies(limit: int = Query(50, ge=1, le=500)):
"""Top IPs anomales déduplicées par IP (max fuzzing_index), triées par fuzzing_index DESC."""
try:
sql = """
SELECT
replaceRegexpAll(toString(src_ip), '^::ffff:', '') AS ip,
any(ja4) AS ja4,
any(host) AS host,
max(hits) AS hits,
max(fuzzing_index) AS max_fuzzing,
max(hit_velocity) AS hit_velocity,
max(temporal_entropy) AS temporal_entropy,
max(is_fake_navigation) AS is_fake_navigation,
max(ua_ch_mismatch) AS ua_ch_mismatch,
max(sni_host_mismatch) AS sni_host_mismatch,
max(is_ua_rotating) AS is_ua_rotating,
max(path_diversity_ratio) AS path_diversity_ratio,
max(anomalous_payload_ratio) AS anomalous_payload_ratio,
any(asn_label) AS asn_label,
any(bot_name) AS bot_name
FROM mabase_prod.view_ai_features_1h
GROUP BY src_ip
ORDER BY 5 DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"limit": limit})
items = []
for row in result.result_rows:
fuzzing = float(row[4] or 0)
velocity = float(row[5] or 0)
fake_nav = int(row[7] or 0)
ua_mm = int(row[8] or 0)
items.append({
"ip": str(row[0]),
"ja4": str(row[1]),
"host": str(row[2]),
"hits": int(row[3] or 0),
"fuzzing_index": fuzzing,
"hit_velocity": velocity,
"temporal_entropy": float(row[6] or 0),
"is_fake_navigation": fake_nav,
"ua_ch_mismatch": ua_mm,
"sni_host_mismatch": int(row[9] or 0),
"is_ua_rotating": int(row[10] or 0),
"path_diversity_ratio": float(row[11] or 0),
"anomalous_payload_ratio":float(row[12] or 0),
"asn_label": str(row[13] or ""),
"bot_name": str(row[14] or ""),
"attack_type": _attack_type(fuzzing, velocity, fake_nav, ua_mm),
})
return {"items": items}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/ip/{ip}/radar")
async def get_ip_radar(ip: str):
"""Scores radar pour une IP spécifique (8 dimensions d'anomalie)."""
try:
sql = """
SELECT
avg(fuzzing_index) AS fuzzing_index,
avg(hit_velocity) AS hit_velocity,
avg(is_fake_navigation) AS is_fake_navigation,
avg(ua_ch_mismatch) AS ua_ch_mismatch,
avg(sni_host_mismatch) AS sni_host_mismatch,
avg(orphan_ratio) AS orphan_ratio,
avg(path_diversity_ratio) AS path_diversity_ratio,
avg(anomalous_payload_ratio) AS anomalous_payload_ratio
FROM mabase_prod.view_ai_features_1h
WHERE replaceRegexpAll(toString(src_ip), '^::ffff:', '') = %(ip)s
"""
result = db.query(sql, {"ip": ip})
if not result.result_rows:
raise HTTPException(status_code=404, detail="IP not found")
row = result.result_rows[0]
def _f(v) -> float:
return float(v or 0)
return {
"ip": ip,
"fuzzing_score": min(100.0, _f(row[0])),
"velocity_score": min(100.0, _f(row[1]) * 100),
"fake_nav_score": _f(row[2]) * 100,
"ua_mismatch_score": _f(row[3]) * 100,
"sni_mismatch_score": _f(row[4]) * 100,
"orphan_score": min(100.0, _f(row[5]) * 100),
"path_repetition_score": max(0.0, 100 - _f(row[6]) * 100),
"payload_anomaly_score": min(100.0, _f(row[7]) * 100),
}
except HTTPException:
raise
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/scatter")
async def get_ml_scatter(limit: int = Query(200, ge=1, le=1000)):
"""Points pour scatter plot (fuzzing_index × hit_velocity), dédupliqués par IP."""
try:
sql = """
SELECT
replaceRegexpAll(toString(src_ip), '^::ffff:', '') AS ip,
any(ja4) AS ja4,
max(fuzzing_index) AS max_fuzzing,
max(hit_velocity) AS hit_velocity,
max(hits) AS hits,
max(is_fake_navigation) AS is_fake_navigation,
max(ua_ch_mismatch) AS ua_ch_mismatch
FROM mabase_prod.view_ai_features_1h
GROUP BY src_ip
ORDER BY 3 DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"limit": limit})
points = []
for row in result.result_rows:
fuzzing = float(row[2] or 0)
velocity = float(row[3] or 0)
fake_nav = int(row[5] or 0)
ua_mm = int(row[6] or 0)
points.append({
"ip": str(row[0]),
"ja4": str(row[1]),
"fuzzing_index":fuzzing,
"hit_velocity": velocity,
"hits": int(row[4] or 0),
"attack_type": _attack_type(fuzzing, velocity, fake_nav, ua_mm),
})
return {"points": points}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

101
backend/routes/rotation.py Normal file
View File

@ -0,0 +1,101 @@
"""
Endpoints pour la détection de la rotation de fingerprints JA4 et des menaces persistantes
"""
from fastapi import APIRouter, HTTPException, Query
from ..database import db
router = APIRouter(prefix="/api/rotation", tags=["rotation"])
@router.get("/ja4-rotators")
async def get_ja4_rotators(limit: int = Query(50, ge=1, le=500)):
"""IPs qui effectuent le plus de rotation de fingerprints JA4."""
try:
sql = """
SELECT
src_ip AS ip,
distinct_ja4_count,
total_hits
FROM mabase_prod.view_host_ip_ja4_rotation
ORDER BY distinct_ja4_count DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"limit": limit})
items = []
for row in result.result_rows:
distinct = int(row[1])
items.append({
"ip": str(row[0]),
"distinct_ja4_count":distinct,
"total_hits": int(row[2]),
"evasion_score": min(100, distinct * 15),
})
return {"items": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/persistent-threats")
async def get_persistent_threats(limit: int = Query(100, ge=1, le=1000)):
"""Menaces persistantes triées par score de persistance."""
try:
sql = """
SELECT
src_ip AS ip,
recurrence,
worst_score,
worst_threat_level,
first_seen,
last_seen
FROM mabase_prod.view_ip_recurrence
ORDER BY (least(100, recurrence * 20 + worst_score * 50)) DESC
LIMIT %(limit)s
"""
result = db.query(sql, {"limit": limit})
items = []
for row in result.result_rows:
recurrence = int(row[1])
worst_score = float(row[2] or 0)
items.append({
"ip": str(row[0]),
"recurrence": recurrence,
"worst_score": worst_score,
"worst_threat_level":str(row[3] or ""),
"first_seen": str(row[4]),
"last_seen": str(row[5]),
"persistence_score": min(100, recurrence * 20 + worst_score * 50),
})
return {"items": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/ip/{ip}/ja4-history")
async def get_ip_ja4_history(ip: str):
"""Historique des JA4 utilisés par une IP donnée."""
try:
sql = """
SELECT
ja4,
sum(hits) AS hits,
min(window_start) AS first_seen,
max(window_start) AS last_seen
FROM mabase_prod.agg_host_ip_ja4_1h
WHERE replaceRegexpAll(toString(src_ip), '^::ffff:', '') = %(ip)s
GROUP BY ja4
ORDER BY hits DESC
"""
result = db.query(sql, {"ip": ip})
items = [
{
"ja4": str(row[0]),
"hits": int(row[1]),
"first_seen":str(row[2]),
"last_seen": str(row[3]),
}
for row in result.result_rows
]
return {"ip": ip, "ja4_history": items, "total": len(items)}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

View File

@ -0,0 +1,163 @@
"""
Endpoints pour la détection du TCP spoofing (TTL / window size anormaux)
"""
from fastapi import APIRouter, HTTPException, Query
from ..database import db
router = APIRouter(prefix="/api/tcp-spoofing", tags=["tcp_spoofing"])
def _suspected_os(ttl: int) -> str:
if 55 <= ttl <= 65:
return "Linux/Mac"
if 120 <= ttl <= 135:
return "Windows"
if ttl < 50:
return "Behind proxy (depleted)"
return "Unknown"
def _declared_os(ua: str) -> str:
ua = ua or ""
if "Windows" in ua:
return "Windows"
if "Mac OS X" in ua:
return "macOS"
if "Linux" in ua or "Android" in ua:
return "Linux/Android"
return "Unknown"
@router.get("/overview")
async def get_tcp_spoofing_overview():
"""Statistiques globales sur les détections de spoofing TCP."""
try:
sql = """
SELECT
count() AS total_detections,
uniq(src_ip) AS unique_ips,
countIf(tcp_ttl < 60) AS low_ttl_count,
countIf(tcp_ttl = 0) AS zero_ttl_count
FROM mabase_prod.view_tcp_spoofing_detected
"""
result = db.query(sql)
row = result.result_rows[0]
total_detections = int(row[0])
unique_ips = int(row[1])
low_ttl_count = int(row[2])
zero_ttl_count = int(row[3])
ttl_sql = """
SELECT
tcp_ttl,
count() AS cnt,
uniq(src_ip) AS ips
FROM mabase_prod.view_tcp_spoofing_detected
GROUP BY tcp_ttl
ORDER BY cnt DESC
LIMIT 15
"""
ttl_res = db.query(ttl_sql)
ttl_distribution = [
{"ttl": int(r[0]), "count": int(r[1]), "ips": int(r[2])}
for r in ttl_res.result_rows
]
win_sql = """
SELECT
tcp_window_size,
count() AS cnt
FROM mabase_prod.view_tcp_spoofing_detected
GROUP BY tcp_window_size
ORDER BY cnt DESC
LIMIT 10
"""
win_res = db.query(win_sql)
window_size_distribution = [
{"window_size": int(r[0]), "count": int(r[1])}
for r in win_res.result_rows
]
return {
"total_detections": total_detections,
"unique_ips": unique_ips,
"low_ttl_count": low_ttl_count,
"zero_ttl_count": zero_ttl_count,
"ttl_distribution": ttl_distribution,
"window_size_distribution":window_size_distribution,
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/list")
async def get_tcp_spoofing_list(
limit: int = Query(100, ge=1, le=1000),
offset: int = Query(0, ge=0),
):
"""Liste paginée des détections, triée par tcp_ttl ASC."""
try:
count_sql = "SELECT count() FROM mabase_prod.view_tcp_spoofing_detected"
total = int(db.query(count_sql).result_rows[0][0])
sql = """
SELECT
replaceRegexpAll(toString(src_ip), '^::ffff:', '') AS src_ip,
ja4, tcp_ttl, tcp_window_size, first_ua
FROM mabase_prod.view_tcp_spoofing_detected
ORDER BY tcp_ttl ASC
LIMIT %(limit)s OFFSET %(offset)s
"""
result = db.query(sql, {"limit": limit, "offset": offset})
items = []
for row in result.result_rows:
ip = str(row[0])
ja4 = str(row[1])
ttl = int(row[2])
window_size = int(row[3])
ua = str(row[4] or "")
sus_os = _suspected_os(ttl)
dec_os = _declared_os(ua)
spoof_flag = sus_os != dec_os and sus_os != "Unknown" and dec_os != "Unknown"
items.append({
"ip": ip,
"ja4": ja4,
"tcp_ttl": ttl,
"tcp_window_size": window_size,
"first_ua": ua,
"suspected_os": sus_os,
"declared_os": dec_os,
"spoof_flag": spoof_flag,
})
return {"items": items, "total": total}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))
@router.get("/matrix")
async def get_tcp_spoofing_matrix():
"""Cross-tab suspected_os × declared_os avec comptage."""
try:
sql = """
SELECT src_ip, tcp_ttl, first_ua
FROM mabase_prod.view_tcp_spoofing_detected
"""
result = db.query(sql)
counts: dict = {}
for row in result.result_rows:
ttl = int(row[1])
ua = str(row[2] or "")
sus_os = _suspected_os(ttl)
dec_os = _declared_os(ua)
key = (sus_os, dec_os)
counts[key] = counts.get(key, 0) + 1
matrix = [
{"suspected_os": k[0], "declared_os": k[1], "count": v}
for k, v in counts.items()
]
matrix.sort(key=lambda x: x["count"], reverse=True)
return {"matrix": matrix}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))