Add LLM configuration and MCP server management UI and backend functionality

- Implemented a new SPA page for LLM Bridge and MCP Server settings in `llm-config.js`.
- Added functionality for managing LLM and MCP configurations, including toggling, saving settings, and testing connections.
- Created HTTP endpoints in `llm_utils.py` for handling LLM chat, status checks, and MCP server configuration.
- Integrated model fetching from LaRuche and Ollama backends.
- Enhanced error handling and logging for better debugging and user feedback.
This commit is contained in:
infinition
2026-03-16 20:33:22 +01:00
parent aac77a3e76
commit b759ab6d4b
41 changed files with 9991 additions and 397 deletions

View File

@@ -156,19 +156,48 @@ class BluetoothUtils:
self.adapter_props.Set("org.bluez.Adapter1", "Discoverable", dbus.Boolean(True))
self.adapter_props.Set("org.bluez.Adapter1", "DiscoverableTimeout", dbus.UInt32(BT_DISCOVERABLE_TIMEOUT))
self.adapter_methods.StartDiscovery()
time.sleep(BT_SCAN_DURATION_S)
# StartDiscovery can fail if already running or adapter is busy
discovery_started = False
try:
self.adapter_methods.StartDiscovery()
discovery_started = True
except dbus.exceptions.DBusException as e:
err_name = e.get_dbus_name() if hasattr(e, 'get_dbus_name') else str(e)
if "InProgress" in str(err_name) or "Busy" in str(err_name):
self.logger.info("Discovery already in progress, continuing with existing scan")
discovery_started = True
else:
# Try stopping and restarting
self.logger.warning(f"StartDiscovery failed ({err_name}), attempting stop+restart")
try:
self.adapter_methods.StopDiscovery()
time.sleep(0.5)
self.adapter_methods.StartDiscovery()
discovery_started = True
except dbus.exceptions.DBusException as e2:
self.logger.warning(f"Retry also failed ({e2}), returning cached devices")
if discovery_started:
time.sleep(BT_SCAN_DURATION_S)
objects = self.manager_interface.GetManagedObjects()
devices = []
for path, ifaces in objects.items():
if "org.bluez.Device1" in ifaces:
dev = ifaces["org.bluez.Device1"]
rssi = dev.get("RSSI", None)
try:
rssi = int(rssi) if rssi is not None else -999
except (ValueError, TypeError):
rssi = -999
devices.append({
"name": str(dev.get("Name", "Unknown")),
"address": str(dev.get("Address", "")),
"paired": bool(dev.get("Paired", False)),
"trusted": bool(dev.get("Trusted", False)),
"connected": bool(dev.get("Connected", False))
"connected": bool(dev.get("Connected", False)),
"rssi": rssi,
"icon": str(dev.get("Icon", "")),
})
try:

335
web_utils/llm_utils.py Normal file
View File

@@ -0,0 +1,335 @@
# web_utils/llm_utils.py
# HTTP endpoints for LLM chat, LLM bridge config, and MCP server config.
# Follows the same pattern as all other web_utils classes in this project.
import json
import uuid
from typing import Any, Dict
from logger import Logger
logger = Logger(name="llm_utils.py", level=20)
_ALLOWED_TOOLS = [
"get_hosts", "get_vulnerabilities", "get_credentials",
"get_action_history", "get_status", "run_action", "query_db",
]
def _send_json(handler, data: Any, status: int = 200) -> None:
body = json.dumps(data).encode("utf-8")
handler.send_response(status)
handler.send_header("Content-Type", "application/json")
handler.send_header("Content-Length", str(len(body)))
handler.end_headers()
handler.wfile.write(body)
class LLMUtils:
def __init__(self, shared_data):
self.shared_data = shared_data
# ------------------------------------------------------------------
# GET /api/llm/status
# ------------------------------------------------------------------
def get_llm_status(self, handler) -> None:
"""Return current LLM bridge status."""
try:
from llm_bridge import LLMBridge
status = LLMBridge().status()
except Exception as e:
status = {"error": str(e), "enabled": False}
_send_json(handler, status)
# ------------------------------------------------------------------
# POST /api/llm/chat {"message": "...", "session_id": "..."}
# ------------------------------------------------------------------
def handle_chat(self, data: Dict) -> Dict:
"""Process a chat message and return the LLM response."""
message = (data.get("message") or "").strip()
if not message:
return {"status": "error", "message": "Empty message"}
session_id = data.get("session_id") or "default"
try:
from llm_bridge import LLMBridge
response = LLMBridge().chat(message, session_id=session_id)
return {"status": "ok", "response": response or "(no response)", "session_id": session_id}
except Exception as e:
logger.error(f"Chat error: {e}")
return {"status": "error", "message": str(e)}
# ------------------------------------------------------------------
# POST /api/llm/clear_history {"session_id": "..."}
# ------------------------------------------------------------------
def clear_chat_history(self, data: Dict) -> Dict:
session_id = data.get("session_id") or "default"
try:
from llm_bridge import LLMBridge
LLMBridge().clear_history(session_id)
return {"status": "ok"}
except Exception as e:
return {"status": "error", "message": str(e)}
# ------------------------------------------------------------------
# GET /api/mcp/status
# ------------------------------------------------------------------
def get_mcp_status(self, handler) -> None:
"""Return current MCP server status."""
try:
import mcp_server
status = mcp_server.server_status()
except Exception as e:
status = {"error": str(e), "enabled": False, "running": False}
_send_json(handler, status)
# ------------------------------------------------------------------
# POST /api/mcp/toggle {"enabled": true/false}
# ------------------------------------------------------------------
def toggle_mcp(self, data: Dict) -> Dict:
"""Enable or disable the MCP server."""
enabled = bool(data.get("enabled", False))
try:
self.shared_data.config["mcp_enabled"] = enabled
setattr(self.shared_data, "mcp_enabled", enabled)
self.shared_data.save_config()
import mcp_server
if enabled and not mcp_server.is_running():
started = mcp_server.start()
return {"status": "ok", "enabled": True, "started": started}
elif not enabled:
mcp_server.stop()
return {"status": "ok", "enabled": False}
return {"status": "ok", "enabled": enabled}
except Exception as e:
logger.error(f"MCP toggle error: {e}")
return {"status": "error", "message": str(e)}
# ------------------------------------------------------------------
# POST /api/mcp/config {"allowed_tools": [...], "port": 8765, ...}
# ------------------------------------------------------------------
def save_mcp_config(self, data: Dict) -> Dict:
"""Save MCP server configuration."""
try:
cfg = self.shared_data.config
if "allowed_tools" in data:
tools = [t for t in data["allowed_tools"] if t in _ALLOWED_TOOLS]
cfg["mcp_allowed_tools"] = tools
if "port" in data:
port = int(data["port"])
if 1024 <= port <= 65535:
cfg["mcp_port"] = port
if "transport" in data and data["transport"] in ("http", "stdio"):
cfg["mcp_transport"] = data["transport"]
self.shared_data.save_config()
return {"status": "ok", "config": {
"mcp_enabled": cfg.get("mcp_enabled", False),
"mcp_port": cfg.get("mcp_port", 8765),
"mcp_transport": cfg.get("mcp_transport", "http"),
"mcp_allowed_tools": cfg.get("mcp_allowed_tools", []),
}}
except Exception as e:
logger.error(f"MCP config save error: {e}")
return {"status": "error", "message": str(e)}
# ------------------------------------------------------------------
# POST /api/llm/config {all llm_* keys}
# ------------------------------------------------------------------
def save_llm_config(self, data: Dict) -> Dict:
"""Save LLM bridge configuration."""
_llm_keys = {
"llm_enabled", "llm_comments_enabled", "llm_comments_log", "llm_chat_enabled",
"llm_backend", "llm_laruche_discovery", "llm_laruche_url", "llm_laruche_model",
"llm_ollama_url", "llm_ollama_model",
"llm_api_provider", "llm_api_key", "llm_api_model", "llm_api_base_url",
"llm_timeout_s", "llm_max_tokens", "llm_comment_max_tokens",
"llm_chat_history_size", "llm_chat_tools_enabled",
# Orchestrator keys
"llm_orchestrator_mode", "llm_orchestrator_interval_s",
"llm_orchestrator_max_actions", "llm_orchestrator_allowed_actions",
"llm_orchestrator_skip_if_no_change", "llm_orchestrator_log_reasoning",
# Personality & prompt keys
"llm_system_prompt_chat", "llm_system_prompt_comment",
"llm_user_name", "llm_user_bio",
}
_int_keys = {
"llm_timeout_s", "llm_max_tokens", "llm_comment_max_tokens",
"llm_chat_history_size", "llm_orchestrator_interval_s",
"llm_orchestrator_max_actions",
}
_bool_keys = {
"llm_enabled", "llm_comments_enabled", "llm_comments_log", "llm_chat_enabled",
"llm_laruche_discovery", "llm_chat_tools_enabled",
"llm_orchestrator_skip_if_no_change", "llm_orchestrator_log_reasoning",
}
try:
cfg = self.shared_data.config
for key in _llm_keys:
if key in data:
value = data[key]
if key in _int_keys:
value = int(value)
elif key in _bool_keys:
value = bool(value)
cfg[key] = value
setattr(self.shared_data, key, value)
self.shared_data.save_config()
self.shared_data.invalidate_config_cache()
# Restart discovery if URL/toggle changed
if "llm_laruche_url" in data or "llm_laruche_discovery" in data:
try:
from llm_bridge import LLMBridge
bridge = LLMBridge()
bridge._laruche_url = cfg.get("llm_laruche_url") or None
if cfg.get("llm_laruche_discovery", True) and not bridge._discovery_active:
bridge._start_laruche_discovery()
except Exception:
pass
# Notify orchestrator of mode change
if "llm_orchestrator_mode" in data:
try:
from orchestrator import Orchestrator
orch = getattr(self.shared_data, '_orchestrator_ref', None)
if orch and hasattr(orch, 'llm_orchestrator'):
orch.llm_orchestrator.restart_if_mode_changed()
except Exception:
pass
return {"status": "ok"}
except Exception as e:
logger.error(f"LLM config save error: {e}")
return {"status": "error", "message": str(e)}
# ------------------------------------------------------------------
# GET /api/llm/models?backend=laruche|ollama
# Returns available models from the specified backend.
# ------------------------------------------------------------------
def get_llm_models(self, handler, params: Dict = None) -> None:
"""Return available models from LaRuche or Ollama."""
backend = (params or {}).get("backend", "laruche")
models = []
laruche_default = None
try:
if backend == "laruche":
import land_protocol
# Get LaRuche URL from bridge discovery or config
url = self.shared_data.config.get("llm_laruche_url", "")
if not url:
try:
from llm_bridge import LLMBridge
bridge = LLMBridge()
with bridge._laruche_lock:
url = bridge._laruche_url or ""
except Exception:
pass
if url:
result_data = land_protocol.list_models(url, timeout=10)
raw = result_data.get("models", []) if isinstance(result_data, dict) else result_data
for m in raw:
if isinstance(m, dict):
models.append({
"name": m.get("name", m.get("model", "?")),
"size": m.get("size", 0),
"modified": m.get("modified_at", ""),
})
elif isinstance(m, str):
models.append({"name": m, "size": 0})
# Extract default model from the same /models response
if isinstance(result_data, dict):
laruche_default = result_data.get("default_model")
elif backend == "ollama":
base = self.shared_data.config.get("llm_ollama_url", "http://127.0.0.1:11434").rstrip("/")
import urllib.request
req = urllib.request.Request(f"{base}/api/tags", method="GET")
with urllib.request.urlopen(req, timeout=10) as resp:
body = json.loads(resp.read().decode())
for m in body.get("models", []):
models.append({
"name": m.get("name", "?"),
"size": m.get("size", 0),
"modified": m.get("modified_at", ""),
})
except Exception as e:
_send_json(handler, {"status": "error", "message": str(e), "models": []})
return
result = {"status": "ok", "backend": backend, "models": models}
if laruche_default:
result["default_model"] = laruche_default
_send_json(handler, result)
# ------------------------------------------------------------------
# GET /api/llm/reasoning
# Returns the llm_orchestrator chat session (reasoning log).
# ------------------------------------------------------------------
def get_llm_reasoning(self, handler) -> None:
"""Return the LLM orchestrator reasoning session history."""
try:
from llm_bridge import LLMBridge
bridge = LLMBridge()
with bridge._hist_lock:
hist = list(bridge._chat_histories.get("llm_orchestrator", []))
_send_json(handler, {"status": "ok", "messages": hist, "count": len(hist)})
except Exception as e:
_send_json(handler, {"status": "error", "message": str(e), "messages": [], "count": 0})
# ------------------------------------------------------------------
# GET /api/llm/config
# ------------------------------------------------------------------
def get_llm_config(self, handler) -> None:
"""Return current LLM config (api_key redacted) + live discovery state."""
cfg = self.shared_data.config
result = {k: cfg.get(k) for k in (
"llm_enabled", "llm_comments_enabled", "llm_comments_log", "llm_chat_enabled",
"llm_backend", "llm_laruche_discovery", "llm_laruche_url", "llm_laruche_model",
"llm_ollama_url", "llm_ollama_model",
"llm_api_provider", "llm_api_model", "llm_api_base_url",
"llm_timeout_s", "llm_max_tokens", "llm_comment_max_tokens",
"llm_chat_history_size", "llm_chat_tools_enabled",
# Orchestrator
"llm_orchestrator_mode", "llm_orchestrator_interval_s",
"llm_orchestrator_max_actions", "llm_orchestrator_skip_if_no_change",
"llm_orchestrator_log_reasoning",
# Personality & prompts
"llm_system_prompt_chat", "llm_system_prompt_comment",
"llm_user_name", "llm_user_bio",
)}
result["llm_api_key_set"] = bool(cfg.get("llm_api_key", ""))
# Default prompts for placeholder display in the UI
result["llm_default_prompt_chat"] = (
"You are Bjorn, an autonomous network security AI assistant running on a Raspberry Pi. "
"Current state: {hosts} hosts discovered, {vulns} vulnerabilities, {creds} credentials captured. "
"Operation mode: {mode}. Current action: {status}. "
"Answer security questions concisely and technically. "
"You can discuss network topology, vulnerabilities, and suggest next steps. "
"Use brief Norse references occasionally. Never break character."
)
result["llm_default_prompt_comment"] = (
"You are Bjorn, a terse Norse-themed autonomous security AI. "
"Reply with ONE sentence of at most 12 words as a status comment. "
"Be cryptic, dark, and technical. No punctuation at the end."
)
# Inject live mDNS discovery state so the UI can show it
try:
from llm_bridge import LLMBridge
bridge = LLMBridge()
with bridge._laruche_lock:
result["laruche_discovered_url"] = bridge._laruche_url or ""
result["laruche_discovery_active"] = bridge._discovery_active
except Exception:
result["laruche_discovered_url"] = ""
result["laruche_discovery_active"] = False
_send_json(handler, result)

View File

@@ -2,18 +2,21 @@
"""
Network utilities for WiFi/network operations.
Handles WiFi scanning, connection, known networks management.
Compatible with both legacy NM keyfiles and Trixie netplan.
"""
from __future__ import annotations
import json
import subprocess
import logging
import re
import os
import glob
import re
from typing import Any, Dict, Optional, List
import logging
from logger import Logger
logger = Logger(name="network_utils.py", level=logging.DEBUG)
class NetworkUtils:
"""Utilities for network and WiFi management."""
@@ -21,94 +24,107 @@ class NetworkUtils:
self.logger = logger
self.shared_data = shared_data
def get_known_wifi(self, handler):
"""List known WiFi networks with priorities."""
try:
result = subprocess.run(
['nmcli', '-t', '-f', 'NAME,TYPE,AUTOCONNECT-PRIORITY', 'connection', 'show'],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
stdout = result.stdout
self.logger.debug(f"nmcli connection show output:\n{stdout}")
# ── helpers ───────────────────────────────────────────────────────
known_networks = []
lines = stdout.strip().split('\n')
for line in lines:
@staticmethod
def _run(cmd: list[str], **kw) -> subprocess.CompletedProcess:
"""Run a command, returning CompletedProcess."""
return subprocess.run(
cmd, check=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, text=True, **kw,
)
@staticmethod
def _json_response(handler, code: int, payload: dict):
handler.send_response(code)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps(payload).encode('utf-8'))
# ── known networks ───────────────────────────────────────────────
def get_known_wifi(self, handler):
"""List known WiFi networks with priorities.
Uses nmcli terse output. On Trixie, netplan-generated profiles
(named ``netplan-wlan0-*``) appear alongside user-created NM
profiles — both are returned.
"""
try:
result = self._run(
['nmcli', '-t', '-f', 'NAME,TYPE,AUTOCONNECT-PRIORITY', 'connection', 'show']
)
self.logger.debug(f"nmcli connection show output:\n{result.stdout}")
known_networks: list[dict] = []
for line in result.stdout.strip().splitlines():
if not line.strip():
continue
parts = line.split(':')
# nmcli -t uses ':' as delimiter — SSIDs with ':' are
# escaped by nmcli (backslash-colon), so split from
# the right to be safe: last field = priority,
# second-to-last = type, rest = name.
parts = line.rsplit(':', 2)
if len(parts) == 3:
name, conn_type, priority = parts
name, conn_type, priority_str = parts
elif len(parts) == 2:
name, conn_type = parts
priority = '0'
self.logger.warning(f"Missing priority for connection {name}. Assigning priority 0.")
priority_str = '0'
else:
self.logger.warning(f"Unexpected line format: {line}")
continue
if conn_type.lower() in ['802-11-wireless', 'wireless', 'wifi']:
try:
priority_int = int(priority) if priority.isdigit() else 0
except ValueError:
priority_int = 0
self.logger.warning(f"Non-numeric priority for {name}. Assigning priority 0.")
known_networks.append({
'ssid': name,
'priority': priority_int
})
# Unescape nmcli backslash-colon
name = name.replace('\\:', ':')
if conn_type.strip().lower() not in (
'802-11-wireless', 'wireless', 'wifi',
):
continue
try:
priority_int = int(priority_str.strip())
except (ValueError, AttributeError):
priority_int = 0
known_networks.append({
'ssid': name.strip(),
'priority': priority_int,
})
self.logger.debug(f"Extracted known networks: {known_networks}")
known_networks.sort(key=lambda x: x['priority'], reverse=True)
self._json_response(handler, 200, {"known_networks": known_networks})
handler.send_response(200)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"known_networks": known_networks}).encode('utf-8'))
except subprocess.CalledProcessError as e:
self.logger.error(f"Error getting known Wi-Fi networks: {e.stderr.strip()}")
handler.send_response(500)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"error": e.stderr.strip()}).encode('utf-8'))
self._json_response(handler, 500, {"error": e.stderr.strip()})
except Exception as e:
self.logger.error(f"Error getting known Wi-Fi networks: {e}")
handler.send_response(500)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"error": str(e)}).encode('utf-8'))
self._json_response(handler, 500, {"error": str(e)})
def delete_known_wifi(self, data):
"""Delete a known WiFi connection."""
ssid = None
ssid = data.get('ssid')
try:
ssid = data['ssid']
result = subprocess.run(
['sudo', 'nmcli', 'connection', 'delete', ssid],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
if not ssid:
return {"status": "error", "message": "Missing SSID"}
self._run(['sudo', 'nmcli', 'connection', 'delete', ssid])
self.logger.info(f"Deleted Wi-Fi connection: {ssid}")
return {"status": "success", "message": f"Network {ssid} deleted"}
except subprocess.CalledProcessError as e:
error_message = f"Error deleting Wi-Fi connection {ssid if ssid else 'unknown'}: {e.stderr.strip()}"
self.logger.error(error_message)
self.logger.error(f"Error deleting Wi-Fi connection {ssid}: {e.stderr.strip()}")
return {"status": "error", "message": e.stderr.strip()}
except Exception as e:
error_message = f"Unexpected error deleting Wi-Fi connection {ssid if ssid else 'unknown'}: {e}"
self.logger.error(error_message)
self.logger.error(f"Unexpected error deleting Wi-Fi connection {ssid}: {e}")
return {"status": "error", "message": str(e)}
def connect_known_wifi(self, data):
"""Connect to a known WiFi network."""
ssid = data.get('ssid', '')
try:
ssid = data['ssid']
if not self.validate_network_configuration(ssid):
raise Exception(f"Invalid or non-existent configuration for network '{ssid}'.")
result = subprocess.run(
['sudo', 'nmcli', 'connection', 'up', ssid],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
if not self.check_connection_exists(ssid):
return {"status": "error", "message": f"Network '{ssid}' not found in saved connections."}
self._run(['sudo', 'nmcli', 'connection', 'up', ssid])
self.logger.info(f"Connected to known Wi-Fi network: {ssid}")
return {"status": "success", "message": f"Connected to {ssid}"}
except subprocess.CalledProcessError as e:
@@ -119,14 +135,20 @@ class NetworkUtils:
return {"status": "error", "message": str(e)}
def update_wifi_priority(self, data):
"""Update WiFi connection priority."""
"""Update WiFi connection priority.
Works for both NM-native and netplan-generated profiles.
For netplan profiles (prefixed ``netplan-``), nmcli modify
writes a persistent override into
/etc/NetworkManager/system-connections/.
"""
ssid = data.get('ssid', '')
try:
ssid = data['ssid']
priority = int(data['priority'])
result = subprocess.run(
['sudo', 'nmcli', 'connection', 'modify', ssid, 'connection.autoconnect-priority', str(priority)],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
self._run([
'sudo', 'nmcli', 'connection', 'modify', ssid,
'connection.autoconnect-priority', str(priority),
])
self.logger.info(f"Priority updated for {ssid} to {priority}")
return {"status": "success", "message": "Priority updated"}
except subprocess.CalledProcessError as e:
@@ -136,95 +158,122 @@ class NetworkUtils:
self.logger.error(f"Unexpected error updating Wi-Fi priority: {e}")
return {"status": "error", "message": str(e)}
# ── scanning ─────────────────────────────────────────────────────
def scan_wifi(self, handler):
"""Scan for available WiFi networks."""
"""Scan for available WiFi networks.
Uses ``nmcli -t`` (terse) output for reliable parsing.
Signal is returned as a percentage 0-100.
"""
try:
result = subprocess.run(
['sudo', 'nmcli', 'device', 'wifi', 'list'],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
# Trigger a rescan first (best-effort)
subprocess.run(
['sudo', 'nmcli', 'device', 'wifi', 'rescan'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True,
)
stdout = result.stdout
networks = self.parse_scan_result(stdout)
self.logger.info(f"Found {len(networks)} networks")
result = self._run([
'sudo', 'nmcli', '-t', '-f',
'SSID,SIGNAL,SECURITY,IN-USE',
'device', 'wifi', 'list',
])
networks = self._parse_terse_scan(result.stdout)
current_ssid = self.get_current_ssid()
self.logger.info(f"Current SSID: {current_ssid}")
self.logger.info(f"Found {len(networks)} networks, current={current_ssid}")
handler.send_response(200)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"networks": networks, "current_ssid": current_ssid}).encode('utf-8'))
self._json_response(handler, 200, {
"networks": networks,
"current_ssid": current_ssid,
})
except subprocess.CalledProcessError as e:
self.logger.error(f"Error scanning Wi-Fi networks: {e.stderr.strip()}")
handler.send_response(500)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"error": e.stderr.strip()}).encode('utf-8'))
self._json_response(handler, 500, {"error": e.stderr.strip()})
def parse_scan_result(self, scan_output):
"""Parse nmcli scan output."""
networks = []
lines = scan_output.split('\n')
headers = []
for idx, line in enumerate(lines):
if line.startswith("IN-USE"):
headers = re.split(r'\s{2,}', line)
@staticmethod
def _parse_terse_scan(output: str) -> list[dict]:
"""Parse ``nmcli -t -f SSID,SIGNAL,SECURITY,IN-USE device wifi list``.
Terse output uses ':' as separator. SSIDs containing ':'
are escaped by nmcli as ``\\:``.
Returns a deduplicated list sorted by signal descending.
"""
seen: dict[str, dict] = {}
for line in output.strip().splitlines():
if not line.strip():
continue
if headers and line.strip():
fields = re.split(r'\s{2,}', line)
if len(fields) >= len(headers):
network = dict(zip(headers, fields))
ssid = network.get('SSID', '')
signal_level = int(network.get('SIGNAL', '0'))
security = network.get('SECURITY', '')
networks.append({
'ssid': ssid,
'signal_level': signal_level,
'security': security
})
return networks
def get_current_ssid(self):
# Split from the right: IN-USE (last), SECURITY, SIGNAL, rest=SSID
# IN-USE is '*' or '' — always one char field at the end
parts = line.rsplit(':', 3)
if len(parts) < 4:
continue
raw_ssid, signal_str, security, in_use = parts
# Unescape nmcli backslash-colon in SSID
ssid = raw_ssid.replace('\\:', ':').strip()
if not ssid:
continue
try:
signal = int(signal_str.strip())
except (ValueError, AttributeError):
signal = 0
# Normalize security string
security = security.strip()
if not security or security == '--':
security = 'Open'
# Keep the strongest signal per SSID
if ssid not in seen or signal > seen[ssid]['signal']:
seen[ssid] = {
'ssid': ssid,
'signal': signal,
'security': security,
'in_use': in_use.strip() == '*',
}
result = sorted(seen.values(), key=lambda n: n['signal'], reverse=True)
return result
def get_current_ssid(self) -> Optional[str]:
"""Get currently connected SSID."""
try:
result = subprocess.run(
['nmcli', '-t', '-f', 'active,ssid', 'dev', 'wifi'],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
lines = result.stdout.strip().split('\n')
for line in lines:
active, ssid = line.split(':', 1)
if active == 'yes':
return ssid
result = self._run(['nmcli', '-t', '-f', 'active,ssid', 'dev', 'wifi'])
for line in result.stdout.strip().splitlines():
parts = line.split(':', 1)
if len(parts) == 2 and parts[0] == 'yes':
return parts[1]
return None
except subprocess.CalledProcessError as e:
self.logger.error(f"Error getting current SSID: {e.stderr.strip()}")
return None
def connect_wifi(self, data):
"""Connect to WiFi network (new or existing)."""
try:
ssid = data['ssid']
password = data.get('password', '')
# ── connect ──────────────────────────────────────────────────────
def connect_wifi(self, data):
"""Connect to WiFi network (new or existing).
On Trixie, ``nmcli device wifi connect`` creates a persistent
NM keyfile in /etc/NetworkManager/system-connections/,
which survives reboots even when netplan manages the initial
Wi-Fi profile.
"""
ssid = data.get('ssid', '')
password = data.get('password', '')
try:
if self.check_connection_exists(ssid):
result = subprocess.run(
['sudo', 'nmcli', 'connection', 'up', ssid],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
return {"status": "success", "message": f"Connected to {ssid}"}
else:
if password:
result = subprocess.run(
['sudo', 'nmcli', 'device', 'wifi', 'connect', ssid, 'password', password],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
else:
result = subprocess.run(
['sudo', 'nmcli', 'device', 'wifi', 'connect', ssid],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
self._run(['sudo', 'nmcli', 'connection', 'up', ssid])
return {"status": "success", "message": f"Connected to {ssid}"}
cmd = ['sudo', 'nmcli', 'device', 'wifi', 'connect', ssid]
if password:
cmd += ['password', password]
self._run(cmd)
return {"status": "success", "message": f"Connected to {ssid}"}
except subprocess.CalledProcessError as e:
self.logger.error(f"Error connecting to network {ssid}: {e.stderr.strip()}")
return {"status": "error", "message": e.stderr.strip()}
@@ -232,127 +281,216 @@ class NetworkUtils:
self.logger.error(f"Error in connect_wifi: {e}")
return {"status": "error", "message": str(e)}
def check_connection_exists(self, ssid):
"""Check if a WiFi connection already exists."""
def check_connection_exists(self, ssid: str) -> bool:
"""Check if a WiFi connection profile exists (exact match)."""
try:
result = subprocess.run(
['nmcli', '-t', '-f', 'NAME', 'connection', 'show'],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
connections = result.stdout.strip().split('\n')
return ssid in connections
except subprocess.CalledProcessError as e:
self.logger.error(f"Error checking existing connections: {e.stderr.strip()}")
result = self._run(['nmcli', '-t', '-f', 'NAME', 'connection', 'show'])
for name in result.stdout.strip().splitlines():
# nmcli escapes ':' in names with backslash
if name.replace('\\:', ':').strip() == ssid:
return True
return False
except subprocess.CalledProcessError:
return False
def validate_network_configuration(self, ssid):
"""Validate network configuration in NetworkManager."""
try:
result = subprocess.run(
['nmcli', '-t', '-f', 'NAME,UUID,TYPE,AUTOCONNECT', 'connection', 'show'],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
connections = result.stdout.strip().split('\n')
for conn in connections:
if ssid in conn:
self.logger.info(f"Network {ssid} validated in NetworkManager.")
return True
self.logger.warning(f"Network {ssid} not found in NetworkManager.")
return False
except Exception as e:
self.logger.error(f"Error validating network {ssid}: {e}")
return False
def validate_network_configuration(self, ssid: str) -> bool:
"""Validate that a WiFi connection profile exists (exact match)."""
return self.check_connection_exists(ssid)
# ── potfile import ───────────────────────────────────────────────
def import_potfiles(self, data=None):
"""Import WiFi credentials from .pot/.potfile files."""
"""Import WiFi credentials from .pot/.potfile files.
Creates NM connection profiles via nmcli — these are stored
in /etc/NetworkManager/system-connections/ and persist across
reboots on both legacy and Trixie builds.
"""
try:
potfiles_folder = self.shared_data.potfiles_dir
import glob
potfile_paths = glob.glob(f"{potfiles_folder}/*.pot") + glob.glob(f"{potfiles_folder}/*.potfile")
potfile_paths = (
glob.glob(f"{potfiles_folder}/*.pot")
+ glob.glob(f"{potfiles_folder}/*.potfile")
)
networks_added = []
networks_added: list[str] = []
networks_skipped: list[str] = []
networks_failed: list[str] = []
DEFAULT_PRIORITY = 5
for potfile_path in potfile_paths:
with open(potfile_path, 'r') as potfile:
for line in potfile:
line = line.strip()
if ':' not in line:
self.logger.warning(f"Ignoring malformed line in {potfile_path}: {line}")
continue
if line.startswith('$WPAPSK$') and '#' in line:
try:
ssid_hash_part, password = line.split(':', 1)
ssid = ssid_hash_part.split('#')[0].replace('$WPAPSK$', '')
except ValueError:
self.logger.warning(f"Failed to parse WPAPSK line in {potfile_path}: {line}")
try:
with open(potfile_path, 'r', errors='replace') as potfile:
for line in potfile:
line = line.strip()
if not line or ':' not in line:
continue
elif len(line.split(':')) == 4:
try:
_, _, ssid, password = line.split(':')
except ValueError:
self.logger.warning(f"Failed to parse custom line in {potfile_path}: {line}")
ssid, password = self._parse_potfile_line(line)
if not ssid or not password:
continue
else:
self.logger.warning(f"Unknown format in {potfile_path}: {line}")
continue
if ssid and password:
if not self.check_connection_exists(ssid):
try:
subprocess.run(
['sudo', 'nmcli', 'connection', 'add', 'type', 'wifi',
'con-name', ssid, 'ifname', '*', 'ssid', ssid,
'wifi-sec.key-mgmt', 'wpa-psk', 'wifi-sec.psk', password,
'connection.autoconnect', 'yes',
'connection.autoconnect-priority', str(DEFAULT_PRIORITY)],
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
)
networks_added.append(ssid)
self.logger.info(f"Imported network {ssid} from {potfile_path}")
except subprocess.CalledProcessError as e:
self.logger.error(f"Failed to add network {ssid}: {e.stderr.strip()}")
else:
self.logger.info(f"Network {ssid} already exists. Skipping.")
else:
self.logger.warning(f"Incomplete data in {potfile_path}: {line}")
if self.check_connection_exists(ssid):
networks_skipped.append(ssid)
continue
return {"status": "success", "networks_added": networks_added}
try:
self._run([
'sudo', 'nmcli', 'connection', 'add',
'type', 'wifi',
'con-name', ssid,
'ifname', '*',
'ssid', ssid,
'wifi-sec.key-mgmt', 'wpa-psk',
'wifi-sec.psk', password,
'connection.autoconnect', 'yes',
'connection.autoconnect-priority', str(DEFAULT_PRIORITY),
])
networks_added.append(ssid)
self.logger.info(f"Imported network {ssid} from {potfile_path}")
except subprocess.CalledProcessError as e:
networks_failed.append(ssid)
self.logger.error(f"Failed to add network {ssid}: {e.stderr.strip()}")
except OSError as e:
self.logger.error(f"Failed to read potfile {potfile_path}: {e}")
return {
"status": "success",
"networks_added": networks_added,
"imported": len(networks_added),
"skipped": len(networks_skipped),
"failed": len(networks_failed),
}
except Exception as e:
self.logger.error(f"Unexpected error importing potfiles: {e}")
return {"status": "error", "message": str(e)}
@staticmethod
def _parse_potfile_line(line: str) -> tuple[str, str]:
"""Parse a single potfile line, returning (ssid, password) or ('', '')."""
# Format 1: $WPAPSK$SSID#hash:password
if line.startswith('$WPAPSK$') and '#' in line:
try:
ssid_hash_part, password = line.split(':', 1)
ssid = ssid_hash_part.split('#')[0].replace('$WPAPSK$', '')
return ssid.strip(), password.strip()
except ValueError:
return '', ''
# Format 2: MAC:MAC:SSID:password (4 colon-separated fields)
parts = line.split(':')
if len(parts) == 4:
return parts[2].strip(), parts[3].strip()
# Format 3: SSID:password (2 colon-separated fields)
if len(parts) == 2:
return parts[0].strip(), parts[1].strip()
return '', ''
# ── preconfigured file management (legacy compat) ────────────────
def delete_preconfigured_file(self, handler):
"""Delete the legacy preconfigured.nmconnection file.
On Trixie this file typically does not exist (Wi-Fi is managed
by netplan). The endpoint returns 200/success even if the file
is missing to avoid breaking the frontend.
"""
path = '/etc/NetworkManager/system-connections/preconfigured.nmconnection'
try:
os.remove('/etc/NetworkManager/system-connections/preconfigured.nmconnection')
handler.send_response(200)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"status": "success"}).encode('utf-8'))
except FileNotFoundError:
handler.send_response(404)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"status": "error", "message": "Fichier introuvable"}).encode('utf-8'))
if os.path.exists(path):
os.remove(path)
self.logger.info("Deleted preconfigured.nmconnection")
else:
self.logger.info("preconfigured.nmconnection not found (Trixie/netplan — this is normal)")
self._json_response(handler, 200, {"status": "success"})
except Exception as e:
handler.send_response(500)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
self.logger.error(f"Error deleting preconfigured file: {e}")
self._json_response(handler, 500, {"status": "error", "message": str(e)})
def create_preconfigured_file(self, handler):
"""Create a preconfigured.nmconnection file (legacy compat).
On Trixie this is a no-op: Wi-Fi is managed by netplan.
Returns success regardless to avoid breaking the frontend.
"""
self.logger.warning("create_preconfigured_file called — no-op on Trixie/netplan builds")
self._json_response(handler, 200, {
"status": "success",
"message": "No action needed on netplan-managed builds",
})
# ── potfile upload ────────────────────────────────────────────────
def upload_potfile(self, handler):
"""Upload a .pot/.potfile file to the potfiles directory.
Accepts multipart/form-data with a 'potfile' field.
Saves to shared_data.potfiles_dir.
Manual multipart parsing — no cgi module (removed in Python 3.13).
"""
try:
with open('/etc/NetworkManager/system-connections/preconfigured.nmconnection', 'w') as f:
f.write('Exemple de contenu') # Ajoutez le contenu par défaut
handler.send_response(200)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"status": "success"}).encode('utf-8'))
content_type = handler.headers.get("Content-Type", "")
if "multipart/form-data" not in content_type:
self._json_response(handler, 400, {
"status": "error",
"message": "Content-Type must be multipart/form-data",
})
return
boundary = content_type.split("=")[1].encode()
content_length = int(handler.headers.get("Content-Length", 0))
body = handler.rfile.read(content_length)
parts = body.split(b"--" + boundary)
filename = None
file_data = None
for part in parts:
if b"Content-Disposition" not in part:
continue
if b'name="potfile"' not in part:
continue
if b"filename=" not in part:
continue
headers_raw, data = part.split(b"\r\n\r\n", 1)
headers_str = headers_raw.decode(errors="replace")
match = re.search(r'filename="(.+?)"', headers_str)
if match:
filename = os.path.basename(match.group(1))
# Strip trailing boundary markers
file_data = data.rstrip(b"\r\n--").rstrip(b"\r\n")
break
if not filename or file_data is None:
self._json_response(handler, 400, {
"status": "error",
"message": "No potfile provided",
})
return
# Sanitise filename
safe_name = "".join(
c for c in filename if c.isalnum() or c in ".-_"
) or "uploaded.potfile"
dest_dir = self.shared_data.potfiles_dir
os.makedirs(dest_dir, exist_ok=True)
dest_path = os.path.join(dest_dir, safe_name)
with open(dest_path, "wb") as f:
f.write(file_data)
self.logger.info(f"Uploaded potfile: {safe_name} ({len(file_data)} bytes)")
self._json_response(handler, 200, {
"status": "success",
"filename": safe_name,
})
except Exception as e:
handler.send_response(500)
handler.send_header("Content-type", "application/json")
handler.end_headers()
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
self.logger.error(f"Error uploading potfile: {e}")
self._json_response(handler, 500, {
"status": "error",
"message": str(e),
})

View File

@@ -257,6 +257,170 @@ class SentinelUtils:
use_tls=config.get("email_tls", True),
))
# ── LLM-powered endpoints ────────────────────────────────────────────
def analyze_events(self, data: Dict) -> Dict:
"""POST /api/sentinel/analyze — AI analysis of selected events."""
try:
event_ids = data.get("event_ids", [])
if not event_ids:
return {"status": "error", "message": "event_ids required"}
# Fetch events
placeholders = ",".join("?" for _ in event_ids)
rows = self.shared_data.db.query(
f"SELECT * FROM sentinel_events WHERE id IN ({placeholders})",
[int(i) for i in event_ids],
) or []
if not rows:
return {"status": "error", "message": "No events found"}
# Gather device info for context
macs = set()
ips = set()
for ev in rows:
meta = {}
try:
meta = json.loads(ev.get("metadata", "{}") or "{}")
except Exception:
pass
if meta.get("mac"):
macs.add(meta["mac"])
if meta.get("ip"):
ips.add(meta["ip"])
devices = []
if macs:
mac_ph = ",".join("?" for _ in macs)
devices = self.shared_data.db.query(
f"SELECT * FROM sentinel_devices WHERE mac_address IN ({mac_ph})",
list(macs),
) or []
from llm_bridge import LLMBridge
bridge = LLMBridge()
system = (
"You are a cybersecurity analyst reviewing sentinel alerts from Bjorn, "
"a network security AI. Analyze the events below and provide: "
"1) A severity assessment (critical/high/medium/low/info), "
"2) A concise analysis of what happened, "
"3) Concrete recommendations. "
"Be technical and actionable. Respond in plain text, keep it under 300 words."
)
prompt = (
f"Events:\n{json.dumps(rows, indent=2, default=str)}\n\n"
f"Known devices:\n{json.dumps(devices, indent=2, default=str)}\n\n"
"Analyze these security events."
)
response = bridge.complete(
[{"role": "user", "content": prompt}],
max_tokens=600,
system=system,
timeout=30,
)
return {"status": "ok", "analysis": response or "(no response)"}
except Exception as e:
logger.error("analyze_events error: %s", e)
return {"status": "error", "message": str(e)}
def summarize_events(self, data: Dict) -> Dict:
"""POST /api/sentinel/summarize — AI summary of recent unread events."""
try:
limit = min(int(data.get("limit", 50)), 100)
rows = self.shared_data.db.query(
"SELECT * FROM sentinel_events WHERE acknowledged = 0 "
"ORDER BY timestamp DESC LIMIT ?",
[limit],
) or []
if not rows:
return {"status": "ok", "summary": "No unread events to summarize."}
from llm_bridge import LLMBridge
bridge = LLMBridge()
system = (
"You are a cybersecurity analyst. Summarize the security events below. "
"Group by type, identify patterns, flag critical items. "
"Be concise — max 200 words. Use bullet points."
)
prompt = (
f"{len(rows)} unread sentinel events:\n"
f"{json.dumps(rows, indent=2, default=str)}\n\n"
"Summarize these events and identify patterns."
)
response = bridge.complete(
[{"role": "user", "content": prompt}],
max_tokens=500,
system=system,
timeout=30,
)
return {"status": "ok", "summary": response or "(no response)"}
except Exception as e:
logger.error("summarize_events error: %s", e)
return {"status": "error", "message": str(e)}
def suggest_rule(self, data: Dict) -> Dict:
"""POST /api/sentinel/suggest-rule — AI generates a rule from description."""
try:
description = (data.get("description") or "").strip()
if not description:
return {"status": "error", "message": "description required"}
from llm_bridge import LLMBridge
bridge = LLMBridge()
system = (
"You are a security rule generator. Given a user description, generate a Bjorn sentinel rule "
"as JSON. The rule schema is:\n"
'{"name": "string", "trigger_type": "new_device|arp_spoof|port_change|service_change|'
'dhcp_server|rogue_ap|high_traffic|vulnerability", "conditions": {"key": "value"}, '
'"logic": "AND|OR", "actions": ["notify_web","notify_discord","notify_email","notify_webhook"], '
'"cooldown_s": 60, "enabled": 1}\n'
"Respond with ONLY the JSON object, no markdown fences, no explanation."
)
prompt = f"Generate a sentinel rule for: {description}"
response = bridge.complete(
[{"role": "user", "content": prompt}],
max_tokens=400,
system=system,
timeout=20,
)
if not response:
return {"status": "error", "message": "No LLM response"}
# Try to parse the JSON
try:
# Strip markdown fences if present
clean = response.strip()
if clean.startswith("```"):
clean = clean.split("\n", 1)[1] if "\n" in clean else clean[3:]
if clean.endswith("```"):
clean = clean[:-3]
clean = clean.strip()
if clean.startswith("json"):
clean = clean[4:].strip()
rule = json.loads(clean)
return {"status": "ok", "rule": rule}
except json.JSONDecodeError:
return {"status": "ok", "rule": None, "raw": response,
"message": "LLM response was not valid JSON"}
except Exception as e:
logger.error("suggest_rule error: %s", e)
return {"status": "error", "message": str(e)}
# ── Helpers ─────────────────────────────────────────────────────────
def _send_json(self, handler, data, status=200):