mirror of
https://github.com/infinition/Bjorn.git
synced 2026-03-19 02:00:24 +00:00
feat: Add login page with dynamic RGB effects and password toggle functionality
feat: Implement package management utilities with JSON endpoints for listing and uninstalling packages feat: Create plugin management utilities with endpoints for listing, configuring, and installing plugins feat: Develop schedule and trigger management utilities with CRUD operations for schedules and triggers
This commit is contained in:
@@ -1,17 +1,6 @@
|
||||
# action_utils.py
|
||||
"""
|
||||
Unified web utilities: Actions (scripts+images+comments), Images, Characters,
|
||||
Comments, and Attacks — consolidated into a single module.
|
||||
"""action_utils.py - Unified web utilities for actions, images, characters, comments, and attacks.
|
||||
|
||||
Key image rules:
|
||||
- Status icon: always 28x28 BMP (<status_images>/<Action>/<Action>.bmp)
|
||||
- Character image: always 78x78 BMP (<status_images>/<Action>/<Action>N.bmp)
|
||||
- Missing status icon auto-generates a placeholder (similar intent to makePlaceholderIconBlob).
|
||||
|
||||
This file merges previous modules:
|
||||
- Action/Image/Character utils (now in ActionUtils)
|
||||
- Comment utils (CommentUtils)
|
||||
- Attack utils (AttackUtils)
|
||||
Consolidates ActionUtils, CommentUtils, and AttackUtils into a single module.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -471,11 +460,11 @@ class ActionUtils:
|
||||
"""
|
||||
Rebuild DB 'actions' + 'actions_studio' from filesystem .py files.
|
||||
- 'actions' : info runtime (b_class, b_module, etc.)
|
||||
- 'actions_studio': payload studio (on garde meta complet en JSON)
|
||||
- 'actions_studio': studio payload (full meta as JSON)
|
||||
"""
|
||||
actions_dir = self.shared_data.actions_dir
|
||||
|
||||
# Schéma minimum (au cas où la migration n'est pas faite)
|
||||
# Minimum schema (in case migration hasn't run)
|
||||
self.shared_data.db.execute("""
|
||||
CREATE TABLE IF NOT EXISTS actions (
|
||||
name TEXT PRIMARY KEY,
|
||||
@@ -491,7 +480,7 @@ class ActionUtils:
|
||||
)
|
||||
""")
|
||||
|
||||
# On reconstruit à partir du disque
|
||||
# Rebuild from disk
|
||||
self.shared_data.db.execute("DELETE FROM actions")
|
||||
self.shared_data.db.execute("DELETE FROM actions_studio")
|
||||
|
||||
@@ -510,10 +499,10 @@ class ActionUtils:
|
||||
module_name = os.path.splitext(filename)[0]
|
||||
meta.setdefault("b_module", module_name)
|
||||
|
||||
# Nom logique de l'action (prend 'name' si présent, sinon b_class)
|
||||
# Logical action name: use 'name' if present, fall back to b_class
|
||||
action_name = (meta.get("name") or meta["b_class"]).strip()
|
||||
|
||||
# -- UPSERT dans actions
|
||||
# Upsert into actions
|
||||
self.shared_data.db.execute(
|
||||
"""
|
||||
INSERT INTO actions (name, b_class, b_module, meta_json)
|
||||
@@ -526,7 +515,7 @@ class ActionUtils:
|
||||
(action_name, meta["b_class"], meta["b_module"], json.dumps(meta, ensure_ascii=False))
|
||||
)
|
||||
|
||||
# -- UPSERT dans actions_studio (on stocke le même meta ou seulement ce qui est utile studio)
|
||||
# Upsert into actions_studio (store full meta or studio-relevant subset)
|
||||
self.shared_data.db.execute(
|
||||
"""
|
||||
INSERT INTO actions_studio (action_name, studio_meta_json)
|
||||
@@ -853,7 +842,7 @@ class ActionUtils:
|
||||
image_name = self._safe(form.getvalue('image_name') or '')
|
||||
file_item = form['new_image'] if 'new_image' in form else None
|
||||
|
||||
# ⚠️ NE PAS faire "not file_item" (FieldStorage n'est pas booléable)
|
||||
# Don't use "not file_item" (FieldStorage is not bool-safe)
|
||||
if not tp or not image_name or file_item is None or not getattr(file_item, 'filename', ''):
|
||||
raise ValueError('type, image_name and new_image are required')
|
||||
|
||||
@@ -869,13 +858,13 @@ class ActionUtils:
|
||||
|
||||
raw = file_item.file.read()
|
||||
|
||||
# Si c'est le status icon <action>.bmp => BMP 28x28 imposé
|
||||
# Status icon <action>.bmp => forced BMP 28x28
|
||||
if image_name.lower() == f"{action.lower()}.bmp":
|
||||
out = self._to_bmp_resized(raw, self.STATUS_W, self.STATUS_H)
|
||||
with open(target, 'wb') as f:
|
||||
f.write(out)
|
||||
else:
|
||||
# Déléguer aux character utils pour une image perso numérotée
|
||||
# Delegate to character utils for numbered character image
|
||||
if not self.character_utils:
|
||||
raise RuntimeError("CharacterUtils not wired into ImageUtils")
|
||||
return self.character_utils.replace_character_image(h, form, action, image_name)
|
||||
@@ -1088,10 +1077,7 @@ class ActionUtils:
|
||||
f.write(char_from_status)
|
||||
|
||||
def get_status_icon(self, handler):
|
||||
"""
|
||||
Serve <action>/<action>.bmp s'il existe.
|
||||
NE PAS créer de placeholder ici (laisser le front gérer le fallback).
|
||||
"""
|
||||
"""Serve <action>/<action>.bmp if it exists. No placeholder - let the frontend handle fallback."""
|
||||
try:
|
||||
q = parse_qs(urlparse(handler.path).query)
|
||||
action = (q.get("action", [None])[0] or "").strip()
|
||||
@@ -1615,7 +1601,7 @@ class ActionUtils:
|
||||
def get_attacks(self, handler):
|
||||
"""List all attack cards from DB (name + enabled)."""
|
||||
try:
|
||||
cards = self.shared_data.db.list_action_cards() # déjà mappe b_enabled -> enabled
|
||||
cards = self.shared_data.db.list_action_cards() # maps b_enabled -> enabled
|
||||
attacks = []
|
||||
for c in cards:
|
||||
name = c.get("name") or c.get("b_class")
|
||||
@@ -1648,7 +1634,7 @@ class ActionUtils:
|
||||
if not action_name:
|
||||
raise ValueError("action_name is required")
|
||||
|
||||
# Met à jour la colonne correcte avec l'API DB existante
|
||||
# Update the correct column using existing DB API
|
||||
rowcount = self.shared_data.db.execute(
|
||||
"UPDATE actions SET b_enabled = ? WHERE b_class = ?;",
|
||||
(enabled, action_name)
|
||||
@@ -1915,11 +1901,11 @@ class ActionUtils:
|
||||
|
||||
try:
|
||||
ctype, pdict = _parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type must be multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = _MultipartForm(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
if 'web_image' not in form or not getattr(form['web_image'],'filename',''): raise ValueError('Aucun fichier web_image fourni')
|
||||
if 'web_image' not in form or not getattr(form['web_image'],'filename',''): raise ValueError('No web_image file provided')
|
||||
file_item = form['web_image']; filename = self._safe(file_item.filename)
|
||||
base, ext = os.path.splitext(filename);
|
||||
if ext.lower() not in ALLOWED_IMAGE_EXTS: filename = base + '.png'
|
||||
@@ -1961,11 +1947,11 @@ class ActionUtils:
|
||||
|
||||
try:
|
||||
ctype, pdict = _parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type must be multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = _MultipartForm(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
if 'icon_image' not in form or not getattr(form['icon_image'],'filename',''): raise ValueError('Aucun fichier icon_image fourni')
|
||||
if 'icon_image' not in form or not getattr(form['icon_image'],'filename',''): raise ValueError('No icon_image file provided')
|
||||
file_item = form['icon_image']; filename = self._safe(file_item.filename)
|
||||
base, ext = os.path.splitext(filename);
|
||||
if ext.lower() not in ALLOWED_IMAGE_EXTS: filename = base + '.png'
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/attack_utils.py
|
||||
"""
|
||||
Attack and action management utilities.
|
||||
Handles attack listing, import/export, and action metadata management.
|
||||
"""
|
||||
"""attack_utils.py - Attack listing, import/export, and action metadata management."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import os
|
||||
@@ -322,12 +318,14 @@ class AttackUtils:
|
||||
try:
|
||||
rel = handler.path[len('/actions_icons/'):]
|
||||
rel = os.path.normpath(rel).replace("\\", "/")
|
||||
if rel.startswith("../"):
|
||||
|
||||
# Robust path traversal prevention: resolve to absolute and verify containment
|
||||
image_path = os.path.realpath(os.path.join(self.shared_data.actions_icons_dir, rel))
|
||||
base_dir = os.path.realpath(self.shared_data.actions_icons_dir)
|
||||
if not image_path.startswith(base_dir + os.sep) and image_path != base_dir:
|
||||
handler.send_error(400, "Invalid path")
|
||||
return
|
||||
|
||||
image_path = os.path.join(self.shared_data.actions_icons_dir, rel)
|
||||
|
||||
if not os.path.exists(image_path):
|
||||
handler.send_error(404, "Image not found")
|
||||
return
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/backup_utils.py
|
||||
"""
|
||||
Backup and restore utilities.
|
||||
Handles system backups, GitHub updates, and restore operations.
|
||||
"""
|
||||
"""backup_utils.py - System backups, GitHub updates, and restore operations."""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import json
|
||||
@@ -59,7 +55,7 @@ class BackupUtils:
|
||||
return {"status": "success", "message": "Backup created successfully in ZIP format."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create ZIP backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
elif backup_format == 'tar.gz':
|
||||
backup_filename = f"backup_{timestamp}.tar.gz"
|
||||
@@ -83,7 +79,7 @@ class BackupUtils:
|
||||
return {"status": "success", "message": "Backup created successfully in tar.gz format."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create tar.gz backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
else:
|
||||
self.logger.error(f"Unsupported backup format: {backup_format}")
|
||||
return {"status": "error", "message": "Unsupported backup format."}
|
||||
@@ -96,7 +92,7 @@ class BackupUtils:
|
||||
return {"status": "success", "backups": backups}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to list backups: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def remove_named_pipes(self, directory):
|
||||
"""Recursively remove named pipes in the specified directory."""
|
||||
@@ -213,12 +209,12 @@ class BackupUtils:
|
||||
self.logger.error(f"Failed to extract backup: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": f"Failed to extract backup: {e}"}
|
||||
return {"status": "error", "message": "Failed to extract backup"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to restore backup: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def set_default_backup(self, data):
|
||||
"""Set a backup as default."""
|
||||
@@ -231,7 +227,7 @@ class BackupUtils:
|
||||
return {"status": "success"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error setting default backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def delete_backup(self, data):
|
||||
"""Delete a backup file and its DB metadata."""
|
||||
@@ -250,7 +246,7 @@ class BackupUtils:
|
||||
return {"status": "success", "message": "Backup deleted successfully."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to delete backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def update_application(self, data):
|
||||
"""Update application from GitHub with options to keep certain folders."""
|
||||
@@ -367,12 +363,12 @@ class BackupUtils:
|
||||
self.logger.error(f"Failed to download update: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": f"Failed to download update: {e}"}
|
||||
return {"status": "error", "message": "Failed to download update"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Update failed: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
finally:
|
||||
for path in [downloaded_zip, extract_dir]:
|
||||
if os.path.exists(path):
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
"""
|
||||
Bifrost web API endpoints.
|
||||
"""
|
||||
"""bifrost_utils.py - Bifrost web API endpoints."""
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict
|
||||
@@ -22,7 +20,7 @@ class BifrostUtils:
|
||||
# ── GET endpoints (handler signature) ─────────────────────
|
||||
|
||||
def get_status(self, handler):
|
||||
"""GET /api/bifrost/status — full engine state."""
|
||||
"""GET /api/bifrost/status - full engine state."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
data = engine.get_status()
|
||||
@@ -37,7 +35,7 @@ class BifrostUtils:
|
||||
self._send_json(handler, data)
|
||||
|
||||
def get_networks(self, handler):
|
||||
"""GET /api/bifrost/networks — discovered WiFi networks."""
|
||||
"""GET /api/bifrost/networks - discovered WiFi networks."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM bifrost_networks ORDER BY rssi DESC LIMIT 200"
|
||||
@@ -48,7 +46,7 @@ class BifrostUtils:
|
||||
self._send_json(handler, {'networks': []})
|
||||
|
||||
def get_handshakes(self, handler):
|
||||
"""GET /api/bifrost/handshakes — captured handshakes."""
|
||||
"""GET /api/bifrost/handshakes - captured handshakes."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM bifrost_handshakes ORDER BY captured_at DESC LIMIT 200"
|
||||
@@ -59,7 +57,7 @@ class BifrostUtils:
|
||||
self._send_json(handler, {'handshakes': []})
|
||||
|
||||
def get_activity(self, handler):
|
||||
"""GET /api/bifrost/activity — recent activity feed."""
|
||||
"""GET /api/bifrost/activity - recent activity feed."""
|
||||
try:
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
limit = int(qs.get('limit', [50])[0])
|
||||
@@ -73,7 +71,7 @@ class BifrostUtils:
|
||||
self._send_json(handler, {'activity': []})
|
||||
|
||||
def get_epochs(self, handler):
|
||||
"""GET /api/bifrost/epochs — epoch history."""
|
||||
"""GET /api/bifrost/epochs - epoch history."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM bifrost_epochs ORDER BY id DESC LIMIT 100"
|
||||
@@ -84,7 +82,7 @@ class BifrostUtils:
|
||||
self._send_json(handler, {'epochs': []})
|
||||
|
||||
def get_stats(self, handler):
|
||||
"""GET /api/bifrost/stats — aggregate statistics."""
|
||||
"""GET /api/bifrost/stats - aggregate statistics."""
|
||||
try:
|
||||
db = self.shared_data.db
|
||||
nets = db.query_one("SELECT COUNT(*) AS c FROM bifrost_networks") or {}
|
||||
@@ -114,7 +112,7 @@ class BifrostUtils:
|
||||
})
|
||||
|
||||
def get_plugins(self, handler):
|
||||
"""GET /api/bifrost/plugins — loaded plugin list."""
|
||||
"""GET /api/bifrost/plugins - loaded plugin list."""
|
||||
try:
|
||||
from bifrost.plugins import get_loaded_info
|
||||
self._send_json(handler, {'plugins': get_loaded_info()})
|
||||
@@ -125,7 +123,7 @@ class BifrostUtils:
|
||||
# ── POST endpoints (JSON data signature) ──────────────────
|
||||
|
||||
def toggle_bifrost(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/toggle — switch to/from BIFROST mode.
|
||||
"""POST /api/bifrost/toggle - switch to/from BIFROST mode.
|
||||
|
||||
BIFROST is a 4th exclusive operation mode. Enabling it stops the
|
||||
orchestrator (Manual/Auto/AI) because WiFi goes into monitor mode.
|
||||
@@ -141,7 +139,7 @@ class BifrostUtils:
|
||||
return {'status': 'ok', 'enabled': enabled}
|
||||
|
||||
def set_mode(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/mode — set auto/manual."""
|
||||
"""POST /api/bifrost/mode - set auto/manual."""
|
||||
mode = data.get('mode', 'auto')
|
||||
engine = self._engine
|
||||
if engine and engine.agent:
|
||||
@@ -149,7 +147,7 @@ class BifrostUtils:
|
||||
return {'status': 'ok', 'mode': mode}
|
||||
|
||||
def toggle_plugin(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/plugin/toggle — enable/disable a plugin."""
|
||||
"""POST /api/bifrost/plugin/toggle - enable/disable a plugin."""
|
||||
try:
|
||||
from bifrost.plugins import toggle_plugin
|
||||
name = data.get('name', '')
|
||||
@@ -160,7 +158,7 @@ class BifrostUtils:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def clear_activity(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/activity/clear — clear activity log."""
|
||||
"""POST /api/bifrost/activity/clear - clear activity log."""
|
||||
try:
|
||||
self.shared_data.db.execute("DELETE FROM bifrost_activity")
|
||||
return {'status': 'ok'}
|
||||
@@ -168,7 +166,7 @@ class BifrostUtils:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def update_whitelist(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/whitelist — update AP whitelist."""
|
||||
"""POST /api/bifrost/whitelist - update AP whitelist."""
|
||||
try:
|
||||
whitelist = data.get('whitelist', '')
|
||||
self.shared_data.config['bifrost_whitelist'] = whitelist
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/bluetooth_utils.py
|
||||
"""
|
||||
Bluetooth device management utilities.
|
||||
Handles Bluetooth scanning, pairing, connection, and device management.
|
||||
"""
|
||||
"""bluetooth_utils.py - Bluetooth scanning, pairing, connection, and device management."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
# webutils/c2_utils.py
|
||||
"""c2_utils.py - Command and control agent management endpoints."""
|
||||
from c2_manager import c2_manager
|
||||
import base64
|
||||
import time
|
||||
from pathlib import Path
|
||||
import json
|
||||
from datetime import datetime
|
||||
# to import logging from the previous path you can use:
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="c2_utils.py", level=logging.DEBUG)
|
||||
@@ -15,12 +14,12 @@ class C2Utils:
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
# --- Anti-yoyo: cache du dernier snapshot "sain" d'agents ---
|
||||
self._last_agents = [] # liste d'agents normalisés
|
||||
self._last_agents_ts = 0.0 # epoch seconds du snapshot
|
||||
self._snapshot_ttl = 10.0 # tolérance (s) si /c2/agents flanche
|
||||
# Anti-flap: cache last healthy agent snapshot
|
||||
self._last_agents = []
|
||||
self._last_agents_ts = 0.0
|
||||
self._snapshot_ttl = 10.0 # grace period (s) if /c2/agents fails
|
||||
|
||||
# ---------------------- Helpers JSON ----------------------
|
||||
# ---------------------- JSON helpers ----------------------
|
||||
|
||||
def _to_jsonable(self, obj):
|
||||
if obj is None or isinstance(obj, (bool, int, float, str)):
|
||||
@@ -49,29 +48,27 @@ class C2Utils:
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
# ---------------------- Normalisation Agents ----------------------
|
||||
# ---------------------- Agent normalization ----------------------
|
||||
|
||||
def _normalize_agent(self, a):
|
||||
"""
|
||||
Uniformise l'agent (id, last_seen en ISO) sans casser les autres champs.
|
||||
"""
|
||||
"""Normalize agent fields (id, last_seen as ISO) without breaking other fields."""
|
||||
a = dict(a) if isinstance(a, dict) else {}
|
||||
a["id"] = a.get("id") or a.get("agent_id") or a.get("client_id")
|
||||
|
||||
ls = a.get("last_seen")
|
||||
if isinstance(ls, (int, float)):
|
||||
# epoch seconds -> ISO
|
||||
# epoch seconds to ISO
|
||||
try:
|
||||
a["last_seen"] = datetime.fromtimestamp(ls).isoformat()
|
||||
except Exception:
|
||||
a["last_seen"] = None
|
||||
elif isinstance(ls, str):
|
||||
# ISO (avec ou sans Z)
|
||||
# ISO (with or without Z)
|
||||
try:
|
||||
dt = datetime.fromisoformat(ls.replace("Z", "+00:00"))
|
||||
a["last_seen"] = dt.isoformat()
|
||||
except Exception:
|
||||
# format inconnu -> laisser tel quel
|
||||
# unknown format, leave as-is
|
||||
pass
|
||||
elif isinstance(ls, datetime):
|
||||
a["last_seen"] = ls.isoformat()
|
||||
@@ -80,7 +77,7 @@ class C2Utils:
|
||||
|
||||
return a
|
||||
|
||||
# ---------------------- Handlers REST ----------------------
|
||||
# ---------------------- REST handlers ----------------------
|
||||
|
||||
def c2_start(self, handler, data):
|
||||
port = int(data.get("port", 5555))
|
||||
@@ -95,10 +92,8 @@ class C2Utils:
|
||||
return self._json(handler, 200, c2_manager.status())
|
||||
|
||||
def c2_agents(self, handler):
|
||||
"""
|
||||
Renvoie la liste des agents (tableau JSON).
|
||||
Anti-yoyo : si c2_manager.list_agents() renvoie [] mais que
|
||||
nous avons un snapshot récent (< TTL), renvoyer ce snapshot.
|
||||
"""Return agent list as JSON array.
|
||||
Anti-flap: if list_agents() returns [] but we have a recent snapshot (< TTL), serve that instead.
|
||||
"""
|
||||
try:
|
||||
raw = c2_manager.list_agents() or []
|
||||
@@ -106,16 +101,16 @@ class C2Utils:
|
||||
|
||||
now = time.time()
|
||||
if len(agents) == 0 and len(self._last_agents) > 0 and (now - self._last_agents_ts) <= self._snapshot_ttl:
|
||||
# Fallback rapide : on sert le dernier snapshot non-vide
|
||||
# Quick fallback: serve last non-empty snapshot
|
||||
return self._json(handler, 200, self._last_agents)
|
||||
|
||||
# Snapshot frais (même si vide réel)
|
||||
# Fresh snapshot (even if actually empty)
|
||||
self._last_agents = agents
|
||||
self._last_agents_ts = now
|
||||
return self._json(handler, 200, agents)
|
||||
|
||||
except Exception as e:
|
||||
# En cas d'erreur, si snapshot récent dispo, on le sert
|
||||
# On error, serve recent snapshot if available
|
||||
now = time.time()
|
||||
if len(self._last_agents) > 0 and (now - self._last_agents_ts) <= self._snapshot_ttl:
|
||||
self.logger.warning(f"/c2/agents fallback to snapshot after error: {e}")
|
||||
@@ -167,17 +162,17 @@ class C2Utils:
|
||||
except Exception as e:
|
||||
return self._json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
# ---------------------- SSE: stream d'événements ----------------------
|
||||
# ---------------------- SSE: event stream ----------------------
|
||||
|
||||
def c2_events_sse(self, handler):
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "text/event-stream")
|
||||
handler.send_header("Cache-Control", "no-cache")
|
||||
handler.send_header("Connection", "keep-alive")
|
||||
handler.send_header("X-Accel-Buffering", "no") # utile derrière Nginx/Traefik
|
||||
handler.send_header("X-Accel-Buffering", "no") # needed behind Nginx/Traefik
|
||||
handler.end_headers()
|
||||
|
||||
# Indiquer au client un backoff de reconnexion (évite les tempêtes)
|
||||
# Tell client to back off on reconnect (avoids thundering herd)
|
||||
try:
|
||||
handler.wfile.write(b"retry: 5000\n\n") # 5s
|
||||
handler.wfile.flush()
|
||||
@@ -194,7 +189,7 @@ class C2Utils:
|
||||
handler.wfile.write(payload.encode("utf-8"))
|
||||
handler.wfile.flush()
|
||||
except Exception:
|
||||
# Connexion rompue : on se désabonne proprement
|
||||
# Connection broken: unsubscribe cleanly
|
||||
try:
|
||||
c2_manager.bus.unsubscribe(push)
|
||||
except Exception:
|
||||
@@ -202,11 +197,11 @@ class C2Utils:
|
||||
|
||||
c2_manager.bus.subscribe(push)
|
||||
try:
|
||||
# Keep-alive périodique pour maintenir le flux ouvert
|
||||
# Periodic keep-alive to maintain the stream
|
||||
while True:
|
||||
time.sleep(15)
|
||||
try:
|
||||
handler.wfile.write(b": keep-alive\n\n") # commentaire SSE
|
||||
handler.wfile.write(b": keep-alive\n\n") # SSE comment
|
||||
handler.wfile.flush()
|
||||
except Exception:
|
||||
break
|
||||
@@ -216,7 +211,7 @@ class C2Utils:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ---------------------- Gestion des fichiers client ----------------------
|
||||
# ---------------------- Client file management ----------------------
|
||||
|
||||
def c2_download_client(self, handler, filename):
|
||||
"""Serve generated client file for download"""
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
"""
|
||||
Character and persona management utilities.
|
||||
Handles character switching, creation, and image management.
|
||||
"""
|
||||
"""character_utils.py - Character switching, creation, and image management."""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import re
|
||||
@@ -131,10 +128,7 @@ class CharacterUtils:
|
||||
return out.getvalue()
|
||||
|
||||
def get_existing_character_numbers(self, action_dir: str | Path, action_name: str) -> set[int]:
|
||||
"""
|
||||
Retourne l'ensemble des numéros déjà utilisés pour les images characters
|
||||
(p. ex. <action>1.bmp, <action>2.bmp, ...).
|
||||
"""
|
||||
"""Return the set of numbers already used for character images (e.g. <action>1.bmp, <action>2.bmp)."""
|
||||
d = Path(action_dir)
|
||||
if not d.exists():
|
||||
return set()
|
||||
@@ -152,7 +146,7 @@ class CharacterUtils:
|
||||
# --------- endpoints ---------
|
||||
|
||||
def get_current_character(self):
|
||||
"""Lit le personnage courant depuis la config (DB)."""
|
||||
"""Read current character from config (DB)."""
|
||||
try:
|
||||
return self.shared_data.config.get('current_character', 'BJORN') or 'BJORN'
|
||||
except Exception:
|
||||
@@ -220,7 +214,7 @@ class CharacterUtils:
|
||||
|
||||
current_character = self.get_current_character()
|
||||
if character == current_character:
|
||||
# Quand le perso est actif, ses images sont dans status_images_dir/IDLE/IDLE1.bmp
|
||||
# Active character images live in status_images_dir/IDLE/IDLE1.bmp
|
||||
idle_image_path = os.path.join(self.shared_data.status_images_dir, 'IDLE', 'IDLE1.bmp')
|
||||
else:
|
||||
idle_image_path = os.path.join(self.shared_data.settings_dir, character, 'status', 'IDLE', 'IDLE1.bmp')
|
||||
@@ -398,11 +392,11 @@ class CharacterUtils:
|
||||
self.logger.error(f"Error in copy_character_images: {e}")
|
||||
|
||||
def upload_character_images(self, handler):
|
||||
"""Ajoute des images de characters pour une action existante (toujours BMP + numérotation)."""
|
||||
"""Add character images for an existing action (always BMP, auto-numbered)."""
|
||||
try:
|
||||
ctype, pdict = _parse_header(handler.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data':
|
||||
raise ValueError('Content-Type doit être multipart/form-data')
|
||||
raise ValueError('Content-Type must be multipart/form-data')
|
||||
|
||||
pdict['boundary'] = bytes(pdict['boundary'], "utf-8")
|
||||
pdict['CONTENT-LENGTH'] = int(handler.headers.get('Content-Length'))
|
||||
@@ -415,18 +409,18 @@ class CharacterUtils:
|
||||
)
|
||||
|
||||
if 'action_name' not in form:
|
||||
raise ValueError("Le nom de l'action est requis")
|
||||
raise ValueError("Action name is required")
|
||||
|
||||
action_name = (form.getvalue('action_name') or '').strip()
|
||||
if not action_name:
|
||||
raise ValueError("Le nom de l'action est requis")
|
||||
raise ValueError("Action name is required")
|
||||
|
||||
if 'character_images' not in form:
|
||||
raise ValueError('Aucun fichier image fourni')
|
||||
raise ValueError('No image file provided')
|
||||
|
||||
action_dir = os.path.join(self.shared_data.status_images_dir, action_name)
|
||||
if not os.path.exists(action_dir):
|
||||
raise FileNotFoundError(f"L'action '{action_name}' n'existe pas")
|
||||
raise FileNotFoundError(f"Action '{action_name}' does not exist")
|
||||
|
||||
existing_numbers = self.get_existing_character_numbers(action_dir, action_name)
|
||||
next_number = max(existing_numbers, default=0) + 1
|
||||
@@ -448,16 +442,16 @@ class CharacterUtils:
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Images de characters ajoutées avec succès'}).encode('utf-8'))
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Character images added successfully'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur dans upload_character_images: {e}")
|
||||
self.logger.error(f"Error in upload_character_images: {e}")
|
||||
import traceback
|
||||
self.logger.error(traceback.format_exc())
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
|
||||
def reload_fonts(self, handler):
|
||||
"""Recharge les fonts en exécutant load_fonts."""
|
||||
"""Reload fonts via load_fonts."""
|
||||
try:
|
||||
self.shared_data.load_fonts()
|
||||
handler.send_response(200)
|
||||
@@ -472,13 +466,13 @@ class CharacterUtils:
|
||||
handler.wfile.write(json.dumps({'status': 'error', 'message': str(e)}).encode('utf-8'))
|
||||
|
||||
def reload_images(self, handler):
|
||||
"""Recharge les images en exécutant load_images."""
|
||||
"""Reload images via load_images."""
|
||||
try:
|
||||
self.shared_data.load_images()
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Images rechargées avec succès.'}).encode('utf-8'))
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Images reloaded successfully.'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in reload_images: {e}")
|
||||
handler.send_response(500)
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/comment_utils.py
|
||||
"""
|
||||
Comment and status message management utilities.
|
||||
Handles status comments/messages displayed in the UI.
|
||||
"""
|
||||
"""comment_utils.py - Status comments and messages displayed in the UI."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import re
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/db_utils.py
|
||||
"""
|
||||
Database manager utilities.
|
||||
Handles database table operations, CRUD, schema management, and exports.
|
||||
"""
|
||||
"""db_utils.py - Database table operations, CRUD, schema management, and exports."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import re
|
||||
@@ -122,7 +118,8 @@ class DBUtils:
|
||||
data = {"tables": self._db_list_tables(), "views": self._db_list_views()}
|
||||
self._write_json(handler, data)
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
logger.error(f"Error fetching database catalog: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def db_schema_endpoint(self, handler, name: str):
|
||||
"""Get schema for a table or view."""
|
||||
@@ -133,8 +130,11 @@ class DBUtils:
|
||||
)
|
||||
cols = self.shared_data.db.query(f"PRAGMA table_info({name});")
|
||||
self._write_json(handler, {"meta": row, "columns": cols})
|
||||
except ValueError:
|
||||
self._write_json(handler, {"status": "error", "message": "Invalid table or view name"}, 400)
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
logger.error(f"Error fetching schema for '{name}': {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def db_get_table_endpoint(self, handler, table_name: str):
|
||||
"""Get table data with pagination and filtering."""
|
||||
@@ -179,8 +179,11 @@ class DBUtils:
|
||||
"pk": pk,
|
||||
"total": total
|
||||
})
|
||||
except ValueError:
|
||||
self._write_json(handler, {"status": "error", "message": "Invalid table or query parameters"}, 400)
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
logger.error(f"Error fetching table data: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def db_update_cells_endpoint(self, handler, payload: dict):
|
||||
"""Update table cells."""
|
||||
@@ -210,7 +213,8 @@ class DBUtils:
|
||||
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
logger.error(f"Error updating cells: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 400)
|
||||
|
||||
def db_delete_rows_endpoint(self, handler, payload: dict):
|
||||
"""Delete table rows."""
|
||||
@@ -226,8 +230,11 @@ class DBUtils:
|
||||
tuple(pks)
|
||||
)
|
||||
self._write_json(handler, {"status": "ok", "deleted": len(pks)})
|
||||
except Exception as e:
|
||||
except ValueError as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting rows: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 400)
|
||||
|
||||
def db_insert_row_endpoint(self, handler, payload: dict):
|
||||
"""Insert a new row."""
|
||||
@@ -259,7 +266,8 @@ class DBUtils:
|
||||
new_pk = row["lid"]
|
||||
self._write_json(handler, {"status": "ok", "pk": new_pk})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
logger.error(f"Error inserting row: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 400)
|
||||
|
||||
def db_export_table_endpoint(self, handler, table_name: str):
|
||||
"""Export table as CSV or JSON."""
|
||||
@@ -287,7 +295,8 @@ class DBUtils:
|
||||
handler.end_headers()
|
||||
handler.wfile.write(buf.getvalue().encode("utf-8"))
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
logger.error(f"Error exporting table: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 400)
|
||||
|
||||
def db_vacuum_endpoint(self, handler):
|
||||
"""Vacuum and optimize database."""
|
||||
@@ -296,7 +305,8 @@ class DBUtils:
|
||||
self.shared_data.db.optimize()
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
logger.error(f"Error during database vacuum: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def db_drop_table_endpoint(self, handler, table_name: str):
|
||||
"""Drop a table."""
|
||||
@@ -305,7 +315,8 @@ class DBUtils:
|
||||
self.shared_data.db.execute(f"DROP TABLE IF EXISTS {table};")
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
logger.error(f"Error dropping table: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 400)
|
||||
|
||||
def db_truncate_table_endpoint(self, handler, table_name: str):
|
||||
"""Truncate a table."""
|
||||
@@ -318,7 +329,8 @@ class DBUtils:
|
||||
pass
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
logger.error(f"Error truncating table: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 400)
|
||||
|
||||
|
||||
def db_create_table_endpoint(self, handler, payload: dict):
|
||||
@@ -345,14 +357,14 @@ class DBUtils:
|
||||
seg += " DEFAULT " + str(c["default"])
|
||||
if c.get("pk"):
|
||||
pk_inline = cname
|
||||
# AUTOINCREMENT en SQLite que sur INTEGER PRIMARY KEY
|
||||
# AUTOINCREMENT only valid on INTEGER PRIMARY KEY in SQLite
|
||||
if ctype.upper().startswith("INTEGER"):
|
||||
seg += " PRIMARY KEY AUTOINCREMENT"
|
||||
else:
|
||||
seg += " PRIMARY KEY"
|
||||
parts.append(seg)
|
||||
if pk_inline is None:
|
||||
# rien, PK implicite ou aucune
|
||||
# no explicit PK, implicit rowid or none
|
||||
pass
|
||||
ine = "IF NOT EXISTS " if payload.get("if_not_exists") else ""
|
||||
sql = f"CREATE TABLE {ine}{name} ({', '.join(parts)});"
|
||||
@@ -360,8 +372,9 @@ class DBUtils:
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating table: {e}")
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
handler.wfile.write(json.dumps({"status":"error","message":"Internal server error"}).encode("utf-8"))
|
||||
|
||||
def db_rename_table_endpoint(self, handler, payload: dict):
|
||||
try:
|
||||
@@ -371,8 +384,9 @@ class DBUtils:
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(f"Error renaming table: {e}")
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
handler.wfile.write(json.dumps({"status":"error","message":"Internal server error"}).encode("utf-8"))
|
||||
|
||||
def db_add_column_endpoint(self, handler, payload: dict):
|
||||
"""
|
||||
@@ -391,11 +405,12 @@ class DBUtils:
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding column: {e}")
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
handler.wfile.write(json.dumps({"status":"error","message":"Internal server error"}).encode("utf-8"))
|
||||
|
||||
|
||||
# --- drop/truncate (vue/table) ---
|
||||
# --- drop/truncate (view/table) ---
|
||||
def db_drop_view_endpoint(self, handler, view_name: str):
|
||||
try:
|
||||
view = self._db_safe_ident(view_name)
|
||||
@@ -403,8 +418,9 @@ class DBUtils:
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(f"Error dropping view: {e}")
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
handler.wfile.write(json.dumps({"status":"error","message":"Internal server error"}).encode("utf-8"))
|
||||
|
||||
# --- export all (zip CSV/JSON) ---
|
||||
def db_export_all_endpoint(self, handler):
|
||||
@@ -425,7 +441,7 @@ class DBUtils:
|
||||
w.writeheader()
|
||||
for r in rows: w.writerow({c: r.get(c) for c in cols})
|
||||
z.writestr(f"tables/{name}.csv", sio.getvalue())
|
||||
# views (lecture seule)
|
||||
# views (read-only)
|
||||
for v in self._db_list_views():
|
||||
name = v["name"]
|
||||
try:
|
||||
@@ -451,8 +467,9 @@ class DBUtils:
|
||||
handler.end_headers()
|
||||
handler.wfile.write(payload)
|
||||
except Exception as e:
|
||||
logger.error(f"Error exporting database: {e}")
|
||||
handler.send_response(500); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
handler.wfile.write(json.dumps({"status":"error","message":"Internal server error"}).encode("utf-8"))
|
||||
|
||||
def db_list_tables_endpoint(self, handler):
|
||||
try:
|
||||
@@ -466,7 +483,7 @@ class DBUtils:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
handler.wfile.write(json.dumps({"status":"error","message":"Internal server error"}).encode("utf-8"))
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
"""
|
||||
Debug / Profiling utilities for the Bjorn Debug page.
|
||||
Exposes process-level and per-thread metrics via /proc (no external deps).
|
||||
Designed for Pi Zero 2: lightweight reads, no subprocess spawning.
|
||||
OPTIMIZED: minimal allocations, cached tracemalloc, /proc/self/smaps for C memory.
|
||||
"""debug_utils.py - Debug/profiling for the Bjorn Debug page.
|
||||
|
||||
Exposes process and per-thread metrics via /proc. Optimized for Pi Zero 2.
|
||||
"""
|
||||
|
||||
import json
|
||||
@@ -58,7 +56,7 @@ def _fd_count():
|
||||
|
||||
|
||||
def _read_open_files():
|
||||
"""Read open FDs — reuses a single dict to minimize allocations."""
|
||||
"""Read open FDs - reuses a single dict to minimize allocations."""
|
||||
fd_dir = "/proc/self/fd"
|
||||
fd_map = {}
|
||||
try:
|
||||
@@ -141,7 +139,7 @@ def _get_python_threads_rich():
|
||||
if target is not None:
|
||||
tf = getattr(target, "__qualname__", getattr(target, "__name__", "?"))
|
||||
tm = getattr(target, "__module__", "")
|
||||
# Source file — use __code__ directly (avoids importing inspect)
|
||||
# Source file - use __code__ directly (avoids importing inspect)
|
||||
tfile = ""
|
||||
code = getattr(target, "__code__", None)
|
||||
if code:
|
||||
@@ -151,7 +149,7 @@ def _get_python_threads_rich():
|
||||
tm = ""
|
||||
tfile = ""
|
||||
|
||||
# Current stack — top 5 frames, build compact strings directly
|
||||
# Current stack - top 5 frames, build compact strings directly
|
||||
stack = []
|
||||
frame = frames.get(ident)
|
||||
depth = 0
|
||||
@@ -236,7 +234,7 @@ def _read_smaps_rollup():
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Cached tracemalloc — take snapshot at most every 5s to reduce overhead
|
||||
# Cached tracemalloc - take snapshot at most every 5s to reduce overhead
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_tm_cache_lock = threading.Lock()
|
||||
@@ -261,7 +259,7 @@ def _get_tracemalloc_cached():
|
||||
current, peak = tracemalloc.get_traced_memory()
|
||||
snap = tracemalloc.take_snapshot()
|
||||
|
||||
# Single statistics call — use lineno (more useful), derive file-level client-side
|
||||
# Single statistics call - use lineno (more useful), derive file-level client-side
|
||||
stats_line = snap.statistics("lineno")[:30]
|
||||
top_by_line = []
|
||||
file_agg = {}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/file_utils.py
|
||||
"""
|
||||
File management utilities.
|
||||
Handles file operations, uploads, downloads, directory management.
|
||||
"""
|
||||
"""file_utils.py - File operations, uploads, downloads, and directory management."""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import json
|
||||
@@ -76,7 +72,8 @@ class FileUtils:
|
||||
except (BrokenPipeError, ConnectionResetError):
|
||||
return
|
||||
except Exception as e:
|
||||
error_payload = json.dumps({"status": "error", "message": str(e)}).encode("utf-8")
|
||||
self.logger.error(f"Error listing files: {e}")
|
||||
error_payload = json.dumps({"status": "error", "message": "Internal server error"}).encode("utf-8")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.send_header("Content-Length", str(len(error_payload)))
|
||||
@@ -126,7 +123,7 @@ class FileUtils:
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
"message": "Internal server error"
|
||||
}).encode('utf-8'))
|
||||
|
||||
def loot_download(self, handler):
|
||||
@@ -158,8 +155,8 @@ class FileUtils:
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
"status": "error",
|
||||
"message": "Internal server error"
|
||||
}).encode('utf-8'))
|
||||
|
||||
def download_file(self, handler):
|
||||
@@ -178,10 +175,11 @@ class FileUtils:
|
||||
handler.send_response(404)
|
||||
handler.end_headers()
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error downloading file: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": "Internal server error"}).encode('utf-8'))
|
||||
|
||||
def create_folder(self, data):
|
||||
"""Create a new folder."""
|
||||
@@ -213,7 +211,15 @@ class FileUtils:
|
||||
current_path = json.loads(data.decode().strip())
|
||||
break
|
||||
|
||||
# Validate currentPath segments to prevent path traversal
|
||||
for seg in current_path:
|
||||
if not isinstance(seg, str) or '..' in seg or seg.startswith('/') or seg.startswith('\\'):
|
||||
raise PermissionError(f"Invalid path segment: {seg}")
|
||||
target_dir = os.path.join(self.shared_data.current_dir, *current_path)
|
||||
abs_target = os.path.realpath(target_dir)
|
||||
abs_base = os.path.realpath(self.shared_data.current_dir)
|
||||
if not abs_target.startswith(abs_base + os.sep) and abs_target != abs_base:
|
||||
raise PermissionError("Path traversal detected in currentPath")
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
|
||||
uploaded_files = []
|
||||
@@ -260,7 +266,7 @@ class FileUtils:
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
"message": "Internal server error"
|
||||
}).encode('utf-8'))
|
||||
|
||||
def delete_file(self, data):
|
||||
@@ -290,7 +296,7 @@ class FileUtils:
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error deleting file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def rename_file(self, data):
|
||||
"""Rename file or directory."""
|
||||
@@ -306,10 +312,10 @@ class FileUtils:
|
||||
"message": f"Successfully renamed {old_path} to {new_path}"
|
||||
}
|
||||
except ValueError as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Access denied"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error renaming file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def duplicate_file(self, data):
|
||||
"""Duplicate file or directory."""
|
||||
@@ -330,7 +336,7 @@ class FileUtils:
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error duplicating file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def move_file(self, data):
|
||||
"""Move file or directory."""
|
||||
@@ -355,7 +361,7 @@ class FileUtils:
|
||||
return {"status": "success", "message": "Item moved successfully"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error moving file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def list_directories(self, handler):
|
||||
"""List directory structure."""
|
||||
@@ -379,12 +385,13 @@ class FileUtils:
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(directory_structure).encode())
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing directories: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
"message": "Internal server error"
|
||||
}).encode())
|
||||
|
||||
def clear_output_folder(self, data=None):
|
||||
@@ -454,4 +461,4 @@ class FileUtils:
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error clearing output folder: {str(e)}")
|
||||
return {"status": "error", "message": f"Error clearing output folder: {str(e)}"}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
@@ -1,4 +1,4 @@
|
||||
# image_utils.py
|
||||
"""image_utils.py - Image upload, processing, and gallery management."""
|
||||
from __future__ import annotations
|
||||
import os, json, re, shutil, io, logging
|
||||
from io import BytesIO
|
||||
@@ -231,7 +231,7 @@ class ImageUtils:
|
||||
|
||||
try:
|
||||
ctype, pdict = _parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type must be multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = _MultipartForm(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
@@ -298,11 +298,11 @@ class ImageUtils:
|
||||
|
||||
try:
|
||||
ctype, pdict = _parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type must be multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = _MultipartForm(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
if 'web_image' not in form or not getattr(form['web_image'],'filename',''): raise ValueError('Aucun fichier web_image fourni')
|
||||
if 'web_image' not in form or not getattr(form['web_image'],'filename',''): raise ValueError('No web_image file provided')
|
||||
file_item = form['web_image']; filename = self._safe(file_item.filename)
|
||||
base, ext = os.path.splitext(filename);
|
||||
if ext.lower() not in ALLOWED_IMAGE_EXTS: filename = base + '.png'
|
||||
@@ -332,11 +332,11 @@ class ImageUtils:
|
||||
|
||||
try:
|
||||
ctype, pdict = _parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type must be multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = _MultipartForm(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
if 'icon_image' not in form or not getattr(form['icon_image'],'filename',''): raise ValueError('Aucun fichier icon_image fourni')
|
||||
if 'icon_image' not in form or not getattr(form['icon_image'],'filename',''): raise ValueError('No icon_image file provided')
|
||||
file_item = form['icon_image']; filename = self._safe(file_item.filename)
|
||||
base, ext = os.path.splitext(filename);
|
||||
if ext.lower() not in ALLOWED_IMAGE_EXTS: filename = base + '.png'
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# webutils/index_utils.py
|
||||
"""index_utils.py - Dashboard index page data and system status endpoints."""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import json
|
||||
@@ -16,7 +16,7 @@ from datetime import datetime
|
||||
from typing import Any, Dict, Tuple
|
||||
|
||||
|
||||
# --------- Singleton module (évite recréation à chaque requête) ----------
|
||||
# Singleton module (avoids re-creation on every request)
|
||||
logger = Logger(name="index_utils.py", level=logging.DEBUG)
|
||||
|
||||
|
||||
@@ -27,17 +27,17 @@ class IndexUtils:
|
||||
|
||||
self.db = shared_data.db
|
||||
|
||||
# Cache pour l'assemblage de stats (champs dynamiques)
|
||||
# Stats assembly cache (dynamic fields)
|
||||
self._last_stats: Dict[str, Any] = {}
|
||||
self._last_stats_ts: float = 0.0
|
||||
self._cache_ttl: float = 5.0 # 5s
|
||||
|
||||
# Cache pour l'info système (rarement changeant)
|
||||
# System info cache (rarely changes)
|
||||
self._system_info_cache: Dict[str, Any] = {}
|
||||
self._system_info_ts: float = 0.0
|
||||
self._system_cache_ttl: float = 300.0 # 5 min
|
||||
|
||||
# Cache wardrive (compte Wi-Fi connus)
|
||||
# Wardrive cache (known WiFi count)
|
||||
self._wardrive_cache_mem: Optional[int] = None
|
||||
self._wardrive_ts_mem: float = 0.0
|
||||
self._wardrive_ttl: float = 600.0 # 10 min
|
||||
@@ -55,14 +55,14 @@ class IndexUtils:
|
||||
return 0, 0
|
||||
|
||||
def _open_fds_count(self) -> int:
|
||||
"""Compte le nombre de file descriptors ouverts (proc global)."""
|
||||
"""Count total open file descriptors (global /proc)."""
|
||||
try:
|
||||
return len(glob.glob("/proc/*/fd/*"))
|
||||
except Exception as e:
|
||||
# self.logger.debug(f"FD probe error: {e}")
|
||||
return 0
|
||||
|
||||
# ---------------------- Helpers JSON ----------------------
|
||||
# ---------------------- JSON helpers ----------------------
|
||||
def _to_jsonable(self, obj):
|
||||
if obj is None or isinstance(obj, (bool, int, float, str)):
|
||||
return obj
|
||||
@@ -80,7 +80,7 @@ class IndexUtils:
|
||||
def _json(self, handler, code: int, obj):
|
||||
payload = json.dumps(self._to_jsonable(obj), ensure_ascii=False).encode("utf-8")
|
||||
handler.send_response(code)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.send_header("Content-Type", "application/json; charset=utf-8")
|
||||
handler.send_header("Content-Length", str(len(payload)))
|
||||
handler.end_headers()
|
||||
try:
|
||||
@@ -96,7 +96,7 @@ class IndexUtils:
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
# ---------------------- Config store ----------------------
|
||||
# ---------------------- Config store -------------------------
|
||||
def _cfg_get(self, key: str, default=None):
|
||||
try:
|
||||
row = self.db.query_one("SELECT value FROM config WHERE key=? LIMIT 1;", (key,))
|
||||
@@ -123,7 +123,7 @@ class IndexUtils:
|
||||
(key, s),
|
||||
)
|
||||
|
||||
# ---------------------- Info système ----------------------
|
||||
# ---------------------- System info ----------------------
|
||||
def _get_system_info(self) -> Dict[str, Any]:
|
||||
now = time.time()
|
||||
if self._system_info_cache and (now - self._system_info_ts) < self._system_cache_ttl:
|
||||
@@ -171,7 +171,7 @@ class IndexUtils:
|
||||
return platform.machine()
|
||||
|
||||
def _check_epd_connected(self) -> bool:
|
||||
# I2C puis fallback SPI
|
||||
# I2C first, fallback to SPI
|
||||
try:
|
||||
result = subprocess.run(["i2cdetect", "-y", "1"], capture_output=True, text=True, timeout=1)
|
||||
if result.returncode == 0:
|
||||
@@ -266,7 +266,7 @@ class IndexUtils:
|
||||
# self.logger.debug(f"Battery probe error: {e}")
|
||||
return {"present": False}
|
||||
|
||||
# ---------------------- Réseau ----------------------
|
||||
# ---------------------- Network ----------------------
|
||||
def _quick_internet(self, timeout: float = 1.0) -> bool:
|
||||
try:
|
||||
for server in ["1.1.1.1", "8.8.8.8"]:
|
||||
@@ -499,7 +499,7 @@ class IndexUtils:
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
# ---------------------- Wi-Fi connus (profils NM) ----------------------
|
||||
# ---------------------- Known WiFi (NM profiles) ----------------------
|
||||
def _run_nmcli(self, args: list[str], timeout: float = 4.0) -> Optional[str]:
|
||||
import shutil, os as _os
|
||||
nmcli_path = shutil.which("nmcli") or "/usr/bin/nmcli"
|
||||
@@ -520,14 +520,14 @@ class IndexUtils:
|
||||
# self.logger.debug(f"nmcli rc={out.returncode} args={args} stderr={(out.stderr or '').strip()}")
|
||||
return None
|
||||
except FileNotFoundError:
|
||||
# self.logger.debug("nmcli introuvable")
|
||||
# self.logger.debug("nmcli not found")
|
||||
return None
|
||||
except Exception as e:
|
||||
# self.logger.debug(f"nmcli exception {args}: {e}")
|
||||
return None
|
||||
|
||||
def _known_wifi_count_nmcli(self) -> int:
|
||||
# Try 1: simple (une valeur par ligne)
|
||||
# Try 1: simple (one value per line)
|
||||
out = self._run_nmcli(["-t", "-g", "TYPE", "connection", "show"])
|
||||
if out:
|
||||
cnt = sum(1 for line in out.splitlines()
|
||||
@@ -573,20 +573,20 @@ class IndexUtils:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Dernier recours: config persistée
|
||||
# Last resort: persisted config value
|
||||
v = self._cfg_get("wardrive_known", 0)
|
||||
# self.logger.debug(f"known wifi via cfg fallback = {v}")
|
||||
return int(v) if isinstance(v, (int, float)) else 0
|
||||
|
||||
# Cache wardrive: mémoire (par process) + DB (partagé multi-workers)
|
||||
# Wardrive cache: in-memory (per-process) + DB (shared across workers)
|
||||
def _wardrive_known_cached(self) -> int:
|
||||
now = time.time()
|
||||
|
||||
# 1) cache mémoire
|
||||
# 1) in-memory cache
|
||||
if self._wardrive_cache_mem is not None and (now - self._wardrive_ts_mem) < self._wardrive_ttl:
|
||||
return int(self._wardrive_cache_mem)
|
||||
|
||||
# 2) cache partagé en DB
|
||||
# 2) shared DB cache
|
||||
try:
|
||||
row = self.db.query_one("SELECT value FROM config WHERE key='wardrive_cache' LIMIT 1;")
|
||||
if row and row.get("value"):
|
||||
@@ -600,17 +600,17 @@ class IndexUtils:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# 3) refresh si nécessaire
|
||||
# 3) refresh if needed
|
||||
val = int(self._known_wifi_count_nmcli())
|
||||
|
||||
# maj caches
|
||||
# update caches
|
||||
self._wardrive_cache_mem = val
|
||||
self._wardrive_ts_mem = now
|
||||
self._cfg_set("wardrive_cache", {"val": val, "ts": now})
|
||||
|
||||
return val
|
||||
|
||||
# ---------------------- Accès direct shared_data ----------------------
|
||||
# ---------------------- Direct shared_data access ----------------------
|
||||
def _count_open_ports_total(self) -> int:
|
||||
try:
|
||||
val = int(getattr(self.shared_data, "port_count", -1))
|
||||
@@ -677,7 +677,7 @@ class IndexUtils:
|
||||
except Exception:
|
||||
return str(self._cfg_get("bjorn_mode", "AUTO")).upper()
|
||||
|
||||
# ---------------------- Delta vuln depuis dernier scan ----------------------
|
||||
# ---------------------- Vuln delta since last scan ----------------------
|
||||
def _vulns_delta(self) -> int:
|
||||
last_scan_ts = self._cfg_get("vuln_last_scan_ts")
|
||||
if not last_scan_ts:
|
||||
@@ -707,7 +707,7 @@ class IndexUtils:
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
# ---------------------- Assemblage principal ----------------------
|
||||
# ---------------------- Main stats assembly ----------------------
|
||||
def _assemble_stats(self) -> Dict[str, Any]:
|
||||
now = time.time()
|
||||
if self._last_stats and (now - self._last_stats_ts) < self._cache_ttl:
|
||||
@@ -725,7 +725,7 @@ class IndexUtils:
|
||||
scripts_count = self._scripts_count()
|
||||
wardrive = self._wardrive_known_cached()
|
||||
|
||||
# Système
|
||||
# System
|
||||
sys_info = self._get_system_info()
|
||||
uptime = self._uptime_str()
|
||||
first_init = self._first_init_ts()
|
||||
@@ -741,7 +741,7 @@ class IndexUtils:
|
||||
# Batterie
|
||||
batt = self._battery_probe()
|
||||
|
||||
# Réseau
|
||||
# Network
|
||||
internet_ok = self._quick_internet()
|
||||
gw, dns = self._gw_dns()
|
||||
wifi_ip = self._ip_for("wlan0")
|
||||
@@ -775,12 +775,12 @@ class IndexUtils:
|
||||
"bjorn_level": bjorn_level,
|
||||
"internet_access": bool(internet_ok),
|
||||
|
||||
# Hôtes & ports
|
||||
# Hosts & ports
|
||||
"known_hosts_total": int(total),
|
||||
"alive_hosts": int(alive),
|
||||
"open_ports_alive_total": int(open_ports_total),
|
||||
|
||||
# Comptes sécurité
|
||||
# Security counters
|
||||
"wardrive_known": int(wardrive),
|
||||
"vulnerabilities": int(vulns_total),
|
||||
"vulns_delta": int(vulns_delta),
|
||||
@@ -918,35 +918,35 @@ class IndexUtils:
|
||||
|
||||
|
||||
def reload_generate_actions_json(self, handler):
|
||||
"""Recharge le fichier actions.json en exécutant generate_actions_json."""
|
||||
"""Reload actions.json by running generate_actions_json."""
|
||||
try:
|
||||
self.shared_data.generate_actions_json()
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.send_header('Content-Type', 'application/json; charset=utf-8')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'actions.json reloaded successfully.'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in reload_generate_actions_json: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.send_header('Content-Type', 'application/json; charset=utf-8')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'error', 'message': str(e)}).encode('utf-8'))
|
||||
|
||||
|
||||
def clear_shared_config_json(self, handler, restart=True):
|
||||
"""Reset config à partir des defaults, en DB."""
|
||||
"""Reset config to defaults in DB."""
|
||||
try:
|
||||
self.shared_data.config = self.shared_data.get_default_config()
|
||||
self.shared_data.save_config() # -> DB
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type","application/json")
|
||||
handler.send_header("Content-Type", "application/json; charset=utf-8")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"success","message":"Configuration reset to defaults"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type","application/json")
|
||||
handler.send_header("Content-Type", "application/json; charset=utf-8")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
|
||||
@@ -968,7 +968,7 @@ class IndexUtils:
|
||||
|
||||
def serve_manifest(self, handler):
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.send_header("Content-Type", "application/json; charset=utf-8")
|
||||
handler.end_headers()
|
||||
manifest_path = os.path.join(self.shared_data.web_dir, 'manifest.json')
|
||||
try:
|
||||
@@ -992,7 +992,7 @@ class IndexUtils:
|
||||
|
||||
|
||||
|
||||
# --- Nouveaux probes "radio / link" ---
|
||||
# --- Radio / link probes ---
|
||||
def _wifi_radio_on(self) -> bool:
|
||||
# nmcli (NetworkManager)
|
||||
try:
|
||||
@@ -1019,7 +1019,7 @@ class IndexUtils:
|
||||
btctl = shutil.which("bluetoothctl") or "/usr/bin/bluetoothctl"
|
||||
env = _os.environ.copy()
|
||||
env.setdefault("PATH", "/usr/sbin:/usr/bin:/sbin:/bin")
|
||||
# important quand on tourne en service systemd
|
||||
# needed when running as a systemd service
|
||||
env.setdefault("DBUS_SYSTEM_BUS_ADDRESS", "unix:path=/run/dbus/system_bus_socket")
|
||||
|
||||
try:
|
||||
@@ -1027,7 +1027,7 @@ class IndexUtils:
|
||||
if out.returncode == 0:
|
||||
txt = (out.stdout or "").lower()
|
||||
if "no default controller available" in txt:
|
||||
# Essayer de lister et cibler le premier contrôleur
|
||||
# Try listing and targeting the first controller
|
||||
ls = subprocess.run([btctl, "list"], capture_output=True, text=True, timeout=1.2, env=env)
|
||||
if ls.returncode == 0:
|
||||
for line in (ls.stdout or "").splitlines():
|
||||
@@ -1038,7 +1038,7 @@ class IndexUtils:
|
||||
if sh.returncode == 0 and "powered: yes" in (sh.stdout or "").lower():
|
||||
return True
|
||||
return False
|
||||
# cas normal
|
||||
# normal case
|
||||
if "powered: yes" in txt:
|
||||
return True
|
||||
except Exception:
|
||||
@@ -1068,7 +1068,7 @@ class IndexUtils:
|
||||
return ("STATE UP" in t) or ("LOWER_UP" in t)
|
||||
except Exception:
|
||||
pass
|
||||
# ethtool (si dispo)
|
||||
# ethtool fallback
|
||||
try:
|
||||
out = subprocess.run(["ethtool", ifname], capture_output=True, text=True, timeout=1)
|
||||
if out.returncode == 0:
|
||||
@@ -1080,7 +1080,7 @@ class IndexUtils:
|
||||
return False
|
||||
|
||||
def _usb_gadget_active(self) -> bool:
|
||||
# actif si un UDC est attaché
|
||||
# active if a UDC is attached
|
||||
try:
|
||||
udc = self._read_text("/sys/kernel/config/usb_gadget/g1/UDC")
|
||||
return bool(udc and udc.strip())
|
||||
|
||||
@@ -1,7 +1,4 @@
|
||||
# web_utils/llm_utils.py
|
||||
# HTTP endpoints for LLM chat, LLM bridge config, and MCP server config.
|
||||
# Follows the same pattern as all other web_utils classes in this project.
|
||||
|
||||
"""llm_utils.py - HTTP endpoints for LLM chat, bridge config, and MCP server config."""
|
||||
import json
|
||||
import uuid
|
||||
from typing import Any, Dict
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
"""
|
||||
Loki web API endpoints.
|
||||
"""
|
||||
"""loki_utils.py - Loki web API endpoints."""
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
@@ -23,7 +21,7 @@ class LokiUtils:
|
||||
# ── GET endpoints (handler signature) ─────────────────────
|
||||
|
||||
def get_status(self, handler):
|
||||
"""GET /api/loki/status — engine state."""
|
||||
"""GET /api/loki/status - engine state."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
data = engine.get_status()
|
||||
@@ -36,7 +34,7 @@ class LokiUtils:
|
||||
self._send_json(handler, data)
|
||||
|
||||
def get_scripts(self, handler):
|
||||
"""GET /api/loki/scripts — user-saved scripts."""
|
||||
"""GET /api/loki/scripts - user-saved scripts."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT id, name, description, category, target_os, "
|
||||
@@ -48,7 +46,7 @@ class LokiUtils:
|
||||
self._send_json(handler, {'scripts': []})
|
||||
|
||||
def get_script(self, handler):
|
||||
"""GET /api/loki/script?id=N — single script with content."""
|
||||
"""GET /api/loki/script?id=N - single script with content."""
|
||||
try:
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
script_id = int(qs.get('id', [0])[0])
|
||||
@@ -64,7 +62,7 @@ class LokiUtils:
|
||||
self._send_json(handler, {'error': str(e)}, 500)
|
||||
|
||||
def get_jobs(self, handler):
|
||||
"""GET /api/loki/jobs — job list."""
|
||||
"""GET /api/loki/jobs - job list."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
jobs = engine.get_jobs()
|
||||
@@ -73,7 +71,7 @@ class LokiUtils:
|
||||
self._send_json(handler, {'jobs': jobs})
|
||||
|
||||
def get_payloads(self, handler):
|
||||
"""GET /api/loki/payloads — built-in payload list."""
|
||||
"""GET /api/loki/payloads - built-in payload list."""
|
||||
payloads = []
|
||||
payload_dir = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
@@ -104,7 +102,7 @@ class LokiUtils:
|
||||
self._send_json(handler, {'payloads': payloads})
|
||||
|
||||
def get_layouts(self, handler):
|
||||
"""GET /api/loki/layouts — available keyboard layouts."""
|
||||
"""GET /api/loki/layouts - available keyboard layouts."""
|
||||
try:
|
||||
from loki.layouts import available
|
||||
layouts = available()
|
||||
@@ -115,7 +113,7 @@ class LokiUtils:
|
||||
# ── POST endpoints (JSON data signature) ──────────────────
|
||||
|
||||
def toggle_loki(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/toggle — switch to/from LOKI mode."""
|
||||
"""POST /api/loki/toggle - switch to/from LOKI mode."""
|
||||
enabled = bool(data.get('enabled', False))
|
||||
if enabled:
|
||||
self.shared_data.operation_mode = "LOKI"
|
||||
@@ -124,7 +122,7 @@ class LokiUtils:
|
||||
return {'status': 'ok', 'enabled': enabled}
|
||||
|
||||
def save_script(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/script/save — save/update a script."""
|
||||
"""POST /api/loki/script/save - save/update a script."""
|
||||
try:
|
||||
script_id = data.get('id')
|
||||
name = data.get('name', '').strip()
|
||||
@@ -154,7 +152,7 @@ class LokiUtils:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def delete_script(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/script/delete — delete a script."""
|
||||
"""POST /api/loki/script/delete - delete a script."""
|
||||
try:
|
||||
script_id = data.get('id')
|
||||
if script_id:
|
||||
@@ -166,7 +164,7 @@ class LokiUtils:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def run_script(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/script/run — execute a HIDScript."""
|
||||
"""POST /api/loki/script/run - execute a HIDScript."""
|
||||
engine = self._engine
|
||||
if not engine:
|
||||
return {'status': 'error', 'message': 'Loki engine not available'}
|
||||
@@ -185,7 +183,7 @@ class LokiUtils:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def cancel_job(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/job/cancel — cancel a running job."""
|
||||
"""POST /api/loki/job/cancel - cancel a running job."""
|
||||
engine = self._engine
|
||||
if not engine:
|
||||
return {'status': 'error', 'message': 'Loki engine not available'}
|
||||
@@ -195,20 +193,20 @@ class LokiUtils:
|
||||
return {'status': 'error', 'message': 'Job not found'}
|
||||
|
||||
def clear_jobs(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/jobs/clear — clear completed jobs."""
|
||||
"""POST /api/loki/jobs/clear - clear completed jobs."""
|
||||
engine = self._engine
|
||||
if engine and engine._jobs:
|
||||
engine.job_manager.clear_completed()
|
||||
return {'status': 'ok'}
|
||||
|
||||
def install_gadget(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/install — install HID gadget boot script."""
|
||||
"""POST /api/loki/install - install HID gadget boot script."""
|
||||
from loki import LokiEngine
|
||||
result = LokiEngine.install_hid_gadget()
|
||||
return result
|
||||
|
||||
def reboot(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/reboot — reboot the Pi to activate HID gadget."""
|
||||
"""POST /api/loki/reboot - reboot the Pi to activate HID gadget."""
|
||||
import subprocess
|
||||
try:
|
||||
logger.info("Reboot requested by Loki setup")
|
||||
@@ -218,7 +216,7 @@ class LokiUtils:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def quick_type(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/quick — quick-type text without a full script."""
|
||||
"""POST /api/loki/quick - quick-type text without a full script."""
|
||||
engine = self._engine
|
||||
if not engine or not engine._running:
|
||||
return {'status': 'error', 'message': 'Loki not running'}
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/netkb_utils.py
|
||||
"""
|
||||
Network Knowledge Base utilities.
|
||||
Handles network discovery data, host information, and action queue management.
|
||||
"""
|
||||
"""netkb_utils.py - Network discovery data, host info, and action queue management."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
from typing import Any, Dict, Optional
|
||||
@@ -23,13 +19,20 @@ class NetKBUtils:
|
||||
try:
|
||||
hosts = self.shared_data.db.get_all_hosts()
|
||||
actions_meta = self.shared_data.db.list_actions()
|
||||
action_names = [a["b_class"] for a in actions_meta]
|
||||
builtin_actions = []
|
||||
custom_actions = []
|
||||
for a in actions_meta:
|
||||
if a.get("b_action") == "custom" or (a.get("b_module") or "").startswith("custom/"):
|
||||
custom_actions.append(a["b_class"])
|
||||
else:
|
||||
builtin_actions.append(a["b_class"])
|
||||
|
||||
alive = [h for h in hosts if int(h.get("alive") or 0) == 1]
|
||||
response_data = {
|
||||
"ips": [h.get("ips", "") for h in alive],
|
||||
"ports": {h.get("ips", ""): (h.get("ports", "") or "").split(';') for h in alive},
|
||||
"actions": action_names
|
||||
"actions": builtin_actions,
|
||||
"custom_actions": custom_actions,
|
||||
}
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
# web_utils/network_utils.py
|
||||
"""
|
||||
Network utilities for WiFi/network operations.
|
||||
Handles WiFi scanning, connection, known networks management.
|
||||
"""network_utils.py - WiFi scanning, connection, and known networks management.
|
||||
|
||||
Compatible with both legacy NM keyfiles and Trixie netplan.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
@@ -48,7 +46,7 @@ class NetworkUtils:
|
||||
|
||||
Uses nmcli terse output. On Trixie, netplan-generated profiles
|
||||
(named ``netplan-wlan0-*``) appear alongside user-created NM
|
||||
profiles — both are returned.
|
||||
profiles - both are returned.
|
||||
"""
|
||||
try:
|
||||
result = self._run(
|
||||
@@ -60,7 +58,7 @@ class NetworkUtils:
|
||||
for line in result.stdout.strip().splitlines():
|
||||
if not line.strip():
|
||||
continue
|
||||
# nmcli -t uses ':' as delimiter — SSIDs with ':' are
|
||||
# nmcli -t uses ':' as delimiter - SSIDs with ':' are
|
||||
# escaped by nmcli (backslash-colon), so split from
|
||||
# the right to be safe: last field = priority,
|
||||
# second-to-last = type, rest = name.
|
||||
@@ -205,7 +203,7 @@ class NetworkUtils:
|
||||
continue
|
||||
|
||||
# Split from the right: IN-USE (last), SECURITY, SIGNAL, rest=SSID
|
||||
# IN-USE is '*' or '' — always one char field at the end
|
||||
# IN-USE is '*' or '' - always one char field at the end
|
||||
parts = line.rsplit(':', 3)
|
||||
if len(parts) < 4:
|
||||
continue
|
||||
@@ -302,7 +300,7 @@ class NetworkUtils:
|
||||
def import_potfiles(self, data=None):
|
||||
"""Import WiFi credentials from .pot/.potfile files.
|
||||
|
||||
Creates NM connection profiles via nmcli — these are stored
|
||||
Creates NM connection profiles via nmcli - these are stored
|
||||
in /etc/NetworkManager/system-connections/ and persist across
|
||||
reboots on both legacy and Trixie builds.
|
||||
"""
|
||||
@@ -403,7 +401,7 @@ class NetworkUtils:
|
||||
os.remove(path)
|
||||
self.logger.info("Deleted preconfigured.nmconnection")
|
||||
else:
|
||||
self.logger.info("preconfigured.nmconnection not found (Trixie/netplan — this is normal)")
|
||||
self.logger.info("preconfigured.nmconnection not found (Trixie/netplan - this is normal)")
|
||||
self._json_response(handler, 200, {"status": "success"})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error deleting preconfigured file: {e}")
|
||||
@@ -415,7 +413,7 @@ class NetworkUtils:
|
||||
On Trixie this is a no-op: Wi-Fi is managed by netplan.
|
||||
Returns success regardless to avoid breaking the frontend.
|
||||
"""
|
||||
self.logger.warning("create_preconfigured_file called — no-op on Trixie/netplan builds")
|
||||
self.logger.warning("create_preconfigured_file called - no-op on Trixie/netplan builds")
|
||||
self._json_response(handler, 200, {
|
||||
"status": "success",
|
||||
"message": "No action needed on netplan-managed builds",
|
||||
@@ -428,7 +426,7 @@ class NetworkUtils:
|
||||
|
||||
Accepts multipart/form-data with a 'potfile' field.
|
||||
Saves to shared_data.potfiles_dir.
|
||||
Manual multipart parsing — no cgi module (removed in Python 3.13).
|
||||
Manual multipart parsing - no cgi module (removed in Python 3.13).
|
||||
"""
|
||||
try:
|
||||
content_type = handler.headers.get("Content-Type", "")
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/orchestrator_utils.py
|
||||
"""
|
||||
Orchestrator management utilities.
|
||||
Handles attack execution, scanning, and credential management.
|
||||
"""
|
||||
"""orchestrator_utils.py - Attack execution, scanning, and credential management."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import html
|
||||
|
||||
171
web_utils/package_utils.py
Normal file
171
web_utils/package_utils.py
Normal file
@@ -0,0 +1,171 @@
|
||||
"""package_utils.py - Package installation, listing, and removal endpoints."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
from typing import Any, Dict
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="package_utils.py", level=logging.DEBUG)
|
||||
|
||||
# Regex: alphanumeric, hyphens, underscores, dots, brackets (for extras like pkg[extra])
|
||||
_VALID_PACKAGE_NAME = re.compile(r'^[a-zA-Z0-9_\-\.]+(\[[a-zA-Z0-9_\-\.,]+\])?$')
|
||||
|
||||
|
||||
class PackageUtils:
|
||||
"""Utilities for pip package management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
# =========================================================================
|
||||
# JSON ENDPOINTS
|
||||
# =========================================================================
|
||||
|
||||
def list_packages_json(self, data: Dict) -> Dict:
|
||||
"""Return all tracked packages."""
|
||||
try:
|
||||
packages = self.shared_data.db.list_packages()
|
||||
return {"status": "success", "data": packages}
|
||||
except Exception as e:
|
||||
self.logger.error(f"list_packages error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def uninstall_package(self, data: Dict) -> Dict:
|
||||
"""Uninstall a pip package and remove from DB."""
|
||||
try:
|
||||
name = data.get("name")
|
||||
if not name:
|
||||
return {"status": "error", "message": "name is required"}
|
||||
if not _VALID_PACKAGE_NAME.match(name):
|
||||
return {"status": "error", "message": "Invalid package name"}
|
||||
|
||||
result = subprocess.run(
|
||||
["pip", "uninstall", "-y", name],
|
||||
capture_output=True, text=True, timeout=120,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return {"status": "error", "message": result.stderr.strip() or "Uninstall failed"}
|
||||
|
||||
self.shared_data.db.remove_package(name)
|
||||
return {"status": "success", "message": f"Package '{name}' uninstalled"}
|
||||
except subprocess.TimeoutExpired:
|
||||
return {"status": "error", "message": "Uninstall timed out"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"uninstall_package error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
# =========================================================================
|
||||
# SSE ENDPOINT
|
||||
# =========================================================================
|
||||
|
||||
def install_package(self, handler):
|
||||
"""Stream pip install output as SSE events (GET endpoint)."""
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
query = parse_qs(urlparse(handler.path).query)
|
||||
name = query.get("name", [""])[0].strip()
|
||||
|
||||
# Validate
|
||||
if not name:
|
||||
handler.send_response(400)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": "name is required"}).encode("utf-8"))
|
||||
return
|
||||
if not _VALID_PACKAGE_NAME.match(name):
|
||||
handler.send_response(400)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": "Invalid package name"}).encode("utf-8"))
|
||||
return
|
||||
|
||||
max_lifetime = 300 # 5 minutes maximum
|
||||
start_time = time.time()
|
||||
process = None
|
||||
try:
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "text/event-stream")
|
||||
handler.send_header("Cache-Control", "no-cache")
|
||||
handler.send_header("Connection", "keep-alive")
|
||||
handler.send_header("Access-Control-Allow-Origin", "*")
|
||||
handler.end_headers()
|
||||
|
||||
process = subprocess.Popen(
|
||||
["pip", "install", "--break-system-packages", name],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
)
|
||||
|
||||
for line in process.stdout:
|
||||
if time.time() - start_time > max_lifetime:
|
||||
self.logger.warning("install_package SSE stream reached max lifetime")
|
||||
break
|
||||
|
||||
payload = json.dumps({"line": line.rstrip(), "done": False})
|
||||
try:
|
||||
handler.wfile.write(f"data: {payload}\n\n".encode("utf-8"))
|
||||
handler.wfile.flush()
|
||||
except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError, OSError):
|
||||
self.logger.info("Client disconnected during package install")
|
||||
break
|
||||
|
||||
process.wait(timeout=30)
|
||||
success = process.returncode == 0
|
||||
|
||||
# Get version on success
|
||||
version = ""
|
||||
if success:
|
||||
try:
|
||||
show = subprocess.run(
|
||||
["pip", "show", name],
|
||||
capture_output=True, text=True, timeout=15,
|
||||
)
|
||||
for show_line in show.stdout.splitlines():
|
||||
if show_line.startswith("Version:"):
|
||||
version = show_line.split(":", 1)[1].strip()
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Record in DB
|
||||
self.shared_data.db.add_package(name, version)
|
||||
|
||||
payload = json.dumps({"line": "", "done": True, "success": success, "version": version})
|
||||
try:
|
||||
handler.wfile.write(f"data: {payload}\n\n".encode("utf-8"))
|
||||
handler.wfile.flush()
|
||||
except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError, OSError):
|
||||
pass
|
||||
|
||||
except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError):
|
||||
self.logger.info("Client disconnected from package install SSE stream")
|
||||
except Exception as e:
|
||||
self.logger.error(f"install_package SSE error: {e}")
|
||||
try:
|
||||
payload = json.dumps({"line": f"Error: {e}", "done": True, "success": False, "version": ""})
|
||||
handler.wfile.write(f"data: {payload}\n\n".encode("utf-8"))
|
||||
handler.wfile.flush()
|
||||
except Exception:
|
||||
pass
|
||||
finally:
|
||||
if process:
|
||||
try:
|
||||
if process.stdout and not process.stdout.closed:
|
||||
process.stdout.close()
|
||||
if process.poll() is None:
|
||||
process.terminate()
|
||||
try:
|
||||
process.wait(timeout=5)
|
||||
except subprocess.TimeoutExpired:
|
||||
process.kill()
|
||||
process.wait()
|
||||
except Exception:
|
||||
pass
|
||||
self.logger.info("Package install SSE stream closed")
|
||||
226
web_utils/plugin_utils.py
Normal file
226
web_utils/plugin_utils.py
Normal file
@@ -0,0 +1,226 @@
|
||||
"""plugin_utils.py - Plugin management web API endpoints."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="plugin_utils", level=logging.DEBUG)
|
||||
|
||||
|
||||
class PluginUtils:
|
||||
"""Web API handlers for plugin management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.shared_data = shared_data
|
||||
|
||||
@property
|
||||
def _mgr(self):
|
||||
return getattr(self.shared_data, 'plugin_manager', None)
|
||||
|
||||
def _write_json(self, handler, data, status=200):
|
||||
payload = json.dumps(data, ensure_ascii=False).encode("utf-8")
|
||||
handler.send_response(status)
|
||||
handler.send_header("Content-Type", "application/json; charset=utf-8")
|
||||
handler.send_header("Content-Length", str(len(payload)))
|
||||
handler.end_headers()
|
||||
try:
|
||||
handler.wfile.write(payload)
|
||||
except (BrokenPipeError, ConnectionResetError):
|
||||
pass
|
||||
|
||||
# ── GET endpoints ────────────────────────────────────────────────
|
||||
|
||||
def list_plugins(self, handler):
|
||||
"""GET /api/plugins/list - All plugins with status."""
|
||||
try:
|
||||
mgr = self._mgr
|
||||
if not mgr:
|
||||
self._write_json(handler, {"status": "ok", "data": []})
|
||||
return
|
||||
|
||||
plugins = mgr.get_all_status()
|
||||
self._write_json(handler, {"status": "ok", "data": plugins})
|
||||
except Exception as e:
|
||||
logger.error(f"list_plugins failed: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def get_plugin_config(self, handler):
|
||||
"""GET /api/plugins/config?id=<plugin_id> - Config schema + current values."""
|
||||
try:
|
||||
query = urlparse(handler.path).query
|
||||
params = parse_qs(query)
|
||||
plugin_id = params.get("id", [None])[0]
|
||||
|
||||
if not plugin_id:
|
||||
self._write_json(handler, {"status": "error", "message": "Missing 'id' parameter"}, 400)
|
||||
return
|
||||
|
||||
mgr = self._mgr
|
||||
if not mgr:
|
||||
self._write_json(handler, {"status": "error", "message": "Plugin manager not available"}, 503)
|
||||
return
|
||||
|
||||
# Get metadata for schema
|
||||
meta = mgr._meta.get(plugin_id)
|
||||
if not meta:
|
||||
# Try to load from DB
|
||||
db_rec = self.shared_data.db.get_plugin_config(plugin_id)
|
||||
if db_rec:
|
||||
meta = db_rec.get("meta", {})
|
||||
else:
|
||||
self._write_json(handler, {"status": "error", "message": "Plugin not found"}, 404)
|
||||
return
|
||||
|
||||
schema = meta.get("config_schema", {})
|
||||
current_values = mgr.get_config(plugin_id)
|
||||
|
||||
self._write_json(handler, {
|
||||
"status": "ok",
|
||||
"plugin_id": plugin_id,
|
||||
"schema": schema,
|
||||
"values": current_values,
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"get_plugin_config failed: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def get_plugin_logs(self, handler):
|
||||
"""GET /api/plugins/logs?id=<plugin_id> - Recent log lines (placeholder)."""
|
||||
try:
|
||||
query = urlparse(handler.path).query
|
||||
params = parse_qs(query)
|
||||
plugin_id = params.get("id", [None])[0]
|
||||
|
||||
if not plugin_id:
|
||||
self._write_json(handler, {"status": "error", "message": "Missing 'id' parameter"}, 400)
|
||||
return
|
||||
|
||||
# For now, return empty — full log filtering can be added later
|
||||
# by filtering the main log file for [plugin.<plugin_id>] entries
|
||||
self._write_json(handler, {
|
||||
"status": "ok",
|
||||
"plugin_id": plugin_id,
|
||||
"logs": [],
|
||||
"message": "Log filtering available via console SSE with [plugin.{id}] prefix"
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"get_plugin_logs failed: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
# ── POST endpoints (JSON body) ───────────────────────────────────
|
||||
|
||||
def toggle_plugin(self, data: dict) -> dict:
|
||||
"""POST /api/plugins/toggle - {id, enabled}"""
|
||||
try:
|
||||
plugin_id = data.get("id")
|
||||
enabled = data.get("enabled")
|
||||
|
||||
if not plugin_id:
|
||||
return {"status": "error", "message": "Missing 'id' parameter"}
|
||||
if enabled is None:
|
||||
return {"status": "error", "message": "Missing 'enabled' parameter"}
|
||||
|
||||
mgr = self._mgr
|
||||
if not mgr:
|
||||
return {"status": "error", "message": "Plugin manager not available"}
|
||||
|
||||
mgr.toggle_plugin(plugin_id, bool(int(enabled)))
|
||||
|
||||
return {
|
||||
"status": "ok",
|
||||
"plugin_id": plugin_id,
|
||||
"enabled": bool(int(enabled)),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"toggle_plugin failed: {e}")
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def save_config(self, data: dict) -> dict:
|
||||
"""POST /api/plugins/config - {id, config: {...}}"""
|
||||
try:
|
||||
plugin_id = data.get("id")
|
||||
config = data.get("config")
|
||||
|
||||
if not plugin_id:
|
||||
return {"status": "error", "message": "Missing 'id' parameter"}
|
||||
if config is None or not isinstance(config, dict):
|
||||
return {"status": "error", "message": "Missing or invalid 'config' parameter"}
|
||||
|
||||
mgr = self._mgr
|
||||
if not mgr:
|
||||
return {"status": "error", "message": "Plugin manager not available"}
|
||||
|
||||
mgr.save_config(plugin_id, config)
|
||||
|
||||
return {"status": "ok", "plugin_id": plugin_id}
|
||||
except ValueError as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
except Exception as e:
|
||||
logger.error(f"save_config failed: {e}")
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def uninstall_plugin(self, data: dict) -> dict:
|
||||
"""POST /api/plugins/uninstall - {id}"""
|
||||
try:
|
||||
plugin_id = data.get("id")
|
||||
if not plugin_id:
|
||||
return {"status": "error", "message": "Missing 'id' parameter"}
|
||||
|
||||
mgr = self._mgr
|
||||
if not mgr:
|
||||
return {"status": "error", "message": "Plugin manager not available"}
|
||||
|
||||
return mgr.uninstall(plugin_id)
|
||||
except Exception as e:
|
||||
logger.error(f"uninstall_plugin failed: {e}")
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
# ── MULTIPART endpoints ──────────────────────────────────────────
|
||||
|
||||
def install_plugin(self, handler):
|
||||
"""POST /api/plugins/install - multipart upload of .zip"""
|
||||
try:
|
||||
mgr = self._mgr
|
||||
if not mgr:
|
||||
self._write_json(handler, {"status": "error", "message": "Plugin manager not available"}, 503)
|
||||
return
|
||||
|
||||
content_type = handler.headers.get('Content-Type', '')
|
||||
content_length = int(handler.headers.get('Content-Length', 0))
|
||||
|
||||
if content_length <= 0 or content_length > 10 * 1024 * 1024: # 10MB max
|
||||
self._write_json(handler, {"status": "error", "message": "Invalid file size (max 10MB)"}, 400)
|
||||
return
|
||||
|
||||
body = handler.rfile.read(content_length)
|
||||
|
||||
# Extract zip bytes from multipart form data
|
||||
zip_bytes = None
|
||||
if 'multipart' in content_type:
|
||||
boundary = content_type.split('boundary=')[1].encode() if 'boundary=' in content_type else None
|
||||
if boundary:
|
||||
parts = body.split(b'--' + boundary)
|
||||
for part in parts:
|
||||
if b'filename=' in part and b'.zip' in part.lower():
|
||||
# Extract file data after double CRLF
|
||||
if b'\r\n\r\n' in part:
|
||||
zip_bytes = part.split(b'\r\n\r\n', 1)[1].rstrip(b'\r\n--')
|
||||
break
|
||||
|
||||
if not zip_bytes:
|
||||
# Maybe raw zip upload (no multipart)
|
||||
if body[:4] == b'PK\x03\x04':
|
||||
zip_bytes = body
|
||||
else:
|
||||
self._write_json(handler, {"status": "error", "message": "No .zip file found in upload"}, 400)
|
||||
return
|
||||
|
||||
result = mgr.install_from_zip(zip_bytes)
|
||||
status_code = 200 if result.get("status") == "ok" else 400
|
||||
self._write_json(handler, result, status_code)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"install_plugin failed: {e}")
|
||||
self._write_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
@@ -1,3 +1,4 @@
|
||||
"""rl_utils.py - Backend utilities for RL/AI dashboard endpoints."""
|
||||
import json
|
||||
from typing import Any, Dict, List
|
||||
|
||||
|
||||
222
web_utils/schedule_utils.py
Normal file
222
web_utils/schedule_utils.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""schedule_utils.py - Schedule and trigger management endpoints."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="schedule_utils.py", level=logging.DEBUG)
|
||||
|
||||
|
||||
class ScheduleUtils:
|
||||
"""Utilities for schedule and trigger CRUD operations."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
# =========================================================================
|
||||
# SCHEDULE ENDPOINTS
|
||||
# =========================================================================
|
||||
|
||||
def list_schedules(self, data: Dict) -> Dict:
|
||||
"""Return all schedules."""
|
||||
try:
|
||||
schedules = self.shared_data.db.list_schedules()
|
||||
return {"status": "success", "data": schedules}
|
||||
except Exception as e:
|
||||
self.logger.error(f"list_schedules error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def create_schedule(self, data: Dict) -> Dict:
|
||||
"""Create a new schedule entry."""
|
||||
try:
|
||||
script_name = data.get("script_name")
|
||||
schedule_type = data.get("schedule_type")
|
||||
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "script_name is required"}
|
||||
if schedule_type not in ("recurring", "oneshot"):
|
||||
return {"status": "error", "message": "schedule_type must be 'recurring' or 'oneshot'"}
|
||||
|
||||
interval_seconds = None
|
||||
run_at = None
|
||||
|
||||
if schedule_type == "recurring":
|
||||
interval_seconds = data.get("interval_seconds")
|
||||
if interval_seconds is None:
|
||||
return {"status": "error", "message": "interval_seconds is required for recurring schedules"}
|
||||
interval_seconds = int(interval_seconds)
|
||||
if interval_seconds < 30:
|
||||
return {"status": "error", "message": "interval_seconds must be at least 30"}
|
||||
else:
|
||||
run_at = data.get("run_at")
|
||||
if not run_at:
|
||||
return {"status": "error", "message": "run_at is required for oneshot schedules"}
|
||||
|
||||
args = data.get("args", "")
|
||||
conditions = data.get("conditions")
|
||||
if conditions and isinstance(conditions, dict):
|
||||
conditions = json.dumps(conditions)
|
||||
|
||||
new_id = self.shared_data.db.add_schedule(
|
||||
script_name=script_name,
|
||||
schedule_type=schedule_type,
|
||||
interval_seconds=interval_seconds,
|
||||
run_at=run_at,
|
||||
args=args,
|
||||
conditions=conditions,
|
||||
)
|
||||
return {"status": "success", "data": {"id": new_id}, "message": "Schedule created"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"create_schedule error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def update_schedule(self, data: Dict) -> Dict:
|
||||
"""Update an existing schedule."""
|
||||
try:
|
||||
schedule_id = data.get("id")
|
||||
if schedule_id is None:
|
||||
return {"status": "error", "message": "id is required"}
|
||||
|
||||
kwargs = {k: v for k, v in data.items() if k != "id"}
|
||||
if "conditions" in kwargs and isinstance(kwargs["conditions"], dict):
|
||||
kwargs["conditions"] = json.dumps(kwargs["conditions"])
|
||||
|
||||
self.shared_data.db.update_schedule(int(schedule_id), **kwargs)
|
||||
return {"status": "success", "message": "Schedule updated"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"update_schedule error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_schedule(self, data: Dict) -> Dict:
|
||||
"""Delete a schedule by id."""
|
||||
try:
|
||||
schedule_id = data.get("id")
|
||||
if schedule_id is None:
|
||||
return {"status": "error", "message": "id is required"}
|
||||
|
||||
self.shared_data.db.delete_schedule(int(schedule_id))
|
||||
return {"status": "success", "message": "Schedule deleted"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"delete_schedule error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def toggle_schedule(self, data: Dict) -> Dict:
|
||||
"""Enable or disable a schedule."""
|
||||
try:
|
||||
schedule_id = data.get("id")
|
||||
enabled = data.get("enabled")
|
||||
if schedule_id is None:
|
||||
return {"status": "error", "message": "id is required"}
|
||||
if enabled is None:
|
||||
return {"status": "error", "message": "enabled is required"}
|
||||
|
||||
self.shared_data.db.toggle_schedule(int(schedule_id), bool(enabled))
|
||||
return {"status": "success", "message": f"Schedule {'enabled' if enabled else 'disabled'}"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"toggle_schedule error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
# =========================================================================
|
||||
# TRIGGER ENDPOINTS
|
||||
# =========================================================================
|
||||
|
||||
def list_triggers(self, data: Dict) -> Dict:
|
||||
"""Return all triggers."""
|
||||
try:
|
||||
triggers = self.shared_data.db.list_triggers()
|
||||
return {"status": "success", "data": triggers}
|
||||
except Exception as e:
|
||||
self.logger.error(f"list_triggers error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def create_trigger(self, data: Dict) -> Dict:
|
||||
"""Create a new trigger entry."""
|
||||
try:
|
||||
script_name = data.get("script_name")
|
||||
trigger_name = data.get("trigger_name")
|
||||
conditions = data.get("conditions")
|
||||
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "script_name is required"}
|
||||
if not trigger_name:
|
||||
return {"status": "error", "message": "trigger_name is required"}
|
||||
if not conditions or not isinstance(conditions, dict):
|
||||
return {"status": "error", "message": "conditions must be a JSON object"}
|
||||
|
||||
args = data.get("args", "")
|
||||
cooldown_seconds = int(data.get("cooldown_seconds", 60))
|
||||
|
||||
new_id = self.shared_data.db.add_trigger(
|
||||
script_name=script_name,
|
||||
trigger_name=trigger_name,
|
||||
conditions=json.dumps(conditions),
|
||||
args=args,
|
||||
cooldown_seconds=cooldown_seconds,
|
||||
)
|
||||
return {"status": "success", "data": {"id": new_id}, "message": "Trigger created"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"create_trigger error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def update_trigger(self, data: Dict) -> Dict:
|
||||
"""Update an existing trigger."""
|
||||
try:
|
||||
trigger_id = data.get("id")
|
||||
if trigger_id is None:
|
||||
return {"status": "error", "message": "id is required"}
|
||||
|
||||
kwargs = {k: v for k, v in data.items() if k != "id"}
|
||||
if "conditions" in kwargs and isinstance(kwargs["conditions"], dict):
|
||||
kwargs["conditions"] = json.dumps(kwargs["conditions"])
|
||||
|
||||
self.shared_data.db.update_trigger(int(trigger_id), **kwargs)
|
||||
return {"status": "success", "message": "Trigger updated"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"update_trigger error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_trigger(self, data: Dict) -> Dict:
|
||||
"""Delete a trigger by id."""
|
||||
try:
|
||||
trigger_id = data.get("id")
|
||||
if trigger_id is None:
|
||||
return {"status": "error", "message": "id is required"}
|
||||
|
||||
self.shared_data.db.delete_trigger(int(trigger_id))
|
||||
return {"status": "success", "message": "Trigger deleted"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"delete_trigger error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def toggle_trigger(self, data: Dict) -> Dict:
|
||||
"""Enable or disable a trigger."""
|
||||
try:
|
||||
trigger_id = data.get("id")
|
||||
enabled = data.get("enabled")
|
||||
if trigger_id is None:
|
||||
return {"status": "error", "message": "id is required"}
|
||||
if enabled is None:
|
||||
return {"status": "error", "message": "enabled is required"}
|
||||
|
||||
self.shared_data.db.update_trigger(int(trigger_id), enabled=1 if enabled else 0)
|
||||
return {"status": "success", "message": f"Trigger {'enabled' if enabled else 'disabled'}"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"toggle_trigger error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def test_trigger(self, data: Dict) -> Dict:
|
||||
"""Evaluate trigger conditions and return the result."""
|
||||
try:
|
||||
conditions = data.get("conditions")
|
||||
if not conditions or not isinstance(conditions, dict):
|
||||
return {"status": "error", "message": "conditions must be a JSON object"}
|
||||
|
||||
from script_scheduler import evaluate_conditions
|
||||
result = evaluate_conditions(conditions, self.shared_data.db)
|
||||
return {"status": "success", "data": {"result": result}}
|
||||
except Exception as e:
|
||||
self.logger.error(f"test_trigger error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/script_utils.py
|
||||
"""
|
||||
Script launcher and execution utilities.
|
||||
Handles script management, execution, monitoring, and output capture.
|
||||
"""
|
||||
"""script_utils.py - Script management, execution, monitoring, and output capture."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import subprocess
|
||||
@@ -97,12 +93,82 @@ import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="script_utils.py", level=logging.DEBUG)
|
||||
|
||||
# AST parse cache: {path: (mtime, format)} - avoids re-parsing on every list_scripts call
|
||||
_format_cache: dict = {}
|
||||
_vars_cache: dict = {}
|
||||
_MAX_CACHE_ENTRIES = 200
|
||||
|
||||
|
||||
def _detect_script_format(script_path: str) -> str:
|
||||
"""Check if a script uses Bjorn action format (has b_class) or is a free script. Cached by mtime."""
|
||||
try:
|
||||
mtime = os.path.getmtime(script_path)
|
||||
cached = _format_cache.get(script_path)
|
||||
if cached and cached[0] == mtime:
|
||||
return cached[1]
|
||||
except OSError:
|
||||
return "free"
|
||||
|
||||
fmt = "free"
|
||||
try:
|
||||
with open(script_path, "r", encoding="utf-8") as f:
|
||||
tree = ast.parse(f.read(), filename=script_path)
|
||||
for node in ast.iter_child_nodes(tree):
|
||||
if isinstance(node, ast.Assign):
|
||||
for target in node.targets:
|
||||
if isinstance(target, ast.Name) and target.id == "b_class":
|
||||
fmt = "bjorn"
|
||||
break
|
||||
if fmt == "bjorn":
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if len(_format_cache) >= _MAX_CACHE_ENTRIES:
|
||||
_format_cache.clear()
|
||||
_format_cache[script_path] = (mtime, fmt)
|
||||
return fmt
|
||||
|
||||
|
||||
def _extract_module_vars(script_path: str, *var_names: str) -> dict:
|
||||
"""Safely extract module-level variable assignments via AST (no exec). Cached by mtime."""
|
||||
try:
|
||||
mtime = os.path.getmtime(script_path)
|
||||
cache_key = (script_path, var_names)
|
||||
cached = _vars_cache.get(cache_key)
|
||||
if cached and cached[0] == mtime:
|
||||
return cached[1]
|
||||
except OSError:
|
||||
return {}
|
||||
|
||||
result = {}
|
||||
try:
|
||||
with open(script_path, "r", encoding="utf-8") as f:
|
||||
tree = ast.parse(f.read(), filename=script_path)
|
||||
for node in ast.iter_child_nodes(tree):
|
||||
if isinstance(node, ast.Assign) and len(node.targets) == 1:
|
||||
target = node.targets[0]
|
||||
if isinstance(target, ast.Name) and target.id in var_names:
|
||||
try:
|
||||
result[target.id] = ast.literal_eval(node.value)
|
||||
except Exception:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if len(_vars_cache) >= _MAX_CACHE_ENTRIES:
|
||||
_vars_cache.clear()
|
||||
_vars_cache[cache_key] = (mtime, result)
|
||||
return result
|
||||
|
||||
|
||||
class ScriptUtils:
|
||||
"""Utilities for script management and execution."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
self._last_custom_scan = 0.0
|
||||
|
||||
def get_script_description(self, script_path: Path) -> str:
|
||||
"""Extract description from script comments."""
|
||||
@@ -126,16 +192,74 @@ class ScriptUtils:
|
||||
self.logger.error(f"Error reading script description: {e}")
|
||||
return "Error reading description"
|
||||
|
||||
def _resolve_action_path(self, b_module: str) -> str:
|
||||
"""Resolve filesystem path for an action module (handles custom/ prefix)."""
|
||||
return os.path.join(self.shared_data.actions_dir, f"{b_module}.py")
|
||||
|
||||
def _auto_register_custom_scripts(self, known_modules: set):
|
||||
"""Scan custom_scripts_dir for .py files not yet in DB. Throttled to once per 30s."""
|
||||
now = time.time()
|
||||
if now - self._last_custom_scan < 30:
|
||||
return
|
||||
self._last_custom_scan = now
|
||||
|
||||
custom_dir = self.shared_data.custom_scripts_dir
|
||||
if not os.path.isdir(custom_dir):
|
||||
return
|
||||
for fname in os.listdir(custom_dir):
|
||||
if not fname.endswith(".py") or fname == "__init__.py":
|
||||
continue
|
||||
stem = fname[:-3]
|
||||
module_key = f"custom/{stem}"
|
||||
if module_key in known_modules:
|
||||
continue
|
||||
# Auto-register
|
||||
script_path = os.path.join(custom_dir, fname)
|
||||
fmt = _detect_script_format(script_path)
|
||||
meta = _extract_module_vars(
|
||||
script_path,
|
||||
"b_class", "b_name", "b_description", "b_author",
|
||||
"b_version", "b_args", "b_tags", "b_examples", "b_icon"
|
||||
)
|
||||
b_class = meta.get("b_class", f"Custom_{stem}")
|
||||
try:
|
||||
self.shared_data.db.upsert_simple_action(
|
||||
b_class=b_class,
|
||||
b_module=module_key,
|
||||
b_action="custom",
|
||||
b_name=meta.get("b_name", stem),
|
||||
b_description=meta.get("b_description", "Custom script"),
|
||||
b_author=meta.get("b_author"),
|
||||
b_version=meta.get("b_version"),
|
||||
b_icon=meta.get("b_icon"),
|
||||
b_args=json.dumps(meta["b_args"]) if "b_args" in meta else None,
|
||||
b_tags=json.dumps(meta["b_tags"]) if "b_tags" in meta else None,
|
||||
b_examples=json.dumps(meta["b_examples"]) if "b_examples" in meta else None,
|
||||
b_enabled=1,
|
||||
b_priority=50,
|
||||
)
|
||||
self.logger.info(f"Auto-registered custom script: {module_key} ({fmt})")
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to auto-register {module_key}: {e}")
|
||||
|
||||
def list_scripts(self) -> Dict:
|
||||
"""List all actions with metadata for the launcher."""
|
||||
try:
|
||||
actions_out: list[dict] = []
|
||||
db_actions = self.shared_data.db.list_actions()
|
||||
|
||||
# Auto-register untracked custom scripts
|
||||
known_modules = {(r.get("b_module") or "").strip() for r in db_actions}
|
||||
self._auto_register_custom_scripts(known_modules)
|
||||
# Re-query if new scripts were registered
|
||||
new_known = {(r.get("b_module") or "").strip() for r in self.shared_data.db.list_actions()}
|
||||
if new_known != known_modules:
|
||||
db_actions = self.shared_data.db.list_actions()
|
||||
|
||||
for row in db_actions:
|
||||
b_class = (row.get("b_class") or "").strip()
|
||||
b_module = (row.get("b_module") or "").strip()
|
||||
action_path = os.path.join(self.shared_data.actions_dir, f"{b_module}.py")
|
||||
action_path = self._resolve_action_path(b_module)
|
||||
|
||||
# Load b_args from DB (priority)
|
||||
db_args_raw = row.get("b_args")
|
||||
@@ -172,31 +296,48 @@ class ScriptUtils:
|
||||
except Exception:
|
||||
b_examples = None
|
||||
|
||||
# Enrich from module if available
|
||||
# Enrich metadata from module file (AST for static fields, exec only for dynamic b_args)
|
||||
try:
|
||||
if os.path.exists(action_path):
|
||||
spec = importlib.util.spec_from_file_location(b_module, action_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
# Static metadata via AST (no exec, no sys.modules pollution)
|
||||
static_vars = _extract_module_vars(
|
||||
action_path,
|
||||
"b_name", "b_description", "b_author", "b_version",
|
||||
"b_icon", "b_docs_url", "b_examples", "b_args"
|
||||
)
|
||||
if static_vars.get("b_name"): b_name = static_vars["b_name"]
|
||||
if static_vars.get("b_description"): b_description = static_vars["b_description"]
|
||||
if static_vars.get("b_author"): b_author = static_vars["b_author"]
|
||||
if static_vars.get("b_version"): b_version = static_vars["b_version"]
|
||||
if static_vars.get("b_icon"): b_icon = static_vars["b_icon"]
|
||||
if static_vars.get("b_docs_url"): b_docs_url = static_vars["b_docs_url"]
|
||||
if static_vars.get("b_examples"): b_examples = static_vars["b_examples"]
|
||||
if static_vars.get("b_args") and not b_args:
|
||||
b_args = static_vars["b_args"]
|
||||
|
||||
# Dynamic b_args
|
||||
if hasattr(module, "compute_dynamic_b_args"):
|
||||
try:
|
||||
b_args = module.compute_dynamic_b_args(b_args or {})
|
||||
except Exception as e:
|
||||
self.logger.warning(f"compute_dynamic_b_args failed for {b_module}: {e}")
|
||||
# Only exec module if it has compute_dynamic_b_args (rare)
|
||||
# Check via simple text search first to avoid unnecessary imports
|
||||
try:
|
||||
with open(action_path, "r", encoding="utf-8") as _f:
|
||||
has_dynamic = "compute_dynamic_b_args" in _f.read()
|
||||
except Exception:
|
||||
has_dynamic = False
|
||||
|
||||
# Enrich fields
|
||||
if getattr(module, "b_name", None): b_name = module.b_name
|
||||
if getattr(module, "b_description", None): b_description = module.b_description
|
||||
if getattr(module, "b_author", None): b_author = module.b_author
|
||||
if getattr(module, "b_version", None): b_version = module.b_version
|
||||
if getattr(module, "b_icon", None): b_icon = module.b_icon
|
||||
if getattr(module, "b_docs_url", None): b_docs_url = module.b_docs_url
|
||||
if getattr(module, "b_examples", None): b_examples = module.b_examples
|
||||
if has_dynamic:
|
||||
import sys as _sys
|
||||
spec = importlib.util.spec_from_file_location(f"_tmp_{b_module}", action_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
if hasattr(module, "compute_dynamic_b_args"):
|
||||
try:
|
||||
b_args = module.compute_dynamic_b_args(b_args or {})
|
||||
except Exception as e:
|
||||
self.logger.warning(f"compute_dynamic_b_args failed for {b_module}: {e}")
|
||||
# Remove from sys.modules to prevent accumulation
|
||||
_sys.modules.pop(f"_tmp_{b_module}", None)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Could not import {b_module} for dynamic/meta: {e}")
|
||||
self.logger.warning(f"Could not enrich {b_module}: {e}")
|
||||
|
||||
# Parse tags
|
||||
tags_raw = row.get("b_tags")
|
||||
@@ -218,6 +359,12 @@ class ScriptUtils:
|
||||
# Icon URL
|
||||
icon_url = self._normalize_icon_url(b_icon, b_class)
|
||||
|
||||
# Custom script detection
|
||||
is_custom = b_module.startswith("custom/")
|
||||
script_format = ""
|
||||
if is_custom and os.path.exists(action_path):
|
||||
script_format = _detect_script_format(action_path)
|
||||
|
||||
# Build action info
|
||||
action_info = {
|
||||
"name": display_name,
|
||||
@@ -236,6 +383,8 @@ class ScriptUtils:
|
||||
"b_icon": icon_url,
|
||||
"b_docs_url": b_docs_url,
|
||||
"b_examples": b_examples,
|
||||
"is_custom": is_custom,
|
||||
"script_format": script_format,
|
||||
"is_running": False,
|
||||
"output": []
|
||||
}
|
||||
@@ -302,27 +451,36 @@ class ScriptUtils:
|
||||
return {"status": "error", "message": f"Action {script_key} not found"}
|
||||
|
||||
module_name = action["b_module"]
|
||||
script_path = os.path.join(self.shared_data.actions_dir, f"{module_name}.py")
|
||||
|
||||
script_path = self._resolve_action_path(module_name)
|
||||
|
||||
if not os.path.exists(script_path):
|
||||
return {"status": "error", "message": f"Script file {script_path} not found"}
|
||||
|
||||
|
||||
is_custom = module_name.startswith("custom/")
|
||||
script_format = _detect_script_format(script_path) if is_custom else "bjorn"
|
||||
|
||||
# Check if already running
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts and \
|
||||
self.shared_data.running_scripts[script_path].get("is_running", False):
|
||||
return {"status": "error", "message": f"Script {module_name} is already running"}
|
||||
|
||||
|
||||
# Prepare environment
|
||||
env = dict(os.environ)
|
||||
env["PYTHONUNBUFFERED"] = "1"
|
||||
env["BJORN_EMBEDDED"] = "1"
|
||||
|
||||
# Start process
|
||||
cmd = ["sudo", "python3", "-u", script_path]
|
||||
|
||||
# Build command based on script format
|
||||
if script_format == "free":
|
||||
# Free scripts run directly as standalone Python
|
||||
cmd = ["sudo", "python3", "-u", script_path]
|
||||
else:
|
||||
# Bjorn-format actions go through action_runner (bootstraps shared_data)
|
||||
runner_path = os.path.join(self.shared_data.current_dir, "action_runner.py")
|
||||
cmd = ["sudo", "python3", "-u", runner_path, module_name, action["b_class"]]
|
||||
if args:
|
||||
cmd.extend(args.split())
|
||||
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
@@ -330,7 +488,7 @@ class ScriptUtils:
|
||||
bufsize=1,
|
||||
universal_newlines=True,
|
||||
env=env,
|
||||
cwd=self.shared_data.actions_dir
|
||||
cwd=self.shared_data.current_dir
|
||||
)
|
||||
|
||||
# Store process info
|
||||
@@ -469,40 +627,51 @@ class ScriptUtils:
|
||||
self.logger.error(f"Error getting script output: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
MAX_OUTPUT_LINES = 2000
|
||||
|
||||
def monitor_script_output(self, script_path: str, process: subprocess.Popen):
|
||||
"""Monitor script output in real-time."""
|
||||
"""Monitor script output in real-time with bounded buffer."""
|
||||
try:
|
||||
self.logger.debug(f"Starting output monitoring for: {script_path}")
|
||||
|
||||
|
||||
while True:
|
||||
line = process.stdout.readline()
|
||||
|
||||
|
||||
if not line and process.poll() is not None:
|
||||
break
|
||||
|
||||
|
||||
if line:
|
||||
line = line.rstrip()
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts:
|
||||
self.shared_data.running_scripts[script_path]["output"].append(line)
|
||||
self.logger.debug(f"[{os.path.basename(script_path)}] {line}")
|
||||
|
||||
# Process ended
|
||||
output = self.shared_data.running_scripts[script_path]["output"]
|
||||
output.append(line)
|
||||
# Cap output to prevent unbounded memory growth
|
||||
if len(output) > self.MAX_OUTPUT_LINES:
|
||||
del output[:len(output) - self.MAX_OUTPUT_LINES]
|
||||
|
||||
# Process ended - close stdout FD explicitly
|
||||
if process.stdout:
|
||||
process.stdout.close()
|
||||
|
||||
return_code = process.poll()
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts:
|
||||
info = self.shared_data.running_scripts[script_path]
|
||||
info["process"] = None
|
||||
info["is_running"] = False
|
||||
|
||||
|
||||
if return_code == 0:
|
||||
info["output"].append("Script completed successfully")
|
||||
else:
|
||||
info["output"].append(f"Script exited with code {return_code}")
|
||||
info["last_error"] = f"Exit code: {return_code}"
|
||||
|
||||
|
||||
# Prune old finished entries (keep max 20 historical)
|
||||
self._prune_finished_scripts()
|
||||
|
||||
self.logger.info(f"Script {script_path} finished with code {return_code}")
|
||||
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error monitoring output for {script_path}: {e}")
|
||||
with self.shared_data.scripts_lock:
|
||||
@@ -512,6 +681,29 @@ class ScriptUtils:
|
||||
info["last_error"] = str(e)
|
||||
info["process"] = None
|
||||
info["is_running"] = False
|
||||
finally:
|
||||
# Ensure process resources are released
|
||||
try:
|
||||
if process.stdout and not process.stdout.closed:
|
||||
process.stdout.close()
|
||||
if process.poll() is None:
|
||||
process.kill()
|
||||
process.wait()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _prune_finished_scripts(self):
|
||||
"""Remove oldest finished script entries to bound memory. Caller must hold scripts_lock."""
|
||||
MAX_FINISHED = 20
|
||||
finished = [
|
||||
(k, v.get("start_time", 0))
|
||||
for k, v in self.shared_data.running_scripts.items()
|
||||
if not v.get("is_running", False) and v.get("process") is None
|
||||
]
|
||||
if len(finished) > MAX_FINISHED:
|
||||
finished.sort(key=lambda x: x[1])
|
||||
for k, _ in finished[:len(finished) - MAX_FINISHED]:
|
||||
del self.shared_data.running_scripts[k]
|
||||
|
||||
def upload_script(self, handler) -> None:
|
||||
"""Upload a new script file."""
|
||||
@@ -567,7 +759,7 @@ class ScriptUtils:
|
||||
script_name = data.get('script_name')
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "Missing script_name"}
|
||||
|
||||
|
||||
rows = self.shared_data.db.query("SELECT * FROM scripts WHERE name=?", (script_name,))
|
||||
if not rows:
|
||||
return {"status": "error", "message": f"Script '{script_name}' not found in DB"}
|
||||
@@ -593,6 +785,116 @@ class ScriptUtils:
|
||||
self.logger.error(f"Error deleting script: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
# --- Custom scripts management ---
|
||||
|
||||
def upload_custom_script(self, handler) -> None:
|
||||
"""Upload a custom script to actions/custom/."""
|
||||
try:
|
||||
form = _MultipartForm(
|
||||
fp=handler.rfile,
|
||||
headers=handler.headers,
|
||||
environ={'REQUEST_METHOD': 'POST'}
|
||||
)
|
||||
if 'script_file' not in form:
|
||||
resp = {"status": "error", "message": "Missing 'script_file'"}
|
||||
handler.send_response(400)
|
||||
else:
|
||||
file_item = form['script_file']
|
||||
if not file_item.filename.endswith('.py'):
|
||||
resp = {"status": "error", "message": "Only .py files allowed"}
|
||||
handler.send_response(400)
|
||||
else:
|
||||
script_name = os.path.basename(file_item.filename)
|
||||
stem = script_name[:-3]
|
||||
script_path = Path(self.shared_data.custom_scripts_dir) / script_name
|
||||
|
||||
if script_path.exists():
|
||||
resp = {"status": "error", "message": f"Script '{script_name}' already exists. Delete it first."}
|
||||
handler.send_response(400)
|
||||
else:
|
||||
with open(script_path, 'wb') as f:
|
||||
f.write(file_item.file.read())
|
||||
|
||||
# Extract metadata via AST (safe, no exec)
|
||||
fmt = _detect_script_format(str(script_path))
|
||||
meta = _extract_module_vars(
|
||||
str(script_path),
|
||||
"b_class", "b_name", "b_description", "b_author",
|
||||
"b_version", "b_args", "b_tags", "b_examples", "b_icon"
|
||||
)
|
||||
|
||||
b_class = meta.get("b_class", f"Custom_{stem}")
|
||||
module_key = f"custom/{stem}"
|
||||
|
||||
self.shared_data.db.upsert_simple_action(
|
||||
b_class=b_class,
|
||||
b_module=module_key,
|
||||
b_action="custom",
|
||||
b_name=meta.get("b_name", stem),
|
||||
b_description=meta.get("b_description", "Custom script"),
|
||||
b_author=meta.get("b_author"),
|
||||
b_version=meta.get("b_version"),
|
||||
b_icon=meta.get("b_icon"),
|
||||
b_args=json.dumps(meta["b_args"]) if "b_args" in meta else None,
|
||||
b_tags=json.dumps(meta["b_tags"]) if "b_tags" in meta else None,
|
||||
b_examples=json.dumps(meta["b_examples"]) if "b_examples" in meta else None,
|
||||
b_enabled=1,
|
||||
b_priority=50,
|
||||
)
|
||||
|
||||
resp = {
|
||||
"status": "success",
|
||||
"message": f"Custom script '{script_name}' uploaded ({fmt} format).",
|
||||
"data": {"b_class": b_class, "b_module": module_key, "format": fmt}
|
||||
}
|
||||
handler.send_response(200)
|
||||
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(resp).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error uploading custom script: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def delete_custom_script(self, data: Dict) -> Dict:
|
||||
"""Delete a custom script (refuses to delete built-in actions)."""
|
||||
try:
|
||||
b_class = data.get("script_name") or data.get("b_class")
|
||||
if not b_class:
|
||||
return {"status": "error", "message": "Missing script_name"}
|
||||
|
||||
# Look up in actions table
|
||||
action = self.shared_data.db.get_action_by_class(b_class)
|
||||
if not action:
|
||||
return {"status": "error", "message": f"Action '{b_class}' not found"}
|
||||
|
||||
b_module = action.get("b_module", "")
|
||||
if action.get("b_action") != "custom" and not b_module.startswith("custom/"):
|
||||
return {"status": "error", "message": "Cannot delete built-in actions"}
|
||||
|
||||
script_path = self._resolve_action_path(b_module)
|
||||
|
||||
# Check if running
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts and \
|
||||
self.shared_data.running_scripts[script_path].get("is_running", False):
|
||||
return {"status": "error", "message": f"Script '{b_class}' is currently running. Stop it first."}
|
||||
|
||||
# Delete file
|
||||
if os.path.exists(script_path):
|
||||
os.remove(script_path)
|
||||
|
||||
# Delete from DB
|
||||
self.shared_data.db.delete_action(b_class)
|
||||
|
||||
return {"status": "success", "message": f"Custom script '{b_class}' deleted."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error deleting custom script: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def upload_project(self, handler) -> None:
|
||||
"""Upload a project with multiple files."""
|
||||
try:
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
"""
|
||||
Sentinel web API endpoints.
|
||||
"""
|
||||
"""sentinel_utils.py - Sentinel web API endpoints."""
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict
|
||||
@@ -21,7 +19,7 @@ class SentinelUtils:
|
||||
# ── GET endpoints (handler signature) ───────────────────────────────
|
||||
|
||||
def get_status(self, handler):
|
||||
"""GET /api/sentinel/status — overall sentinel state + unread count."""
|
||||
"""GET /api/sentinel/status - overall sentinel state + unread count."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
data = engine.get_status()
|
||||
@@ -30,7 +28,7 @@ class SentinelUtils:
|
||||
self._send_json(handler, data)
|
||||
|
||||
def get_events(self, handler):
|
||||
"""GET /api/sentinel/events — recent events with optional filters."""
|
||||
"""GET /api/sentinel/events - recent events with optional filters."""
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
@@ -68,7 +66,7 @@ class SentinelUtils:
|
||||
return sql, params
|
||||
|
||||
def get_rules(self, handler):
|
||||
"""GET /api/sentinel/rules — all rules."""
|
||||
"""GET /api/sentinel/rules - all rules."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM sentinel_rules ORDER BY id"
|
||||
@@ -79,7 +77,7 @@ class SentinelUtils:
|
||||
self._send_json(handler, {"rules": []})
|
||||
|
||||
def get_devices(self, handler):
|
||||
"""GET /api/sentinel/devices — known device baselines."""
|
||||
"""GET /api/sentinel/devices - known device baselines."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM sentinel_devices ORDER BY last_seen DESC"
|
||||
@@ -90,7 +88,7 @@ class SentinelUtils:
|
||||
self._send_json(handler, {"devices": []})
|
||||
|
||||
def get_arp_table(self, handler):
|
||||
"""GET /api/sentinel/arp — ARP cache for spoof analysis."""
|
||||
"""GET /api/sentinel/arp - ARP cache for spoof analysis."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM sentinel_arp_cache ORDER BY last_seen DESC LIMIT 200"
|
||||
@@ -103,7 +101,7 @@ class SentinelUtils:
|
||||
# ── POST endpoints (JSON data signature) ────────────────────────────
|
||||
|
||||
def toggle_sentinel(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/toggle — enable/disable sentinel."""
|
||||
"""POST /api/sentinel/toggle - enable/disable sentinel."""
|
||||
enabled = bool(data.get("enabled", False))
|
||||
self.shared_data.sentinel_enabled = enabled
|
||||
engine = self._engine
|
||||
@@ -115,7 +113,7 @@ class SentinelUtils:
|
||||
return {"status": "ok", "enabled": enabled}
|
||||
|
||||
def acknowledge_event(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/ack — acknowledge single or all events."""
|
||||
"""POST /api/sentinel/ack - acknowledge single or all events."""
|
||||
try:
|
||||
event_id = data.get("id")
|
||||
if data.get("all"):
|
||||
@@ -134,7 +132,7 @@ class SentinelUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def clear_events(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/clear — clear all events."""
|
||||
"""POST /api/sentinel/clear - clear all events."""
|
||||
try:
|
||||
self.shared_data.db.execute("DELETE FROM sentinel_events")
|
||||
return {"status": "ok", "message": "Events cleared"}
|
||||
@@ -142,7 +140,7 @@ class SentinelUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def upsert_rule(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/rule — create or update a rule."""
|
||||
"""POST /api/sentinel/rule - create or update a rule."""
|
||||
try:
|
||||
rule = data.get("rule", data)
|
||||
if not rule.get("name") or not rule.get("trigger_type"):
|
||||
@@ -182,7 +180,7 @@ class SentinelUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_rule(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/rule/delete — delete a rule."""
|
||||
"""POST /api/sentinel/rule/delete - delete a rule."""
|
||||
try:
|
||||
rule_id = data.get("id")
|
||||
if not rule_id:
|
||||
@@ -195,7 +193,7 @@ class SentinelUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def update_device(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/device — update device baseline."""
|
||||
"""POST /api/sentinel/device - update device baseline."""
|
||||
try:
|
||||
mac = data.get("mac_address", "").lower()
|
||||
if not mac:
|
||||
@@ -232,7 +230,7 @@ class SentinelUtils:
|
||||
}
|
||||
|
||||
def get_notifier_config(self, handler) -> None:
|
||||
"""GET /api/sentinel/notifiers — return current notifier config."""
|
||||
"""GET /api/sentinel/notifiers - return current notifier config."""
|
||||
cfg = self.shared_data.config
|
||||
notifiers = {}
|
||||
for frontend_key, cfg_key in self._NOTIFIER_KEY_MAP.items():
|
||||
@@ -242,7 +240,7 @@ class SentinelUtils:
|
||||
self._send_json(handler, {"status": "ok", "notifiers": notifiers})
|
||||
|
||||
def save_notifier_config(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/notifiers — save notification channel config."""
|
||||
"""POST /api/sentinel/notifiers - save notification channel config."""
|
||||
try:
|
||||
notifiers = data.get("notifiers", {})
|
||||
cfg = self.shared_data.config
|
||||
@@ -288,7 +286,7 @@ class SentinelUtils:
|
||||
# ── LLM-powered endpoints ────────────────────────────────────────────
|
||||
|
||||
def analyze_events(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/analyze — AI analysis of selected events."""
|
||||
"""POST /api/sentinel/analyze - AI analysis of selected events."""
|
||||
try:
|
||||
event_ids = data.get("event_ids", [])
|
||||
if not event_ids:
|
||||
@@ -356,7 +354,7 @@ class SentinelUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def summarize_events(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/summarize — AI summary of recent unread events."""
|
||||
"""POST /api/sentinel/summarize - AI summary of recent unread events."""
|
||||
try:
|
||||
limit = min(int(data.get("limit", 50)), 100)
|
||||
rows = self.shared_data.db.query(
|
||||
@@ -374,7 +372,7 @@ class SentinelUtils:
|
||||
system = (
|
||||
"You are a cybersecurity analyst. Summarize the security events below. "
|
||||
"Group by type, identify patterns, flag critical items. "
|
||||
"Be concise — max 200 words. Use bullet points."
|
||||
"Be concise - max 200 words. Use bullet points."
|
||||
)
|
||||
|
||||
prompt = (
|
||||
@@ -396,7 +394,7 @@ class SentinelUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def suggest_rule(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/suggest-rule — AI generates a rule from description."""
|
||||
"""POST /api/sentinel/suggest-rule - AI generates a rule from description."""
|
||||
try:
|
||||
description = (data.get("description") or "").strip()
|
||||
if not description:
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/studio_utils.py
|
||||
"""
|
||||
Studio visual editor utilities.
|
||||
Handles action/edge/host management for the visual workflow editor.
|
||||
"""
|
||||
"""studio_utils.py - Action/edge/host management for the visual workflow editor."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
# web_utils/system_utils.py
|
||||
"""
|
||||
System utilities for management operations.
|
||||
Handles system commands, service management, configuration.
|
||||
"""
|
||||
"""system_utils.py - System commands, service management, and configuration."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import subprocess
|
||||
@@ -73,7 +69,8 @@ class SystemUtils:
|
||||
self.logger.warning(f"Failed to remove {entry.path}: {e}")
|
||||
self._send_json(handler, {"status": "success", "message": "Logs cleared successfully"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
self.logger.error(f"Error clearing logs: {e}")
|
||||
self._send_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def initialize_db(self, handler):
|
||||
"""Initialize or prepare database schema."""
|
||||
@@ -83,7 +80,8 @@ class SystemUtils:
|
||||
self.shared_data.initialize_statistics()
|
||||
self._send_json(handler, {"status": "success", "message": "Database initialized successfully"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
self.logger.error(f"Error initializing database: {e}")
|
||||
self._send_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def erase_bjorn_memories(self, handler):
|
||||
"""Erase all Bjorn-related memories and restart service."""
|
||||
@@ -120,7 +118,7 @@ class SystemUtils:
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": f"Error erasing Bjorn memories: {str(e)}"
|
||||
"message": "Internal server error"
|
||||
}).encode('utf-8'))
|
||||
|
||||
def clear_netkb(self, handler, restart=True):
|
||||
@@ -134,7 +132,8 @@ class SystemUtils:
|
||||
self.restart_bjorn_service(handler)
|
||||
self._send_json(handler, {"status": "success", "message": "NetKB cleared in database"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
self.logger.error(f"Error clearing NetKB: {e}")
|
||||
self._send_json(handler, {"status": "error", "message": "Internal server error"}, 500)
|
||||
|
||||
def clear_livestatus(self, handler, restart=True):
|
||||
"""Clear live status counters."""
|
||||
@@ -195,7 +194,7 @@ class SystemUtils:
|
||||
return {"status": "success", "message": "Configuration saved"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error saving configuration: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
return {"status": "error", "message": "Internal server error"}
|
||||
|
||||
def serve_current_config(self, handler):
|
||||
"""Serve current configuration as JSON (Optimized via SharedData cache)."""
|
||||
@@ -243,10 +242,13 @@ class SystemUtils:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": "Internal server error"}).encode('utf-8'))
|
||||
|
||||
def sse_log_stream(self, handler):
|
||||
"""Stream logs using Server-Sent Events (SSE)."""
|
||||
log_file_handle = None
|
||||
max_lifetime = 1800 # 30 minutes maximum connection lifetime
|
||||
start_time = time.time()
|
||||
try:
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "text/event-stream")
|
||||
@@ -260,24 +262,45 @@ class SystemUtils:
|
||||
handler.wfile.write(b"data: Connected\n\n")
|
||||
handler.wfile.flush()
|
||||
|
||||
with open(log_file_path, 'r') as log_file:
|
||||
log_file.seek(0, os.SEEK_END)
|
||||
while True:
|
||||
line = log_file.readline()
|
||||
if line:
|
||||
message = f"data: {line.strip()}\n\n"
|
||||
handler.wfile.write(message.encode('utf-8'))
|
||||
log_file_handle = open(log_file_path, 'r')
|
||||
log_file_handle.seek(0, os.SEEK_END)
|
||||
while True:
|
||||
# Check maximum connection lifetime
|
||||
if time.time() - start_time > max_lifetime:
|
||||
self.logger.info("SSE stream reached maximum lifetime, closing")
|
||||
try:
|
||||
handler.wfile.write(b"data: Stream timeout, please reconnect\n\n")
|
||||
handler.wfile.flush()
|
||||
else:
|
||||
handler.wfile.write(b": heartbeat\n\n")
|
||||
handler.wfile.flush()
|
||||
time.sleep(1)
|
||||
except Exception:
|
||||
pass
|
||||
break
|
||||
|
||||
except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError) as e:
|
||||
line = log_file_handle.readline()
|
||||
if line:
|
||||
message = f"data: {line.strip()}\n\n"
|
||||
else:
|
||||
message = ": heartbeat\n\n"
|
||||
|
||||
try:
|
||||
handler.wfile.write(message.encode('utf-8') if isinstance(message, str) else message)
|
||||
handler.wfile.flush()
|
||||
except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError, OSError):
|
||||
self.logger.info("Client disconnected from SSE stream (write failed)")
|
||||
break
|
||||
|
||||
if not line:
|
||||
time.sleep(1)
|
||||
|
||||
except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError):
|
||||
self.logger.info("Client disconnected from SSE stream")
|
||||
except Exception as e:
|
||||
self.logger.error(f"SSE Error: {e}")
|
||||
finally:
|
||||
if log_file_handle is not None:
|
||||
try:
|
||||
log_file_handle.close()
|
||||
except Exception:
|
||||
pass
|
||||
self.logger.info("SSE stream closed")
|
||||
|
||||
def _parse_progress(self):
|
||||
@@ -301,7 +324,7 @@ class SystemUtils:
|
||||
"status": self.shared_data.bjorn_orch_status,
|
||||
"status2": self.shared_data.bjorn_status_text2,
|
||||
|
||||
# 🟢 PROGRESS — parse "42%" / "" / 0 safely
|
||||
# 🟢 PROGRESS - parse "42%" / "" / 0 safely
|
||||
"progress": self._parse_progress(),
|
||||
|
||||
"image_path": "/bjorn_status_image?t=" + str(int(time.time())),
|
||||
@@ -358,7 +381,7 @@ class SystemUtils:
|
||||
# ----------------------------------------------------------------
|
||||
|
||||
def epd_get_layout(self, handler):
|
||||
"""GET /api/epd/layout — return current layout JSON.
|
||||
"""GET /api/epd/layout - return current layout JSON.
|
||||
|
||||
Optional query param: ?epd_type=epd2in7
|
||||
If provided, returns the layout for that EPD type (custom or built-in)
|
||||
@@ -397,7 +420,7 @@ class SystemUtils:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def epd_save_layout(self, handler, data):
|
||||
"""POST /api/epd/layout — save a custom layout."""
|
||||
"""POST /api/epd/layout - save a custom layout."""
|
||||
try:
|
||||
layout = getattr(self.shared_data, 'display_layout', None)
|
||||
if layout is None:
|
||||
@@ -413,7 +436,7 @@ class SystemUtils:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def epd_reset_layout(self, handler, data):
|
||||
"""POST /api/epd/layout/reset — reset to built-in default."""
|
||||
"""POST /api/epd/layout/reset - reset to built-in default."""
|
||||
try:
|
||||
layout = getattr(self.shared_data, 'display_layout', None)
|
||||
if layout is None:
|
||||
@@ -426,7 +449,7 @@ class SystemUtils:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def epd_list_layouts(self, handler):
|
||||
"""GET /api/epd/layouts — list available EPD types and their layouts."""
|
||||
"""GET /api/epd/layouts - list available EPD types and their layouts."""
|
||||
try:
|
||||
from display_layout import BUILTIN_LAYOUTS
|
||||
result = {}
|
||||
|
||||
@@ -1,9 +1,4 @@
|
||||
# web_utils/vuln_utils.py
|
||||
"""
|
||||
Vulnerability management and CVE enrichment utilities.
|
||||
Handles vulnerability data, CVE metadata, and enrichment from external sources.
|
||||
Optimized for low-power devices like Raspberry Pi Zero.
|
||||
"""
|
||||
"""vuln_utils.py - Vulnerability data, CVE metadata, and enrichment from external sources."""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
@@ -545,7 +540,7 @@ class VulnUtils:
|
||||
|
||||
def _fetch_exploits_for_cve(self, cve_id: str) -> List[Dict[str, Any]]:
|
||||
"""Look up exploit data from the local exploit_feeds table.
|
||||
No external API calls — populated by serve_feed_sync().
|
||||
No external API calls - populated by serve_feed_sync().
|
||||
"""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
@@ -576,7 +571,7 @@ class VulnUtils:
|
||||
return []
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Feed sync — called by POST /api/feeds/sync
|
||||
# Feed sync - called by POST /api/feeds/sync
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
# Schema created lazily on first sync
|
||||
@@ -630,7 +625,7 @@ class VulnUtils:
|
||||
logger.debug("Failed to update feed_sync_state for %s", feed, exc_info=True)
|
||||
|
||||
def serve_feed_sync(self, handler) -> None:
|
||||
"""POST /api/feeds/sync — download CISA KEV + Exploit-DB + EPSS into local DB."""
|
||||
"""POST /api/feeds/sync - download CISA KEV + Exploit-DB + EPSS into local DB."""
|
||||
self._ensure_feed_schema()
|
||||
results: Dict[str, Any] = {}
|
||||
|
||||
@@ -639,7 +634,7 @@ class VulnUtils:
|
||||
kev_count = self._sync_cisa_kev()
|
||||
self._set_sync_state("cisa_kev", kev_count, "ok")
|
||||
results["cisa_kev"] = {"status": "ok", "count": kev_count}
|
||||
logger.info("CISA KEV synced — %d records", kev_count)
|
||||
logger.info("CISA KEV synced - %d records", kev_count)
|
||||
except Exception as e:
|
||||
self._set_sync_state("cisa_kev", 0, "error")
|
||||
results["cisa_kev"] = {"status": "error", "message": str(e)}
|
||||
@@ -650,7 +645,7 @@ class VulnUtils:
|
||||
edb_count = self._sync_exploitdb()
|
||||
self._set_sync_state("exploitdb", edb_count, "ok")
|
||||
results["exploitdb"] = {"status": "ok", "count": edb_count}
|
||||
logger.info("Exploit-DB synced — %d records", edb_count)
|
||||
logger.info("Exploit-DB synced - %d records", edb_count)
|
||||
except Exception as e:
|
||||
self._set_sync_state("exploitdb", 0, "error")
|
||||
results["exploitdb"] = {"status": "error", "message": str(e)}
|
||||
@@ -661,7 +656,7 @@ class VulnUtils:
|
||||
epss_count = self._sync_epss()
|
||||
self._set_sync_state("epss", epss_count, "ok")
|
||||
results["epss"] = {"status": "ok", "count": epss_count}
|
||||
logger.info("EPSS synced — %d records", epss_count)
|
||||
logger.info("EPSS synced - %d records", epss_count)
|
||||
except Exception as e:
|
||||
self._set_sync_state("epss", 0, "error")
|
||||
results["epss"] = {"status": "error", "message": str(e)}
|
||||
@@ -675,7 +670,7 @@ class VulnUtils:
|
||||
})
|
||||
|
||||
def serve_feed_status(self, handler) -> None:
|
||||
"""GET /api/feeds/status — return last sync timestamps and counts."""
|
||||
"""GET /api/feeds/status - return last sync timestamps and counts."""
|
||||
try:
|
||||
self._ensure_feed_schema()
|
||||
rows = self.shared_data.db.query(
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# webutils/webenum_utils.py
|
||||
"""webenum_utils.py - REST utilities for web enumeration data."""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import base64
|
||||
@@ -208,7 +208,7 @@ class WebEnumUtils:
|
||||
|
||||
where_sql = " AND ".join(where_clauses)
|
||||
|
||||
# Main query — alias columns to match the frontend schema
|
||||
# Main query - alias columns to match the frontend schema
|
||||
results = db.query(f"""
|
||||
SELECT
|
||||
id,
|
||||
|
||||
Reference in New Issue
Block a user