mirror of
https://github.com/infinition/Bjorn.git
synced 2026-03-15 17:01:58 +00:00
Add Loki and Sentinel utility classes for web API endpoints
- Implemented LokiUtils class with GET and POST endpoints for managing scripts, jobs, and payloads. - Added SentinelUtils class with GET and POST endpoints for managing events, rules, devices, and notifications. - Both classes include error handling and JSON response formatting.
This commit is contained in:
185
web_utils/bifrost_utils.py
Normal file
185
web_utils/bifrost_utils.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""
|
||||
Bifrost web API endpoints.
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="bifrost_utils", level=logging.DEBUG)
|
||||
|
||||
|
||||
class BifrostUtils:
|
||||
def __init__(self, shared_data):
|
||||
self.shared_data = shared_data
|
||||
|
||||
@property
|
||||
def _engine(self):
|
||||
return getattr(self.shared_data, 'bifrost_engine', None)
|
||||
|
||||
# ── GET endpoints (handler signature) ─────────────────────
|
||||
|
||||
def get_status(self, handler):
|
||||
"""GET /api/bifrost/status — full engine state."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
data = engine.get_status()
|
||||
else:
|
||||
data = {
|
||||
'enabled': False, 'running': False,
|
||||
'mood': 'sleeping', 'face': '(-.-) zzZ', 'voice': '',
|
||||
'channel': 0, 'num_aps': 0, 'num_handshakes': 0,
|
||||
'uptime': 0, 'epoch': 0, 'mode': 'auto',
|
||||
'last_pwnd': '', 'reward': 0,
|
||||
}
|
||||
self._send_json(handler, data)
|
||||
|
||||
def get_networks(self, handler):
|
||||
"""GET /api/bifrost/networks — discovered WiFi networks."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM bifrost_networks ORDER BY rssi DESC LIMIT 200"
|
||||
) or []
|
||||
self._send_json(handler, {'networks': rows})
|
||||
except Exception as e:
|
||||
logger.error("get_networks error: %s", e)
|
||||
self._send_json(handler, {'networks': []})
|
||||
|
||||
def get_handshakes(self, handler):
|
||||
"""GET /api/bifrost/handshakes — captured handshakes."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM bifrost_handshakes ORDER BY captured_at DESC LIMIT 200"
|
||||
) or []
|
||||
self._send_json(handler, {'handshakes': rows})
|
||||
except Exception as e:
|
||||
logger.error("get_handshakes error: %s", e)
|
||||
self._send_json(handler, {'handshakes': []})
|
||||
|
||||
def get_activity(self, handler):
|
||||
"""GET /api/bifrost/activity — recent activity feed."""
|
||||
try:
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
limit = int(qs.get('limit', [50])[0])
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM bifrost_activity ORDER BY timestamp DESC LIMIT ?",
|
||||
(limit,)
|
||||
) or []
|
||||
self._send_json(handler, {'activity': rows})
|
||||
except Exception as e:
|
||||
logger.error("get_activity error: %s", e)
|
||||
self._send_json(handler, {'activity': []})
|
||||
|
||||
def get_epochs(self, handler):
|
||||
"""GET /api/bifrost/epochs — epoch history."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM bifrost_epochs ORDER BY id DESC LIMIT 100"
|
||||
) or []
|
||||
self._send_json(handler, {'epochs': rows})
|
||||
except Exception as e:
|
||||
logger.error("get_epochs error: %s", e)
|
||||
self._send_json(handler, {'epochs': []})
|
||||
|
||||
def get_stats(self, handler):
|
||||
"""GET /api/bifrost/stats — aggregate statistics."""
|
||||
try:
|
||||
db = self.shared_data.db
|
||||
nets = db.query_one("SELECT COUNT(*) AS c FROM bifrost_networks") or {}
|
||||
shakes = db.query_one("SELECT COUNT(*) AS c FROM bifrost_handshakes") or {}
|
||||
epochs = db.query_one("SELECT COUNT(*) AS c FROM bifrost_epochs") or {}
|
||||
deauths = db.query_one(
|
||||
"SELECT COALESCE(SUM(num_deauths),0) AS c FROM bifrost_epochs"
|
||||
) or {}
|
||||
assocs = db.query_one(
|
||||
"SELECT COALESCE(SUM(num_assocs),0) AS c FROM bifrost_epochs"
|
||||
) or {}
|
||||
peers = db.query_one("SELECT COUNT(*) AS c FROM bifrost_peers") or {}
|
||||
self._send_json(handler, {
|
||||
'total_networks': int(nets.get('c', 0)),
|
||||
'total_handshakes': int(shakes.get('c', 0)),
|
||||
'total_epochs': int(epochs.get('c', 0)),
|
||||
'total_deauths': int(deauths.get('c', 0)),
|
||||
'total_assocs': int(assocs.get('c', 0)),
|
||||
'total_peers': int(peers.get('c', 0)),
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error("get_stats error: %s", e)
|
||||
self._send_json(handler, {
|
||||
'total_networks': 0, 'total_handshakes': 0,
|
||||
'total_epochs': 0, 'total_deauths': 0,
|
||||
'total_assocs': 0, 'total_peers': 0,
|
||||
})
|
||||
|
||||
def get_plugins(self, handler):
|
||||
"""GET /api/bifrost/plugins — loaded plugin list."""
|
||||
try:
|
||||
from bifrost.plugins import get_loaded_info
|
||||
self._send_json(handler, {'plugins': get_loaded_info()})
|
||||
except Exception as e:
|
||||
logger.error("get_plugins error: %s", e)
|
||||
self._send_json(handler, {'plugins': []})
|
||||
|
||||
# ── POST endpoints (JSON data signature) ──────────────────
|
||||
|
||||
def toggle_bifrost(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/toggle — switch to/from BIFROST mode.
|
||||
|
||||
BIFROST is a 4th exclusive operation mode. Enabling it stops the
|
||||
orchestrator (Manual/Auto/AI) because WiFi goes into monitor mode.
|
||||
Disabling it returns to the previous mode (defaults to AUTO).
|
||||
"""
|
||||
enabled = bool(data.get('enabled', False))
|
||||
if enabled:
|
||||
# Switch to BIFROST mode (stops orchestrator, starts engine)
|
||||
self.shared_data.operation_mode = "BIFROST"
|
||||
else:
|
||||
# Leave BIFROST → return to AUTO (safest default)
|
||||
self.shared_data.operation_mode = "AUTO"
|
||||
return {'status': 'ok', 'enabled': enabled}
|
||||
|
||||
def set_mode(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/mode — set auto/manual."""
|
||||
mode = data.get('mode', 'auto')
|
||||
engine = self._engine
|
||||
if engine and engine.agent:
|
||||
engine.agent.mode = mode
|
||||
return {'status': 'ok', 'mode': mode}
|
||||
|
||||
def toggle_plugin(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/plugin/toggle — enable/disable a plugin."""
|
||||
try:
|
||||
from bifrost.plugins import toggle_plugin
|
||||
name = data.get('name', '')
|
||||
enable = bool(data.get('enabled', True))
|
||||
changed = toggle_plugin(name, enable)
|
||||
return {'status': 'ok', 'changed': changed}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def clear_activity(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/activity/clear — clear activity log."""
|
||||
try:
|
||||
self.shared_data.db.execute("DELETE FROM bifrost_activity")
|
||||
return {'status': 'ok'}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def update_whitelist(self, data: Dict) -> Dict:
|
||||
"""POST /api/bifrost/whitelist — update AP whitelist."""
|
||||
try:
|
||||
whitelist = data.get('whitelist', '')
|
||||
self.shared_data.config['bifrost_whitelist'] = whitelist
|
||||
return {'status': 'ok'}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────
|
||||
|
||||
def _send_json(self, handler, data, status=200):
|
||||
handler.send_response(status)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(data).encode("utf-8"))
|
||||
@@ -8,14 +8,24 @@ import json
|
||||
import subprocess
|
||||
import time
|
||||
import os
|
||||
import threading
|
||||
import dbus
|
||||
import dbus.mainloop.glib
|
||||
import dbus.exceptions
|
||||
from typing import Any, Dict, Optional
|
||||
import logging
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="bluetooth_utils.py", level=logging.DEBUG)
|
||||
|
||||
# Constants
|
||||
BT_SCAN_DURATION_S = 3
|
||||
BT_PAIR_TIMEOUT_S = 60
|
||||
BT_CONNECT_SETTLE_S = 2
|
||||
BT_CONFIG_PATH = "/home/bjorn/.settings_bjorn/bt.json"
|
||||
BT_DISCOVERABLE_TIMEOUT = 180
|
||||
|
||||
|
||||
class BluetoothUtils:
|
||||
"""Utilities for Bluetooth device management."""
|
||||
|
||||
@@ -29,6 +39,7 @@ class BluetoothUtils:
|
||||
self.adapter = None
|
||||
self.adapter_props = None
|
||||
self.adapter_methods = None
|
||||
self._config_lock = threading.Lock()
|
||||
|
||||
def _ensure_bluetooth_service(self):
|
||||
"""Check if bluetooth service is running, if not start and enable it."""
|
||||
@@ -105,42 +116,78 @@ class BluetoothUtils:
|
||||
return self.bus.get_object("org.bluez", path)
|
||||
return None
|
||||
|
||||
def _save_bt_config(self, address):
|
||||
"""Atomically save Bluetooth device MAC to config file (thread-safe)."""
|
||||
with self._config_lock:
|
||||
current_mac = None
|
||||
if os.path.exists(BT_CONFIG_PATH):
|
||||
try:
|
||||
with open(BT_CONFIG_PATH, "r") as f:
|
||||
data = json.load(f)
|
||||
current_mac = data.get("device_mac")
|
||||
except (json.JSONDecodeError, IOError) as e:
|
||||
self.logger.warning(f"Could not read bt.json: {e}")
|
||||
|
||||
if current_mac != address:
|
||||
self.logger.info(f"Updating bt.json with new MAC: {address}")
|
||||
os.makedirs(os.path.dirname(BT_CONFIG_PATH), exist_ok=True)
|
||||
tmp_path = BT_CONFIG_PATH + ".tmp"
|
||||
with open(tmp_path, "w") as f:
|
||||
json.dump({"device_mac": address}, f)
|
||||
os.replace(tmp_path, BT_CONFIG_PATH)
|
||||
self.logger.info("Updated bt.json with new device MAC.")
|
||||
|
||||
def _restart_auto_bt_connect(self):
|
||||
"""Restart auto_bt_connect service (non-fatal if service doesn't exist)."""
|
||||
result = subprocess.run(
|
||||
["sudo", "systemctl", "restart", "auto_bt_connect"],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
if result.returncode != 0:
|
||||
self.logger.warning(f"auto_bt_connect service restart failed (may not exist): {result.stderr.strip()}")
|
||||
else:
|
||||
self.logger.info("auto_bt_connect service restarted successfully")
|
||||
|
||||
def scan_bluetooth(self, handler):
|
||||
"""Scan for Bluetooth devices."""
|
||||
try:
|
||||
self._init_bluetooth()
|
||||
self.adapter_props.Set("org.bluez.Adapter1", "Powered", dbus.Boolean(True))
|
||||
self.adapter_props.Set("org.bluez.Adapter1", "Discoverable", dbus.Boolean(True))
|
||||
self.adapter_props.Set("org.bluez.Adapter1", "DiscoverableTimeout", dbus.UInt32(180))
|
||||
self.adapter_props.Set("org.bluez.Adapter1", "DiscoverableTimeout", dbus.UInt32(BT_DISCOVERABLE_TIMEOUT))
|
||||
|
||||
self.adapter_methods.StartDiscovery()
|
||||
time.sleep(3)
|
||||
time.sleep(BT_SCAN_DURATION_S)
|
||||
objects = self.manager_interface.GetManagedObjects()
|
||||
devices = []
|
||||
for path, ifaces in objects.items():
|
||||
if "org.bluez.Device1" in ifaces:
|
||||
dev = ifaces["org.bluez.Device1"]
|
||||
addr = dev.get("Address", "")
|
||||
name = dev.get("Name", "Unknown")
|
||||
paired = bool(dev.get("Paired", False))
|
||||
trusted = bool(dev.get("Trusted", False))
|
||||
connected = bool(dev.get("Connected", False))
|
||||
|
||||
devices.append({
|
||||
"name": name,
|
||||
"address": addr,
|
||||
"paired": paired,
|
||||
"trusted": trusted,
|
||||
"connected": connected
|
||||
"name": str(dev.get("Name", "Unknown")),
|
||||
"address": str(dev.get("Address", "")),
|
||||
"paired": bool(dev.get("Paired", False)),
|
||||
"trusted": bool(dev.get("Trusted", False)),
|
||||
"connected": bool(dev.get("Connected", False))
|
||||
})
|
||||
|
||||
self.adapter_methods.StopDiscovery()
|
||||
try:
|
||||
self.adapter_methods.StopDiscovery()
|
||||
except dbus.exceptions.DBusException:
|
||||
pass # Discovery may have already stopped
|
||||
|
||||
response = {"devices": devices}
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(response).encode('utf-8'))
|
||||
|
||||
except dbus.exceptions.DBusException as e:
|
||||
self.logger.error(f"DBus error scanning Bluetooth: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": f"Bluetooth DBus error: {e}"}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error scanning Bluetooth: {e}")
|
||||
handler.send_response(500)
|
||||
@@ -150,13 +197,13 @@ class BluetoothUtils:
|
||||
|
||||
def pair_bluetooth(self, address, pin=None):
|
||||
"""Pair with a Bluetooth device."""
|
||||
bt_process = None
|
||||
try:
|
||||
device = self._get_device_object(address)
|
||||
if device is None:
|
||||
self.logger.error(f"Device {address} not found")
|
||||
return {"status": "error", "message": f"Device {address} not found"}
|
||||
|
||||
device_methods = dbus.Interface(device, "org.bluez.Device1")
|
||||
device_props = dbus.Interface(device, "org.freedesktop.DBus.Properties")
|
||||
|
||||
bt_process = subprocess.Popen(
|
||||
@@ -173,58 +220,35 @@ class BluetoothUtils:
|
||||
self.logger.info(f"Attempting to pair with {address}")
|
||||
bt_process.stdin.write(f"pair {address}\n")
|
||||
bt_process.stdin.flush()
|
||||
|
||||
timeout = 60
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
while (time.time() - start_time) < timeout:
|
||||
|
||||
while (time.time() - start_time) < BT_PAIR_TIMEOUT_S:
|
||||
line = bt_process.stdout.readline()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
|
||||
self.logger.info(f"Bluetoothctl output: {line.strip()}")
|
||||
|
||||
|
||||
if "Confirm passkey" in line or "Request confirmation" in line:
|
||||
self.logger.info("Sending confirmation...")
|
||||
bt_process.stdin.write("yes\n")
|
||||
bt_process.stdin.flush()
|
||||
|
||||
|
||||
try:
|
||||
paired = device_props.Get("org.bluez.Device1", "Paired")
|
||||
if paired:
|
||||
self.logger.info("Device successfully paired!")
|
||||
device_props.Set("org.bluez.Device1", "Trusted", dbus.Boolean(True))
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
config_path = "/home/bjorn/.settings_bjorn/bt.json"
|
||||
current_mac = None
|
||||
if os.path.exists(config_path):
|
||||
try:
|
||||
with open(config_path, "r") as f:
|
||||
data = json.load(f)
|
||||
current_mac = data.get("device_mac")
|
||||
except (json.JSONDecodeError, IOError):
|
||||
pass
|
||||
|
||||
if current_mac != address:
|
||||
self.logger.info(f"Updating config with new MAC: {address}")
|
||||
new_data = {"device_mac": address}
|
||||
os.makedirs(os.path.dirname(config_path), exist_ok=True)
|
||||
with open(config_path, "w") as f:
|
||||
json.dump(new_data, f)
|
||||
self.logger.info("Updated bt.json with new device MAC.")
|
||||
time.sleep(BT_CONNECT_SETTLE_S)
|
||||
self._save_bt_config(address)
|
||||
self._restart_auto_bt_connect()
|
||||
|
||||
try:
|
||||
subprocess.run(["sudo", "systemctl", "restart", "auto_bt_connect"], check=True)
|
||||
self.logger.info("auto_bt_connect service restarted successfully")
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Failed to restart auto_bt_connect service: {e}")
|
||||
|
||||
return {"status": "success", "message": "Device successfully paired and trusted"}
|
||||
except:
|
||||
pass
|
||||
|
||||
except dbus.exceptions.DBusException as e:
|
||||
self.logger.debug(f"Pairing check DBus error (may be transient): {e}")
|
||||
|
||||
if "Failed" in line or "Error" in line:
|
||||
self.logger.error(f"Bluetoothctl error: {line}")
|
||||
return {"status": "error", "message": f"Pairing failed: {line.strip()}"}
|
||||
@@ -235,12 +259,17 @@ class BluetoothUtils:
|
||||
self.logger.error(f"Error during pairing process: {str(e)}")
|
||||
return {"status": "error", "message": f"Error during pairing: {str(e)}"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error initiating pairing: {str(e)}")
|
||||
return {"status": "error", "message": f"Error initiating pairing: {str(e)}"}
|
||||
finally:
|
||||
if 'bt_process' in locals():
|
||||
bt_process.stdin.write("quit\n")
|
||||
bt_process.stdin.flush()
|
||||
time.sleep(1)
|
||||
bt_process.terminate()
|
||||
if bt_process is not None:
|
||||
try:
|
||||
bt_process.stdin.write("quit\n")
|
||||
bt_process.stdin.flush()
|
||||
bt_process.wait(timeout=3)
|
||||
except Exception:
|
||||
bt_process.kill()
|
||||
|
||||
def forget_bluetooth(self, address):
|
||||
"""Remove/forget a Bluetooth device."""
|
||||
@@ -253,19 +282,17 @@ class BluetoothUtils:
|
||||
adapter_methods = dbus.Interface(self.adapter, "org.bluez.Adapter1")
|
||||
|
||||
try:
|
||||
try:
|
||||
device_methods.Disconnect()
|
||||
except:
|
||||
pass
|
||||
device_methods.Disconnect()
|
||||
except dbus.exceptions.DBusException as e:
|
||||
self.logger.debug(f"Disconnect before forget (non-fatal): {e}")
|
||||
|
||||
adapter_methods.RemoveDevice(device)
|
||||
self.logger.info(f"Successfully removed device {address}")
|
||||
return {"status": "success", "message": "Device forgotten successfully"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to forget device: {e}")
|
||||
return {"status": "error", "message": f"Failed to forget device: {str(e)}"}
|
||||
adapter_methods.RemoveDevice(device)
|
||||
self.logger.info(f"Successfully removed device {address}")
|
||||
return {"status": "success", "message": "Device forgotten successfully"}
|
||||
|
||||
except dbus.exceptions.DBusException as e:
|
||||
self.logger.error(f"DBus error forgetting device: {e}")
|
||||
return {"status": "error", "message": f"Failed to forget device: {str(e)}"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in forget_bluetooth: {str(e)}")
|
||||
return {"status": "error", "message": f"Error forgetting device: {str(e)}"}
|
||||
@@ -279,7 +306,7 @@ class BluetoothUtils:
|
||||
try:
|
||||
device_props.Set("org.bluez.Device1", "Trusted", dbus.Boolean(True))
|
||||
return {"status": "success", "message": f"Trusted {address}"}
|
||||
except Exception as e:
|
||||
except dbus.exceptions.DBusException as e:
|
||||
return {"status": "error", "message": f"Failed to trust {address}: {e}"}
|
||||
|
||||
def connect_bluetooth(self, address):
|
||||
@@ -300,7 +327,7 @@ class BluetoothUtils:
|
||||
text=True
|
||||
)
|
||||
|
||||
time.sleep(2)
|
||||
time.sleep(BT_CONNECT_SETTLE_S)
|
||||
|
||||
if bt_net_process.poll() is not None:
|
||||
if bt_net_process.returncode != 0:
|
||||
@@ -321,24 +348,7 @@ class BluetoothUtils:
|
||||
return {"status": "error", "message": f"Connected to {address}, bt-network ok, but dhclient failed: {dhclient_res.stderr}"}
|
||||
|
||||
self.logger.info("Successfully obtained IP via dhclient on bnep0.")
|
||||
|
||||
config_path = "/home/bjorn/.settings_bjorn/bt.json"
|
||||
current_mac = None
|
||||
if os.path.exists(config_path):
|
||||
try:
|
||||
with open(config_path, "r") as f:
|
||||
data = json.load(f)
|
||||
current_mac = data.get("device_mac")
|
||||
except (json.JSONDecodeError, IOError):
|
||||
pass
|
||||
|
||||
if current_mac != address:
|
||||
self.logger.info(f"Updating config with new MAC: {address}")
|
||||
new_data = {"device_mac": address}
|
||||
os.makedirs(os.path.dirname(config_path), exist_ok=True)
|
||||
with open(config_path, "w") as f:
|
||||
json.dump(new_data, f)
|
||||
self.logger.info("Updated bt.json with new device MAC.")
|
||||
self._save_bt_config(address)
|
||||
|
||||
return {"status": "success", "message": f"Connected to {address} and network interface set up."}
|
||||
except dbus.exceptions.DBusException as e:
|
||||
|
||||
@@ -22,6 +22,18 @@ class FileUtils:
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def _validate_path(self, path, base_dir=None):
|
||||
"""Validate that a path is within the allowed base directory.
|
||||
Uses realpath to resolve symlinks, preventing traversal attacks.
|
||||
Returns the resolved absolute path, or raises ValueError."""
|
||||
if base_dir is None:
|
||||
base_dir = self.shared_data.current_dir
|
||||
resolved_base = os.path.realpath(base_dir)
|
||||
resolved_path = os.path.realpath(path)
|
||||
if not resolved_path.startswith(resolved_base + os.sep) and resolved_path != resolved_base:
|
||||
raise ValueError(f"Access denied: path is outside the allowed directory")
|
||||
return resolved_path
|
||||
|
||||
def list_files(self, directory, depth=0, max_depth=3):
|
||||
"""List files and directories recursively."""
|
||||
files = []
|
||||
@@ -35,10 +47,15 @@ class FileUtils:
|
||||
"children": self.list_files(entry.path, depth+1, max_depth)
|
||||
})
|
||||
else:
|
||||
try:
|
||||
fsize = entry.stat().st_size
|
||||
except OSError:
|
||||
fsize = 0
|
||||
files.append({
|
||||
"name": entry.name,
|
||||
"is_directory": False,
|
||||
"path": entry.path
|
||||
"path": entry.path,
|
||||
"size": fsize
|
||||
})
|
||||
return files
|
||||
|
||||
@@ -118,7 +135,8 @@ class FileUtils:
|
||||
query = handler.path.split('?')[1]
|
||||
file_path = unquote(query.split('=')[1])
|
||||
full_path = os.path.join(self.shared_data.data_stolen_dir, file_path)
|
||||
|
||||
self._validate_path(full_path, self.shared_data.data_stolen_dir)
|
||||
|
||||
if not os.path.isfile(full_path):
|
||||
raise FileNotFoundError(f"File not found: {file_path}")
|
||||
|
||||
@@ -149,6 +167,7 @@ class FileUtils:
|
||||
try:
|
||||
query = unquote(handler.path.split('?path=')[1])
|
||||
file_path = os.path.join(self.shared_data.current_dir, query)
|
||||
self._validate_path(file_path)
|
||||
if os.path.isfile(file_path):
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Disposition", f'attachment; filename="{os.path.basename(file_path)}"')
|
||||
@@ -168,12 +187,11 @@ class FileUtils:
|
||||
"""Create a new folder."""
|
||||
try:
|
||||
folder_path = os.path.join(self.shared_data.current_dir, data['folder_path'])
|
||||
|
||||
if not os.path.abspath(folder_path).startswith(self.shared_data.current_dir):
|
||||
return {'status': 'error', 'message': "Invalid path"}
|
||||
|
||||
self._validate_path(folder_path)
|
||||
os.makedirs(folder_path, exist_ok=True)
|
||||
return {'status': 'success', 'message': 'Folder created successfully'}
|
||||
except ValueError as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error creating folder: {e}")
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
@@ -248,19 +266,13 @@ class FileUtils:
|
||||
def delete_file(self, data):
|
||||
"""Delete file or directory."""
|
||||
try:
|
||||
import stat
|
||||
file_path = data.get('file_path')
|
||||
if not file_path:
|
||||
return {"status": "error", "message": "No file path provided"}
|
||||
|
||||
abs_file_path = os.path.abspath(file_path)
|
||||
base_dir = os.path.abspath(self.shared_data.current_dir)
|
||||
|
||||
abs_file_path = self._validate_path(file_path)
|
||||
self.logger.info(f"Deleting: {abs_file_path}")
|
||||
|
||||
if not abs_file_path.startswith(base_dir):
|
||||
return {"status": "error", "message": f"Access denied: {file_path} is outside the allowed directory"}
|
||||
|
||||
if not os.path.exists(abs_file_path):
|
||||
return {"status": "error", "message": f"Path not found: {file_path}"}
|
||||
|
||||
@@ -285,16 +297,16 @@ class FileUtils:
|
||||
try:
|
||||
old_path = os.path.join(self.shared_data.current_dir, data['old_path'])
|
||||
new_path = os.path.join(self.shared_data.current_dir, data['new_path'])
|
||||
|
||||
if not (os.path.abspath(old_path).startswith(self.shared_data.current_dir) and
|
||||
os.path.abspath(new_path).startswith(self.shared_data.current_dir)):
|
||||
return {"status": "error", "message": "Invalid path"}
|
||||
self._validate_path(old_path)
|
||||
self._validate_path(new_path)
|
||||
|
||||
os.rename(old_path, new_path)
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Successfully renamed {old_path} to {new_path}"
|
||||
}
|
||||
except ValueError as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error renaming file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
@@ -304,10 +316,8 @@ class FileUtils:
|
||||
try:
|
||||
source_path = os.path.join(self.shared_data.current_dir, data['source_path'])
|
||||
target_path = os.path.join(self.shared_data.current_dir, data['target_path'])
|
||||
|
||||
if not (os.path.abspath(source_path).startswith(self.shared_data.current_dir) and
|
||||
os.path.abspath(target_path).startswith(self.shared_data.current_dir)):
|
||||
return {"status": "error", "message": "Invalid path"}
|
||||
self._validate_path(source_path)
|
||||
self._validate_path(target_path)
|
||||
|
||||
if os.path.isdir(source_path):
|
||||
shutil.copytree(source_path, target_path)
|
||||
@@ -327,10 +337,8 @@ class FileUtils:
|
||||
try:
|
||||
source_path = os.path.join(self.shared_data.current_dir, data['source_path'])
|
||||
target_path = os.path.join(self.shared_data.current_dir, data['target_path'])
|
||||
|
||||
if not (os.path.abspath(source_path).startswith(self.shared_data.current_dir) and
|
||||
os.path.abspath(target_path).startswith(self.shared_data.current_dir)):
|
||||
return {"status": "error", "message": "Invalid path"}
|
||||
self._validate_path(source_path)
|
||||
self._validate_path(target_path)
|
||||
|
||||
target_dir = os.path.dirname(target_path)
|
||||
if not os.path.exists(target_dir):
|
||||
|
||||
245
web_utils/loki_utils.py
Normal file
245
web_utils/loki_utils.py
Normal file
@@ -0,0 +1,245 @@
|
||||
"""
|
||||
Loki web API endpoints.
|
||||
"""
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="loki_utils", level=logging.DEBUG)
|
||||
|
||||
|
||||
class LokiUtils:
|
||||
def __init__(self, shared_data):
|
||||
self.shared_data = shared_data
|
||||
|
||||
@property
|
||||
def _engine(self):
|
||||
return getattr(self.shared_data, 'loki_engine', None)
|
||||
|
||||
# ── GET endpoints (handler signature) ─────────────────────
|
||||
|
||||
def get_status(self, handler):
|
||||
"""GET /api/loki/status — engine state."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
data = engine.get_status()
|
||||
else:
|
||||
data = {
|
||||
'enabled': False, 'running': False,
|
||||
'gadget_ready': False, 'layout': 'us',
|
||||
'jobs_running': 0, 'jobs_total': 0,
|
||||
}
|
||||
self._send_json(handler, data)
|
||||
|
||||
def get_scripts(self, handler):
|
||||
"""GET /api/loki/scripts — user-saved scripts."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT id, name, description, category, target_os, "
|
||||
"created_at, updated_at FROM loki_scripts ORDER BY name"
|
||||
) or []
|
||||
self._send_json(handler, {'scripts': rows})
|
||||
except Exception as e:
|
||||
logger.error("get_scripts error: %s", e)
|
||||
self._send_json(handler, {'scripts': []})
|
||||
|
||||
def get_script(self, handler):
|
||||
"""GET /api/loki/script?id=N — single script with content."""
|
||||
try:
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
script_id = int(qs.get('id', [0])[0])
|
||||
row = self.shared_data.db.query_one(
|
||||
"SELECT * FROM loki_scripts WHERE id = ?", (script_id,)
|
||||
)
|
||||
if row:
|
||||
self._send_json(handler, {'script': row})
|
||||
else:
|
||||
self._send_json(handler, {'script': None}, 404)
|
||||
except Exception as e:
|
||||
logger.error("get_script error: %s", e)
|
||||
self._send_json(handler, {'error': str(e)}, 500)
|
||||
|
||||
def get_jobs(self, handler):
|
||||
"""GET /api/loki/jobs — job list."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
jobs = engine.get_jobs()
|
||||
else:
|
||||
jobs = []
|
||||
self._send_json(handler, {'jobs': jobs})
|
||||
|
||||
def get_payloads(self, handler):
|
||||
"""GET /api/loki/payloads — built-in payload list."""
|
||||
payloads = []
|
||||
payload_dir = os.path.join(
|
||||
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
|
||||
"loki", "payloads"
|
||||
)
|
||||
if os.path.isdir(payload_dir):
|
||||
for f in sorted(os.listdir(payload_dir)):
|
||||
if f.endswith('.js'):
|
||||
path = os.path.join(payload_dir, f)
|
||||
try:
|
||||
with open(path, 'r') as fh:
|
||||
content = fh.read()
|
||||
# Extract description from first comment line
|
||||
desc = ""
|
||||
for line in content.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('//'):
|
||||
desc = line[2:].strip()
|
||||
break
|
||||
payloads.append({
|
||||
'name': f[:-3], # without .js
|
||||
'filename': f,
|
||||
'description': desc,
|
||||
'content': content,
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
self._send_json(handler, {'payloads': payloads})
|
||||
|
||||
def get_layouts(self, handler):
|
||||
"""GET /api/loki/layouts — available keyboard layouts."""
|
||||
try:
|
||||
from loki.layouts import available
|
||||
layouts = available()
|
||||
except Exception:
|
||||
layouts = ['us']
|
||||
self._send_json(handler, {'layouts': layouts})
|
||||
|
||||
# ── POST endpoints (JSON data signature) ──────────────────
|
||||
|
||||
def toggle_loki(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/toggle — switch to/from LOKI mode."""
|
||||
enabled = bool(data.get('enabled', False))
|
||||
if enabled:
|
||||
self.shared_data.operation_mode = "LOKI"
|
||||
else:
|
||||
self.shared_data.operation_mode = "AUTO"
|
||||
return {'status': 'ok', 'enabled': enabled}
|
||||
|
||||
def save_script(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/script/save — save/update a script."""
|
||||
try:
|
||||
script_id = data.get('id')
|
||||
name = data.get('name', '').strip()
|
||||
description = data.get('description', '')
|
||||
content = data.get('content', '')
|
||||
category = data.get('category', 'general')
|
||||
target_os = data.get('target_os', 'any')
|
||||
|
||||
if not name:
|
||||
return {'status': 'error', 'message': 'Name required'}
|
||||
|
||||
db = self.shared_data.db
|
||||
if script_id:
|
||||
db.execute(
|
||||
"UPDATE loki_scripts SET name=?, description=?, content=?, "
|
||||
"category=?, target_os=?, updated_at=CURRENT_TIMESTAMP WHERE id=?",
|
||||
(name, description, content, category, target_os, script_id)
|
||||
)
|
||||
else:
|
||||
db.execute(
|
||||
"INSERT INTO loki_scripts (name, description, content, category, target_os) "
|
||||
"VALUES (?, ?, ?, ?, ?)",
|
||||
(name, description, content, category, target_os)
|
||||
)
|
||||
return {'status': 'ok'}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def delete_script(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/script/delete — delete a script."""
|
||||
try:
|
||||
script_id = data.get('id')
|
||||
if script_id:
|
||||
self.shared_data.db.execute(
|
||||
"DELETE FROM loki_scripts WHERE id = ?", (script_id,)
|
||||
)
|
||||
return {'status': 'ok'}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def run_script(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/script/run — execute a HIDScript."""
|
||||
engine = self._engine
|
||||
if not engine:
|
||||
return {'status': 'error', 'message': 'Loki engine not available'}
|
||||
if not engine._running:
|
||||
return {'status': 'error', 'message': 'Loki not running. Enable it first.'}
|
||||
|
||||
content = data.get('content', '')
|
||||
name = data.get('name', 'unnamed')
|
||||
if not content:
|
||||
return {'status': 'error', 'message': 'No script content'}
|
||||
|
||||
try:
|
||||
job_id = engine.submit_job(name, content)
|
||||
return {'status': 'ok', 'job_id': job_id}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def cancel_job(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/job/cancel — cancel a running job."""
|
||||
engine = self._engine
|
||||
if not engine:
|
||||
return {'status': 'error', 'message': 'Loki engine not available'}
|
||||
job_id = data.get('job_id', '')
|
||||
if engine.cancel_job(job_id):
|
||||
return {'status': 'ok'}
|
||||
return {'status': 'error', 'message': 'Job not found'}
|
||||
|
||||
def clear_jobs(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/jobs/clear — clear completed jobs."""
|
||||
engine = self._engine
|
||||
if engine and engine._jobs:
|
||||
engine.job_manager.clear_completed()
|
||||
return {'status': 'ok'}
|
||||
|
||||
def install_gadget(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/install — install HID gadget boot script."""
|
||||
from loki import LokiEngine
|
||||
result = LokiEngine.install_hid_gadget()
|
||||
return result
|
||||
|
||||
def reboot(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/reboot — reboot the Pi to activate HID gadget."""
|
||||
import subprocess
|
||||
try:
|
||||
logger.info("Reboot requested by Loki setup")
|
||||
subprocess.Popen(["sudo", "reboot"], close_fds=True)
|
||||
return {'status': 'ok', 'message': 'Rebooting...'}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def quick_type(self, data: Dict) -> Dict:
|
||||
"""POST /api/loki/quick — quick-type text without a full script."""
|
||||
engine = self._engine
|
||||
if not engine or not engine._running:
|
||||
return {'status': 'error', 'message': 'Loki not running'}
|
||||
|
||||
text = data.get('text', '')
|
||||
if not text:
|
||||
return {'status': 'error', 'message': 'No text provided'}
|
||||
|
||||
# Wrap as a simple HIDScript
|
||||
escaped = text.replace('\\', '\\\\').replace('"', '\\"').replace('\n', '\\n')
|
||||
script = f'type("{escaped}");'
|
||||
try:
|
||||
job_id = engine.submit_job("quick-type", script)
|
||||
return {'status': 'ok', 'job_id': job_id}
|
||||
except Exception as e:
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
# ── Helpers ───────────────────────────────────────────────
|
||||
|
||||
def _send_json(self, handler, data, status=200):
|
||||
handler.send_response(status)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(data).encode("utf-8"))
|
||||
@@ -170,12 +170,21 @@ class NetKBUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_all_actions(self, data=None):
|
||||
"""Clear entire action queue."""
|
||||
"""Cancel running actions then clear entire action queue."""
|
||||
try:
|
||||
# First cancel any running/pending/scheduled actions
|
||||
cancelled = self.shared_data.db.execute("""
|
||||
UPDATE action_queue
|
||||
SET status='cancelled',
|
||||
completed_at=CURRENT_TIMESTAMP,
|
||||
error_message=COALESCE(error_message,'user_cancelled')
|
||||
WHERE status IN ('scheduled','pending','running')
|
||||
""")
|
||||
# Then delete everything
|
||||
deleted = self.shared_data.db.execute("DELETE FROM action_queue")
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Cleared ALL actions ({deleted} entries)"
|
||||
"message": f"Cancelled {cancelled} active, cleared {deleted} total entries"
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"delete_all_actions error: {e}")
|
||||
|
||||
@@ -7,6 +7,7 @@ from __future__ import annotations
|
||||
import json
|
||||
import html
|
||||
import importlib
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional
|
||||
import logging
|
||||
@@ -19,6 +20,9 @@ class OrchestratorUtils:
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
# ORCH-03: Background scan timer for MANUAL mode
|
||||
self._scan_timer = None
|
||||
self._scan_stop_event = threading.Event()
|
||||
|
||||
def execute_manual_attack(self, params):
|
||||
"""Execute a manual attack on a specific target."""
|
||||
@@ -96,6 +100,9 @@ class OrchestratorUtils:
|
||||
def start_orchestrator(self):
|
||||
"""Start the orchestrator."""
|
||||
try:
|
||||
# ORCH-03: Stop background scan timer when switching to AUTO/AI
|
||||
self._stop_scan_timer()
|
||||
|
||||
bjorn_instance = self.shared_data.bjorn_instance
|
||||
if getattr(self.shared_data, "ai_mode", False):
|
||||
self.shared_data.operation_mode = "AI"
|
||||
@@ -109,17 +116,66 @@ class OrchestratorUtils:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def stop_orchestrator(self):
|
||||
"""Stop the orchestrator."""
|
||||
"""Stop the orchestrator and reset all status fields to IDLE."""
|
||||
try:
|
||||
bjorn_instance = self.shared_data.bjorn_instance
|
||||
self.shared_data.operation_mode = "MANUAL"
|
||||
bjorn_instance.stop_orchestrator()
|
||||
self.shared_data.orchestrator_should_exit = True
|
||||
return {"status": "success", "message": "Orchestrator stopping..."}
|
||||
# Explicit reset so the web UI reflects IDLE immediately,
|
||||
# even if the orchestrator thread is still finishing up.
|
||||
self.shared_data.bjorn_orch_status = "IDLE"
|
||||
self.shared_data.bjorn_status_text = "IDLE"
|
||||
self.shared_data.bjorn_status_text2 = "Waiting for instructions..."
|
||||
self.shared_data.action_target_ip = ""
|
||||
self.shared_data.active_action = None
|
||||
self.shared_data.update_status("IDLE", "")
|
||||
|
||||
# ORCH-03: Start background scan timer if enabled
|
||||
if getattr(self.shared_data, 'manual_mode_auto_scan', True):
|
||||
self._start_scan_timer()
|
||||
|
||||
return {"status": "success", "message": "Orchestrator stopped"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error stopping orchestrator: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
# =========================================================================
|
||||
# ORCH-03: Background scan timer for MANUAL mode
|
||||
# =========================================================================
|
||||
|
||||
def _start_scan_timer(self):
|
||||
"""Start a background thread that periodically scans in MANUAL mode."""
|
||||
if self._scan_timer and self._scan_timer.is_alive():
|
||||
return
|
||||
self._scan_stop_event.clear()
|
||||
self._scan_timer = threading.Thread(
|
||||
target=self._scan_loop, daemon=True, name="ManualModeScanTimer"
|
||||
)
|
||||
self._scan_timer.start()
|
||||
self.logger.info("ORCH-03: Background scan timer started for MANUAL mode")
|
||||
|
||||
def _scan_loop(self):
|
||||
"""Periodically run network scan while in MANUAL mode."""
|
||||
interval = int(getattr(self.shared_data, 'manual_mode_scan_interval', 180))
|
||||
while not self._scan_stop_event.wait(timeout=interval):
|
||||
if self.shared_data.operation_mode != "MANUAL":
|
||||
self.logger.info("ORCH-03: Exiting scan timer, no longer in MANUAL mode")
|
||||
break
|
||||
try:
|
||||
self.logger.info("ORCH-03: Manual mode background scan starting")
|
||||
self.execute_manual_scan()
|
||||
except Exception as e:
|
||||
self.logger.error(f"ORCH-03: Background scan error: {e}")
|
||||
|
||||
def _stop_scan_timer(self):
|
||||
"""Stop the background scan timer."""
|
||||
if self._scan_timer:
|
||||
self._scan_stop_event.set()
|
||||
self._scan_timer.join(timeout=5)
|
||||
self._scan_timer = None
|
||||
self.logger.debug("ORCH-03: Background scan timer stopped")
|
||||
|
||||
def serve_credentials_data(self, handler):
|
||||
"""Serve credentials data as HTML."""
|
||||
try:
|
||||
|
||||
@@ -174,7 +174,11 @@ class RLUtils:
|
||||
try:
|
||||
return self.shared_data.db.query(sql) or []
|
||||
except Exception as exc:
|
||||
logger.error(f"DB query failed: {exc}")
|
||||
msg = str(exc)
|
||||
if "no such table" in msg:
|
||||
logger.debug(f"Table not yet created (AI not active): {msg}")
|
||||
else:
|
||||
logger.error(f"DB query failed: {exc}")
|
||||
return []
|
||||
|
||||
def _query_scalar(self, sql: str, key: str, default: int = 0) -> int:
|
||||
|
||||
266
web_utils/sentinel_utils.py
Normal file
266
web_utils/sentinel_utils.py
Normal file
@@ -0,0 +1,266 @@
|
||||
"""
|
||||
Sentinel web API endpoints.
|
||||
"""
|
||||
import json
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="sentinel_utils", level=logging.DEBUG)
|
||||
|
||||
|
||||
class SentinelUtils:
|
||||
def __init__(self, shared_data):
|
||||
self.shared_data = shared_data
|
||||
|
||||
@property
|
||||
def _engine(self):
|
||||
return getattr(self.shared_data, 'sentinel_engine', None)
|
||||
|
||||
# ── GET endpoints (handler signature) ───────────────────────────────
|
||||
|
||||
def get_status(self, handler):
|
||||
"""GET /api/sentinel/status — overall sentinel state + unread count."""
|
||||
engine = self._engine
|
||||
if engine:
|
||||
data = engine.get_status()
|
||||
else:
|
||||
data = {"enabled": False, "running": False, "unread_alerts": 0}
|
||||
self._send_json(handler, data)
|
||||
|
||||
def get_events(self, handler):
|
||||
"""GET /api/sentinel/events — recent events with optional filters."""
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
limit = int(qs.get("limit", [50])[0])
|
||||
offset = int(qs.get("offset", [0])[0])
|
||||
event_type = qs.get("type", [""])[0]
|
||||
unread = qs.get("unread", [""])[0] == "1"
|
||||
|
||||
rows = self.shared_data.db.query(
|
||||
*self._build_events_query(limit, offset, event_type, unread)
|
||||
)
|
||||
count_row = self.shared_data.db.query_one(
|
||||
"SELECT COUNT(*) AS c FROM sentinel_events WHERE acknowledged = 0"
|
||||
)
|
||||
unread_count = int(count_row.get("c", 0)) if count_row else 0
|
||||
|
||||
self._send_json(handler, {
|
||||
"events": rows or [],
|
||||
"unread_count": unread_count,
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error("get_events error: %s", e)
|
||||
self._send_json(handler, {"events": [], "unread_count": 0})
|
||||
|
||||
def _build_events_query(self, limit, offset, event_type, unread_only):
|
||||
sql = "SELECT * FROM sentinel_events WHERE 1=1"
|
||||
params = []
|
||||
if event_type:
|
||||
sql += " AND event_type = ?"
|
||||
params.append(event_type)
|
||||
if unread_only:
|
||||
sql += " AND acknowledged = 0"
|
||||
sql += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
|
||||
params.extend([limit, offset])
|
||||
return sql, params
|
||||
|
||||
def get_rules(self, handler):
|
||||
"""GET /api/sentinel/rules — all rules."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM sentinel_rules ORDER BY id"
|
||||
) or []
|
||||
self._send_json(handler, {"rules": rows})
|
||||
except Exception as e:
|
||||
logger.error("get_rules error: %s", e)
|
||||
self._send_json(handler, {"rules": []})
|
||||
|
||||
def get_devices(self, handler):
|
||||
"""GET /api/sentinel/devices — known device baselines."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM sentinel_devices ORDER BY last_seen DESC"
|
||||
) or []
|
||||
self._send_json(handler, {"devices": rows})
|
||||
except Exception as e:
|
||||
logger.error("get_devices error: %s", e)
|
||||
self._send_json(handler, {"devices": []})
|
||||
|
||||
def get_arp_table(self, handler):
|
||||
"""GET /api/sentinel/arp — ARP cache for spoof analysis."""
|
||||
try:
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT * FROM sentinel_arp_cache ORDER BY last_seen DESC LIMIT 200"
|
||||
) or []
|
||||
self._send_json(handler, {"arp": rows})
|
||||
except Exception as e:
|
||||
logger.error("get_arp error: %s", e)
|
||||
self._send_json(handler, {"arp": []})
|
||||
|
||||
# ── POST endpoints (JSON data signature) ────────────────────────────
|
||||
|
||||
def toggle_sentinel(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/toggle — enable/disable sentinel."""
|
||||
enabled = bool(data.get("enabled", False))
|
||||
self.shared_data.sentinel_enabled = enabled
|
||||
engine = self._engine
|
||||
if engine:
|
||||
if enabled:
|
||||
engine.start()
|
||||
else:
|
||||
engine.stop()
|
||||
return {"status": "ok", "enabled": enabled}
|
||||
|
||||
def acknowledge_event(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/ack — acknowledge single or all events."""
|
||||
try:
|
||||
event_id = data.get("id")
|
||||
if data.get("all"):
|
||||
self.shared_data.db.execute(
|
||||
"UPDATE sentinel_events SET acknowledged = 1"
|
||||
)
|
||||
return {"status": "ok", "message": "All events acknowledged"}
|
||||
elif event_id:
|
||||
self.shared_data.db.execute(
|
||||
"UPDATE sentinel_events SET acknowledged = 1 WHERE id = ?",
|
||||
(int(event_id),)
|
||||
)
|
||||
return {"status": "ok", "id": event_id}
|
||||
return {"status": "error", "message": "No id or all flag provided"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def clear_events(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/clear — clear all events."""
|
||||
try:
|
||||
self.shared_data.db.execute("DELETE FROM sentinel_events")
|
||||
return {"status": "ok", "message": "Events cleared"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def upsert_rule(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/rule — create or update a rule."""
|
||||
try:
|
||||
rule = data.get("rule", data)
|
||||
if not rule.get("name") or not rule.get("trigger_type"):
|
||||
return {"status": "error", "message": "name and trigger_type required"}
|
||||
|
||||
conditions = rule.get("conditions", {})
|
||||
if isinstance(conditions, dict):
|
||||
conditions = json.dumps(conditions)
|
||||
actions = rule.get("actions", ["notify_web"])
|
||||
if isinstance(actions, list):
|
||||
actions = json.dumps(actions)
|
||||
|
||||
rule_id = rule.get("id")
|
||||
if rule_id:
|
||||
self.shared_data.db.execute(
|
||||
"""UPDATE sentinel_rules SET
|
||||
name=?, enabled=?, trigger_type=?, conditions=?,
|
||||
logic=?, actions=?, cooldown_s=?
|
||||
WHERE id=?""",
|
||||
(rule["name"], int(rule.get("enabled", 1)),
|
||||
rule["trigger_type"], conditions,
|
||||
rule.get("logic", "AND"), actions,
|
||||
int(rule.get("cooldown_s", 60)), rule_id)
|
||||
)
|
||||
else:
|
||||
self.shared_data.db.execute(
|
||||
"""INSERT INTO sentinel_rules
|
||||
(name, enabled, trigger_type, conditions, logic, actions, cooldown_s)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)""",
|
||||
(rule["name"], int(rule.get("enabled", 1)),
|
||||
rule["trigger_type"], conditions,
|
||||
rule.get("logic", "AND"), actions,
|
||||
int(rule.get("cooldown_s", 60)))
|
||||
)
|
||||
return {"status": "ok"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_rule(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/rule/delete — delete a rule."""
|
||||
try:
|
||||
rule_id = data.get("id")
|
||||
if not rule_id:
|
||||
return {"status": "error", "message": "id required"}
|
||||
self.shared_data.db.execute(
|
||||
"DELETE FROM sentinel_rules WHERE id = ?", (int(rule_id),)
|
||||
)
|
||||
return {"status": "ok"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def update_device(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/device — update device baseline."""
|
||||
try:
|
||||
mac = data.get("mac_address", "").lower()
|
||||
if not mac:
|
||||
return {"status": "error", "message": "mac_address required"}
|
||||
self.shared_data.db.execute(
|
||||
"""INSERT INTO sentinel_devices
|
||||
(mac_address, alias, trusted, watch, expected_ips, expected_ports, notes)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?)
|
||||
ON CONFLICT(mac_address)
|
||||
DO UPDATE SET alias=?, trusted=?, watch=?,
|
||||
expected_ips=?, expected_ports=?, notes=?,
|
||||
last_seen=CURRENT_TIMESTAMP""",
|
||||
(mac, data.get("alias", ""), int(data.get("trusted", 0)),
|
||||
int(data.get("watch", 1)), data.get("expected_ips", ""),
|
||||
data.get("expected_ports", ""), data.get("notes", ""),
|
||||
data.get("alias", ""), int(data.get("trusted", 0)),
|
||||
int(data.get("watch", 1)), data.get("expected_ips", ""),
|
||||
data.get("expected_ports", ""), data.get("notes", ""))
|
||||
)
|
||||
return {"status": "ok"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def save_notifier_config(self, data: Dict) -> Dict:
|
||||
"""POST /api/sentinel/notifiers — save notification channel config."""
|
||||
try:
|
||||
# Store notifier configs in shared_data for persistence
|
||||
notifiers = data.get("notifiers", {})
|
||||
self.shared_data.sentinel_notifiers = notifiers
|
||||
|
||||
# Re-register notifiers on the engine
|
||||
engine = self._engine
|
||||
if engine:
|
||||
self._setup_notifiers(engine, notifiers)
|
||||
|
||||
return {"status": "ok"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def _setup_notifiers(self, engine, config: Dict):
|
||||
"""Register notifier instances on the engine from config dict."""
|
||||
from sentinel import DiscordNotifier, WebhookNotifier, EmailNotifier
|
||||
|
||||
if config.get("discord_webhook"):
|
||||
engine.register_notifier("notify_discord",
|
||||
DiscordNotifier(config["discord_webhook"]))
|
||||
if config.get("webhook_url"):
|
||||
engine.register_notifier("notify_webhook",
|
||||
WebhookNotifier(config["webhook_url"],
|
||||
config.get("webhook_headers", {})))
|
||||
if config.get("email_smtp_host"):
|
||||
engine.register_notifier("notify_email", EmailNotifier(
|
||||
smtp_host=config["email_smtp_host"],
|
||||
smtp_port=int(config.get("email_smtp_port", 587)),
|
||||
username=config.get("email_username", ""),
|
||||
password=config.get("email_password", ""),
|
||||
from_addr=config.get("email_from", ""),
|
||||
to_addrs=config.get("email_to", []),
|
||||
use_tls=config.get("email_tls", True),
|
||||
))
|
||||
|
||||
# ── Helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
def _send_json(self, handler, data, status=200):
|
||||
handler.send_response(status)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(data).encode("utf-8"))
|
||||
@@ -9,6 +9,7 @@ import subprocess
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional
|
||||
from logger import Logger
|
||||
|
||||
@@ -22,68 +23,57 @@ class SystemUtils:
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def _send_json(self, handler, data, status=200):
|
||||
"""Send a JSON response (helper to reduce boilerplate)."""
|
||||
handler.send_response(status)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(data).encode('utf-8'))
|
||||
|
||||
def reboot_system(self, handler):
|
||||
"""Reboot the system."""
|
||||
try:
|
||||
command = "sudo reboot"
|
||||
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "System is rebooting"}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
subprocess.Popen(["sudo", "reboot"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
self._send_json(handler, {"status": "success", "message": "System is rebooting"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def shutdown_system(self, handler):
|
||||
"""Shutdown the system."""
|
||||
try:
|
||||
command = "sudo shutdown now"
|
||||
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "System is shutting down"}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
subprocess.Popen(["sudo", "shutdown", "now"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
self._send_json(handler, {"status": "success", "message": "System is shutting down"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def restart_bjorn_service(self, handler):
|
||||
"""Restart the Bjorn service."""
|
||||
if not hasattr(handler, 'send_response'):
|
||||
raise TypeError("Invalid handler passed. Expected an HTTP handler.")
|
||||
|
||||
|
||||
try:
|
||||
command = "sudo systemctl restart bjorn.service"
|
||||
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Bjorn service restarted successfully"}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
subprocess.Popen(["sudo", "systemctl", "restart", "bjorn.service"], stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
self._send_json(handler, {"status": "success", "message": "Bjorn service restarted successfully"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def clear_logs(self, handler):
|
||||
"""Clear logs directory contents."""
|
||||
try:
|
||||
command = "sudo rm -rf data/logs/*"
|
||||
subprocess.Popen(command, shell=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Logs cleared successfully"}).encode('utf-8'))
|
||||
logs_dir = os.path.join(self.shared_data.current_dir, "data", "logs")
|
||||
if os.path.isdir(logs_dir):
|
||||
for entry in os.scandir(logs_dir):
|
||||
try:
|
||||
if entry.is_file() or entry.is_symlink():
|
||||
os.remove(entry.path)
|
||||
elif entry.is_dir():
|
||||
import shutil
|
||||
shutil.rmtree(entry.path)
|
||||
except OSError as e:
|
||||
self.logger.warning(f"Failed to remove {entry.path}: {e}")
|
||||
self._send_json(handler, {"status": "success", "message": "Logs cleared successfully"})
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def initialize_db(self, handler):
|
||||
"""Initialize or prepare database schema."""
|
||||
@@ -91,15 +81,9 @@ class SystemUtils:
|
||||
self.shared_data.sync_actions_to_database()
|
||||
self.shared_data.initialize_database()
|
||||
self.shared_data.initialize_statistics()
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Database initialized successfully"}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "success", "message": "Database initialized successfully"})
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def erase_bjorn_memories(self, handler):
|
||||
"""Erase all Bjorn-related memories and restart service."""
|
||||
@@ -148,15 +132,9 @@ class SystemUtils:
|
||||
db.update_livestats(0, 0, 0, 0)
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "NetKB cleared in database"}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "success", "message": "NetKB cleared in database"})
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def clear_livestatus(self, handler, restart=True):
|
||||
"""Clear live status counters."""
|
||||
@@ -164,15 +142,9 @@ class SystemUtils:
|
||||
self.shared_data.db.update_livestats(0, 0, 0, 0)
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Livestatus counters reset"}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "success", "message": "Livestatus counters reset"})
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def clear_actions_file(self, handler, restart=True):
|
||||
"""Clear actions table and resynchronize from modules."""
|
||||
@@ -181,15 +153,9 @@ class SystemUtils:
|
||||
self.shared_data.generate_actions_json()
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Actions table refreshed"}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "success", "message": "Actions table refreshed"})
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def clear_shared_config_json(self, handler, restart=True):
|
||||
"""Reset configuration to defaults."""
|
||||
@@ -198,15 +164,9 @@ class SystemUtils:
|
||||
self.shared_data.save_config()
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Configuration reset to defaults"}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "success", "message": "Configuration reset to defaults"})
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def save_configuration(self, data):
|
||||
"""Save configuration to database."""
|
||||
@@ -258,7 +218,9 @@ class SystemUtils:
|
||||
try:
|
||||
log_file_path = self.shared_data.webconsolelog
|
||||
if not os.path.exists(log_file_path):
|
||||
subprocess.Popen(f"sudo tail -f /home/bjorn/Bjorn/data/logs/* > {log_file_path}", shell=True)
|
||||
# Create the log file if it doesn't exist; tail aggregation
|
||||
# is handled by the bjorn service, not by shell piping.
|
||||
Path(log_file_path).touch(exist_ok=True)
|
||||
|
||||
with open(log_file_path, 'r') as log_file:
|
||||
log_lines = log_file.readlines()
|
||||
@@ -390,3 +352,116 @@ class SystemUtils:
|
||||
return
|
||||
except Exception as e:
|
||||
self.logger.error(f"check_console_autostart failed: {e}")
|
||||
|
||||
# ----------------------------------------------------------------
|
||||
# EPD Layout API (EPD-01 / EPD-02)
|
||||
# ----------------------------------------------------------------
|
||||
|
||||
def epd_get_layout(self, handler):
|
||||
"""GET /api/epd/layout — return current layout JSON.
|
||||
|
||||
Optional query param: ?epd_type=epd2in7
|
||||
If provided, returns the layout for that EPD type (custom or built-in)
|
||||
without changing the active device layout.
|
||||
"""
|
||||
try:
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
from display_layout import BUILTIN_LAYOUTS
|
||||
query = parse_qs(urlparse(handler.path).query)
|
||||
requested_type = query.get('epd_type', [''])[0]
|
||||
|
||||
layout = getattr(self.shared_data, 'display_layout', None)
|
||||
if layout is None:
|
||||
self._send_json(handler, {"status": "error", "message": "Layout engine not initialised"}, 503)
|
||||
return
|
||||
|
||||
if requested_type and requested_type != self.shared_data.config.get('epd_type', ''):
|
||||
# Return layout for the requested type without modifying active layout
|
||||
custom_path = os.path.join(layout._custom_dir, f'{requested_type}.json')
|
||||
if os.path.isfile(custom_path):
|
||||
import json as _json
|
||||
with open(custom_path, 'r') as f:
|
||||
self._send_json(handler, _json.load(f))
|
||||
return
|
||||
# Fallback to built-in
|
||||
base = requested_type.split('_')[0] if '_' in requested_type else requested_type
|
||||
builtin = BUILTIN_LAYOUTS.get(requested_type) or BUILTIN_LAYOUTS.get(base)
|
||||
if builtin:
|
||||
self._send_json(handler, builtin)
|
||||
return
|
||||
self._send_json(handler, {"status": "error", "message": f"Unknown EPD type: {requested_type}"}, 404)
|
||||
return
|
||||
|
||||
self._send_json(handler, layout.to_dict())
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def epd_save_layout(self, handler, data):
|
||||
"""POST /api/epd/layout — save a custom layout."""
|
||||
try:
|
||||
layout = getattr(self.shared_data, 'display_layout', None)
|
||||
if layout is None:
|
||||
self._send_json(handler, {"status": "error", "message": "Layout engine not initialised"}, 503)
|
||||
return
|
||||
if not isinstance(data, dict) or 'meta' not in data or 'elements' not in data:
|
||||
self._send_json(handler, {"status": "error", "message": "Invalid layout: must contain 'meta' and 'elements'"}, 400)
|
||||
return
|
||||
epd_type = data.get('meta', {}).get('epd_type') or None
|
||||
layout.save_custom(data, epd_type=epd_type)
|
||||
self._send_json(handler, {"status": "success", "message": "Layout saved"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def epd_reset_layout(self, handler, data):
|
||||
"""POST /api/epd/layout/reset — reset to built-in default."""
|
||||
try:
|
||||
layout = getattr(self.shared_data, 'display_layout', None)
|
||||
if layout is None:
|
||||
self._send_json(handler, {"status": "error", "message": "Layout engine not initialised"}, 503)
|
||||
return
|
||||
epd_type = data.get('epd_type') if isinstance(data, dict) else None
|
||||
layout.reset_to_default(epd_type=epd_type)
|
||||
self._send_json(handler, {"status": "success", "message": "Layout reset to default"})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def epd_list_layouts(self, handler):
|
||||
"""GET /api/epd/layouts — list available EPD types and their layouts."""
|
||||
try:
|
||||
from display_layout import BUILTIN_LAYOUTS
|
||||
result = {}
|
||||
for epd_type, layout_dict in BUILTIN_LAYOUTS.items():
|
||||
result[epd_type] = {
|
||||
"meta": layout_dict.get("meta", {}),
|
||||
"builtin": True,
|
||||
}
|
||||
# Check for custom overrides
|
||||
layout = getattr(self.shared_data, 'display_layout', None)
|
||||
if layout:
|
||||
custom_dir = layout._custom_dir
|
||||
if os.path.isdir(custom_dir):
|
||||
for fname in os.listdir(custom_dir):
|
||||
if fname.endswith('.json'):
|
||||
epd_name = fname[:-5]
|
||||
try:
|
||||
with open(os.path.join(custom_dir, fname), 'r') as f:
|
||||
custom_data = json.load(f)
|
||||
if epd_name in result:
|
||||
result[epd_name]["has_custom"] = True
|
||||
result[epd_name]["custom_meta"] = custom_data.get("meta", {})
|
||||
else:
|
||||
result[epd_name] = {
|
||||
"meta": custom_data.get("meta", {}),
|
||||
"builtin": False,
|
||||
"has_custom": True,
|
||||
}
|
||||
except Exception:
|
||||
pass
|
||||
# Add current active type info
|
||||
current_type = self.shared_data.config.get('epd_type', 'epd2in13_V4')
|
||||
self._send_json(handler, {
|
||||
"current_epd_type": current_type,
|
||||
"layouts": result,
|
||||
})
|
||||
except Exception as e:
|
||||
self._send_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
Reference in New Issue
Block a user