mirror of
https://github.com/infinition/Bjorn.git
synced 2026-03-19 18:20:24 +00:00
feat: Add login page with dynamic RGB effects and password toggle functionality
feat: Implement package management utilities with JSON endpoints for listing and uninstalling packages feat: Create plugin management utilities with endpoints for listing, configuring, and installing plugins feat: Develop schedule and trigger management utilities with CRUD operations for schedules and triggers
This commit is contained in:
@@ -1,5 +1,4 @@
|
||||
# db_utils/__init__.py
|
||||
# Database utilities package
|
||||
"""__init__.py - Database utilities package."""
|
||||
|
||||
from .base import DatabaseBase
|
||||
from .config import ConfigOps
|
||||
@@ -17,6 +16,8 @@ from .comments import CommentOps
|
||||
from .agents import AgentOps
|
||||
from .studio import StudioOps
|
||||
from .webenum import WebEnumOps
|
||||
from .schedules import ScheduleOps
|
||||
from .packages import PackageOps
|
||||
|
||||
__all__ = [
|
||||
'DatabaseBase',
|
||||
@@ -35,4 +36,6 @@ __all__ = [
|
||||
'AgentOps',
|
||||
'StudioOps',
|
||||
'WebEnumOps',
|
||||
'ScheduleOps',
|
||||
'PackageOps',
|
||||
]
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/actions.py
|
||||
# Action definition and management operations
|
||||
"""actions.py - Action definition and management operations."""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
@@ -256,7 +255,7 @@ class ActionOps:
|
||||
out = []
|
||||
for r in rows:
|
||||
cls = r["b_class"]
|
||||
enabled = int(r["b_enabled"]) # 0 reste 0
|
||||
enabled = int(r["b_enabled"])
|
||||
out.append({
|
||||
"name": cls,
|
||||
"image": f"/actions/actions_icons/{cls}.png",
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/agents.py
|
||||
# C2 (Command & Control) agent management operations
|
||||
"""agents.py - C2 agent management operations."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/backups.py
|
||||
# Backup registry and management operations
|
||||
"""backups.py - Backup registry and management operations."""
|
||||
|
||||
from typing import Any, Dict, List
|
||||
import logging
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# db_utils/base.py
|
||||
# Base database connection and transaction management
|
||||
"""base.py - Base database connection and transaction management."""
|
||||
|
||||
import re
|
||||
import sqlite3
|
||||
import time
|
||||
from contextlib import contextmanager
|
||||
@@ -12,6 +12,16 @@ from logger import Logger
|
||||
|
||||
logger = Logger(name="db_utils.base", level=logging.DEBUG)
|
||||
|
||||
# Regex for valid SQLite identifiers: alphanumeric + underscore, must start with letter/underscore
|
||||
_SAFE_IDENT_RE = re.compile(r'^[A-Za-z_][A-Za-z0-9_]*$')
|
||||
|
||||
|
||||
def _validate_identifier(name: str, kind: str = "identifier") -> str:
|
||||
"""Validate that a SQL identifier (table/column name) is safe against injection."""
|
||||
if not name or not _SAFE_IDENT_RE.match(name):
|
||||
raise ValueError(f"Invalid SQL {kind}: {name!r}")
|
||||
return name
|
||||
|
||||
|
||||
class DatabaseBase:
|
||||
"""
|
||||
@@ -120,12 +130,15 @@ class DatabaseBase:
|
||||
|
||||
def _column_names(self, table: str) -> List[str]:
|
||||
"""Return a list of column names for a given table (empty if table missing)"""
|
||||
_validate_identifier(table, "table name")
|
||||
with self._cursor() as c:
|
||||
c.execute(f"PRAGMA table_info({table});")
|
||||
return [r[1] for r in c.fetchall()]
|
||||
|
||||
|
||||
def _ensure_column(self, table: str, column: str, ddl: str) -> None:
|
||||
"""Add a column with the provided DDL if it does not exist yet"""
|
||||
_validate_identifier(table, "table name")
|
||||
_validate_identifier(column, "column name")
|
||||
cols = self._column_names(table) if self._table_exists(table) else []
|
||||
if column not in cols:
|
||||
self.execute(f"ALTER TABLE {table} ADD COLUMN {ddl};")
|
||||
@@ -134,13 +147,15 @@ class DatabaseBase:
|
||||
# MAINTENANCE OPERATIONS
|
||||
# =========================================================================
|
||||
|
||||
_VALID_CHECKPOINT_MODES = {"PASSIVE", "FULL", "RESTART", "TRUNCATE"}
|
||||
|
||||
def checkpoint(self, mode: str = "TRUNCATE") -> Tuple[int, int, int]:
|
||||
"""
|
||||
Force a WAL checkpoint. Returns (busy, log_frames, checkpointed_frames).
|
||||
mode ∈ {PASSIVE, FULL, RESTART, TRUNCATE}
|
||||
"""
|
||||
mode = (mode or "PASSIVE").upper()
|
||||
if mode not in {"PASSIVE", "FULL", "RESTART", "TRUNCATE"}:
|
||||
if mode not in self._VALID_CHECKPOINT_MODES:
|
||||
mode = "PASSIVE"
|
||||
with self._cursor() as c:
|
||||
c.execute(f"PRAGMA wal_checkpoint({mode});")
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
"""
|
||||
Bifrost DB operations — networks, handshakes, epochs, activity, peers, plugin data.
|
||||
"""
|
||||
"""bifrost.py - Networks, handshakes, epochs, activity, peers, plugin data."""
|
||||
import logging
|
||||
|
||||
from logger import Logger
|
||||
@@ -89,7 +87,7 @@ class BifrostOps:
|
||||
"ON bifrost_activity(timestamp DESC)"
|
||||
)
|
||||
|
||||
# Peers (mesh networking — Phase 2)
|
||||
# Peers (mesh networking - Phase 2)
|
||||
self.base.execute("""
|
||||
CREATE TABLE IF NOT EXISTS bifrost_peers (
|
||||
peer_id TEXT PRIMARY KEY,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/comments.py
|
||||
# Comment and status message operations
|
||||
"""comments.py - Comment and status message operations."""
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/config.py
|
||||
# Configuration management operations
|
||||
"""config.py - Configuration management operations."""
|
||||
|
||||
import json
|
||||
import ast
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/credentials.py
|
||||
# Credential storage and management operations
|
||||
"""credentials.py - Credential storage and management operations."""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
# db_utils/hosts.py
|
||||
# Host and network device management operations
|
||||
"""hosts.py - Host and network device management operations."""
|
||||
|
||||
import time
|
||||
import sqlite3
|
||||
from typing import Any, Dict, Iterable, List, Optional
|
||||
from db_utils.base import _validate_identifier
|
||||
import logging
|
||||
|
||||
from logger import Logger
|
||||
@@ -428,6 +428,7 @@ class HostOps:
|
||||
if tname == 'hosts':
|
||||
continue
|
||||
try:
|
||||
_validate_identifier(tname, "table name")
|
||||
cur.execute(f"PRAGMA table_info({tname})")
|
||||
cols = [r[1].lower() for r in cur.fetchall()]
|
||||
if 'mac_address' in cols:
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
"""
|
||||
Loki DB operations — HID scripts and job tracking.
|
||||
"""
|
||||
"""loki.py - HID script and job tracking operations."""
|
||||
import logging
|
||||
|
||||
from logger import Logger
|
||||
|
||||
54
db_utils/packages.py
Normal file
54
db_utils/packages.py
Normal file
@@ -0,0 +1,54 @@
|
||||
"""packages.py - Custom package tracking operations."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="db_utils.packages", level=logging.DEBUG)
|
||||
|
||||
|
||||
class PackageOps:
|
||||
"""Custom package management operations"""
|
||||
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
def create_tables(self):
|
||||
"""Create custom_packages table"""
|
||||
self.base.execute("""
|
||||
CREATE TABLE IF NOT EXISTS custom_packages (
|
||||
name TEXT PRIMARY KEY,
|
||||
version TEXT,
|
||||
installed_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
installed_by TEXT DEFAULT 'user'
|
||||
);
|
||||
""")
|
||||
logger.debug("Packages table created/verified")
|
||||
|
||||
# =========================================================================
|
||||
# PACKAGE OPERATIONS
|
||||
# =========================================================================
|
||||
|
||||
def add_package(self, name: str, version: str) -> None:
|
||||
"""Insert or replace a package record"""
|
||||
self.base.execute("""
|
||||
INSERT OR REPLACE INTO custom_packages (name, version)
|
||||
VALUES (?, ?);
|
||||
""", (name, version))
|
||||
|
||||
def remove_package(self, name: str) -> None:
|
||||
"""Delete a package by name"""
|
||||
self.base.execute("DELETE FROM custom_packages WHERE name=?;", (name,))
|
||||
|
||||
def list_packages(self) -> List[Dict[str, Any]]:
|
||||
"""List all tracked packages"""
|
||||
return self.base.query(
|
||||
"SELECT * FROM custom_packages ORDER BY name;"
|
||||
)
|
||||
|
||||
def get_package(self, name: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get a single package by name"""
|
||||
return self.base.query_one(
|
||||
"SELECT * FROM custom_packages WHERE name=?;", (name,)
|
||||
)
|
||||
137
db_utils/plugins.py
Normal file
137
db_utils/plugins.py
Normal file
@@ -0,0 +1,137 @@
|
||||
"""plugins.py - Plugin configuration and hook tracking operations."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="db_utils.plugins", level=logging.DEBUG)
|
||||
|
||||
|
||||
class PluginOps:
|
||||
"""Plugin configuration and hook registration operations."""
|
||||
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
def create_tables(self):
|
||||
"""Create plugin_configs and plugin_hooks tables."""
|
||||
|
||||
self.base.execute("""
|
||||
CREATE TABLE IF NOT EXISTS plugin_configs (
|
||||
plugin_id TEXT PRIMARY KEY,
|
||||
enabled INTEGER DEFAULT 1,
|
||||
config_json TEXT DEFAULT '{}',
|
||||
meta_json TEXT DEFAULT '{}',
|
||||
installed_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
""")
|
||||
|
||||
self.base.execute("""
|
||||
CREATE TABLE IF NOT EXISTS plugin_hooks (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
plugin_id TEXT NOT NULL,
|
||||
hook_name TEXT NOT NULL,
|
||||
UNIQUE(plugin_id, hook_name),
|
||||
FOREIGN KEY (plugin_id) REFERENCES plugin_configs(plugin_id)
|
||||
ON DELETE CASCADE
|
||||
);
|
||||
""")
|
||||
|
||||
self.base.execute(
|
||||
"CREATE INDEX IF NOT EXISTS idx_plugin_hooks_hook "
|
||||
"ON plugin_hooks(hook_name);"
|
||||
)
|
||||
|
||||
logger.debug("Plugin tables created/verified")
|
||||
|
||||
# ── Config CRUD ──────────────────────────────────────────────────
|
||||
|
||||
def get_plugin_config(self, plugin_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get plugin config row. Returns dict with parsed config_json and meta."""
|
||||
row = self.base.query_one(
|
||||
"SELECT * FROM plugin_configs WHERE plugin_id=?;", (plugin_id,)
|
||||
)
|
||||
if row:
|
||||
try:
|
||||
row["config"] = json.loads(row.get("config_json") or "{}")
|
||||
except Exception:
|
||||
row["config"] = {}
|
||||
try:
|
||||
row["meta"] = json.loads(row.get("meta_json") or "{}")
|
||||
except Exception:
|
||||
row["meta"] = {}
|
||||
return row
|
||||
|
||||
def save_plugin_config(self, plugin_id: str, config: dict) -> None:
|
||||
"""Update config_json for a plugin."""
|
||||
self.base.execute("""
|
||||
UPDATE plugin_configs
|
||||
SET config_json = ?, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE plugin_id = ?;
|
||||
""", (json.dumps(config, ensure_ascii=False), plugin_id))
|
||||
|
||||
def upsert_plugin(self, plugin_id: str, enabled: int, config: dict, meta: dict) -> None:
|
||||
"""Insert or update a plugin record."""
|
||||
self.base.execute("""
|
||||
INSERT INTO plugin_configs (plugin_id, enabled, config_json, meta_json)
|
||||
VALUES (?, ?, ?, ?)
|
||||
ON CONFLICT(plugin_id) DO UPDATE SET
|
||||
enabled = excluded.enabled,
|
||||
meta_json = excluded.meta_json,
|
||||
updated_at = CURRENT_TIMESTAMP;
|
||||
""", (plugin_id, enabled, json.dumps(config, ensure_ascii=False),
|
||||
json.dumps(meta, ensure_ascii=False)))
|
||||
|
||||
def delete_plugin(self, plugin_id: str) -> None:
|
||||
"""Delete plugin and its hooks (CASCADE)."""
|
||||
self.base.execute("DELETE FROM plugin_configs WHERE plugin_id=?;", (plugin_id,))
|
||||
|
||||
def list_plugins(self) -> List[Dict[str, Any]]:
|
||||
"""List all registered plugins."""
|
||||
rows = self.base.query("SELECT * FROM plugin_configs ORDER BY plugin_id;")
|
||||
for r in rows:
|
||||
try:
|
||||
r["config"] = json.loads(r.get("config_json") or "{}")
|
||||
except Exception:
|
||||
r["config"] = {}
|
||||
try:
|
||||
r["meta"] = json.loads(r.get("meta_json") or "{}")
|
||||
except Exception:
|
||||
r["meta"] = {}
|
||||
return rows
|
||||
|
||||
def set_plugin_enabled(self, plugin_id: str, enabled: bool) -> None:
|
||||
"""Toggle plugin enabled state."""
|
||||
self.base.execute(
|
||||
"UPDATE plugin_configs SET enabled=?, updated_at=CURRENT_TIMESTAMP WHERE plugin_id=?;",
|
||||
(1 if enabled else 0, plugin_id)
|
||||
)
|
||||
|
||||
# ── Hook CRUD ────────────────────────────────────────────────────
|
||||
|
||||
def set_plugin_hooks(self, plugin_id: str, hooks: List[str]) -> None:
|
||||
"""Replace all hooks for a plugin."""
|
||||
with self.base.transaction():
|
||||
self.base.execute("DELETE FROM plugin_hooks WHERE plugin_id=?;", (plugin_id,))
|
||||
for h in hooks:
|
||||
self.base.execute(
|
||||
"INSERT OR IGNORE INTO plugin_hooks(plugin_id, hook_name) VALUES(?,?);",
|
||||
(plugin_id, h)
|
||||
)
|
||||
|
||||
def get_hooks_for_event(self, hook_name: str) -> List[str]:
|
||||
"""Get all plugin_ids subscribed to a given hook."""
|
||||
rows = self.base.query(
|
||||
"SELECT plugin_id FROM plugin_hooks WHERE hook_name=?;", (hook_name,)
|
||||
)
|
||||
return [r["plugin_id"] for r in rows]
|
||||
|
||||
def get_hooks_for_plugin(self, plugin_id: str) -> List[str]:
|
||||
"""Get all hooks a plugin subscribes to."""
|
||||
rows = self.base.query(
|
||||
"SELECT hook_name FROM plugin_hooks WHERE plugin_id=?;", (plugin_id,)
|
||||
)
|
||||
return [r["hook_name"] for r in rows]
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/queue.py
|
||||
# Action queue management operations
|
||||
"""queue.py - Action queue management operations."""
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
|
||||
244
db_utils/schedules.py
Normal file
244
db_utils/schedules.py
Normal file
@@ -0,0 +1,244 @@
|
||||
"""schedules.py - Script scheduling and trigger operations."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="db_utils.schedules", level=logging.DEBUG)
|
||||
|
||||
|
||||
class ScheduleOps:
|
||||
"""Script schedule and trigger management operations"""
|
||||
|
||||
def __init__(self, base):
|
||||
self.base = base
|
||||
|
||||
def create_tables(self):
|
||||
"""Create script_schedules and script_triggers tables"""
|
||||
self.base.execute("""
|
||||
CREATE TABLE IF NOT EXISTS script_schedules (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
script_name TEXT NOT NULL,
|
||||
schedule_type TEXT NOT NULL DEFAULT 'recurring',
|
||||
interval_seconds INTEGER,
|
||||
run_at TEXT,
|
||||
args TEXT DEFAULT '',
|
||||
conditions TEXT,
|
||||
enabled INTEGER DEFAULT 1,
|
||||
last_run_at TEXT,
|
||||
next_run_at TEXT,
|
||||
run_count INTEGER DEFAULT 0,
|
||||
last_status TEXT,
|
||||
last_error TEXT,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
""")
|
||||
self.base.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_sched_next
|
||||
ON script_schedules(next_run_at) WHERE enabled=1;
|
||||
""")
|
||||
self.base.execute("""
|
||||
CREATE TABLE IF NOT EXISTS script_triggers (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
script_name TEXT NOT NULL,
|
||||
trigger_name TEXT NOT NULL,
|
||||
conditions TEXT NOT NULL,
|
||||
args TEXT DEFAULT '',
|
||||
enabled INTEGER DEFAULT 1,
|
||||
last_fired_at TEXT,
|
||||
fire_count INTEGER DEFAULT 0,
|
||||
cooldown_seconds INTEGER DEFAULT 60,
|
||||
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
""")
|
||||
self.base.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_trig_enabled
|
||||
ON script_triggers(enabled) WHERE enabled=1;
|
||||
""")
|
||||
logger.debug("Schedule and trigger tables created/verified")
|
||||
|
||||
# =========================================================================
|
||||
# SCHEDULE OPERATIONS
|
||||
# =========================================================================
|
||||
|
||||
def add_schedule(self, script_name: str, schedule_type: str,
|
||||
interval_seconds: Optional[int] = None,
|
||||
run_at: Optional[str] = None, args: str = '',
|
||||
conditions: Optional[str] = None) -> int:
|
||||
"""Insert a new schedule entry and return its id"""
|
||||
next_run_at = None
|
||||
if schedule_type == 'recurring' and interval_seconds:
|
||||
next_run_at = (datetime.utcnow() + timedelta(seconds=interval_seconds)).strftime('%Y-%m-%d %H:%M:%S')
|
||||
elif run_at:
|
||||
next_run_at = run_at
|
||||
|
||||
self.base.execute("""
|
||||
INSERT INTO script_schedules
|
||||
(script_name, schedule_type, interval_seconds, run_at, args, conditions, next_run_at)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?);
|
||||
""", (script_name, schedule_type, interval_seconds, run_at, args, conditions, next_run_at))
|
||||
|
||||
rows = self.base.query("SELECT last_insert_rowid() AS id;")
|
||||
return rows[0]['id'] if rows else 0
|
||||
|
||||
def update_schedule(self, id: int, **kwargs) -> None:
|
||||
"""Update schedule fields; recompute next_run_at if interval changes"""
|
||||
if not kwargs:
|
||||
return
|
||||
sets = []
|
||||
params = []
|
||||
for key, value in kwargs.items():
|
||||
sets.append(f"{key}=?")
|
||||
params.append(value)
|
||||
sets.append("updated_at=datetime('now')")
|
||||
params.append(id)
|
||||
self.base.execute(
|
||||
f"UPDATE script_schedules SET {', '.join(sets)} WHERE id=?;",
|
||||
tuple(params)
|
||||
)
|
||||
# Recompute next_run_at if interval changed
|
||||
if 'interval_seconds' in kwargs:
|
||||
row = self.get_schedule(id)
|
||||
if row and row['schedule_type'] == 'recurring' and kwargs['interval_seconds']:
|
||||
next_run = (datetime.utcnow() + timedelta(seconds=kwargs['interval_seconds'])).strftime('%Y-%m-%d %H:%M:%S')
|
||||
self.base.execute(
|
||||
"UPDATE script_schedules SET next_run_at=?, updated_at=datetime('now') WHERE id=?;",
|
||||
(next_run, id)
|
||||
)
|
||||
|
||||
def delete_schedule(self, id: int) -> None:
|
||||
"""Delete a schedule by id"""
|
||||
self.base.execute("DELETE FROM script_schedules WHERE id=?;", (id,))
|
||||
|
||||
def list_schedules(self, enabled_only: bool = False) -> List[Dict[str, Any]]:
|
||||
"""List all schedules, optionally filtered to enabled only"""
|
||||
if enabled_only:
|
||||
return self.base.query(
|
||||
"SELECT * FROM script_schedules WHERE enabled=1 ORDER BY id;"
|
||||
)
|
||||
return self.base.query("SELECT * FROM script_schedules ORDER BY id;")
|
||||
|
||||
def get_schedule(self, id: int) -> Optional[Dict[str, Any]]:
|
||||
"""Get a single schedule by id"""
|
||||
return self.base.query_one(
|
||||
"SELECT * FROM script_schedules WHERE id=?;", (id,)
|
||||
)
|
||||
|
||||
def get_due_schedules(self) -> List[Dict[str, Any]]:
|
||||
"""Get schedules that are due to run"""
|
||||
return self.base.query("""
|
||||
SELECT * FROM script_schedules
|
||||
WHERE enabled=1
|
||||
AND next_run_at <= datetime('now')
|
||||
AND (last_status IS NULL OR last_status != 'running')
|
||||
ORDER BY next_run_at;
|
||||
""")
|
||||
|
||||
def mark_schedule_run(self, id: int, status: str, error: Optional[str] = None) -> None:
|
||||
"""Mark a schedule as run, update counters, recompute next_run_at"""
|
||||
row = self.get_schedule(id)
|
||||
if not row:
|
||||
return
|
||||
|
||||
now = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
if row['schedule_type'] == 'recurring' and row['interval_seconds']:
|
||||
next_run = (datetime.utcnow() + timedelta(seconds=row['interval_seconds'])).strftime('%Y-%m-%d %H:%M:%S')
|
||||
self.base.execute("""
|
||||
UPDATE script_schedules
|
||||
SET last_run_at=?, last_status=?, last_error=?,
|
||||
run_count=run_count+1, next_run_at=?, updated_at=datetime('now')
|
||||
WHERE id=?;
|
||||
""", (now, status, error, next_run, id))
|
||||
else:
|
||||
# oneshot: disable after run
|
||||
self.base.execute("""
|
||||
UPDATE script_schedules
|
||||
SET last_run_at=?, last_status=?, last_error=?,
|
||||
run_count=run_count+1, enabled=0, updated_at=datetime('now')
|
||||
WHERE id=?;
|
||||
""", (now, status, error, id))
|
||||
|
||||
def toggle_schedule(self, id: int, enabled: bool) -> None:
|
||||
"""Enable or disable a schedule"""
|
||||
self.base.execute(
|
||||
"UPDATE script_schedules SET enabled=?, updated_at=datetime('now') WHERE id=?;",
|
||||
(1 if enabled else 0, id)
|
||||
)
|
||||
|
||||
# =========================================================================
|
||||
# TRIGGER OPERATIONS
|
||||
# =========================================================================
|
||||
|
||||
def add_trigger(self, script_name: str, trigger_name: str, conditions: str,
|
||||
args: str = '', cooldown_seconds: int = 60) -> int:
|
||||
"""Insert a new trigger and return its id"""
|
||||
self.base.execute("""
|
||||
INSERT INTO script_triggers
|
||||
(script_name, trigger_name, conditions, args, cooldown_seconds)
|
||||
VALUES (?, ?, ?, ?, ?);
|
||||
""", (script_name, trigger_name, conditions, args, cooldown_seconds))
|
||||
|
||||
rows = self.base.query("SELECT last_insert_rowid() AS id;")
|
||||
return rows[0]['id'] if rows else 0
|
||||
|
||||
def update_trigger(self, id: int, **kwargs) -> None:
|
||||
"""Update trigger fields"""
|
||||
if not kwargs:
|
||||
return
|
||||
sets = []
|
||||
params = []
|
||||
for key, value in kwargs.items():
|
||||
sets.append(f"{key}=?")
|
||||
params.append(value)
|
||||
params.append(id)
|
||||
self.base.execute(
|
||||
f"UPDATE script_triggers SET {', '.join(sets)} WHERE id=?;",
|
||||
tuple(params)
|
||||
)
|
||||
|
||||
def delete_trigger(self, id: int) -> None:
|
||||
"""Delete a trigger by id"""
|
||||
self.base.execute("DELETE FROM script_triggers WHERE id=?;", (id,))
|
||||
|
||||
def list_triggers(self, enabled_only: bool = False) -> List[Dict[str, Any]]:
|
||||
"""List all triggers, optionally filtered to enabled only"""
|
||||
if enabled_only:
|
||||
return self.base.query(
|
||||
"SELECT * FROM script_triggers WHERE enabled=1 ORDER BY id;"
|
||||
)
|
||||
return self.base.query("SELECT * FROM script_triggers ORDER BY id;")
|
||||
|
||||
def get_trigger(self, id: int) -> Optional[Dict[str, Any]]:
|
||||
"""Get a single trigger by id"""
|
||||
return self.base.query_one(
|
||||
"SELECT * FROM script_triggers WHERE id=?;", (id,)
|
||||
)
|
||||
|
||||
def get_active_triggers(self) -> List[Dict[str, Any]]:
|
||||
"""Get all enabled triggers"""
|
||||
return self.base.query(
|
||||
"SELECT * FROM script_triggers WHERE enabled=1 ORDER BY id;"
|
||||
)
|
||||
|
||||
def mark_trigger_fired(self, id: int) -> None:
|
||||
"""Record that a trigger has fired"""
|
||||
self.base.execute("""
|
||||
UPDATE script_triggers
|
||||
SET last_fired_at=datetime('now'), fire_count=fire_count+1
|
||||
WHERE id=?;
|
||||
""", (id,))
|
||||
|
||||
def is_trigger_on_cooldown(self, id: int) -> bool:
|
||||
"""Check if a trigger is still within its cooldown period"""
|
||||
row = self.base.query_one("""
|
||||
SELECT 1 AS on_cooldown FROM script_triggers
|
||||
WHERE id=?
|
||||
AND last_fired_at IS NOT NULL
|
||||
AND datetime(last_fired_at, '+' || cooldown_seconds || ' seconds') > datetime('now');
|
||||
""", (id,))
|
||||
return row is not None
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/scripts.py
|
||||
# Script and project metadata operations
|
||||
"""scripts.py - Script and project metadata operations."""
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
import logging
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
"""
|
||||
Sentinel DB operations — events, rules, known devices baseline.
|
||||
"""
|
||||
"""sentinel.py - Events, rules, and known devices baseline."""
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from logger import Logger
|
||||
from db_utils.base import _validate_identifier
|
||||
|
||||
logger = Logger(name="db_utils.sentinel", level=logging.DEBUG)
|
||||
|
||||
@@ -17,7 +16,7 @@ class SentinelOps:
|
||||
def create_tables(self):
|
||||
"""Create all Sentinel tables."""
|
||||
|
||||
# Known device baselines — MAC → expected behavior
|
||||
# Known device baselines - MAC → expected behavior
|
||||
self.base.execute("""
|
||||
CREATE TABLE IF NOT EXISTS sentinel_devices (
|
||||
mac_address TEXT PRIMARY KEY,
|
||||
@@ -261,9 +260,11 @@ class SentinelOps:
|
||||
if existing:
|
||||
sets = []
|
||||
params = []
|
||||
_ALLOWED_DEVICE_COLS = {"alias", "trusted", "watch", "expected_ips",
|
||||
"expected_ports", "notes"}
|
||||
for k, v in kwargs.items():
|
||||
if k in ("alias", "trusted", "watch", "expected_ips",
|
||||
"expected_ports", "notes"):
|
||||
if k in _ALLOWED_DEVICE_COLS:
|
||||
_validate_identifier(k, "column name")
|
||||
sets.append(f"{k} = ?")
|
||||
params.append(v)
|
||||
sets.append("last_seen = CURRENT_TIMESTAMP")
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/services.py
|
||||
# Per-port service fingerprinting and tracking operations
|
||||
"""services.py - Per-port service fingerprinting and tracking."""
|
||||
|
||||
from typing import Dict, List, Optional
|
||||
import logging
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/software.py
|
||||
# Detected software (CPE) inventory operations
|
||||
"""software.py - Detected software (CPE) inventory operations."""
|
||||
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/stats.py
|
||||
# Statistics tracking and display operations
|
||||
"""stats.py - Statistics tracking and display operations."""
|
||||
|
||||
import time
|
||||
import sqlite3
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
# db_utils/studio.py
|
||||
# Actions Studio visual editor operations
|
||||
"""studio.py - Actions Studio visual editor operations."""
|
||||
|
||||
import json
|
||||
import re
|
||||
from typing import Dict, List, Optional
|
||||
import logging
|
||||
|
||||
from logger import Logger
|
||||
from db_utils.base import _validate_identifier
|
||||
|
||||
logger = Logger(name="db_utils.studio", level=logging.DEBUG)
|
||||
|
||||
@@ -105,13 +106,27 @@ class StudioOps:
|
||||
ORDER BY b_priority DESC, b_class
|
||||
""")
|
||||
|
||||
# Whitelist of columns that can be updated via the studio API
|
||||
_STUDIO_UPDATABLE = frozenset({
|
||||
'b_priority', 'studio_x', 'studio_y', 'studio_locked', 'studio_color',
|
||||
'studio_metadata', 'b_trigger', 'b_requires', 'b_enabled', 'b_timeout',
|
||||
'b_max_retries', 'b_cooldown', 'b_rate_limit', 'b_service', 'b_port',
|
||||
'b_stealth_level', 'b_risk_level', 'b_tags', 'b_parent', 'b_action',
|
||||
})
|
||||
|
||||
def update_studio_action(self, b_class: str, updates: dict):
|
||||
"""Update a studio action"""
|
||||
sets = []
|
||||
params = []
|
||||
for key, value in updates.items():
|
||||
_validate_identifier(key, "column name")
|
||||
if key not in self._STUDIO_UPDATABLE:
|
||||
logger.warning(f"Ignoring unknown studio column: {key}")
|
||||
continue
|
||||
sets.append(f"{key} = ?")
|
||||
params.append(value)
|
||||
if not sets:
|
||||
return
|
||||
params.append(b_class)
|
||||
|
||||
self.base.execute(f"""
|
||||
@@ -313,7 +328,9 @@ class StudioOps:
|
||||
if col == "b_class":
|
||||
continue
|
||||
if col not in stu_cols:
|
||||
_validate_identifier(col, "column name")
|
||||
col_type = act_col_defs.get(col, "TEXT") or "TEXT"
|
||||
_validate_identifier(col_type.split()[0], "column type")
|
||||
self.base.execute(f"ALTER TABLE actions_studio ADD COLUMN {col} {col_type};")
|
||||
|
||||
# 3) Insert missing b_class entries, non-destructive
|
||||
@@ -326,6 +343,7 @@ class StudioOps:
|
||||
for col in act_cols:
|
||||
if col == "b_class":
|
||||
continue
|
||||
_validate_identifier(col, "column name")
|
||||
# Only update if the studio value is NULL
|
||||
self.base.execute(f"""
|
||||
UPDATE actions_studio
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/vulnerabilities.py
|
||||
# Vulnerability tracking and CVE metadata operations
|
||||
"""vulnerabilities.py - Vulnerability tracking and CVE metadata operations."""
|
||||
|
||||
import json
|
||||
import time
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
# db_utils/webenum.py
|
||||
# Web enumeration (directory/file discovery) operations
|
||||
"""webenum.py - Web enumeration and directory/file discovery operations."""
|
||||
|
||||
from typing import Any, Dict, List, Optional
|
||||
import logging
|
||||
|
||||
Reference in New Issue
Block a user