mirror of
https://github.com/infinition/Bjorn.git
synced 2025-12-13 16:14:57 +00:00
BREAKING CHANGE: Complete refactor of architecture to prepare BJORN V2 release, APIs, assets, and UI, webapp, logics, attacks, a lot of new features...
This commit is contained in:
0
web_utils/__init__.py
Normal file
0
web_utils/__init__.py
Normal file
1866
web_utils/action_utils.py
Normal file
1866
web_utils/action_utils.py
Normal file
File diff suppressed because it is too large
Load Diff
292
web_utils/attack_utils.py
Normal file
292
web_utils/attack_utils.py
Normal file
@@ -0,0 +1,292 @@
|
||||
# web_utils/attack_utils.py
|
||||
"""
|
||||
Attack and action management utilities.
|
||||
Handles attack listing, import/export, and action metadata management.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import os
|
||||
import ast
|
||||
import cgi
|
||||
import shutil
|
||||
from typing import Any, Dict, Optional
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="attack_utils.py", level=logging.DEBUG)
|
||||
|
||||
class AttackUtils:
|
||||
"""Utilities for attack/action management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def get_first_class_name(self, filepath: str) -> str:
|
||||
"""Extract first class name from Python file using AST."""
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as file:
|
||||
tree = ast.parse(file.read(), filename=filepath)
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
self.logger.debug(f"Found class: {node.name} in {filepath}")
|
||||
return node.name
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error parsing file {filepath}: {e}")
|
||||
self.logger.warning(f"No class found in {filepath}")
|
||||
return ''
|
||||
|
||||
def get_first_class_name_from_content(self, content: str) -> str:
|
||||
"""Extract first class name from Python content using AST."""
|
||||
try:
|
||||
tree = ast.parse(content)
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.ClassDef):
|
||||
self.logger.debug(f"Found class in content: {node.name}")
|
||||
return node.name
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error parsing content: {e}")
|
||||
self.logger.warning("No class found in provided content.")
|
||||
return ''
|
||||
|
||||
def _extract_action_meta_from_content(self, content: str) -> dict | None:
|
||||
"""Extract action metadata (b_* variables) from Python content."""
|
||||
try:
|
||||
tree = ast.parse(content)
|
||||
meta = {}
|
||||
for node in tree.body:
|
||||
if isinstance(node, ast.Assign) and len(node.targets) == 1 and isinstance(node.targets[0], ast.Name):
|
||||
key = node.targets[0].id
|
||||
if key.startswith("b_"):
|
||||
val = ast.literal_eval(node.value) if isinstance(node.value, (ast.Constant, ast.List, ast.Dict, ast.Tuple)) else None
|
||||
meta[key] = val
|
||||
return meta if meta else None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def get_attacks(self, handler):
|
||||
"""List all attack cards from database."""
|
||||
try:
|
||||
cards = self.shared_data.db.list_action_cards()
|
||||
resp = {"attacks": [{"name": c["name"], "image": c["image"]} for c in cards]}
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(resp).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"get_attacks error: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def get_attack_content(self, handler):
|
||||
"""Get source code content of an attack."""
|
||||
try:
|
||||
query = handler.path.split('?')[-1]
|
||||
from urllib.parse import parse_qs, unquote
|
||||
params = dict(parse_qs(query))
|
||||
attack_name = unquote(params.get('name', [''])[0])
|
||||
if not attack_name:
|
||||
raise ValueError("Attack name not provided.")
|
||||
|
||||
row = self.shared_data.db.get_action_by_class(attack_name)
|
||||
if not row:
|
||||
raise FileNotFoundError(f"Attack '{attack_name}' not found in DB.")
|
||||
|
||||
module_name = row["b_module"]
|
||||
script_path = os.path.join(self.shared_data.actions_dir, f"{module_name}.py")
|
||||
with open(script_path, 'r', encoding='utf-8') as f:
|
||||
content = f.read()
|
||||
self._write_json(handler, {"status": "success", "content": content})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error retrieving attack content: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def add_attack(self, handler):
|
||||
"""Import a new attack from uploaded file."""
|
||||
try:
|
||||
ctype = handler.headers.get('Content-Type') or ""
|
||||
if 'multipart/form-data' not in ctype:
|
||||
raise ValueError("Content-Type must be multipart/form-data.")
|
||||
|
||||
form = cgi.FieldStorage(fp=handler.rfile, headers=handler.headers, environ={'REQUEST_METHOD': 'POST'})
|
||||
if 'attack_file' not in form:
|
||||
raise ValueError("No attack_file field in form.")
|
||||
|
||||
file_item = form['attack_file']
|
||||
if not file_item.filename.endswith('.py'):
|
||||
raise ValueError("Only .py files are allowed.")
|
||||
|
||||
filename = file_item.filename
|
||||
module_name = os.path.splitext(filename)[0]
|
||||
content = file_item.file.read().decode('utf-8')
|
||||
|
||||
# Parse metadata without exec
|
||||
meta = self._extract_action_meta_from_content(content)
|
||||
if not meta or "b_class" not in meta:
|
||||
raise ValueError("File must define b_class (and ideally b_module/b_port).")
|
||||
|
||||
# Write file
|
||||
dst = os.path.join(self.shared_data.actions_dir, filename)
|
||||
with open(dst, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
|
||||
# Upsert DB
|
||||
meta.setdefault("b_module", module_name)
|
||||
self.shared_data.db.upsert_simple_action(**meta)
|
||||
|
||||
# Optional: copy to default actions
|
||||
if handler.headers.get('Import-Default', 'false').lower() == 'true':
|
||||
os.makedirs(self.shared_data.default_actions_dir, exist_ok=True)
|
||||
shutil.copyfile(dst, os.path.join(self.shared_data.default_actions_dir, filename))
|
||||
|
||||
self._write_json(handler, {"status": "success", "message": "Attack imported successfully."})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error importing attack: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def remove_attack(self, handler):
|
||||
"""Remove an attack."""
|
||||
try:
|
||||
body = handler.rfile.read(int(handler.headers.get('Content-Length', 0)) or 0)
|
||||
data = json.loads(body or "{}")
|
||||
attack_name = data.get("name") or ""
|
||||
if not attack_name:
|
||||
raise ValueError("Attack name not provided.")
|
||||
|
||||
row = self.shared_data.db.get_action_by_class(attack_name)
|
||||
if not row:
|
||||
raise FileNotFoundError(f"Attack '{attack_name}' not found in DB.")
|
||||
|
||||
module_name = row["b_module"]
|
||||
path = os.path.join(self.shared_data.actions_dir, f"{module_name}.py")
|
||||
if os.path.exists(path):
|
||||
os.remove(path)
|
||||
|
||||
self.shared_data.db.delete_action(attack_name)
|
||||
self._write_json(handler, {"status": "success", "message": "Attack removed successfully."})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error removing attack: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def save_attack(self, handler):
|
||||
"""Save/update attack source code."""
|
||||
try:
|
||||
body = handler.rfile.read(int(handler.headers.get('Content-Length', 0)) or 0)
|
||||
data = json.loads(body or "{}")
|
||||
attack_name = data.get('name') or ""
|
||||
content = data.get('content') or ""
|
||||
if not attack_name or not content:
|
||||
raise ValueError("Missing name or content.")
|
||||
|
||||
row = self.shared_data.db.get_action_by_class(attack_name)
|
||||
if not row:
|
||||
raise FileNotFoundError(f"Attack '{attack_name}' not found in DB.")
|
||||
|
||||
module_name = row["b_module"]
|
||||
script_path = os.path.join(self.shared_data.actions_dir, f"{module_name}.py")
|
||||
|
||||
with open(script_path, "w", encoding="utf-8") as f:
|
||||
f.write(content)
|
||||
|
||||
# If b_class changed, update DB
|
||||
meta = self._extract_action_meta_from_content(content) or {}
|
||||
new_b_class = meta.get("b_class")
|
||||
if new_b_class and new_b_class != attack_name:
|
||||
self.shared_data.db.delete_action(attack_name)
|
||||
meta.setdefault("b_module", module_name)
|
||||
self.shared_data.db.upsert_simple_action(**meta)
|
||||
else:
|
||||
meta.setdefault("b_class", attack_name)
|
||||
meta.setdefault("b_module", module_name)
|
||||
self.shared_data.db.upsert_simple_action(**meta)
|
||||
|
||||
self._write_json(handler, {"status": "success", "message": "Attack saved successfully."})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error saving attack: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def restore_attack(self, handler):
|
||||
"""Restore attack to default version."""
|
||||
try:
|
||||
body = handler.rfile.read(int(handler.headers.get('Content-Length', 0)) or 0)
|
||||
data = json.loads(body or "{}")
|
||||
attack_name = data.get('name') or ""
|
||||
if not attack_name:
|
||||
raise ValueError("Attack name not provided.")
|
||||
|
||||
row = self.shared_data.db.get_action_by_class(attack_name)
|
||||
if not row:
|
||||
raise FileNotFoundError(f"Attack '{attack_name}' not found in DB.")
|
||||
|
||||
module_name = row["b_module"]
|
||||
filename = f"{module_name}.py"
|
||||
|
||||
src = os.path.join(self.shared_data.default_actions_dir, filename)
|
||||
dst = os.path.join(self.shared_data.actions_dir, filename)
|
||||
if not os.path.exists(src):
|
||||
raise FileNotFoundError(f"Default version not found: {src}")
|
||||
|
||||
shutil.copyfile(src, dst)
|
||||
|
||||
# Parse and upsert metadata
|
||||
with open(dst, "r", encoding="utf-8") as f:
|
||||
meta = self._extract_action_meta_from_content(f.read()) or {}
|
||||
meta.setdefault("b_class", attack_name)
|
||||
meta.setdefault("b_module", module_name)
|
||||
self.shared_data.db.upsert_simple_action(**meta)
|
||||
|
||||
self._write_json(handler, {"status": "success", "message": "Attack restored to default successfully."})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error restoring attack: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def serve_actions_icons(self, handler):
|
||||
"""Serve action icons from actions_icons_dir."""
|
||||
try:
|
||||
rel = handler.path[len('/actions_icons/'):]
|
||||
rel = os.path.normpath(rel).replace("\\", "/")
|
||||
if rel.startswith("../"):
|
||||
handler.send_error(400, "Invalid path")
|
||||
return
|
||||
|
||||
image_path = os.path.join(self.shared_data.actions_icons_dir, rel)
|
||||
|
||||
if not os.path.exists(image_path):
|
||||
handler.send_error(404, "Image not found")
|
||||
return
|
||||
|
||||
if image_path.endswith('.bmp'):
|
||||
mime = 'image/bmp'
|
||||
elif image_path.endswith('.png'):
|
||||
mime = 'image/png'
|
||||
elif image_path.endswith('.jpg') or image_path.endswith('.jpeg'):
|
||||
mime = 'image/jpeg'
|
||||
else:
|
||||
mime = 'application/octet-stream'
|
||||
|
||||
with open(image_path, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', mime)
|
||||
handler.send_header('Content-Length', str(len(content)))
|
||||
handler.end_headers()
|
||||
handler.wfile.write(content)
|
||||
self.logger.info(f"Served action icon: {image_path}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error serving action icon {handler.path}: {e}")
|
||||
handler.send_error(500, "Internal Server Error")
|
||||
|
||||
def _write_json(self, handler, obj: dict, code: int = 200):
|
||||
"""Write JSON response."""
|
||||
handler.send_response(code)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(obj).encode('utf-8'))
|
||||
|
||||
def _send_error_response(self, handler, message: str, status_code: int = 500):
|
||||
"""Send error response in JSON format."""
|
||||
handler.send_response(status_code)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
response = {'status': 'error', 'message': message}
|
||||
handler.wfile.write(json.dumps(response).encode('utf-8'))
|
||||
451
web_utils/backup_utils.py
Normal file
451
web_utils/backup_utils.py
Normal file
@@ -0,0 +1,451 @@
|
||||
# web_utils/backup_utils.py
|
||||
"""
|
||||
Backup and restore utilities.
|
||||
Handles system backups, GitHub updates, and restore operations.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import json
|
||||
import tarfile
|
||||
import zipfile
|
||||
import subprocess
|
||||
import shutil
|
||||
import stat
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="backup_utils.py", level=logging.DEBUG)
|
||||
|
||||
class BackupUtils:
|
||||
"""Utilities for backup and restore operations."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def create_backup(self, data):
|
||||
"""Create a backup of the Bjorn directory in tar.gz or zip format."""
|
||||
self.logger.debug("Starting backup process...")
|
||||
backup_dir = self.shared_data.backup_dir
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
|
||||
backup_description = data.get('description', 'No description')
|
||||
backup_format = data.get('format', 'tar.gz')
|
||||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
|
||||
if backup_format == 'zip':
|
||||
backup_filename = f"backup_{timestamp}.zip"
|
||||
backup_path = os.path.join(backup_dir, backup_filename)
|
||||
try:
|
||||
with zipfile.ZipFile(backup_path, 'w', zipfile.ZIP_DEFLATED) as backup_zip:
|
||||
for foldername, subfolders, filenames in os.walk(self.shared_data.current_dir):
|
||||
for filename in filenames:
|
||||
file_path = os.path.join(foldername, filename)
|
||||
rel_path = os.path.relpath(file_path, self.shared_data.current_dir)
|
||||
backup_zip.write(file_path, rel_path)
|
||||
|
||||
self.shared_data.db.add_backup(
|
||||
filename=backup_filename,
|
||||
description=backup_description,
|
||||
date=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
type_="User Backup",
|
||||
is_default=False,
|
||||
is_restore=False,
|
||||
is_github=False
|
||||
)
|
||||
self.logger.debug(f"Backup created successfully: {backup_path}")
|
||||
return {"status": "success", "message": "Backup created successfully in ZIP format."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create ZIP backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
elif backup_format == 'tar.gz':
|
||||
backup_filename = f"backup_{timestamp}.tar.gz"
|
||||
backup_path = os.path.join(backup_dir, backup_filename)
|
||||
try:
|
||||
with tarfile.open(backup_path, "w:gz") as backup_tar:
|
||||
for item in os.listdir(self.shared_data.current_dir):
|
||||
item_path = os.path.join(self.shared_data.current_dir, item)
|
||||
backup_tar.add(item_path, arcname=item)
|
||||
|
||||
self.shared_data.db.add_backup(
|
||||
filename=backup_filename,
|
||||
description=backup_description,
|
||||
date=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
type_="User Backup",
|
||||
is_default=False,
|
||||
is_restore=False,
|
||||
is_github=False
|
||||
)
|
||||
self.logger.debug(f"Backup created successfully: {backup_path}")
|
||||
return {"status": "success", "message": "Backup created successfully in tar.gz format."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create tar.gz backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
else:
|
||||
self.logger.error(f"Unsupported backup format: {backup_format}")
|
||||
return {"status": "error", "message": "Unsupported backup format."}
|
||||
|
||||
def list_backups(self, data=None):
|
||||
"""List all backups from DB."""
|
||||
self.logger.debug("Listing backups...")
|
||||
try:
|
||||
backups = self.shared_data.db.list_backups()
|
||||
return {"status": "success", "backups": backups}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to list backups: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def remove_named_pipes(self, directory):
|
||||
"""Recursively remove named pipes in the specified directory."""
|
||||
self.logger.debug(f"Scanning for named pipes in {directory}...")
|
||||
for root, dirs, files in os.walk(directory):
|
||||
for name in files:
|
||||
file_path = os.path.join(root, name)
|
||||
try:
|
||||
if stat.S_ISFIFO(os.stat(file_path).st_mode):
|
||||
os.remove(file_path)
|
||||
self.logger.debug(f"Removed named pipe: {file_path}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to remove named pipe {file_path}: {e}")
|
||||
|
||||
def restore_backup(self, data):
|
||||
"""Restore a backup with options to keep certain folders."""
|
||||
backup_filename = data.get('filename')
|
||||
mode = data.get('mode') # 'full_restore' or 'selective_restore'
|
||||
keeps = data.get('keeps', [])
|
||||
|
||||
if not backup_filename:
|
||||
return {"status": "error", "message": "Filename not provided"}
|
||||
|
||||
backup_path = os.path.join(self.shared_data.backup_dir, backup_filename)
|
||||
original_dir = self.shared_data.current_dir
|
||||
temp_dir = f"{original_dir}_temp"
|
||||
|
||||
try:
|
||||
if not os.path.exists(backup_path):
|
||||
self.logger.error(f"Backup file does not exist: {backup_path}")
|
||||
return {"status": "error", "message": "Backup file not found"}
|
||||
|
||||
# Clean up old temp_dir if exists
|
||||
if os.path.exists(temp_dir):
|
||||
self.logger.debug(f"Removing existing temp directory: {temp_dir}")
|
||||
self.remove_named_pipes(temp_dir)
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
# Create backup of current state
|
||||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
from_restore_filename = f"FROM_RESTORE_{timestamp}.tar.gz"
|
||||
from_restore_path = os.path.join(self.shared_data.backup_dir, from_restore_filename)
|
||||
|
||||
self.logger.debug("Creating backup of current directory before restoring...")
|
||||
with tarfile.open(from_restore_path, "w:gz") as backup_tar:
|
||||
for item in os.listdir(original_dir):
|
||||
item_path = os.path.join(original_dir, item)
|
||||
backup_tar.add(item_path, arcname=item)
|
||||
self.logger.debug(f"Backup of current directory created: {from_restore_path}")
|
||||
|
||||
self.shared_data.db.add_backup(
|
||||
filename=from_restore_filename,
|
||||
description='AUTO Backup created during restoration',
|
||||
date=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
type_="Restore Backup",
|
||||
is_default=False,
|
||||
is_restore=True,
|
||||
is_github=False
|
||||
)
|
||||
|
||||
# Rename current directory to temp
|
||||
if os.path.exists(original_dir):
|
||||
os.rename(original_dir, temp_dir)
|
||||
else:
|
||||
self.logger.warning(f"Original directory does not exist: {original_dir}")
|
||||
|
||||
# Recreate target directory
|
||||
os.makedirs(original_dir, exist_ok=True)
|
||||
|
||||
# Extract backup
|
||||
self.logger.debug(f"Extracting backup into {original_dir}...")
|
||||
if backup_filename.endswith('.zip'):
|
||||
with zipfile.ZipFile(backup_path, 'r') as backup_zip:
|
||||
backup_zip.extractall(original_dir)
|
||||
elif backup_filename.endswith('.tar.gz'):
|
||||
with tarfile.open(backup_path, 'r:gz') as backup_tar:
|
||||
backup_tar.extractall(original_dir)
|
||||
else:
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": "Unsupported backup file format"}
|
||||
|
||||
# Selective restore
|
||||
if mode == 'selective_restore' and keeps:
|
||||
self.logger.debug("Selective restore: preserving specified folders...")
|
||||
for folder in keeps:
|
||||
src = os.path.join(temp_dir, folder)
|
||||
dest = os.path.join(original_dir, folder)
|
||||
if os.path.exists(src):
|
||||
if os.path.exists(dest):
|
||||
self.remove_named_pipes(dest)
|
||||
shutil.rmtree(dest)
|
||||
shutil.copytree(src, dest)
|
||||
|
||||
# Clean up temp_dir
|
||||
if os.path.exists(temp_dir):
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
# Restart Bjorn service
|
||||
self.logger.debug("Restarting Bjorn service after restoration...")
|
||||
try:
|
||||
subprocess.Popen(
|
||||
["sudo", "systemctl", "restart", "bjorn.service"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to issue restart command: {e}")
|
||||
return {"status": "error", "message": "Failed to restart the service."}
|
||||
|
||||
return {"status": "success", "message": "Backup restored successfully."}
|
||||
|
||||
except (tarfile.TarError, zipfile.BadZipFile) as e:
|
||||
self.logger.error(f"Failed to extract backup: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": f"Failed to extract backup: {e}"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to restore backup: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def set_default_backup(self, data):
|
||||
"""Set a backup as default."""
|
||||
try:
|
||||
filename = data.get('filename')
|
||||
if not filename:
|
||||
return {"status": "error", "message": "No filename provided"}
|
||||
|
||||
self.shared_data.db.set_default_backup(filename)
|
||||
return {"status": "success"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error setting default backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_backup(self, data):
|
||||
"""Delete a backup file and its DB metadata."""
|
||||
filename = data.get('filename')
|
||||
if not filename:
|
||||
return {"status": "error", "message": "Filename not provided"}
|
||||
|
||||
backup_path = os.path.join(self.shared_data.backup_dir, filename)
|
||||
|
||||
try:
|
||||
if os.path.exists(backup_path):
|
||||
os.remove(backup_path)
|
||||
self.logger.debug(f"Deleted backup file: {backup_path}")
|
||||
|
||||
self.shared_data.db.delete_backup(filename)
|
||||
return {"status": "success", "message": "Backup deleted successfully."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to delete backup: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def update_application(self, data):
|
||||
"""Update application from GitHub with options to keep certain folders."""
|
||||
mode = data.get('mode') # 'fresh_start' or 'upgrade'
|
||||
keeps = data.get('keeps', [])
|
||||
|
||||
original_dir = self.shared_data.current_dir
|
||||
temp_dir = f"{original_dir}_temp"
|
||||
github_zip_url = "https://codeload.github.com/infinition/Bjorn/zip/refs/heads/main"
|
||||
downloaded_zip = "/tmp/bjorn_update.zip"
|
||||
extract_dir = "/tmp/bjorn_extract"
|
||||
|
||||
try:
|
||||
# Preliminary cleanup
|
||||
for cleanup_dir in [temp_dir, extract_dir]:
|
||||
if os.path.exists(cleanup_dir):
|
||||
self.logger.debug(f"Removing existing directory: {cleanup_dir}")
|
||||
self.remove_named_pipes(cleanup_dir)
|
||||
shutil.rmtree(cleanup_dir, ignore_errors=True)
|
||||
|
||||
# Create backup before update
|
||||
timestamp = datetime.now().strftime("%Y%m%d%H%M%S")
|
||||
from_update_filename = f"FROM_UPDATE_{timestamp}.tar.gz"
|
||||
from_update_path = os.path.join(self.shared_data.backup_dir, from_update_filename)
|
||||
os.makedirs(self.shared_data.backup_dir, exist_ok=True)
|
||||
|
||||
self.logger.debug("Creating backup before update...")
|
||||
with tarfile.open(from_update_path, "w:gz") as backup_tar:
|
||||
for item in os.listdir(original_dir):
|
||||
item_path = os.path.join(original_dir, item)
|
||||
backup_tar.add(item_path, arcname=item)
|
||||
|
||||
self.shared_data.db.add_backup(
|
||||
filename=from_update_filename,
|
||||
description='AUTO Backup created during GitHub update',
|
||||
date=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
type_="GitHub Update Backup",
|
||||
is_default=False,
|
||||
is_restore=False,
|
||||
is_github=True
|
||||
)
|
||||
|
||||
# Download ZIP from GitHub
|
||||
self.logger.debug("Downloading latest version from GitHub...")
|
||||
download_command = [
|
||||
'curl', '-L', '-o', downloaded_zip,
|
||||
'--connect-timeout', '10',
|
||||
'--max-time', '60',
|
||||
github_zip_url
|
||||
]
|
||||
subprocess.run(download_command, check=True)
|
||||
|
||||
if not os.path.exists(downloaded_zip):
|
||||
raise Exception("Failed to download update file")
|
||||
|
||||
# Prepare original directory
|
||||
if os.path.exists(original_dir):
|
||||
os.rename(original_dir, temp_dir)
|
||||
os.makedirs(original_dir, exist_ok=True)
|
||||
|
||||
# Extract new version
|
||||
self.logger.debug("Extracting new version...")
|
||||
os.makedirs(extract_dir, exist_ok=True)
|
||||
with zipfile.ZipFile(downloaded_zip, 'r') as zip_ref:
|
||||
contents = zip_ref.namelist()
|
||||
if not contents:
|
||||
raise Exception("ZIP file is empty")
|
||||
root_dir = contents[0].split('/')[0]
|
||||
zip_ref.extractall(extract_dir)
|
||||
extracted_dir = os.path.join(extract_dir, root_dir)
|
||||
if not os.path.exists(extracted_dir):
|
||||
raise Exception(f"Expected directory {extracted_dir} not found after extraction")
|
||||
|
||||
for item in os.listdir(extracted_dir):
|
||||
source = os.path.join(extracted_dir, item)
|
||||
destination = os.path.join(original_dir, item)
|
||||
shutil.move(source, destination)
|
||||
|
||||
# If upgrade: restore kept folders
|
||||
if mode == 'upgrade' and keeps:
|
||||
self.logger.debug("Restoring kept folders...")
|
||||
for folder in keeps:
|
||||
src = os.path.join(temp_dir, folder)
|
||||
dest = os.path.join(original_dir, folder)
|
||||
if os.path.exists(src):
|
||||
if os.path.exists(dest):
|
||||
shutil.rmtree(dest, ignore_errors=True)
|
||||
shutil.copytree(src, dest)
|
||||
else:
|
||||
self.logger.warning(f"Source folder not found: {src}")
|
||||
|
||||
# Cleanup
|
||||
for path in [temp_dir, extract_dir, downloaded_zip]:
|
||||
if os.path.exists(path):
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
else:
|
||||
try:
|
||||
os.remove(path)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Restart service
|
||||
self.logger.debug("Restarting Bjorn service...")
|
||||
subprocess.Popen(
|
||||
["sudo", "systemctl", "restart", "bjorn.service"],
|
||||
stdout=subprocess.DEVNULL,
|
||||
stderr=subprocess.DEVNULL
|
||||
)
|
||||
|
||||
return {"status": "success", "message": "Application updated successfully"}
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Failed to download update: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": f"Failed to download update: {e}"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Update failed: {e}")
|
||||
if os.path.exists(temp_dir):
|
||||
os.rename(temp_dir, original_dir)
|
||||
return {"status": "error", "message": str(e)}
|
||||
finally:
|
||||
for path in [downloaded_zip, extract_dir]:
|
||||
if os.path.exists(path):
|
||||
try:
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path, ignore_errors=True)
|
||||
else:
|
||||
os.remove(path)
|
||||
except Exception as ee:
|
||||
self.logger.error(f"Failed to clean up {path}: {ee}")
|
||||
|
||||
def check_update(self, handler):
|
||||
"""Check for updates from GitHub."""
|
||||
try:
|
||||
import requests
|
||||
github_raw_url = self.shared_data.github_version_url
|
||||
response = requests.get(github_raw_url, timeout=10)
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Failed to fetch version from GitHub. Status code: {response.status_code}")
|
||||
|
||||
latest_version_line = response.text.splitlines()[0].strip()
|
||||
latest_version = latest_version_line
|
||||
|
||||
with open(self.shared_data.version_file, 'r') as vf:
|
||||
current_version_line = vf.readline().strip()
|
||||
current_version = current_version_line
|
||||
|
||||
update_available = latest_version != current_version
|
||||
self.logger.debug(f"Current version: {current_version}, Latest version: {latest_version}, Update available: {update_available}")
|
||||
|
||||
response_data = {
|
||||
'latest_version': latest_version,
|
||||
'current_version': current_version,
|
||||
'update_available': update_available
|
||||
}
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(response_data).encode('utf-8'))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error checking update: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": "Failed to check for updates."
|
||||
}).encode('utf-8'))
|
||||
|
||||
|
||||
|
||||
def download_backup(self, handler, filename):
|
||||
"""Download a backup file."""
|
||||
backup_path = os.path.join(self.shared_data.backup_dir, filename)
|
||||
if not os.path.exists(backup_path):
|
||||
handler.send_response(404)
|
||||
handler.end_headers()
|
||||
handler.wfile.write(b"Backup file not found")
|
||||
return
|
||||
|
||||
try:
|
||||
with open(backup_path, 'rb') as f:
|
||||
file_data = f.read()
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/octet-stream')
|
||||
handler.send_header('Content-Disposition', f'attachment; filename="{filename}"')
|
||||
handler.send_header('Content-Length', str(len(file_data)))
|
||||
handler.end_headers()
|
||||
handler.wfile.write(file_data)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error downloading backup: {e}")
|
||||
handler.send_response(500)
|
||||
handler.end_headers()
|
||||
handler.wfile.write(b"Internal server error")
|
||||
357
web_utils/bluetooth_utils.py
Normal file
357
web_utils/bluetooth_utils.py
Normal file
@@ -0,0 +1,357 @@
|
||||
# web_utils/bluetooth_utils.py
|
||||
"""
|
||||
Bluetooth device management utilities.
|
||||
Handles Bluetooth scanning, pairing, connection, and device management.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import subprocess
|
||||
import time
|
||||
import os
|
||||
import dbus
|
||||
import dbus.mainloop.glib
|
||||
import dbus.exceptions
|
||||
from typing import Any, Dict, Optional
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="bluetooth_utils.py", level=logging.DEBUG)
|
||||
|
||||
class BluetoothUtils:
|
||||
"""Utilities for Bluetooth device management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
self.bluetooth_initialized = False
|
||||
self.bus = None
|
||||
self.manager_interface = None
|
||||
self.adapter_path = None
|
||||
self.adapter = None
|
||||
self.adapter_props = None
|
||||
self.adapter_methods = None
|
||||
|
||||
def _ensure_bluetooth_service(self):
|
||||
"""Check if bluetooth service is running, if not start and enable it."""
|
||||
try:
|
||||
res = subprocess.run(
|
||||
["systemctl", "is-active", "bluetooth"],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
if "active" not in res.stdout:
|
||||
self.logger.info("Bluetooth service not active. Starting and enabling it...")
|
||||
start_res = subprocess.run(
|
||||
["sudo", "systemctl", "start", "bluetooth"],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
if start_res.returncode != 0:
|
||||
self.logger.error(f"Failed to start bluetooth service: {start_res.stderr}")
|
||||
raise Exception("Failed to start bluetooth service.")
|
||||
|
||||
enable_res = subprocess.run(
|
||||
["sudo", "systemctl", "enable", "bluetooth"],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
if enable_res.returncode != 0:
|
||||
self.logger.error(f"Failed to enable bluetooth service: {enable_res.stderr}")
|
||||
else:
|
||||
self.logger.info("Bluetooth service enabled successfully.")
|
||||
else:
|
||||
self.logger.info("Bluetooth service is already active.")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error ensuring bluetooth service: {e}")
|
||||
raise
|
||||
|
||||
def _init_bluetooth(self):
|
||||
"""Initialize Bluetooth DBus connection."""
|
||||
if self.bluetooth_initialized:
|
||||
return
|
||||
|
||||
try:
|
||||
dbus.mainloop.glib.DBusGMainLoop(set_as_default=True)
|
||||
self._ensure_bluetooth_service()
|
||||
|
||||
self.bus = dbus.SystemBus()
|
||||
manager = self.bus.get_object("org.bluez", "/")
|
||||
self.manager_interface = dbus.Interface(manager, "org.freedesktop.DBus.ObjectManager")
|
||||
|
||||
objects = self.manager_interface.GetManagedObjects()
|
||||
self.adapter_path = None
|
||||
for path, ifaces in objects.items():
|
||||
if "org.bluez.Adapter1" in ifaces:
|
||||
self.adapter_path = path
|
||||
break
|
||||
|
||||
if not self.adapter_path:
|
||||
self.logger.error("No Bluetooth adapter found.")
|
||||
raise Exception("No Bluetooth adapter found.")
|
||||
|
||||
self.adapter = self.bus.get_object("org.bluez", self.adapter_path)
|
||||
self.adapter_props = dbus.Interface(self.adapter, "org.freedesktop.DBus.Properties")
|
||||
self.adapter_methods = dbus.Interface(self.adapter, "org.bluez.Adapter1")
|
||||
|
||||
self.bluetooth_initialized = True
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to initialize Bluetooth: {e}")
|
||||
raise
|
||||
|
||||
def _get_device_object(self, address):
|
||||
"""Get DBus device object by MAC address."""
|
||||
self._init_bluetooth()
|
||||
objects = self.manager_interface.GetManagedObjects()
|
||||
for path, ifaces in objects.items():
|
||||
if "org.bluez.Device1" in ifaces:
|
||||
dev = ifaces["org.bluez.Device1"]
|
||||
if dev.get("Address") == address:
|
||||
return self.bus.get_object("org.bluez", path)
|
||||
return None
|
||||
|
||||
def scan_bluetooth(self, handler):
|
||||
"""Scan for Bluetooth devices."""
|
||||
try:
|
||||
self._init_bluetooth()
|
||||
self.adapter_props.Set("org.bluez.Adapter1", "Powered", dbus.Boolean(True))
|
||||
self.adapter_props.Set("org.bluez.Adapter1", "Discoverable", dbus.Boolean(True))
|
||||
self.adapter_props.Set("org.bluez.Adapter1", "DiscoverableTimeout", dbus.UInt32(180))
|
||||
|
||||
self.adapter_methods.StartDiscovery()
|
||||
time.sleep(3)
|
||||
objects = self.manager_interface.GetManagedObjects()
|
||||
devices = []
|
||||
for path, ifaces in objects.items():
|
||||
if "org.bluez.Device1" in ifaces:
|
||||
dev = ifaces["org.bluez.Device1"]
|
||||
addr = dev.get("Address", "")
|
||||
name = dev.get("Name", "Unknown")
|
||||
paired = bool(dev.get("Paired", False))
|
||||
trusted = bool(dev.get("Trusted", False))
|
||||
connected = bool(dev.get("Connected", False))
|
||||
|
||||
devices.append({
|
||||
"name": name,
|
||||
"address": addr,
|
||||
"paired": paired,
|
||||
"trusted": trusted,
|
||||
"connected": connected
|
||||
})
|
||||
|
||||
self.adapter_methods.StopDiscovery()
|
||||
response = {"devices": devices}
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(response).encode('utf-8'))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error scanning Bluetooth: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def pair_bluetooth(self, address, pin=None):
|
||||
"""Pair with a Bluetooth device."""
|
||||
try:
|
||||
device = self._get_device_object(address)
|
||||
if device is None:
|
||||
self.logger.error(f"Device {address} not found")
|
||||
return {"status": "error", "message": f"Device {address} not found"}
|
||||
|
||||
device_methods = dbus.Interface(device, "org.bluez.Device1")
|
||||
device_props = dbus.Interface(device, "org.freedesktop.DBus.Properties")
|
||||
|
||||
bt_process = subprocess.Popen(
|
||||
['bluetoothctl'],
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True,
|
||||
bufsize=1,
|
||||
universal_newlines=True
|
||||
)
|
||||
|
||||
try:
|
||||
self.logger.info(f"Attempting to pair with {address}")
|
||||
bt_process.stdin.write(f"pair {address}\n")
|
||||
bt_process.stdin.flush()
|
||||
|
||||
timeout = 60
|
||||
start_time = time.time()
|
||||
|
||||
while (time.time() - start_time) < timeout:
|
||||
line = bt_process.stdout.readline()
|
||||
if not line:
|
||||
continue
|
||||
|
||||
self.logger.info(f"Bluetoothctl output: {line.strip()}")
|
||||
|
||||
if "Confirm passkey" in line or "Request confirmation" in line:
|
||||
self.logger.info("Sending confirmation...")
|
||||
bt_process.stdin.write("yes\n")
|
||||
bt_process.stdin.flush()
|
||||
|
||||
try:
|
||||
paired = device_props.Get("org.bluez.Device1", "Paired")
|
||||
if paired:
|
||||
self.logger.info("Device successfully paired!")
|
||||
device_props.Set("org.bluez.Device1", "Trusted", dbus.Boolean(True))
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
config_path = "/home/bjorn/.settings_bjorn/bt.json"
|
||||
current_mac = None
|
||||
if os.path.exists(config_path):
|
||||
try:
|
||||
with open(config_path, "r") as f:
|
||||
data = json.load(f)
|
||||
current_mac = data.get("device_mac")
|
||||
except (json.JSONDecodeError, IOError):
|
||||
pass
|
||||
|
||||
if current_mac != address:
|
||||
self.logger.info(f"Updating config with new MAC: {address}")
|
||||
new_data = {"device_mac": address}
|
||||
os.makedirs(os.path.dirname(config_path), exist_ok=True)
|
||||
with open(config_path, "w") as f:
|
||||
json.dump(new_data, f)
|
||||
self.logger.info("Updated bt.json with new device MAC.")
|
||||
|
||||
try:
|
||||
subprocess.run(["sudo", "systemctl", "restart", "auto_bt_connect"], check=True)
|
||||
self.logger.info("auto_bt_connect service restarted successfully")
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Failed to restart auto_bt_connect service: {e}")
|
||||
|
||||
return {"status": "success", "message": "Device successfully paired and trusted"}
|
||||
except:
|
||||
pass
|
||||
|
||||
if "Failed" in line or "Error" in line:
|
||||
self.logger.error(f"Bluetoothctl error: {line}")
|
||||
return {"status": "error", "message": f"Pairing failed: {line.strip()}"}
|
||||
|
||||
return {"status": "error", "message": "Pairing timed out - please try again"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error during pairing process: {str(e)}")
|
||||
return {"status": "error", "message": f"Error during pairing: {str(e)}"}
|
||||
|
||||
finally:
|
||||
if 'bt_process' in locals():
|
||||
bt_process.stdin.write("quit\n")
|
||||
bt_process.stdin.flush()
|
||||
time.sleep(1)
|
||||
bt_process.terminate()
|
||||
|
||||
def forget_bluetooth(self, address):
|
||||
"""Remove/forget a Bluetooth device."""
|
||||
try:
|
||||
device = self._get_device_object(address)
|
||||
if device is None:
|
||||
return {"status": "error", "message": f"Device {address} not found"}
|
||||
|
||||
device_methods = dbus.Interface(device, "org.bluez.Device1")
|
||||
adapter_methods = dbus.Interface(self.adapter, "org.bluez.Adapter1")
|
||||
|
||||
try:
|
||||
try:
|
||||
device_methods.Disconnect()
|
||||
except:
|
||||
pass
|
||||
|
||||
adapter_methods.RemoveDevice(device)
|
||||
self.logger.info(f"Successfully removed device {address}")
|
||||
return {"status": "success", "message": "Device forgotten successfully"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to forget device: {e}")
|
||||
return {"status": "error", "message": f"Failed to forget device: {str(e)}"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in forget_bluetooth: {str(e)}")
|
||||
return {"status": "error", "message": f"Error forgetting device: {str(e)}"}
|
||||
|
||||
def trust_bluetooth(self, address):
|
||||
"""Trust a Bluetooth device."""
|
||||
device = self._get_device_object(address)
|
||||
if device is None:
|
||||
return {"status": "error", "message": f"Device {address} not found"}
|
||||
device_props = dbus.Interface(device, "org.freedesktop.DBus.Properties")
|
||||
try:
|
||||
device_props.Set("org.bluez.Device1", "Trusted", dbus.Boolean(True))
|
||||
return {"status": "success", "message": f"Trusted {address}"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": f"Failed to trust {address}: {e}"}
|
||||
|
||||
def connect_bluetooth(self, address):
|
||||
"""Connect to a Bluetooth device and set up networking."""
|
||||
device = self._get_device_object(address)
|
||||
if device is None:
|
||||
return {"status": "error", "message": f"Device {address} not found"}
|
||||
|
||||
device_methods = dbus.Interface(device, "org.bluez.Device1")
|
||||
try:
|
||||
device_methods.Connect()
|
||||
self.logger.info(f"Device {address} connected. Setting up PAN and obtaining IP...")
|
||||
|
||||
bt_net_process = subprocess.Popen(
|
||||
["sudo", "bt-network", "-c", address, "nap"],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
text=True
|
||||
)
|
||||
|
||||
time.sleep(2)
|
||||
|
||||
if bt_net_process.poll() is not None:
|
||||
if bt_net_process.returncode != 0:
|
||||
stderr_output = bt_net_process.stderr.read() if bt_net_process.stderr else ""
|
||||
self.logger.error(f"Failed to run bt-network: {stderr_output}")
|
||||
return {"status": "error", "message": f"Connected to {address}, but failed to set up bt-network: {stderr_output}"}
|
||||
else:
|
||||
self.logger.warning("bt-network ended. PAN might not remain established.")
|
||||
else:
|
||||
self.logger.info("bt-network process started successfully and is running in background.")
|
||||
|
||||
dhclient_res = subprocess.run(
|
||||
["sudo", "dhclient", "-4", "bnep0"],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
if dhclient_res.returncode != 0:
|
||||
self.logger.error(f"Failed to run dhclient: {dhclient_res.stderr}")
|
||||
return {"status": "error", "message": f"Connected to {address}, bt-network ok, but dhclient failed: {dhclient_res.stderr}"}
|
||||
|
||||
self.logger.info("Successfully obtained IP via dhclient on bnep0.")
|
||||
|
||||
config_path = "/home/bjorn/.settings_bjorn/bt.json"
|
||||
current_mac = None
|
||||
if os.path.exists(config_path):
|
||||
try:
|
||||
with open(config_path, "r") as f:
|
||||
data = json.load(f)
|
||||
current_mac = data.get("device_mac")
|
||||
except (json.JSONDecodeError, IOError):
|
||||
pass
|
||||
|
||||
if current_mac != address:
|
||||
self.logger.info(f"Updating config with new MAC: {address}")
|
||||
new_data = {"device_mac": address}
|
||||
os.makedirs(os.path.dirname(config_path), exist_ok=True)
|
||||
with open(config_path, "w") as f:
|
||||
json.dump(new_data, f)
|
||||
self.logger.info("Updated bt.json with new device MAC.")
|
||||
|
||||
return {"status": "success", "message": f"Connected to {address} and network interface set up."}
|
||||
except dbus.exceptions.DBusException as e:
|
||||
return {"status": "error", "message": f"Failed to connect to {address}: {e}"}
|
||||
|
||||
def disconnect_bluetooth(self, address):
|
||||
"""Disconnect from a Bluetooth device."""
|
||||
device = self._get_device_object(address)
|
||||
if device is None:
|
||||
return {"status": "error", "message": f"Device {address} not found"}
|
||||
device_methods = dbus.Interface(device, "org.bluez.Device1")
|
||||
try:
|
||||
device_methods.Disconnect()
|
||||
return {"status": "success", "message": f"Disconnected from {address}"}
|
||||
except dbus.exceptions.DBusException as e:
|
||||
return {"status": "error", "message": f"Failed to disconnect from {address}: {e}"}
|
||||
295
web_utils/c2_utils.py
Normal file
295
web_utils/c2_utils.py
Normal file
@@ -0,0 +1,295 @@
|
||||
# webutils/c2_utils.py
|
||||
from c2_manager import c2_manager
|
||||
import base64
|
||||
import time
|
||||
from pathlib import Path
|
||||
import json
|
||||
from datetime import datetime
|
||||
# to import logging from the previous path you can use:
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="c2_utils.py", level=logging.DEBUG)
|
||||
|
||||
|
||||
class C2Utils:
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
# --- Anti-yoyo: cache du dernier snapshot "sain" d'agents ---
|
||||
self._last_agents = [] # liste d'agents normalisés
|
||||
self._last_agents_ts = 0.0 # epoch seconds du snapshot
|
||||
self._snapshot_ttl = 10.0 # tolérance (s) si /c2/agents flanche
|
||||
|
||||
# ---------------------- Helpers JSON ----------------------
|
||||
|
||||
def _to_jsonable(self, obj):
|
||||
if obj is None or isinstance(obj, (bool, int, float, str)):
|
||||
return obj
|
||||
if isinstance(obj, Path):
|
||||
return str(obj)
|
||||
if isinstance(obj, bytes):
|
||||
return {"_b64": base64.b64encode(obj).decode("ascii")}
|
||||
if isinstance(obj, datetime):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, dict):
|
||||
return {k: self._to_jsonable(v) for k, v in obj.items()}
|
||||
if isinstance(obj, (list, tuple, set)):
|
||||
return [self._to_jsonable(v) for v in obj]
|
||||
return str(obj)
|
||||
|
||||
def _json(self, handler, code: int, obj):
|
||||
safe = self._to_jsonable(obj)
|
||||
payload = json.dumps(safe, ensure_ascii=False).encode("utf-8")
|
||||
handler.send_response(code)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.send_header("Content-Length", str(len(payload)))
|
||||
handler.end_headers()
|
||||
try:
|
||||
handler.wfile.write(payload)
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
# ---------------------- Normalisation Agents ----------------------
|
||||
|
||||
def _normalize_agent(self, a):
|
||||
"""
|
||||
Uniformise l'agent (id, last_seen en ISO) sans casser les autres champs.
|
||||
"""
|
||||
a = dict(a) if isinstance(a, dict) else {}
|
||||
a["id"] = a.get("id") or a.get("agent_id") or a.get("client_id")
|
||||
|
||||
ls = a.get("last_seen")
|
||||
if isinstance(ls, (int, float)):
|
||||
# epoch seconds -> ISO
|
||||
try:
|
||||
a["last_seen"] = datetime.fromtimestamp(ls).isoformat()
|
||||
except Exception:
|
||||
a["last_seen"] = None
|
||||
elif isinstance(ls, str):
|
||||
# ISO (avec ou sans Z)
|
||||
try:
|
||||
dt = datetime.fromisoformat(ls.replace("Z", "+00:00"))
|
||||
a["last_seen"] = dt.isoformat()
|
||||
except Exception:
|
||||
# format inconnu -> laisser tel quel
|
||||
pass
|
||||
elif isinstance(ls, datetime):
|
||||
a["last_seen"] = ls.isoformat()
|
||||
else:
|
||||
a["last_seen"] = None
|
||||
|
||||
return a
|
||||
|
||||
# ---------------------- Handlers REST ----------------------
|
||||
|
||||
def c2_start(self, handler, data):
|
||||
port = int(data.get("port", 5555))
|
||||
res = c2_manager.start(port=port)
|
||||
return self._json(handler, 200, res)
|
||||
|
||||
def c2_stop(self, handler):
|
||||
res = c2_manager.stop()
|
||||
return self._json(handler, 200, res)
|
||||
|
||||
def c2_status(self, handler):
|
||||
return self._json(handler, 200, c2_manager.status())
|
||||
|
||||
def c2_agents(self, handler):
|
||||
"""
|
||||
Renvoie la liste des agents (tableau JSON).
|
||||
Anti-yoyo : si c2_manager.list_agents() renvoie [] mais que
|
||||
nous avons un snapshot récent (< TTL), renvoyer ce snapshot.
|
||||
"""
|
||||
try:
|
||||
raw = c2_manager.list_agents() or []
|
||||
agents = [self._normalize_agent(x) for x in raw]
|
||||
|
||||
now = time.time()
|
||||
if len(agents) == 0 and len(self._last_agents) > 0 and (now - self._last_agents_ts) <= self._snapshot_ttl:
|
||||
# Fallback rapide : on sert le dernier snapshot non-vide
|
||||
return self._json(handler, 200, self._last_agents)
|
||||
|
||||
# Snapshot frais (même si vide réel)
|
||||
self._last_agents = agents
|
||||
self._last_agents_ts = now
|
||||
return self._json(handler, 200, agents)
|
||||
|
||||
except Exception as e:
|
||||
# En cas d'erreur, si snapshot récent dispo, on le sert
|
||||
now = time.time()
|
||||
if len(self._last_agents) > 0 and (now - self._last_agents_ts) <= self._snapshot_ttl:
|
||||
self.logger.warning(f"/c2/agents fallback to snapshot after error: {e}")
|
||||
return self._json(handler, 200, self._last_agents)
|
||||
return self._json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def c2_command(self, handler, data):
|
||||
targets = data.get("targets") or []
|
||||
command = (data.get("command") or "").strip()
|
||||
if not targets or not command:
|
||||
return self._json(handler, 400, {"status": "error", "message": "targets and command required"})
|
||||
return self._json(handler, 200, c2_manager.send_command(targets, command))
|
||||
|
||||
def c2_broadcast(self, handler, data):
|
||||
command = (data.get("command") or "").strip()
|
||||
if not command:
|
||||
return self._json(handler, 400, {"status": "error", "message": "command required"})
|
||||
return self._json(handler, 200, c2_manager.broadcast(command))
|
||||
|
||||
def c2_deploy(self, handler, data):
|
||||
required = ("client_id", "ssh_host", "ssh_user", "ssh_pass")
|
||||
if not all(k in data and str(data.get(k)).strip() for k in required):
|
||||
return self._json(handler, 400, {"status": "error", "message": "missing fields"})
|
||||
payload = {
|
||||
"client_id": data.get("client_id").strip(),
|
||||
"ssh_host": data.get("ssh_host").strip(),
|
||||
"ssh_user": data.get("ssh_user").strip(),
|
||||
"ssh_pass": data.get("ssh_pass").strip(),
|
||||
}
|
||||
if data.get("lab_user"):
|
||||
payload["lab_user"] = data.get("lab_user").strip()
|
||||
if data.get("lab_password"):
|
||||
payload["lab_password"] = data.get("lab_password").strip()
|
||||
res = c2_manager.deploy_client(**payload)
|
||||
return self._json(handler, 200, res)
|
||||
|
||||
def c2_stale_agents(self, handler, threshold: int = 300):
|
||||
try:
|
||||
agents = c2_manager.db.get_stale_agents(threshold)
|
||||
return self._json(handler, 200, {"status": "ok", "count": len(agents), "agents": agents})
|
||||
except Exception as e:
|
||||
return self._json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def c2_purge_agents(self, handler, data):
|
||||
try:
|
||||
threshold = int(data.get("threshold", 86400))
|
||||
purged = c2_manager.db.purge_stale_agents(threshold)
|
||||
return self._json(handler, 200, {"status": "ok", "purged": purged})
|
||||
except Exception as e:
|
||||
return self._json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
# ---------------------- SSE: stream d'événements ----------------------
|
||||
|
||||
def c2_events_sse(self, handler):
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "text/event-stream")
|
||||
handler.send_header("Cache-Control", "no-cache")
|
||||
handler.send_header("Connection", "keep-alive")
|
||||
handler.send_header("X-Accel-Buffering", "no") # utile derrière Nginx/Traefik
|
||||
handler.end_headers()
|
||||
|
||||
# Indiquer au client un backoff de reconnexion (évite les tempêtes)
|
||||
try:
|
||||
handler.wfile.write(b"retry: 5000\n\n") # 5s
|
||||
handler.wfile.flush()
|
||||
except Exception:
|
||||
return
|
||||
|
||||
def push(event: dict):
|
||||
try:
|
||||
t = event.get('type')
|
||||
if t:
|
||||
handler.wfile.write(f"event: {t}\n".encode("utf-8"))
|
||||
safe = self._to_jsonable(event)
|
||||
payload = f"data: {json.dumps(safe, ensure_ascii=False)}\n\n"
|
||||
handler.wfile.write(payload.encode("utf-8"))
|
||||
handler.wfile.flush()
|
||||
except Exception:
|
||||
# Connexion rompue : on se désabonne proprement
|
||||
try:
|
||||
c2_manager.bus.unsubscribe(push)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
c2_manager.bus.subscribe(push)
|
||||
try:
|
||||
# Keep-alive périodique pour maintenir le flux ouvert
|
||||
while True:
|
||||
time.sleep(15)
|
||||
try:
|
||||
handler.wfile.write(b": keep-alive\n\n") # commentaire SSE
|
||||
handler.wfile.flush()
|
||||
except Exception:
|
||||
break
|
||||
finally:
|
||||
try:
|
||||
c2_manager.bus.unsubscribe(push)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# ---------------------- Gestion des fichiers client ----------------------
|
||||
|
||||
def c2_download_client(self, handler, filename):
|
||||
"""Serve generated client file for download"""
|
||||
try:
|
||||
# Security check - prevent directory traversal
|
||||
if '..' in filename or '/' in filename or '\\' in filename:
|
||||
handler.send_error(403, "Forbidden")
|
||||
return
|
||||
|
||||
clients_dir = Path(__file__).parent / "c2_data" / "clients"
|
||||
filepath = clients_dir / filename
|
||||
|
||||
if not filepath.exists() or not filepath.is_file():
|
||||
handler.send_error(404, "File not found")
|
||||
return
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/octet-stream')
|
||||
handler.send_header('Content-Disposition', f'attachment; filename="{filename}"')
|
||||
|
||||
with open(filepath, 'rb') as f:
|
||||
content = f.read()
|
||||
|
||||
handler.send_header('Content-Length', str(len(content)))
|
||||
handler.end_headers()
|
||||
handler.wfile.write(content)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error downloading client: {e}")
|
||||
handler.send_error(500, str(e))
|
||||
|
||||
def c2_list_clients(self, handler):
|
||||
"""List all generated client files"""
|
||||
try:
|
||||
clients_dir = Path(__file__).parent / "c2_data" / "clients"
|
||||
|
||||
clients = []
|
||||
if clients_dir.exists():
|
||||
for file in clients_dir.glob("*.py"):
|
||||
clients.append({
|
||||
"filename": file.name,
|
||||
"size": file.stat().st_size,
|
||||
"modified": file.stat().st_mtime
|
||||
})
|
||||
|
||||
return self._json(handler, 200, {"status": "ok", "clients": clients})
|
||||
|
||||
except Exception as e:
|
||||
return self._json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def c2_remove_client(self, handler, data):
|
||||
"""Remove a client completely"""
|
||||
client_id = (data.get("client_id") or "").strip()
|
||||
if not client_id:
|
||||
return self._json(handler, 400, {"status": "error", "message": "client_id required"})
|
||||
|
||||
res = c2_manager.remove_client(client_id)
|
||||
return self._json(handler, 200, res)
|
||||
|
||||
def c2_generate_client(self, handler, data):
|
||||
"""Enhanced client generation with platform support"""
|
||||
cid = (data.get("client_id") or "").strip()
|
||||
if not cid:
|
||||
cid = f"zombie_{int(time.time())}"
|
||||
|
||||
platform = data.get("platform", "universal")
|
||||
lab_user = (data.get("lab_user") or "testuser").strip()
|
||||
lab_pass = (data.get("lab_password") or "testpass").strip()
|
||||
|
||||
res = c2_manager.generate_client(
|
||||
client_id=cid,
|
||||
platform=platform,
|
||||
lab_user=lab_user,
|
||||
lab_password=lab_pass
|
||||
)
|
||||
return self._json(handler, 200, res)
|
||||
410
web_utils/character_utils.py
Normal file
410
web_utils/character_utils.py
Normal file
@@ -0,0 +1,410 @@
|
||||
"""
|
||||
Character and persona management utilities.
|
||||
Handles character switching, creation, and image management.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import shutil
|
||||
import time
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from io import BytesIO
|
||||
from typing import Any, Dict, Optional
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
import io
|
||||
import cgi
|
||||
from PIL import Image
|
||||
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="character_utils.py", level=logging.DEBUG)
|
||||
|
||||
|
||||
class CharacterUtils:
|
||||
"""Utilities for character/persona management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.shared_data = shared_data
|
||||
self.logger = logger
|
||||
|
||||
# --------- helpers ---------
|
||||
|
||||
def _send_error_response(self, handler, message: str, status_code: int = 500):
|
||||
handler.send_response(status_code)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'error', 'message': message}).encode('utf-8'))
|
||||
|
||||
def _to_bmp_bytes(self, raw: bytes, width: int | None = None, height: int | None = None) -> bytes:
|
||||
"""Convert any image bytes to BMP (optionally resize)."""
|
||||
with Image.open(BytesIO(raw)) as im:
|
||||
if im.mode != 'RGB':
|
||||
im = im.convert('RGB')
|
||||
if width and height:
|
||||
try:
|
||||
resample = Image.Resampling.LANCZOS
|
||||
except AttributeError:
|
||||
resample = Image.LANCZOS
|
||||
im = im.resize((width, height), resample)
|
||||
out = BytesIO()
|
||||
im.save(out, format='BMP')
|
||||
return out.getvalue()
|
||||
|
||||
def get_existing_character_numbers(self, action_dir: str | Path, action_name: str) -> set[int]:
|
||||
"""
|
||||
Retourne l'ensemble des numéros déjà utilisés pour les images characters
|
||||
(p. ex. <action>1.bmp, <action>2.bmp, ...).
|
||||
"""
|
||||
d = Path(action_dir)
|
||||
if not d.exists():
|
||||
return set()
|
||||
nums: set[int] = set()
|
||||
pat = re.compile(rf"^{re.escape(action_name)}(\d+)\.bmp$", re.IGNORECASE)
|
||||
for p in d.glob("*.bmp"):
|
||||
m = pat.match(p.name)
|
||||
if m:
|
||||
try:
|
||||
nums.add(int(m.group(1)))
|
||||
except ValueError:
|
||||
pass
|
||||
return nums
|
||||
|
||||
# --------- endpoints ---------
|
||||
|
||||
def get_current_character(self):
|
||||
"""Lit le personnage courant depuis la config (DB)."""
|
||||
try:
|
||||
return self.shared_data.config.get('current_character', 'BJORN') or 'BJORN'
|
||||
except Exception:
|
||||
return 'BJORN'
|
||||
|
||||
def serve_bjorn_say(self, handler):
|
||||
try:
|
||||
bjorn_says_data = {"text": self.shared_data.bjorn_says}
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(bjorn_says_data).encode('utf-8'))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def serve_bjorn_character(self, handler):
|
||||
try:
|
||||
img_byte_arr = io.BytesIO()
|
||||
self.shared_data.bjorn_character.save(img_byte_arr, format='PNG')
|
||||
img_byte_arr = img_byte_arr.getvalue()
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'image/png')
|
||||
handler.send_header('Cache-Control', 'no-cache')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(img_byte_arr)
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error serving status image: {e}")
|
||||
|
||||
def list_characters(self, handler):
|
||||
"""List all available characters with metadata."""
|
||||
try:
|
||||
characters_dir = self.shared_data.settings_dir
|
||||
characters = []
|
||||
|
||||
for entry in os.scandir(characters_dir):
|
||||
if entry.is_dir():
|
||||
character_name = entry.name
|
||||
idle_image_path = os.path.join(entry.path, 'IDLE', 'IDLE1.bmp') # legacy path?
|
||||
has_idle_image = os.path.exists(idle_image_path)
|
||||
characters.append({'name': character_name, 'has_idle_image': has_idle_image})
|
||||
|
||||
current_character = self.get_current_character()
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
resp = {'status': 'success', 'characters': characters, 'current_character': current_character}
|
||||
handler.wfile.write(json.dumps(resp).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in list_characters: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def get_character_icon(self, handler):
|
||||
"""Serve character icon (IDLE1.bmp)."""
|
||||
try:
|
||||
query_components = parse_qs(urlparse(handler.path).query)
|
||||
character = (query_components.get('character', [None])[0] or '').strip()
|
||||
if not character:
|
||||
raise ValueError('Character parameter is required')
|
||||
|
||||
current_character = self.get_current_character()
|
||||
if character == current_character:
|
||||
# Quand le perso est actif, ses images sont dans status_images_dir/IDLE/IDLE1.bmp
|
||||
idle_image_path = os.path.join(self.shared_data.status_images_dir, 'IDLE', 'IDLE1.bmp')
|
||||
else:
|
||||
idle_image_path = os.path.join(self.shared_data.settings_dir, character, 'status', 'IDLE', 'IDLE1.bmp')
|
||||
|
||||
if not os.path.exists(idle_image_path):
|
||||
raise FileNotFoundError(f"IDLE1.bmp for character '{character}' not found")
|
||||
|
||||
with open(idle_image_path, 'rb') as f:
|
||||
image_data = f.read()
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'image/bmp')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(image_data)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in get_character_icon: {e}")
|
||||
handler.send_error(404)
|
||||
|
||||
def create_character(self, handler):
|
||||
"""Create a new character by copying current character's images."""
|
||||
try:
|
||||
content_length = int(handler.headers['Content-Length'])
|
||||
post_data = handler.rfile.read(content_length).decode('utf-8')
|
||||
data = json.loads(post_data)
|
||||
new_character_name = (data.get('character_name') or '').strip()
|
||||
|
||||
if not new_character_name:
|
||||
raise ValueError('Character name is required')
|
||||
|
||||
new_character_dir = os.path.join(self.shared_data.settings_dir, new_character_name)
|
||||
if os.path.exists(new_character_dir):
|
||||
raise FileExistsError(f"Character '{new_character_name}' already exists")
|
||||
|
||||
self.save_current_character_images(new_character_dir)
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Character created successfully'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in create_character: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def switch_character(self, handler):
|
||||
"""Switch to a different character, saving current modifications first."""
|
||||
try:
|
||||
content_length = int(handler.headers['Content-Length'])
|
||||
post_data = handler.rfile.read(content_length).decode('utf-8')
|
||||
data = json.loads(post_data)
|
||||
selected_character_name = (data.get('character_name') or '').strip()
|
||||
|
||||
if not selected_character_name:
|
||||
raise ValueError('Character name is required')
|
||||
|
||||
current_character = self.get_current_character()
|
||||
if selected_character_name == current_character:
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Character already selected'}).encode('utf-8'))
|
||||
return
|
||||
|
||||
# Save current character's images
|
||||
current_character_dir = os.path.join(self.shared_data.settings_dir, current_character)
|
||||
self.save_current_character_images(current_character_dir)
|
||||
|
||||
# Check new character exists
|
||||
selected_character_dir = os.path.join(self.shared_data.settings_dir, selected_character_name)
|
||||
if not os.path.exists(selected_character_dir):
|
||||
raise FileNotFoundError(f"Character '{selected_character_name}' does not exist")
|
||||
|
||||
# Activate
|
||||
self.copy_character_images(
|
||||
selected_character_dir,
|
||||
self.shared_data.status_images_dir,
|
||||
self.shared_data.static_images_dir
|
||||
)
|
||||
|
||||
# Update config
|
||||
self.shared_data.config['bjorn_name'] = selected_character_name
|
||||
self.shared_data.config['current_character'] = selected_character_name
|
||||
self.shared_data.save_config()
|
||||
self.shared_data.load_config()
|
||||
|
||||
time.sleep(1)
|
||||
self.shared_data.load_images()
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Character switched successfully'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in switch_character: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def delete_character(self, handler):
|
||||
"""Delete a character, handling current character case."""
|
||||
try:
|
||||
content_length = int(handler.headers['Content-Length'])
|
||||
post_data = handler.rfile.read(content_length).decode('utf-8')
|
||||
data = json.loads(post_data)
|
||||
character_name = (data.get('character_name') or '').strip()
|
||||
|
||||
if not character_name:
|
||||
raise ValueError('Character name is required')
|
||||
|
||||
if character_name == 'BJORN':
|
||||
raise ValueError("Cannot delete the default 'BJORN' character")
|
||||
|
||||
character_dir = os.path.join(self.shared_data.settings_dir, character_name)
|
||||
if not os.path.exists(character_dir):
|
||||
raise FileNotFoundError(f"Character '{character_name}' does not exist")
|
||||
|
||||
current_character = self.get_current_character()
|
||||
if character_name == current_character:
|
||||
bjorn_dir = os.path.join(self.shared_data.settings_dir, 'BJORN')
|
||||
if not os.path.exists(bjorn_dir):
|
||||
raise FileNotFoundError("Default 'BJORN' character does not exist")
|
||||
|
||||
self.copy_character_images(
|
||||
bjorn_dir,
|
||||
self.shared_data.status_images_dir,
|
||||
self.shared_data.static_images_dir
|
||||
)
|
||||
|
||||
self.shared_data.config['bjorn_name'] = 'BJORN'
|
||||
self.shared_data.config['current_character'] = 'BJORN'
|
||||
self.shared_data.save_config()
|
||||
self.shared_data.load_config()
|
||||
self.shared_data.load_images()
|
||||
|
||||
shutil.rmtree(character_dir)
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Character deleted successfully'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in delete_character: {e}")
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
def save_current_character_images(self, character_dir):
|
||||
"""Save current character's status and static images."""
|
||||
try:
|
||||
if not os.path.exists(character_dir):
|
||||
os.makedirs(character_dir)
|
||||
|
||||
dest_status_dir = os.path.join(character_dir, 'status')
|
||||
if os.path.exists(dest_status_dir):
|
||||
shutil.rmtree(dest_status_dir)
|
||||
shutil.copytree(self.shared_data.status_images_dir, dest_status_dir)
|
||||
|
||||
dest_static_dir = os.path.join(character_dir, 'static')
|
||||
if os.path.exists(dest_static_dir):
|
||||
shutil.rmtree(dest_static_dir)
|
||||
shutil.copytree(self.shared_data.static_images_dir, dest_static_dir)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in save_current_character_images: {e}")
|
||||
|
||||
def copy_character_images(self, source_dir, dest_status_dir, dest_static_dir):
|
||||
"""Copy character images from source to destination directories."""
|
||||
try:
|
||||
source_status_dir = os.path.join(source_dir, 'status')
|
||||
if os.path.exists(source_status_dir):
|
||||
if os.path.exists(dest_status_dir):
|
||||
shutil.rmtree(dest_status_dir)
|
||||
shutil.copytree(source_status_dir, dest_status_dir)
|
||||
|
||||
source_static_dir = os.path.join(source_dir, 'static')
|
||||
if os.path.exists(source_static_dir):
|
||||
if os.path.exists(dest_static_dir):
|
||||
shutil.rmtree(dest_static_dir)
|
||||
shutil.copytree(source_static_dir, dest_static_dir)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in copy_character_images: {e}")
|
||||
|
||||
def upload_character_images(self, handler):
|
||||
"""Ajoute des images de characters pour une action existante (toujours BMP + numérotation)."""
|
||||
try:
|
||||
ctype, pdict = cgi.parse_header(handler.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data':
|
||||
raise ValueError('Content-Type doit être multipart/form-data')
|
||||
|
||||
pdict['boundary'] = bytes(pdict['boundary'], "utf-8")
|
||||
pdict['CONTENT-LENGTH'] = int(handler.headers.get('Content-Length'))
|
||||
|
||||
form = cgi.FieldStorage(
|
||||
fp=io.BytesIO(handler.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=handler.headers,
|
||||
environ={'REQUEST_METHOD': 'POST'},
|
||||
keep_blank_values=True
|
||||
)
|
||||
|
||||
if 'action_name' not in form:
|
||||
raise ValueError("Le nom de l'action est requis")
|
||||
|
||||
action_name = (form.getvalue('action_name') or '').strip()
|
||||
if not action_name:
|
||||
raise ValueError("Le nom de l'action est requis")
|
||||
|
||||
if 'character_images' not in form:
|
||||
raise ValueError('Aucun fichier image fourni')
|
||||
|
||||
action_dir = os.path.join(self.shared_data.status_images_dir, action_name)
|
||||
if not os.path.exists(action_dir):
|
||||
raise FileNotFoundError(f"L'action '{action_name}' n'existe pas")
|
||||
|
||||
existing_numbers = self.get_existing_character_numbers(action_dir, action_name)
|
||||
next_number = max(existing_numbers, default=0) + 1
|
||||
|
||||
file_items = form['character_images']
|
||||
if not isinstance(file_items, list):
|
||||
file_items = [file_items]
|
||||
|
||||
for file_item in file_items:
|
||||
if not getattr(file_item, 'filename', ''):
|
||||
continue
|
||||
raw = file_item.file.read()
|
||||
bmp = self._to_bmp_bytes(raw)
|
||||
out_path = os.path.join(action_dir, f"{action_name}{next_number}.bmp")
|
||||
with open(out_path, 'wb') as f:
|
||||
f.write(bmp)
|
||||
next_number += 1
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Images de characters ajoutées avec succès'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Erreur dans upload_character_images: {e}")
|
||||
import traceback
|
||||
self.logger.error(traceback.format_exc())
|
||||
self._send_error_response(handler, str(e))
|
||||
|
||||
|
||||
def reload_fonts(self, handler):
|
||||
"""Recharge les fonts en exécutant load_fonts."""
|
||||
try:
|
||||
self.shared_data.load_fonts()
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Fonts loaded successfully.'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in load_fonts: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'error', 'message': str(e)}).encode('utf-8'))
|
||||
|
||||
def reload_images(self, handler):
|
||||
"""Recharge les images en exécutant load_images."""
|
||||
try:
|
||||
self.shared_data.load_images()
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'success', 'message': 'Images rechargées avec succès.'}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in reload_images: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({'status': 'error', 'message': str(e)}).encode('utf-8'))
|
||||
141
web_utils/comment_utils.py
Normal file
141
web_utils/comment_utils.py
Normal file
@@ -0,0 +1,141 @@
|
||||
# web_utils/comment_utils.py
|
||||
"""
|
||||
Comment and status message management utilities.
|
||||
Handles status comments/messages displayed in the UI.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import re
|
||||
import traceback
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="comment_utils.py", level=logging.DEBUG)
|
||||
class CommentUtils:
|
||||
"""Utilities for managing comments and status messages."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def get_sections(self, handler):
|
||||
"""Get list of comment sections (statuses) from DB."""
|
||||
try:
|
||||
rows = self.shared_data.db.query("SELECT DISTINCT status FROM comments ORDER BY status;")
|
||||
sections = [r["status"] for r in rows]
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
response = json.dumps({'status': 'success', 'sections': sections})
|
||||
handler.wfile.write(response.encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in get_sections: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
error_response = json.dumps({'status': 'error', 'message': str(e)})
|
||||
handler.wfile.write(error_response.encode('utf-8'))
|
||||
|
||||
def get_comments(self, handler):
|
||||
"""Get comments for a specific section from DB."""
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
query_components = parse_qs(urlparse(handler.path).query)
|
||||
section = query_components.get('section', [None])[0]
|
||||
if not section:
|
||||
raise ValueError('Section parameter is required')
|
||||
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT text FROM comments WHERE status=? ORDER BY id;",
|
||||
(section,)
|
||||
)
|
||||
comments = [r["text"] for r in rows]
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
response = json.dumps({'status': 'success', 'comments': comments})
|
||||
handler.wfile.write(response.encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in get_comments: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
error_response = json.dumps({'status': 'error', 'message': str(e)})
|
||||
handler.wfile.write(error_response.encode('utf-8'))
|
||||
|
||||
def save_comments(self, data):
|
||||
"""Save comment list for a section to DB (replaces existing)."""
|
||||
try:
|
||||
section = data.get('section')
|
||||
comments = data.get('comments')
|
||||
lang = data.get('lang', 'fr')
|
||||
theme = data.get('theme', section or 'general')
|
||||
weight = int(data.get('weight', 1))
|
||||
|
||||
if not section or comments is None:
|
||||
return {'status': 'error', 'message': 'Section and comments are required'}
|
||||
|
||||
if not isinstance(comments, list):
|
||||
return {'status': 'error', 'message': 'Comments must be a list of strings'}
|
||||
|
||||
# Replace section content
|
||||
with self.shared_data.db.transaction(immediate=True):
|
||||
self.shared_data.db.execute("DELETE FROM comments WHERE status=? AND lang=?", (section, lang))
|
||||
rows = []
|
||||
for txt in comments:
|
||||
t = str(txt).strip()
|
||||
if not t:
|
||||
continue
|
||||
rows.append((t, section, theme, lang, weight))
|
||||
if rows:
|
||||
self.shared_data.db.insert_comments(rows)
|
||||
|
||||
return {'status': 'success', 'message': 'Comments saved successfully'}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in save_comments: {e}")
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def restore_default_comments(self, data=None):
|
||||
"""Restore default comments from JSON file to DB."""
|
||||
try:
|
||||
inserted = self.shared_data.db.import_comments_from_json(
|
||||
self.shared_data.default_comments_file,
|
||||
lang=(data.get('lang') if isinstance(data, dict) else None) or 'fr',
|
||||
clear_existing=True
|
||||
)
|
||||
return {
|
||||
'status': 'success',
|
||||
'message': f'Comments restored ({inserted} entries).'
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in restore_default_comments: {e}")
|
||||
self.logger.error(traceback.format_exc())
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def delete_comment_section(self, data):
|
||||
"""Delete a comment section and its associated comments from DB."""
|
||||
try:
|
||||
section_name = data.get('section')
|
||||
lang = data.get('lang', 'fr')
|
||||
|
||||
if not section_name:
|
||||
return {'status': 'error', 'message': "Section name is required."}
|
||||
|
||||
if not re.match(r'^[\w\-\s]+$', section_name):
|
||||
return {'status': 'error', 'message': "Invalid section name."}
|
||||
|
||||
count = self.shared_data.db.execute(
|
||||
"DELETE FROM comments WHERE status=? AND lang=?;",
|
||||
(section_name, lang)
|
||||
)
|
||||
if count == 0:
|
||||
return {'status': 'error', 'message': f"Section '{section_name}' not found for lang='{lang}'."}
|
||||
|
||||
return {'status': 'success', 'message': 'Section deleted successfully.'}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in delete_comment_section: {e}")
|
||||
self.logger.error(traceback.format_exc())
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
483
web_utils/db_utils.py
Normal file
483
web_utils/db_utils.py
Normal file
@@ -0,0 +1,483 @@
|
||||
# web_utils/db_utils.py
|
||||
"""
|
||||
Database manager utilities.
|
||||
Handles database table operations, CRUD, schema management, and exports.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import re
|
||||
import io
|
||||
import csv
|
||||
import zipfile
|
||||
import io
|
||||
from typing import Any, Dict, Optional, List
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="db_utils.py", level=logging.DEBUG)
|
||||
|
||||
class DBUtils:
|
||||
"""Utilities for database management operations."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def _db_safe_ident(self, name: str) -> str:
|
||||
"""Validate and sanitize SQL identifiers."""
|
||||
if not isinstance(name, str) or not re.fullmatch(r"[A-Za-z_][A-Za-z0-9_]*", name):
|
||||
raise ValueError("Invalid identifier")
|
||||
return name
|
||||
|
||||
def _db_table_info(self, table: str):
|
||||
"""Get table info (primary key and columns)."""
|
||||
table = self._db_safe_ident(table)
|
||||
cols = [r["name"] for r in self.shared_data.db.query(f"PRAGMA table_info({table});")]
|
||||
if not cols:
|
||||
raise ValueError("Table not found")
|
||||
|
||||
pk = None
|
||||
for r in self.shared_data.db.query(f"PRAGMA table_info({table});"):
|
||||
if int(r["pk"] or 0) == 1:
|
||||
pk = r["name"]
|
||||
break
|
||||
|
||||
if not pk:
|
||||
pk = "id" if "id" in cols else cols[0]
|
||||
return pk, cols
|
||||
|
||||
def _db_list_tables(self):
|
||||
"""List all tables with row counts and primary keys."""
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name NOT LIKE 'sqlite_%' ORDER BY name;"
|
||||
)
|
||||
out = []
|
||||
for r in rows:
|
||||
name = r["name"]
|
||||
try:
|
||||
pk, _ = self._db_table_info(name)
|
||||
except Exception:
|
||||
pk = None
|
||||
cnt = self.shared_data.db.query_one(f"SELECT COUNT(*) c FROM {self._db_safe_ident(name)};")["c"]
|
||||
out.append({"name": name, "count": cnt, "pk": pk})
|
||||
return out
|
||||
|
||||
def _db_list_views(self):
|
||||
"""List all views with row counts."""
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT name, sql FROM sqlite_master WHERE type='view' ORDER BY name;"
|
||||
)
|
||||
out = []
|
||||
for r in rows:
|
||||
name = r["name"]
|
||||
try:
|
||||
cnt = self.shared_data.db.query_one(f"SELECT COUNT(*) c FROM {self._db_safe_ident(name)};")["c"]
|
||||
except Exception:
|
||||
cnt = None
|
||||
out.append({"name": name, "count": cnt})
|
||||
return out
|
||||
|
||||
def _db_build_where(self, table: str, cols: list[str], q: str):
|
||||
"""Build WHERE clause from query string."""
|
||||
if not q:
|
||||
return "", []
|
||||
|
||||
parts = [p.strip() for p in q.split(",") if p.strip()]
|
||||
where_clauses = []
|
||||
params = []
|
||||
text_cols = set()
|
||||
|
||||
# Determine text columns
|
||||
colinfo = {r["name"]: (r["type"] or "").upper()
|
||||
for r in self.shared_data.db.query(f"PRAGMA table_info({self._db_safe_ident(table)});")}
|
||||
for c, t in colinfo.items():
|
||||
if "CHAR" in t or "TEXT" in t or t == "":
|
||||
text_cols.add(c)
|
||||
|
||||
relop = re.compile(r"^([A-Za-z_][A-Za-z0-9_]*)\s*(=|>=|<=|>|<|:)\s*(.+)$")
|
||||
for p in parts:
|
||||
m = relop.match(p)
|
||||
if m:
|
||||
col, op, val = m.groups()
|
||||
if col not in cols:
|
||||
continue
|
||||
if op == ":":
|
||||
where_clauses.append(f"{self._db_safe_ident(col)} LIKE ?")
|
||||
params.append(f"%{val}%")
|
||||
elif op in ("=", ">=", "<=", ">", "<"):
|
||||
where_clauses.append(f"{self._db_safe_ident(col)} {op} ?")
|
||||
params.append(val)
|
||||
else:
|
||||
# Free text search
|
||||
ors = []
|
||||
for c in text_cols:
|
||||
ors.append(f"{self._db_safe_ident(c)} LIKE ?")
|
||||
params.append(f"%{p}%")
|
||||
if ors:
|
||||
where_clauses.append("(" + " OR ".join(ors) + ")")
|
||||
|
||||
if not where_clauses:
|
||||
return "", []
|
||||
return "WHERE " + " AND ".join(where_clauses), params
|
||||
|
||||
def db_catalog_endpoint(self, handler):
|
||||
"""Get database catalog (tables and views)."""
|
||||
try:
|
||||
data = {"tables": self._db_list_tables(), "views": self._db_list_views()}
|
||||
self._write_json(handler, data)
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def db_schema_endpoint(self, handler, name: str):
|
||||
"""Get schema for a table or view."""
|
||||
try:
|
||||
name = self._db_safe_ident(name)
|
||||
row = self.shared_data.db.query_one(
|
||||
"SELECT type, name, sql FROM sqlite_master WHERE (type='table' OR type='view') AND name=?;", (name,)
|
||||
)
|
||||
cols = self.shared_data.db.query(f"PRAGMA table_info({name});")
|
||||
self._write_json(handler, {"meta": row, "columns": cols})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
|
||||
def db_get_table_endpoint(self, handler, table_name: str):
|
||||
"""Get table data with pagination and filtering."""
|
||||
try:
|
||||
qd = parse_qs(urlparse(handler.path).query)
|
||||
limit = int(qd.get("limit", ["50"])[0])
|
||||
offset = int(qd.get("offset", ["0"])[0])
|
||||
sort = (qd.get("sort", [""])[0] or "").strip()
|
||||
q = (qd.get("q", [""])[0] or "").strip()
|
||||
|
||||
pk, cols = self._db_table_info(table_name)
|
||||
|
||||
# WHERE
|
||||
where_sql, params = self._db_build_where(table_name, cols, q)
|
||||
|
||||
# ORDER BY
|
||||
order_sql = ""
|
||||
if sort:
|
||||
if ":" in sort:
|
||||
col, direction = sort.split(":", 1)
|
||||
col = col.strip()
|
||||
direction = direction.strip().lower()
|
||||
else:
|
||||
col, direction = sort, "asc"
|
||||
if col in cols and direction in ("asc", "desc"):
|
||||
order_sql = f"ORDER BY {self._db_safe_ident(col)} {direction.upper()}"
|
||||
|
||||
# Total
|
||||
total = self.shared_data.db.query_one(
|
||||
f"SELECT COUNT(*) c FROM {self._db_safe_ident(table_name)} {where_sql};", tuple(params)
|
||||
)["c"]
|
||||
|
||||
# Rows
|
||||
rows = self.shared_data.db.query(
|
||||
f"SELECT * FROM {self._db_safe_ident(table_name)} {where_sql} {order_sql} LIMIT ? OFFSET ?;",
|
||||
tuple(params) + (int(limit), int(offset))
|
||||
)
|
||||
|
||||
self._write_json(handler, {
|
||||
"columns": cols,
|
||||
"rows": rows,
|
||||
"pk": pk,
|
||||
"total": total
|
||||
})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def db_update_cells_endpoint(self, handler, payload: dict):
|
||||
"""Update table cells."""
|
||||
try:
|
||||
table = payload["table"]
|
||||
pk = payload.get("pk") or self._db_table_info(table)[0]
|
||||
_, cols = self._db_table_info(table)
|
||||
|
||||
with self.shared_data.db.transaction():
|
||||
for row in payload.get("rows", []):
|
||||
pk_val = row["pk"]
|
||||
changes = row.get("changes", {}) or {}
|
||||
sets = []
|
||||
params = []
|
||||
for c, v in changes.items():
|
||||
if c not in cols or c == pk:
|
||||
continue
|
||||
sets.append(f"{self._db_safe_ident(c)} = ?")
|
||||
params.append(v)
|
||||
if not sets:
|
||||
continue
|
||||
params.append(pk_val)
|
||||
self.shared_data.db.execute(
|
||||
f"UPDATE {self._db_safe_ident(table)} SET {', '.join(sets)} WHERE {self._db_safe_ident(pk)} = ?;",
|
||||
tuple(params)
|
||||
)
|
||||
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
|
||||
def db_delete_rows_endpoint(self, handler, payload: dict):
|
||||
"""Delete table rows."""
|
||||
try:
|
||||
table = payload["table"]
|
||||
pk = payload.get("pk") or self._db_table_info(table)[0]
|
||||
pks = payload.get("pks", []) or []
|
||||
if not pks:
|
||||
raise ValueError("No primary keys provided")
|
||||
qmarks = ",".join("?" for _ in pks)
|
||||
self.shared_data.db.execute(
|
||||
f"DELETE FROM {self._db_safe_ident(table)} WHERE {self._db_safe_ident(pk)} IN ({qmarks});",
|
||||
tuple(pks)
|
||||
)
|
||||
self._write_json(handler, {"status": "ok", "deleted": len(pks)})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
|
||||
def db_insert_row_endpoint(self, handler, payload: dict):
|
||||
"""Insert a new row."""
|
||||
try:
|
||||
table = payload["table"]
|
||||
pk, cols = self._db_table_info(table)
|
||||
values = payload.get("values", {}) or {}
|
||||
|
||||
insert_cols = []
|
||||
insert_vals = []
|
||||
qmarks = []
|
||||
for c in cols:
|
||||
if c == pk:
|
||||
continue
|
||||
if c in values:
|
||||
insert_cols.append(self._db_safe_ident(c))
|
||||
insert_vals.append(values[c])
|
||||
qmarks.append("?")
|
||||
|
||||
if not insert_cols:
|
||||
self.shared_data.db.execute(f"INSERT INTO {self._db_safe_ident(table)} DEFAULT VALUES;")
|
||||
else:
|
||||
self.shared_data.db.execute(
|
||||
f"INSERT INTO {self._db_safe_ident(table)} ({', '.join(insert_cols)}) VALUES ({', '.join(qmarks)});",
|
||||
tuple(insert_vals)
|
||||
)
|
||||
|
||||
row = self.shared_data.db.query_one("SELECT last_insert_rowid() AS lid;")
|
||||
new_pk = row["lid"]
|
||||
self._write_json(handler, {"status": "ok", "pk": new_pk})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
|
||||
def db_export_table_endpoint(self, handler, table_name: str):
|
||||
"""Export table as CSV or JSON."""
|
||||
try:
|
||||
fmt = (parse_qs(urlparse(handler.path).query).get("format", ["csv"])[0] or "csv").lower()
|
||||
pk, cols = self._db_table_info(table_name)
|
||||
rows = self.shared_data.db.query(f"SELECT * FROM {self._db_safe_ident(table_name)};")
|
||||
|
||||
if fmt == "json":
|
||||
payload = json.dumps(rows, ensure_ascii=False, indent=2)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "application/json; charset=utf-8")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(payload.encode("utf-8"))
|
||||
return
|
||||
|
||||
# CSV
|
||||
buf = io.StringIO()
|
||||
w = csv.DictWriter(buf, fieldnames=cols, extrasaction='ignore')
|
||||
w.writeheader()
|
||||
for r in rows:
|
||||
w.writerow({c: r.get(c) for c in cols})
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "text/csv; charset=utf-8")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(buf.getvalue().encode("utf-8"))
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
|
||||
def db_vacuum_endpoint(self, handler):
|
||||
"""Vacuum and optimize database."""
|
||||
try:
|
||||
self.shared_data.db.vacuum()
|
||||
self.shared_data.db.optimize()
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def db_drop_table_endpoint(self, handler, table_name: str):
|
||||
"""Drop a table."""
|
||||
try:
|
||||
table = self._db_safe_ident(table_name)
|
||||
self.shared_data.db.execute(f"DROP TABLE IF EXISTS {table};")
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
|
||||
def db_truncate_table_endpoint(self, handler, table_name: str):
|
||||
"""Truncate a table."""
|
||||
try:
|
||||
table = self._db_safe_ident(table_name)
|
||||
self.shared_data.db.execute(f"DELETE FROM {table};")
|
||||
try:
|
||||
self.shared_data.db.execute("DELETE FROM sqlite_sequence WHERE name=?;", (table,))
|
||||
except Exception:
|
||||
pass
|
||||
self._write_json(handler, {"status": "ok"})
|
||||
except Exception as e:
|
||||
self._write_json(handler, {"status": "error", "message": str(e)}, 400)
|
||||
|
||||
|
||||
def db_create_table_endpoint(self, handler, payload: dict):
|
||||
"""
|
||||
payload = {
|
||||
"name":"my_table",
|
||||
"if_not_exists": true,
|
||||
"columns":[{"name":"id","type":"INTEGER","pk":true,"not_null":true,"default":"AUTOINCREMENT"}, ...]
|
||||
}
|
||||
"""
|
||||
try:
|
||||
name = self._db_safe_ident(payload["name"])
|
||||
cols = payload.get("columns") or []
|
||||
if not cols:
|
||||
raise ValueError("columns required")
|
||||
parts = []
|
||||
pk_inline = None
|
||||
for c in cols:
|
||||
cname = self._db_safe_ident(c["name"])
|
||||
ctype = (c.get("type") or "").strip()
|
||||
seg = f"{cname} {ctype}".strip()
|
||||
if c.get("not_null"): seg += " NOT NULL"
|
||||
if "default" in c and c["default"] is not None:
|
||||
seg += " DEFAULT " + str(c["default"])
|
||||
if c.get("pk"):
|
||||
pk_inline = cname
|
||||
# AUTOINCREMENT en SQLite que sur INTEGER PRIMARY KEY
|
||||
if ctype.upper().startswith("INTEGER"):
|
||||
seg += " PRIMARY KEY AUTOINCREMENT"
|
||||
else:
|
||||
seg += " PRIMARY KEY"
|
||||
parts.append(seg)
|
||||
if pk_inline is None:
|
||||
# rien, PK implicite ou aucune
|
||||
pass
|
||||
ine = "IF NOT EXISTS " if payload.get("if_not_exists") else ""
|
||||
sql = f"CREATE TABLE {ine}{name} ({', '.join(parts)});"
|
||||
self.shared_data.db.execute(sql)
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
|
||||
def db_rename_table_endpoint(self, handler, payload: dict):
|
||||
try:
|
||||
old = self._db_safe_ident(payload["from"])
|
||||
new = self._db_safe_ident(payload["to"])
|
||||
self.shared_data.db.execute(f"ALTER TABLE {old} RENAME TO {new};")
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
|
||||
def db_add_column_endpoint(self, handler, payload: dict):
|
||||
"""
|
||||
payload = {table, column: {name,type,not_null?,default?}}
|
||||
"""
|
||||
try:
|
||||
table = self._db_safe_ident(payload["table"])
|
||||
c = payload["column"]
|
||||
cname = self._db_safe_ident(c["name"])
|
||||
ctype = (c.get("type") or "").strip()
|
||||
seg = f"{cname} {ctype}".strip()
|
||||
if c.get("not_null"): seg += " NOT NULL"
|
||||
if "default" in c and c["default"] is not None:
|
||||
seg += " DEFAULT " + str(c["default"])
|
||||
self.shared_data.db.execute(f"ALTER TABLE {table} ADD COLUMN {seg};")
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
|
||||
|
||||
# --- drop/truncate (vue/table) ---
|
||||
def db_drop_view_endpoint(self, handler, view_name: str):
|
||||
try:
|
||||
view = self._db_safe_ident(view_name)
|
||||
self.shared_data.db.execute(f"DROP VIEW IF EXISTS {view};")
|
||||
handler.send_response(200); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"ok"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(400); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
|
||||
# --- export all (zip CSV/JSON) ---
|
||||
def db_export_all_endpoint(self, handler):
|
||||
try:
|
||||
fmt = (parse_qs(urlparse(handler.path).query).get("format", ["csv"])[0] or "csv").lower()
|
||||
mem = io.BytesIO()
|
||||
with zipfile.ZipFile(mem, mode="w", compression=zipfile.ZIP_DEFLATED) as z:
|
||||
# tables
|
||||
for t in self._db_list_tables():
|
||||
name = t["name"]
|
||||
rows = self.shared_data.db.query(f"SELECT * FROM {self._db_safe_ident(name)};")
|
||||
if fmt == "json":
|
||||
z.writestr(f"tables/{name}.json", json.dumps(rows, ensure_ascii=False, indent=2))
|
||||
else:
|
||||
cols = [c["name"] for c in self.shared_data.db.query(f"PRAGMA table_info({self._db_safe_ident(name)});")]
|
||||
sio = io.StringIO()
|
||||
w = csv.DictWriter(sio, fieldnames=cols, extrasaction='ignore')
|
||||
w.writeheader()
|
||||
for r in rows: w.writerow({c: r.get(c) for c in cols})
|
||||
z.writestr(f"tables/{name}.csv", sio.getvalue())
|
||||
# views (lecture seule)
|
||||
for v in self._db_list_views():
|
||||
name = v["name"]
|
||||
try:
|
||||
rows = self.shared_data.db.query(f"SELECT * FROM {self._db_safe_ident(name)};")
|
||||
except Exception:
|
||||
rows = []
|
||||
if fmt == "json":
|
||||
z.writestr(f"views/{name}.json", json.dumps(rows, ensure_ascii=False, indent=2))
|
||||
else:
|
||||
if rows:
|
||||
cols = list(rows[0].keys())
|
||||
else:
|
||||
cols = []
|
||||
sio = io.StringIO()
|
||||
w = csv.DictWriter(sio, fieldnames=cols, extrasaction='ignore')
|
||||
if cols: w.writeheader()
|
||||
for r in rows: w.writerow({c: r.get(c) for c in cols})
|
||||
z.writestr(f"views/{name}.csv", sio.getvalue())
|
||||
payload = mem.getvalue()
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type","application/zip")
|
||||
handler.send_header("Content-Disposition","attachment; filename=database_export.zip")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(payload)
|
||||
except Exception as e:
|
||||
handler.send_response(500); handler.send_header("Content-Type","application/json"); handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
|
||||
def db_list_tables_endpoint(self, handler):
|
||||
try:
|
||||
data = self._db_list_tables()
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(data).encode("utf-8"))
|
||||
except Exception as e:
|
||||
logger.error(f"/api/db/tables error: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status":"error","message":str(e)}).encode("utf-8"))
|
||||
|
||||
|
||||
|
||||
def _write_json(self, handler, obj: dict, code: int = 200):
|
||||
"""Write JSON response."""
|
||||
handler.send_response(code)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(obj).encode('utf-8'))
|
||||
450
web_utils/file_utils.py
Normal file
450
web_utils/file_utils.py
Normal file
@@ -0,0 +1,450 @@
|
||||
# web_utils/file_utils.py
|
||||
"""
|
||||
File management utilities.
|
||||
Handles file operations, uploads, downloads, directory management.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import os
|
||||
import json
|
||||
import shutil
|
||||
import cgi
|
||||
from pathlib import Path
|
||||
from io import BytesIO
|
||||
from typing import Any, Dict, Optional
|
||||
from urllib.parse import unquote
|
||||
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="file_utils.py", level=logging.DEBUG)
|
||||
class FileUtils:
|
||||
"""Utilities for file and directory management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def list_files(self, directory, depth=0, max_depth=3):
|
||||
"""List files and directories recursively."""
|
||||
files = []
|
||||
if depth > max_depth:
|
||||
return files
|
||||
for entry in os.scandir(directory):
|
||||
if entry.is_dir():
|
||||
files.append({
|
||||
"name": entry.name,
|
||||
"is_directory": True,
|
||||
"children": self.list_files(entry.path, depth+1, max_depth)
|
||||
})
|
||||
else:
|
||||
files.append({
|
||||
"name": entry.name,
|
||||
"is_directory": False,
|
||||
"path": entry.path
|
||||
})
|
||||
return files
|
||||
|
||||
def list_files_endpoint(self, handler):
|
||||
"""HTTP endpoint to list files."""
|
||||
try:
|
||||
from http import HTTPStatus
|
||||
files = self.list_files(self.shared_data.bjorn_user_dir)
|
||||
payload = json.dumps(files).encode("utf-8")
|
||||
|
||||
handler.send_response(HTTPStatus.OK)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.send_header("Content-Length", str(len(payload)))
|
||||
handler.end_headers()
|
||||
|
||||
try:
|
||||
handler.wfile.write(payload)
|
||||
except (BrokenPipeError, ConnectionResetError):
|
||||
return
|
||||
except Exception as e:
|
||||
error_payload = json.dumps({"status": "error", "message": str(e)}).encode("utf-8")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.send_header("Content-Length", str(len(error_payload)))
|
||||
handler.end_headers()
|
||||
try:
|
||||
handler.wfile.write(error_payload)
|
||||
except (BrokenPipeError, ConnectionResetError):
|
||||
return
|
||||
|
||||
def loot_directories(self, handler):
|
||||
"""List all loot directories and their contents recursively."""
|
||||
try:
|
||||
def scan_dir(directory):
|
||||
items = []
|
||||
for entry in os.scandir(directory):
|
||||
item = {
|
||||
"name": entry.name,
|
||||
"path": entry.path.replace(self.shared_data.data_stolen_dir + '/', '')
|
||||
}
|
||||
|
||||
if entry.is_dir():
|
||||
item["type"] = "directory"
|
||||
item["children"] = scan_dir(entry.path)
|
||||
item["subdirs"] = len([c for c in item["children"] if c["type"] == "directory"])
|
||||
item["total_files"] = sum(1 for c in item["children"] if c["type"] == "file")
|
||||
for child in item["children"]:
|
||||
if child["type"] == "directory":
|
||||
item["total_files"] += child.get("total_files", 0)
|
||||
else:
|
||||
item["type"] = "file"
|
||||
item["size"] = entry.stat().st_size
|
||||
items.append(item)
|
||||
return items
|
||||
|
||||
root_contents = scan_dir(self.shared_data.data_stolen_dir)
|
||||
response = {"status": "success", "data": root_contents}
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(response).encode('utf-8'))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing directories: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
}).encode('utf-8'))
|
||||
|
||||
def loot_download(self, handler):
|
||||
"""Handle loot file download requests."""
|
||||
try:
|
||||
query = handler.path.split('?')[1]
|
||||
file_path = unquote(query.split('=')[1])
|
||||
full_path = os.path.join(self.shared_data.data_stolen_dir, file_path)
|
||||
|
||||
if not os.path.isfile(full_path):
|
||||
raise FileNotFoundError(f"File not found: {file_path}")
|
||||
|
||||
file_name = os.path.basename(file_path)
|
||||
file_size = os.path.getsize(full_path)
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/octet-stream')
|
||||
handler.send_header('Content-Disposition', f'attachment; filename="{file_name}"')
|
||||
handler.send_header('Content-Length', file_size)
|
||||
handler.end_headers()
|
||||
|
||||
with open(full_path, 'rb') as f:
|
||||
shutil.copyfileobj(f, handler.wfile)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error downloading file: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
}).encode('utf-8'))
|
||||
|
||||
def download_file(self, handler):
|
||||
"""Download a file from current directory."""
|
||||
try:
|
||||
query = unquote(handler.path.split('?path=')[1])
|
||||
file_path = os.path.join(self.shared_data.current_dir, query)
|
||||
if os.path.isfile(file_path):
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Disposition", f'attachment; filename="{os.path.basename(file_path)}"')
|
||||
handler.end_headers()
|
||||
with open(file_path, 'rb') as file:
|
||||
handler.wfile.write(file.read())
|
||||
else:
|
||||
handler.send_response(404)
|
||||
handler.end_headers()
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def create_folder(self, data):
|
||||
"""Create a new folder."""
|
||||
try:
|
||||
folder_path = os.path.join(self.shared_data.current_dir, data['folder_path'])
|
||||
|
||||
if not os.path.abspath(folder_path).startswith(self.shared_data.current_dir):
|
||||
return {'status': 'error', 'message': "Invalid path"}
|
||||
|
||||
os.makedirs(folder_path, exist_ok=True)
|
||||
return {'status': 'success', 'message': 'Folder created successfully'}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error creating folder: {e}")
|
||||
return {'status': 'error', 'message': str(e)}
|
||||
|
||||
def handle_file_upload(self, handler):
|
||||
"""Handle file upload with directory structure preservation."""
|
||||
try:
|
||||
import re
|
||||
content_type = handler.headers['Content-Type']
|
||||
boundary = content_type.split('=')[1].encode()
|
||||
content_length = int(handler.headers['Content-Length'])
|
||||
body = handler.rfile.read(content_length)
|
||||
parts = body.split(b'--' + boundary)
|
||||
|
||||
current_path = []
|
||||
for part in parts:
|
||||
if b'Content-Disposition' in part and b'name="currentPath"' in part:
|
||||
headers, data = part.split(b'\r\n\r\n', 1)
|
||||
current_path = json.loads(data.decode().strip())
|
||||
break
|
||||
|
||||
target_dir = os.path.join(self.shared_data.current_dir, *current_path)
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
|
||||
uploaded_files = []
|
||||
for part in parts:
|
||||
if b'Content-Disposition' in part and b'filename=' in part:
|
||||
try:
|
||||
headers, file_data = part.split(b'\r\n\r\n', 1)
|
||||
headers = headers.decode()
|
||||
match = re.search(r'filename="(.+?)"', headers)
|
||||
if match:
|
||||
relative_path = match.group(1)
|
||||
relative_path = os.path.normpath(relative_path)
|
||||
full_path = os.path.join(target_dir, relative_path)
|
||||
parent_dir = os.path.dirname(full_path)
|
||||
|
||||
if not os.path.abspath(full_path).startswith(os.path.abspath(self.shared_data.current_dir)):
|
||||
raise PermissionError(f"Access denied: {relative_path} is outside allowed directory")
|
||||
|
||||
if parent_dir:
|
||||
os.makedirs(parent_dir, exist_ok=True)
|
||||
|
||||
with open(full_path, 'wb') as f:
|
||||
f.write(file_data.strip(b'\r\n--'))
|
||||
|
||||
uploaded_files.append(full_path)
|
||||
self.logger.info(f"File uploaded: {full_path}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error processing file: {str(e)}")
|
||||
continue
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "success",
|
||||
"message": f"Files uploaded successfully to {target_dir}",
|
||||
"files": uploaded_files
|
||||
}).encode('utf-8'))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Upload error: {str(e)}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
}).encode('utf-8'))
|
||||
|
||||
def delete_file(self, data):
|
||||
"""Delete file or directory."""
|
||||
try:
|
||||
import stat
|
||||
file_path = data.get('file_path')
|
||||
if not file_path:
|
||||
return {"status": "error", "message": "No file path provided"}
|
||||
|
||||
abs_file_path = os.path.abspath(file_path)
|
||||
base_dir = os.path.abspath(self.shared_data.current_dir)
|
||||
|
||||
self.logger.info(f"Deleting: {abs_file_path}")
|
||||
|
||||
if not abs_file_path.startswith(base_dir):
|
||||
return {"status": "error", "message": f"Access denied: {file_path} is outside the allowed directory"}
|
||||
|
||||
if not os.path.exists(abs_file_path):
|
||||
return {"status": "error", "message": f"Path not found: {file_path}"}
|
||||
|
||||
if os.path.isdir(abs_file_path):
|
||||
shutil.rmtree(abs_file_path)
|
||||
else:
|
||||
os.remove(abs_file_path)
|
||||
|
||||
if os.path.exists(abs_file_path):
|
||||
return {"status": "error", "message": f"Failed to delete {abs_file_path} - file still exists"}
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Successfully deleted {'directory' if os.path.isdir(abs_file_path) else 'file'}: {file_path}"
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error deleting file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def rename_file(self, data):
|
||||
"""Rename file or directory."""
|
||||
try:
|
||||
old_path = os.path.join(self.shared_data.current_dir, data['old_path'])
|
||||
new_path = os.path.join(self.shared_data.current_dir, data['new_path'])
|
||||
|
||||
if not (os.path.abspath(old_path).startswith(self.shared_data.current_dir) and
|
||||
os.path.abspath(new_path).startswith(self.shared_data.current_dir)):
|
||||
return {"status": "error", "message": "Invalid path"}
|
||||
|
||||
os.rename(old_path, new_path)
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Successfully renamed {old_path} to {new_path}"
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error renaming file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def duplicate_file(self, data):
|
||||
"""Duplicate file or directory."""
|
||||
try:
|
||||
source_path = os.path.join(self.shared_data.current_dir, data['source_path'])
|
||||
target_path = os.path.join(self.shared_data.current_dir, data['target_path'])
|
||||
|
||||
if not (os.path.abspath(source_path).startswith(self.shared_data.current_dir) and
|
||||
os.path.abspath(target_path).startswith(self.shared_data.current_dir)):
|
||||
return {"status": "error", "message": "Invalid path"}
|
||||
|
||||
if os.path.isdir(source_path):
|
||||
shutil.copytree(source_path, target_path)
|
||||
else:
|
||||
shutil.copy2(source_path, target_path)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Successfully duplicated {source_path} to {target_path}"
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error duplicating file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def move_file(self, data):
|
||||
"""Move file or directory."""
|
||||
try:
|
||||
source_path = os.path.join(self.shared_data.current_dir, data['source_path'])
|
||||
target_path = os.path.join(self.shared_data.current_dir, data['target_path'])
|
||||
|
||||
if not (os.path.abspath(source_path).startswith(self.shared_data.current_dir) and
|
||||
os.path.abspath(target_path).startswith(self.shared_data.current_dir)):
|
||||
return {"status": "error", "message": "Invalid path"}
|
||||
|
||||
target_dir = os.path.dirname(target_path)
|
||||
if not os.path.exists(target_dir):
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
|
||||
if os.path.exists(target_path):
|
||||
base, ext = os.path.splitext(target_path)
|
||||
counter = 1
|
||||
while os.path.exists(f"{base} ({counter}){ext}"):
|
||||
counter += 1
|
||||
target_path = f"{base} ({counter}){ext}"
|
||||
|
||||
shutil.move(source_path, target_path)
|
||||
return {"status": "success", "message": "Item moved successfully"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error moving file: {str(e)}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def list_directories(self, handler):
|
||||
"""List directory structure."""
|
||||
try:
|
||||
def get_directory_structure(path):
|
||||
items = []
|
||||
for entry in os.scandir(path):
|
||||
if entry.is_dir():
|
||||
items.append({
|
||||
"name": entry.name,
|
||||
"path": os.path.relpath(entry.path, self.shared_data.current_dir),
|
||||
"is_directory": True,
|
||||
"children": get_directory_structure(entry.path)
|
||||
})
|
||||
return items
|
||||
|
||||
directory_structure = get_directory_structure(self.shared_data.current_dir)
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(directory_structure).encode())
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
}).encode())
|
||||
|
||||
def clear_output_folder(self, data=None):
|
||||
"""Remove all content inside output directory except first-level subfolders."""
|
||||
try:
|
||||
self.logger.info("Starting clear_output_folder operation...")
|
||||
base_dir = self.shared_data.output_dir
|
||||
self.logger.info(f"Base directory: {base_dir}")
|
||||
|
||||
if not os.path.exists(base_dir):
|
||||
self.logger.warning(f"Output directory does not exist: {base_dir}")
|
||||
return {"status": "success", "message": "Output directory does not exist"}
|
||||
|
||||
for root, dirs, files in os.walk(base_dir, topdown=True):
|
||||
try:
|
||||
current_depth = root.rstrip(os.path.sep).count(os.path.sep) - base_dir.rstrip(os.path.sep).count(os.path.sep)
|
||||
self.logger.debug(f"Processing directory at depth {current_depth}: {root}")
|
||||
|
||||
if current_depth == 0:
|
||||
for dir_name in dirs:
|
||||
try:
|
||||
dir_path = os.path.join(root, dir_name)
|
||||
self.logger.debug(f"Clearing contents of first-level subfolder: {dir_path}")
|
||||
for sub_root, sub_dirs, sub_files in os.walk(dir_path):
|
||||
for sub_file in sub_files:
|
||||
try:
|
||||
file_path = os.path.join(sub_root, sub_file)
|
||||
self.logger.debug(f"Removing file: {file_path}")
|
||||
os.remove(file_path)
|
||||
except Exception as file_e:
|
||||
self.logger.warning(f"Failed to remove file {file_path}: {str(file_e)}")
|
||||
continue
|
||||
|
||||
for sub_dir in sub_dirs:
|
||||
try:
|
||||
dir_to_remove = os.path.join(sub_root, sub_dir)
|
||||
self.logger.debug(f"Removing directory: {dir_to_remove}")
|
||||
shutil.rmtree(dir_to_remove)
|
||||
except Exception as dir_e:
|
||||
self.logger.warning(f"Failed to remove directory {dir_to_remove}: {str(dir_e)}")
|
||||
continue
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error processing directory {dir_name}: {str(e)}")
|
||||
continue
|
||||
dirs.clear()
|
||||
|
||||
elif current_depth > 0:
|
||||
for name in dirs + files:
|
||||
try:
|
||||
path = os.path.join(root, name)
|
||||
if os.path.isdir(path):
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
os.remove(path)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to remove {path}: {str(e)}")
|
||||
continue
|
||||
|
||||
except Exception as level_e:
|
||||
self.logger.warning(f"Error processing depth level {current_depth}: {str(level_e)}")
|
||||
continue
|
||||
|
||||
self.logger.info("Output folder cleared successfully")
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Output folder cleared, keeping only first-level subfolders"
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error clearing output folder: {str(e)}")
|
||||
return {"status": "error", "message": f"Error clearing output folder: {str(e)}"}
|
||||
437
web_utils/image_utils.py
Normal file
437
web_utils/image_utils.py
Normal file
@@ -0,0 +1,437 @@
|
||||
# image_utils.py
|
||||
from __future__ import annotations
|
||||
import os, json, re, shutil, io, logging
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
from urllib.parse import urlparse, parse_qs, unquote
|
||||
from PIL import Image
|
||||
from logger import Logger
|
||||
|
||||
logger = Logger(name="image_utils.py", level=logging.DEBUG)
|
||||
|
||||
ALLOWED_IMAGE_EXTS = {'.bmp', '.png', '.jpg', '.jpeg', '.gif', '.ico', '.webp'}
|
||||
|
||||
class ImageUtils:
|
||||
"""Utilities for image management (NO persona/character logic here)."""
|
||||
|
||||
# Fixed sizes used by the frontend spec for action icons
|
||||
STATUS_W, STATUS_H = 28, 28
|
||||
|
||||
def __init__(self, shared_data, character_utils=None):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
self.character_utils = character_utils # optional DI for renumber/help
|
||||
|
||||
# batch resize options for manual tools
|
||||
self.should_resize_images = False
|
||||
self.resize_width = 100
|
||||
self.resize_height = 100
|
||||
|
||||
# dirs
|
||||
self.status_images_dir = getattr(shared_data, "status_images_dir")
|
||||
self.static_images_dir = getattr(shared_data, "static_images_dir")
|
||||
self.web_dir = getattr(shared_data, "web_dir")
|
||||
self.images_dir = getattr(shared_data, "images_dir", None)
|
||||
|
||||
self.web_images_dir = getattr(shared_data, "web_images_dir", os.path.join(self.web_dir, "images"))
|
||||
self.actions_icons_dir= getattr(shared_data, "actions_icons_dir", os.path.join(self.images_dir or self.web_dir, "actions_icons"))
|
||||
|
||||
for d in (self.status_images_dir, self.static_images_dir, self.web_images_dir, self.actions_icons_dir):
|
||||
try: os.makedirs(d, exist_ok=True)
|
||||
except Exception: pass
|
||||
|
||||
# ---------- helpers ----------
|
||||
def _to_bmp(self, raw: bytes, w: Optional[int]=None, h: Optional[int]=None) -> bytes:
|
||||
with Image.open(BytesIO(raw)) as im:
|
||||
if im.mode != 'RGB': im = im.convert('RGB')
|
||||
if w and h:
|
||||
try: res = Image.Resampling.LANCZOS
|
||||
except AttributeError: res = Image.LANCZOS
|
||||
im = im.resize((w, h), res)
|
||||
out = BytesIO(); im.save(out, format='BMP'); return out.getvalue()
|
||||
|
||||
def _safe(self, name: str) -> str:
|
||||
return os.path.basename((name or '').strip().replace('\x00', ''))
|
||||
|
||||
def _mime(self, path: str) -> str:
|
||||
p = path.lower()
|
||||
if p.endswith('.bmp'): return 'image/bmp'
|
||||
if p.endswith('.png'): return 'image/png'
|
||||
if p.endswith('.jpg') or p.endswith('.jpeg'): return 'image/jpeg'
|
||||
if p.endswith('.gif'): return 'image/gif'
|
||||
if p.endswith('.ico'): return 'image/x-icon'
|
||||
if p.endswith('.webp'): return 'image/webp'
|
||||
return 'application/octet-stream'
|
||||
|
||||
def _send_json(self, h, payload: dict, status: int=200):
|
||||
h.send_response(status); h.send_header('Content-Type','application/json'); h.end_headers()
|
||||
h.wfile.write(json.dumps(payload).encode('utf-8'))
|
||||
|
||||
def _err(self, h, msg: str, code: int=500): self._send_json(h, {'status':'error','message':msg}, code)
|
||||
|
||||
def _ensure_action_dir(self, action: str) -> str:
|
||||
p = os.path.join(self.status_images_dir, action); os.makedirs(p, exist_ok=True); return p
|
||||
|
||||
def _list_images(self, directory: str, with_dims: bool=False):
|
||||
if not os.path.isdir(directory): return []
|
||||
items = []
|
||||
for fname in os.listdir(directory):
|
||||
p = os.path.join(directory, fname)
|
||||
if not os.path.isfile(p): continue
|
||||
ext = os.path.splitext(fname)[1].lower()
|
||||
if ext not in ALLOWED_IMAGE_EXTS: continue
|
||||
if with_dims:
|
||||
try:
|
||||
with Image.open(p) as img: w, h = img.size
|
||||
items.append({'name': fname, 'width': w, 'height': h})
|
||||
except Exception:
|
||||
items.append({'name': fname, 'width': None, 'height': None})
|
||||
else:
|
||||
items.append(fname)
|
||||
return items
|
||||
|
||||
# ---------- ACTION (status folder) IMAGES (no characters here) ----------
|
||||
def get_actions(self, h):
|
||||
try:
|
||||
actions = []
|
||||
for e in os.scandir(self.status_images_dir):
|
||||
if e.is_dir():
|
||||
name = e.name
|
||||
actions.append({'name': name, 'has_status_icon': os.path.exists(os.path.join(e.path, f"{name}.bmp"))})
|
||||
self._send_json(h, {'status':'success','actions':actions})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def get_action_images(self, h):
|
||||
try:
|
||||
q = parse_qs(urlparse(h.path).query); action = (q.get('action',[None])[0] or '').strip()
|
||||
if not action: raise ValueError('Action parameter is required')
|
||||
adir = os.path.join(self.status_images_dir, action)
|
||||
if not os.path.exists(adir): raise FileNotFoundError(f"Action '{action}' does not exist")
|
||||
images = []
|
||||
for fn in os.listdir(adir):
|
||||
if fn.lower().endswith('.bmp'):
|
||||
p = os.path.join(adir, fn)
|
||||
try:
|
||||
with Image.open(p) as img: w, hh = img.size
|
||||
except Exception: w = hh = None
|
||||
images.append({'name': fn, 'width': w, 'height': hh})
|
||||
self._send_json(h, {'status':'success','images':images})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def get_status_icon(self, h):
|
||||
try:
|
||||
q = parse_qs(urlparse(h.path).query); action = (q.get('action',[None])[0] or '').strip()
|
||||
if not action: raise ValueError('action is required')
|
||||
p = os.path.join(self.status_images_dir, action, f"{action}.bmp")
|
||||
if not os.path.exists(p): h.send_response(404); h.end_headers(); return
|
||||
with open(p, 'rb') as f: data = f.read()
|
||||
h.send_response(200); h.send_header('Content-Type','image/bmp'); h.end_headers(); h.wfile.write(data)
|
||||
except Exception as e:
|
||||
self.logger.error(e); h.send_response(404); h.end_headers()
|
||||
|
||||
def serve_status_image(self, h):
|
||||
try:
|
||||
url_path = unquote(urlparse(h.path).path); prefix = '/images/status/'
|
||||
if not url_path.startswith(prefix): h.send_error(400, "Bad Request"); return
|
||||
rel = url_path[len(prefix):]
|
||||
base = Path(self.status_images_dir).resolve()
|
||||
target = (base/rel).resolve()
|
||||
if not str(target).startswith(str(base)): h.send_error(403,"Forbidden"); return
|
||||
if not target.exists() or not target.is_file(): h.send_error(404,"Image not found"); return
|
||||
with open(target,'rb') as f: content = f.read()
|
||||
h.send_response(200); h.send_header('Content-Type', self._mime(str(target)))
|
||||
h.send_header('Content-Length', str(len(content))); h.end_headers(); h.wfile.write(content)
|
||||
except Exception as e:
|
||||
self.logger.error(e); h.send_error(500, "Internal Server Error")
|
||||
|
||||
def upload_status_image(self, h):
|
||||
"""Add/replace <action>/<action>.bmp (always 28x28 BMP)."""
|
||||
import cgi
|
||||
try:
|
||||
ctype, pdict = cgi.parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = cgi.FieldStorage(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
for key in ('type','action_name','status_image'):
|
||||
if key not in form: raise ValueError(f'Missing field: {key}')
|
||||
if (form.getvalue('type') or '').strip() != 'action': raise ValueError("type must be 'action'")
|
||||
action = (form.getvalue('action_name') or '').strip()
|
||||
if not action: raise ValueError("action_name is required")
|
||||
file_item = form['status_image']
|
||||
if not getattr(file_item,'filename',''): raise ValueError('No file')
|
||||
|
||||
adir = self._ensure_action_dir(action)
|
||||
raw = file_item.file.read()
|
||||
bmp = self._to_bmp(raw, self.STATUS_W, self.STATUS_H)
|
||||
with open(os.path.join(adir, f"{action}.bmp"), 'wb') as f: f.write(bmp)
|
||||
self._send_json(h, {'status':'success','message':'Status image added/updated','path':f"{action}/{action}.bmp"})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
# ---------- STATIC IMAGES ----------
|
||||
def list_static_images_with_dimensions(self, h):
|
||||
try:
|
||||
self._send_json(h, {'status':'success','images': self._list_images(self.static_images_dir, with_dims=True)})
|
||||
except Exception as e: self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def upload_static_image(self, h):
|
||||
import cgi
|
||||
try:
|
||||
ctype, pdict = cgi.parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type must be multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = cgi.FieldStorage(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
if 'static_image' not in form or not getattr(form['static_image'],'filename',''): raise ValueError('No static_image provided')
|
||||
filename = self._safe(form['static_image'].filename); base, _ = os.path.splitext(filename); filename = base + '.bmp'
|
||||
raw = form['static_image'].file.read()
|
||||
if self.should_resize_images:
|
||||
out = self._to_bmp(raw, self.resize_width, self.resize_height)
|
||||
else:
|
||||
with Image.open(BytesIO(raw)) as im: w, h = im.size
|
||||
out = self._to_bmp(raw, w, h)
|
||||
with open(os.path.join(self.static_images_dir, filename),'wb') as f: f.write(out)
|
||||
self._send_json(h, {'status':'success','message':'Static image uploaded successfully'})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def serve_static_image(self, h):
|
||||
try:
|
||||
path = unquote(urlparse(h.path).path)
|
||||
name = self._safe(os.path.basename(path))
|
||||
full = os.path.join(self.static_images_dir, name)
|
||||
if not os.path.exists(full): raise FileNotFoundError(name)
|
||||
with open(full,'rb') as f: data = f.read()
|
||||
h.send_response(200); h.send_header('Content-Type', self._mime(full)); h.end_headers(); h.wfile.write(data)
|
||||
except Exception as e:
|
||||
self.logger.error(e); h.send_response(404); h.end_headers()
|
||||
|
||||
# ---------- WEB IMAGES & ACTION ICONS ----------
|
||||
def list_web_images_with_dimensions(self, h):
|
||||
try: self._send_json(h, {'status':'success','images': self._list_images(self.web_images_dir, with_dims=True)})
|
||||
except Exception as e: self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def upload_web_image(self, h):
|
||||
import cgi
|
||||
try:
|
||||
ctype, pdict = cgi.parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = cgi.FieldStorage(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
if 'web_image' not in form or not getattr(form['web_image'],'filename',''): raise ValueError('Aucun fichier web_image fourni')
|
||||
file_item = form['web_image']; filename = self._safe(file_item.filename)
|
||||
base, ext = os.path.splitext(filename);
|
||||
if ext.lower() not in ALLOWED_IMAGE_EXTS: filename = base + '.png'
|
||||
data = file_item.file.read()
|
||||
with open(os.path.join(self.web_images_dir, filename), 'wb') as f: f.write(data)
|
||||
self._send_json(h, {'status':'success','message':'Web image uploaded','file':filename})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def serve_web_image(self, h):
|
||||
try:
|
||||
url_path = unquote(urlparse(h.path).path); prefix='/web/images/'
|
||||
if not url_path.startswith(prefix): h.send_error(400,"Bad Request"); return
|
||||
rel = self._safe(url_path[len(prefix):]); target = os.path.join(self.web_images_dir, rel)
|
||||
if not os.path.isfile(target): h.send_error(404,"Not found"); return
|
||||
with open(target,'rb') as f: content = f.read()
|
||||
h.send_response(200); h.send_header('Content-Type', self._mime(target))
|
||||
h.send_header('Content-Length', str(len(content))); h.end_headers(); h.wfile.write(content)
|
||||
except Exception as e:
|
||||
self.logger.error(e); h.send_error(500,"Internal Server Error")
|
||||
|
||||
def list_actions_icons_with_dimensions(self, h):
|
||||
try: self._send_json(h, {'status':'success','images': self._list_images(self.actions_icons_dir, with_dims=True)})
|
||||
except Exception as e: self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def upload_actions_icon(self, h):
|
||||
import cgi
|
||||
try:
|
||||
ctype, pdict = cgi.parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type doit être multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = cgi.FieldStorage(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
if 'icon_image' not in form or not getattr(form['icon_image'],'filename',''): raise ValueError('Aucun fichier icon_image fourni')
|
||||
file_item = form['icon_image']; filename = self._safe(file_item.filename)
|
||||
base, ext = os.path.splitext(filename);
|
||||
if ext.lower() not in ALLOWED_IMAGE_EXTS: filename = base + '.png'
|
||||
data = file_item.file.read()
|
||||
with open(os.path.join(self.actions_icons_dir, filename),'wb') as f: f.write(data)
|
||||
self._send_json(h, {'status':'success','message':'Action icon uploaded','file':filename})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def serve_actions_icon(self, h):
|
||||
try:
|
||||
rel = h.path[len('/actions_icons/'):].lstrip('/')
|
||||
rel = os.path.normpath(rel).replace("\\","/")
|
||||
if rel.startswith("../"): h.send_error(400,"Invalid path"); return
|
||||
image_path = os.path.join(self.actions_icons_dir, rel)
|
||||
if not os.path.exists(image_path): h.send_error(404,"Image not found"); return
|
||||
with open(image_path,'rb') as f: content = f.read()
|
||||
h.send_response(200); h.send_header('Content-Type', self._mime(image_path))
|
||||
h.send_header('Content-Length', str(len(content))); h.end_headers(); h.wfile.write(content)
|
||||
except Exception as e:
|
||||
self.logger.error(e); h.send_error(500,"Internal Server Error")
|
||||
|
||||
# ---------- CRUD that might touch action character files ----------
|
||||
def delete_images(self, h):
|
||||
"""Delete images in 'static'|'web'|'icons' or action folder. When type='action', call CharacterUtils to renumber."""
|
||||
try:
|
||||
data = json.loads(h.rfile.read(int(h.headers['Content-Length'])).decode('utf-8'))
|
||||
tp = data.get('type'); action = data.get('action'); names = data.get('image_names', [])
|
||||
if not tp or not names: raise ValueError('type and image_names are required')
|
||||
if tp == 'action':
|
||||
if not action: raise ValueError("action is required for type=action")
|
||||
base = os.path.join(self.status_images_dir, self._safe(action))
|
||||
for n in names:
|
||||
p = os.path.join(base, self._safe(n))
|
||||
if os.path.exists(p): os.remove(p)
|
||||
if self.character_utils:
|
||||
self.character_utils.update_character_image_numbers(action)
|
||||
elif tp == 'static':
|
||||
for n in names:
|
||||
p = os.path.join(self.static_images_dir, self._safe(n))
|
||||
if os.path.exists(p): os.remove(p)
|
||||
elif tp == 'web':
|
||||
for n in names:
|
||||
p = os.path.join(self.web_images_dir, self._safe(n))
|
||||
if os.path.exists(p): os.remove(p)
|
||||
elif tp == 'icons':
|
||||
for n in names:
|
||||
p = os.path.join(self.actions_icons_dir, self._safe(n))
|
||||
if os.path.exists(p): os.remove(p)
|
||||
else:
|
||||
raise ValueError("type must be 'action','static','web','icons'")
|
||||
self._send_json(h, {'status':'success','message':'Images deleted successfully'})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def replace_image(self, h):
|
||||
"""Replace image. For type='action': status icon here; character images delegated to CharacterUtils."""
|
||||
import cgi
|
||||
try:
|
||||
ctype, pdict = cgi.parse_header(h.headers.get('Content-Type'))
|
||||
if ctype != 'multipart/form-data': raise ValueError('Content-Type must be multipart/form-data')
|
||||
pdict['boundary']=bytes(pdict['boundary'],'utf-8'); pdict['CONTENT-LENGTH']=int(h.headers.get('Content-Length'))
|
||||
form = cgi.FieldStorage(fp=BytesIO(h.rfile.read(pdict['CONTENT-LENGTH'])),
|
||||
headers=h.headers, environ={'REQUEST_METHOD':'POST'}, keep_blank_values=True)
|
||||
tp = form.getvalue('type'); image_name = self._safe(form.getvalue('image_name') or '')
|
||||
file_item = form['new_image'] if 'new_image' in form else None
|
||||
if not tp or not image_name or not file_item or not getattr(file_item,'filename',''):
|
||||
raise ValueError('type, image_name and new_image are required')
|
||||
|
||||
if tp == 'action':
|
||||
action = self._safe(form.getvalue('action') or '')
|
||||
if not action: raise ValueError("action is required for type=action")
|
||||
# status icon = <action>.bmp -> handle here
|
||||
if image_name.lower() == f"{action.lower()}.bmp":
|
||||
base = os.path.join(self.status_images_dir, action)
|
||||
if not os.path.exists(os.path.join(base, image_name)):
|
||||
raise FileNotFoundError(f"{image_name} not found")
|
||||
raw = file_item.file.read()
|
||||
out = self._to_bmp(raw, self.STATUS_W, self.STATUS_H)
|
||||
with open(os.path.join(base, image_name),'wb') as f: f.write(out)
|
||||
else:
|
||||
# delegate character image replacement
|
||||
if not self.character_utils:
|
||||
raise RuntimeError("CharacterUtils not wired into ImageUtils")
|
||||
return self.character_utils.replace_character_image(h, form, action, image_name)
|
||||
elif tp == 'static':
|
||||
path = os.path.join(self.static_images_dir, image_name)
|
||||
if not os.path.exists(path): raise FileNotFoundError(image_name)
|
||||
raw = file_item.file.read()
|
||||
with Image.open(path) as im: w, hh = im.size
|
||||
out = self._to_bmp(raw, w, hh)
|
||||
with open(path, 'wb') as f: f.write(out)
|
||||
elif tp == 'web':
|
||||
path = os.path.join(self.web_images_dir, image_name)
|
||||
if not os.path.exists(path): raise FileNotFoundError(image_name)
|
||||
with open(path,'wb') as f: f.write(file_item.file.read())
|
||||
elif tp == 'icons':
|
||||
path = os.path.join(self.actions_icons_dir, image_name)
|
||||
if not os.path.exists(path): raise FileNotFoundError(image_name)
|
||||
with open(path,'wb') as f: f.write(file_item.file.read())
|
||||
else:
|
||||
raise ValueError("type must be 'action'|'static'|'web'|'icons'")
|
||||
self._send_json(h, {'status':'success','message':'Image replaced successfully'})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def resize_images(self, h):
|
||||
"""Batch-resize statics; when 'action' is requested, delegate to CharacterUtils."""
|
||||
try:
|
||||
data = json.loads(h.rfile.read(int(h.headers['Content-Length'])).decode('utf-8'))
|
||||
tp = data.get('type'); action = data.get('action'); names = data.get('image_names', [])
|
||||
w = int(data.get('width', 100)); hh = int(data.get('height', 100))
|
||||
if tp == 'static':
|
||||
base = self.static_images_dir
|
||||
for n in names:
|
||||
p = os.path.join(base, self._safe(n))
|
||||
if not os.path.exists(p): continue
|
||||
with open(p,'rb') as f: raw=f.read()
|
||||
with Image.open(BytesIO(raw)) as im: _w,_h = im.size
|
||||
out = self._to_bmp(raw, w or _w, hh or _h)
|
||||
with open(p,'wb') as f: f.write(out)
|
||||
self._send_json(h, {'status':'success'})
|
||||
elif tp == 'action':
|
||||
if not self.character_utils:
|
||||
raise RuntimeError("CharacterUtils not wired into ImageUtils")
|
||||
return self.character_utils.resize_action_images(h, data)
|
||||
else:
|
||||
raise ValueError("Type must be 'static' or 'action'")
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
# ---------- misc ----------
|
||||
def restore_default_images(self, h):
|
||||
try:
|
||||
images_dir = getattr(self.shared_data, "images_dir", None)
|
||||
default_images_dir = getattr(self.shared_data, "default_images_dir", None)
|
||||
if not default_images_dir or not os.path.exists(default_images_dir):
|
||||
raise FileNotFoundError(f"Default images directory not found: {default_images_dir}")
|
||||
if images_dir and os.path.exists(images_dir): shutil.rmtree(images_dir)
|
||||
shutil.copytree(default_images_dir, images_dir)
|
||||
self._send_json(h, {'status':'success','message':'Images restored successfully'})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def set_resize_option(self, h):
|
||||
try:
|
||||
data = json.loads(h.rfile.read(int(h.headers['Content-Length'])).decode('utf-8'))
|
||||
self.should_resize_images = bool(data.get('resize', False))
|
||||
self.resize_width = int(data.get('width', 100))
|
||||
self.resize_height = int(data.get('height', 100))
|
||||
self._send_json(h, {'status':'success','message':'Resize options updated'})
|
||||
except Exception as e:
|
||||
self.logger.error(e); self._err(h, str(e))
|
||||
|
||||
def serve_bjorn_status_image(self, h):
|
||||
try:
|
||||
out = io.BytesIO()
|
||||
self.shared_data.bjorn_status_image.save(out, format="PNG")
|
||||
data = out.getvalue()
|
||||
h.send_response(200); h.send_header('Content-Type','image/png'); h.send_header('Cache-Control','no-cache')
|
||||
h.end_headers(); h.wfile.write(data)
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
|
||||
def serve_image(self, h):
|
||||
path = os.path.join(self.shared_data.web_dir, 'screen.png')
|
||||
try:
|
||||
with open(path,'rb') as f:
|
||||
h.send_response(200); h.send_header('Content-type','image/png')
|
||||
h.send_header('Cache-Control','max-age=0, must-revalidate')
|
||||
h.end_headers(); h.wfile.write(f.read())
|
||||
except FileNotFoundError:
|
||||
h.send_response(404); h.end_headers()
|
||||
except BrokenPipeError: pass
|
||||
except Exception as e: self.logger.error(e)
|
||||
1062
web_utils/index_utils.py
Normal file
1062
web_utils/index_utils.py
Normal file
File diff suppressed because it is too large
Load Diff
405
web_utils/netkb_utils.py
Normal file
405
web_utils/netkb_utils.py
Normal file
@@ -0,0 +1,405 @@
|
||||
# web_utils/netkb_utils.py
|
||||
"""
|
||||
Network Knowledge Base utilities.
|
||||
Handles network discovery data, host information, and action queue management.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
from typing import Any, Dict, Optional
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="netkb_utils.py", level=logging.DEBUG)
|
||||
|
||||
class NetKBUtils:
|
||||
"""Utilities for network knowledge base management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def serve_netkb_data_json(self, handler):
|
||||
"""Serve network knowledge base as simple JSON (IPs, ports, actions)."""
|
||||
try:
|
||||
hosts = self.shared_data.db.get_all_hosts()
|
||||
actions_meta = self.shared_data.db.list_actions()
|
||||
action_names = [a["b_class"] for a in actions_meta]
|
||||
|
||||
alive = [h for h in hosts if int(h.get("alive") or 0) == 1]
|
||||
response_data = {
|
||||
"ips": [h.get("ips", "") for h in alive],
|
||||
"ports": {h.get("ips", ""): (h.get("ports", "") or "").split(';') for h in alive},
|
||||
"actions": action_names
|
||||
}
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(response_data).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def serve_netkb_data(self, handler):
|
||||
"""Serve detailed network knowledge base data with action statuses."""
|
||||
try:
|
||||
db = self.shared_data.db
|
||||
hosts = db.get_all_hosts()
|
||||
actions = [a["b_class"] for a in db.list_actions()]
|
||||
|
||||
response = []
|
||||
for h in hosts:
|
||||
mac = h.get("mac_address", "")
|
||||
ips_txt = h.get("ips", "") or ""
|
||||
ips_list = [p for p in ips_txt.split(';') if p]
|
||||
primary_ip = ips_list[0] if ips_list else ""
|
||||
|
||||
row = {
|
||||
"mac": mac,
|
||||
"ip": primary_ip,
|
||||
"ips": ips_list,
|
||||
"hostname": h.get("hostnames", ""),
|
||||
"ports": (h.get("ports", "") or "").split(';') if h.get("ports") else [],
|
||||
"alive": int(h.get("alive") or 0) == 1,
|
||||
"vendor": h.get("vendor", ""),
|
||||
"essid": h.get("essid", ""),
|
||||
"actions": []
|
||||
}
|
||||
|
||||
# Get action status from queue (compatible with UI 'raw' format)
|
||||
for a in actions:
|
||||
st = db.get_action_status_from_queue(a, mac)
|
||||
if st:
|
||||
ts = st.get("completed_at") or st.get("started_at") or st.get("created_at") or ""
|
||||
ts_compact = ts.replace("-", "").replace(":", "").replace(" ", "_") if ts else ""
|
||||
status_raw = f"{st['status']}_{ts_compact}" if ts_compact else ""
|
||||
else:
|
||||
status_raw = ""
|
||||
row["actions"].append({"name": a, "status": status_raw})
|
||||
|
||||
response.append(row)
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(response).encode("utf-8"))
|
||||
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def serve_network_data(self, handler):
|
||||
"""Serve network data as HTML table."""
|
||||
try:
|
||||
html = ['<table><tr><th>ESSID</th><th>IP</th><th>Hostname</th><th>MAC Address</th><th>Vendor</th><th>Ports</th></tr>']
|
||||
for h in self.shared_data.db.get_all_hosts():
|
||||
if int(h.get("alive") or 0) != 1:
|
||||
continue
|
||||
html.append(
|
||||
f"<tr><td>{h.get('essid', '')}</td>"
|
||||
f"<td>{h.get('ips', '')}</td>"
|
||||
f"<td>{h.get('hostnames', '')}</td>"
|
||||
f"<td>{h.get('mac_address', '')}</td>"
|
||||
f"<td>{h.get('vendor', '')}</td>"
|
||||
f"<td>{h.get('ports', '')}</td></tr>"
|
||||
)
|
||||
html.append("</table>")
|
||||
table_html = "\n".join(html)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "text/html")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(table_html.encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def delete_netkb_action(self, data):
|
||||
"""Clear action history (queue) for a host found by IP."""
|
||||
try:
|
||||
ip = (data or {}).get("ip")
|
||||
action = (data or {}).get("action")
|
||||
cancel_active = bool((data or {}).get("cancel") or (data or {}).get("cancel_active"))
|
||||
|
||||
if not ip or not action:
|
||||
return {"status": "error", "message": "Missing 'ip' or 'action'"}
|
||||
|
||||
# Find MAC by IP
|
||||
rows = self.shared_data.db.query(
|
||||
"SELECT mac_address FROM hosts WHERE ips LIKE ? LIMIT 1", (f"%{ip}%",)
|
||||
)
|
||||
if not rows:
|
||||
return {"status": "error", "message": f"No host found for IP {ip}"}
|
||||
mac = rows[0]["mac_address"]
|
||||
|
||||
cancelled = 0
|
||||
if cancel_active:
|
||||
cancelled = self.shared_data.db.execute("""
|
||||
UPDATE action_queue
|
||||
SET status='cancelled',
|
||||
completed_at=CURRENT_TIMESTAMP,
|
||||
error_message=COALESCE(error_message,'user_cancelled')
|
||||
WHERE mac_address=? AND action_name=?
|
||||
AND status IN ('scheduled','pending','running')
|
||||
""", (mac, action))
|
||||
|
||||
# Clear finished statuses
|
||||
cleared = self.shared_data.db.execute(
|
||||
"""
|
||||
DELETE FROM action_queue
|
||||
WHERE mac_address=? AND action_name=?
|
||||
AND status IN ('success','failed','expired','cancelled')
|
||||
""",
|
||||
(mac, action),
|
||||
)
|
||||
|
||||
msg = f"Action '{action}' cleared for IP {ip} (deleted {cleared}"
|
||||
if cancel_active:
|
||||
msg += f", cancelled {cancelled}"
|
||||
msg += ")"
|
||||
|
||||
return {"status": "success", "message": msg}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"delete_netkb_action error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def delete_all_actions(self, data=None):
|
||||
"""Clear entire action queue."""
|
||||
try:
|
||||
deleted = self.shared_data.db.execute("DELETE FROM action_queue")
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Cleared ALL actions ({deleted} entries)"
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"delete_all_actions error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def serve_attempt_history(self, handler):
|
||||
"""Get action attempt history with superseded detection."""
|
||||
try:
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
url = urlparse(handler.path or "")
|
||||
qs = parse_qs(url.query or "")
|
||||
|
||||
action = (qs.get("action", [""])[0] or "").strip()
|
||||
mac = (qs.get("mac", qs.get("mac_address", [""]))[0] or "").strip()
|
||||
port = int((qs.get("port", ["0"])[0] or 0))
|
||||
limit = int((qs.get("limit", ["200"])[0] or 200))
|
||||
include_superseded = (qs.get("include_superseded", ["true"])[0] or "true").lower() in ("1", "true", "yes", "on")
|
||||
|
||||
if not action or mac is None:
|
||||
raise ValueError("missing required parameters: action, mac")
|
||||
|
||||
db = self.shared_data.db
|
||||
|
||||
rows = db.query("""
|
||||
SELECT id, action_name, mac_address, ip, port, hostname, service,
|
||||
status, retry_count, max_retries,
|
||||
priority,
|
||||
created_at, started_at, completed_at, scheduled_for,
|
||||
error_message, result_summary,
|
||||
COALESCE(completed_at, started_at, scheduled_for, created_at) AS ts
|
||||
FROM action_queue
|
||||
WHERE action_name = ?
|
||||
AND COALESCE(mac_address,'') = ?
|
||||
AND COALESCE(port,0) = ?
|
||||
ORDER BY datetime(COALESCE(completed_at, started_at, scheduled_for, created_at)) DESC
|
||||
LIMIT ?
|
||||
""", (action, mac, port, limit))
|
||||
|
||||
# Compute "superseded" status
|
||||
last_success_ts = None
|
||||
for r in rows:
|
||||
st = (r.get("status") or "").lower()
|
||||
if st == "success":
|
||||
last_success_ts = r.get("ts")
|
||||
break
|
||||
|
||||
attempts = []
|
||||
for r in rows:
|
||||
st_raw = (r.get("status") or "").lower()
|
||||
is_sup = False
|
||||
if st_raw in ("failed", "expired", "cancelled") and last_success_ts:
|
||||
ts = r.get("ts") or ""
|
||||
if ts and ts < last_success_ts:
|
||||
is_sup = True
|
||||
|
||||
st_display = "superseded" if is_sup else st_raw
|
||||
|
||||
attempts.append({
|
||||
"id": r.get("id"),
|
||||
"action_name": r.get("action_name"),
|
||||
"mac_address": r.get("mac_address"),
|
||||
"ip": r.get("ip"),
|
||||
"port": r.get("port"),
|
||||
"hostname": r.get("hostname"),
|
||||
"service": r.get("service"),
|
||||
"status": st_raw,
|
||||
"status_display": st_display,
|
||||
"superseded": bool(is_sup),
|
||||
"retry_count": r.get("retry_count"),
|
||||
"max_retries": r.get("max_retries"),
|
||||
"priority": r.get("priority"),
|
||||
"ts": r.get("ts"),
|
||||
"created_at": r.get("created_at"),
|
||||
"started_at": r.get("started_at"),
|
||||
"completed_at": r.get("completed_at"),
|
||||
"scheduled_for": r.get("scheduled_for"),
|
||||
"error_message": r.get("error_message"),
|
||||
"result_summary": r.get("result_summary"),
|
||||
})
|
||||
|
||||
if not include_superseded:
|
||||
attempts = [a for a in attempts if not (a["superseded"] and a["status"] in ("failed", "expired", "cancelled"))]
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(attempts).encode("utf-8"))
|
||||
|
||||
except Exception as e:
|
||||
handler.send_response(400)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def serve_action_queue(self, handler):
|
||||
"""Return action queue with effective priority calculation."""
|
||||
try:
|
||||
db = self.shared_data.db
|
||||
rows = db.query("""
|
||||
SELECT id, action_name, mac_address, ip, port, hostname, service, priority, status,
|
||||
retry_count, max_retries, created_at, scheduled_for, started_at, completed_at,
|
||||
expires_at, error_message, result_summary, tags, metadata,
|
||||
MIN(100, priority + CAST((strftime('%s','now') - strftime('%s',created_at))/300 AS INTEGER)) AS priority_effective
|
||||
FROM action_queue
|
||||
ORDER BY
|
||||
CASE status
|
||||
WHEN 'running' THEN 0
|
||||
WHEN 'pending' THEN 1
|
||||
WHEN 'scheduled' THEN 2
|
||||
WHEN 'success' THEN 3
|
||||
WHEN 'failed' THEN 4
|
||||
WHEN 'expired' THEN 5
|
||||
WHEN 'cancelled' THEN 6
|
||||
ELSE 7
|
||||
END,
|
||||
CASE
|
||||
WHEN status = 'pending' THEN priority_effective
|
||||
ELSE priority
|
||||
END DESC,
|
||||
CASE
|
||||
WHEN status = 'pending' THEN datetime(COALESCE(scheduled_for, created_at))
|
||||
ELSE datetime(COALESCE(completed_at, started_at, scheduled_for, created_at))
|
||||
END ASC
|
||||
LIMIT 1000
|
||||
""")
|
||||
out = []
|
||||
for r in rows:
|
||||
md = {}
|
||||
if r.get("metadata"):
|
||||
try:
|
||||
md = json.loads(r["metadata"])
|
||||
except Exception:
|
||||
md = {}
|
||||
tg = []
|
||||
if r.get("tags"):
|
||||
try:
|
||||
tg = json.loads(r["tags"])
|
||||
except Exception:
|
||||
tg = []
|
||||
out.append({
|
||||
"id": r["id"],
|
||||
"action_name": r["action_name"],
|
||||
"mac_address": r["mac_address"],
|
||||
"ip": r["ip"],
|
||||
"port": r["port"],
|
||||
"hostname": r["hostname"],
|
||||
"service": r["service"],
|
||||
"priority": r["priority"],
|
||||
"priority_effective": r["priority_effective"],
|
||||
"status": r["status"],
|
||||
"retry_count": r["retry_count"],
|
||||
"max_retries": r["max_retries"],
|
||||
"created_at": r["created_at"],
|
||||
"scheduled_for": r["scheduled_for"],
|
||||
"started_at": r["started_at"],
|
||||
"completed_at": r["completed_at"],
|
||||
"expires_at": r["expires_at"],
|
||||
"error_message": r["error_message"],
|
||||
"result_summary": r["result_summary"],
|
||||
"tags": tg,
|
||||
"metadata": md,
|
||||
"timeout": int(md.get("timeout", 900))
|
||||
})
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(out).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def handle_queue_cmd(self, handler):
|
||||
"""Handle queue commands: cancel, retry, bump, delete."""
|
||||
try:
|
||||
ln = int(handler.headers.get("Content-Length", "0") or 0)
|
||||
payload = json.loads(handler.rfile.read(ln) or "{}")
|
||||
cmd = (payload.get("cmd") or "").strip().lower()
|
||||
qid = int(payload.get("id"))
|
||||
delta = int(payload.get("delta") or 10)
|
||||
|
||||
db = self.shared_data.db
|
||||
rc = 0
|
||||
|
||||
if cmd == "cancel":
|
||||
rc = db.execute("""
|
||||
UPDATE action_queue
|
||||
SET status='cancelled', completed_at=CURRENT_TIMESTAMP
|
||||
WHERE id=? AND status IN ('scheduled','pending','running')
|
||||
""", (qid,))
|
||||
|
||||
elif cmd == "retry":
|
||||
rc = db.execute("""
|
||||
UPDATE action_queue
|
||||
SET status='pending',
|
||||
scheduled_for=datetime('now'),
|
||||
error_message=NULL,
|
||||
result_summary=NULL,
|
||||
started_at=NULL,
|
||||
completed_at=NULL
|
||||
WHERE id=? AND status IN ('failed','expired','cancelled','scheduled')
|
||||
""", (qid,))
|
||||
|
||||
elif cmd == "bump":
|
||||
rc = db.execute("""
|
||||
UPDATE action_queue
|
||||
SET priority = MIN(100, COALESCE(priority,50) + ?)
|
||||
WHERE id=?
|
||||
""", (delta, qid))
|
||||
|
||||
elif cmd == "delete":
|
||||
rc = db.execute("""
|
||||
DELETE FROM action_queue
|
||||
WHERE id=? AND status IN ('success','failed','expired','cancelled')
|
||||
""", (qid,))
|
||||
else:
|
||||
raise ValueError("unknown cmd")
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "rowcount": rc}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(400)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
358
web_utils/network_utils.py
Normal file
358
web_utils/network_utils.py
Normal file
@@ -0,0 +1,358 @@
|
||||
# web_utils/network_utils.py
|
||||
"""
|
||||
Network utilities for WiFi/network operations.
|
||||
Handles WiFi scanning, connection, known networks management.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import subprocess
|
||||
import logging
|
||||
import re
|
||||
import os
|
||||
from typing import Any, Dict, Optional, List
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="network_utils.py", level=logging.DEBUG)
|
||||
|
||||
class NetworkUtils:
|
||||
"""Utilities for network and WiFi management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def get_known_wifi(self, handler):
|
||||
"""List known WiFi networks with priorities."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['nmcli', '-t', '-f', 'NAME,TYPE,AUTOCONNECT-PRIORITY', 'connection', 'show'],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
stdout = result.stdout
|
||||
self.logger.debug(f"nmcli connection show output:\n{stdout}")
|
||||
|
||||
known_networks = []
|
||||
lines = stdout.strip().split('\n')
|
||||
for line in lines:
|
||||
if not line.strip():
|
||||
continue
|
||||
parts = line.split(':')
|
||||
if len(parts) == 3:
|
||||
name, conn_type, priority = parts
|
||||
elif len(parts) == 2:
|
||||
name, conn_type = parts
|
||||
priority = '0'
|
||||
self.logger.warning(f"Missing priority for connection {name}. Assigning priority 0.")
|
||||
else:
|
||||
self.logger.warning(f"Unexpected line format: {line}")
|
||||
continue
|
||||
|
||||
if conn_type.lower() in ['802-11-wireless', 'wireless', 'wifi']:
|
||||
try:
|
||||
priority_int = int(priority) if priority.isdigit() else 0
|
||||
except ValueError:
|
||||
priority_int = 0
|
||||
self.logger.warning(f"Non-numeric priority for {name}. Assigning priority 0.")
|
||||
known_networks.append({
|
||||
'ssid': name,
|
||||
'priority': priority_int
|
||||
})
|
||||
|
||||
self.logger.debug(f"Extracted known networks: {known_networks}")
|
||||
known_networks.sort(key=lambda x: x['priority'], reverse=True)
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"known_networks": known_networks}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Error getting known Wi-Fi networks: {e.stderr.strip()}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"error": e.stderr.strip()}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting known Wi-Fi networks: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"error": str(e)}).encode('utf-8'))
|
||||
|
||||
def delete_known_wifi(self, data):
|
||||
"""Delete a known WiFi connection."""
|
||||
ssid = None
|
||||
try:
|
||||
ssid = data['ssid']
|
||||
result = subprocess.run(
|
||||
['sudo', 'nmcli', 'connection', 'delete', ssid],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
self.logger.info(f"Deleted Wi-Fi connection: {ssid}")
|
||||
return {"status": "success", "message": f"Network {ssid} deleted"}
|
||||
except subprocess.CalledProcessError as e:
|
||||
error_message = f"Error deleting Wi-Fi connection {ssid if ssid else 'unknown'}: {e.stderr.strip()}"
|
||||
self.logger.error(error_message)
|
||||
return {"status": "error", "message": e.stderr.strip()}
|
||||
except Exception as e:
|
||||
error_message = f"Unexpected error deleting Wi-Fi connection {ssid if ssid else 'unknown'}: {e}"
|
||||
self.logger.error(error_message)
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def connect_known_wifi(self, data):
|
||||
"""Connect to a known WiFi network."""
|
||||
try:
|
||||
ssid = data['ssid']
|
||||
if not self.validate_network_configuration(ssid):
|
||||
raise Exception(f"Invalid or non-existent configuration for network '{ssid}'.")
|
||||
|
||||
result = subprocess.run(
|
||||
['sudo', 'nmcli', 'connection', 'up', ssid],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
self.logger.info(f"Connected to known Wi-Fi network: {ssid}")
|
||||
return {"status": "success", "message": f"Connected to {ssid}"}
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Error connecting to known Wi-Fi network {ssid}: {e.stderr.strip()}")
|
||||
return {"status": "error", "message": e.stderr.strip()}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Unexpected error connecting to known Wi-Fi network {ssid}: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def update_wifi_priority(self, data):
|
||||
"""Update WiFi connection priority."""
|
||||
try:
|
||||
ssid = data['ssid']
|
||||
priority = int(data['priority'])
|
||||
result = subprocess.run(
|
||||
['sudo', 'nmcli', 'connection', 'modify', ssid, 'connection.autoconnect-priority', str(priority)],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
self.logger.info(f"Priority updated for {ssid} to {priority}")
|
||||
return {"status": "success", "message": "Priority updated"}
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Error updating Wi-Fi priority: {e.stderr.strip()}")
|
||||
return {"status": "error", "message": e.stderr.strip()}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Unexpected error updating Wi-Fi priority: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def scan_wifi(self, handler):
|
||||
"""Scan for available WiFi networks."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['sudo', 'nmcli', 'device', 'wifi', 'list'],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
stdout = result.stdout
|
||||
networks = self.parse_scan_result(stdout)
|
||||
self.logger.info(f"Found {len(networks)} networks")
|
||||
|
||||
current_ssid = self.get_current_ssid()
|
||||
self.logger.info(f"Current SSID: {current_ssid}")
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"networks": networks, "current_ssid": current_ssid}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Error scanning Wi-Fi networks: {e.stderr.strip()}")
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"error": e.stderr.strip()}).encode('utf-8'))
|
||||
|
||||
def parse_scan_result(self, scan_output):
|
||||
"""Parse nmcli scan output."""
|
||||
networks = []
|
||||
lines = scan_output.split('\n')
|
||||
headers = []
|
||||
for idx, line in enumerate(lines):
|
||||
if line.startswith("IN-USE"):
|
||||
headers = re.split(r'\s{2,}', line)
|
||||
continue
|
||||
if headers and line.strip():
|
||||
fields = re.split(r'\s{2,}', line)
|
||||
if len(fields) >= len(headers):
|
||||
network = dict(zip(headers, fields))
|
||||
ssid = network.get('SSID', '')
|
||||
signal_level = int(network.get('SIGNAL', '0'))
|
||||
security = network.get('SECURITY', '')
|
||||
networks.append({
|
||||
'ssid': ssid,
|
||||
'signal_level': signal_level,
|
||||
'security': security
|
||||
})
|
||||
return networks
|
||||
|
||||
def get_current_ssid(self):
|
||||
"""Get currently connected SSID."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['nmcli', '-t', '-f', 'active,ssid', 'dev', 'wifi'],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
lines = result.stdout.strip().split('\n')
|
||||
for line in lines:
|
||||
active, ssid = line.split(':', 1)
|
||||
if active == 'yes':
|
||||
return ssid
|
||||
return None
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Error getting current SSID: {e.stderr.strip()}")
|
||||
return None
|
||||
|
||||
def connect_wifi(self, data):
|
||||
"""Connect to WiFi network (new or existing)."""
|
||||
try:
|
||||
ssid = data['ssid']
|
||||
password = data.get('password', '')
|
||||
|
||||
if self.check_connection_exists(ssid):
|
||||
result = subprocess.run(
|
||||
['sudo', 'nmcli', 'connection', 'up', ssid],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
return {"status": "success", "message": f"Connected to {ssid}"}
|
||||
else:
|
||||
if password:
|
||||
result = subprocess.run(
|
||||
['sudo', 'nmcli', 'device', 'wifi', 'connect', ssid, 'password', password],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
else:
|
||||
result = subprocess.run(
|
||||
['sudo', 'nmcli', 'device', 'wifi', 'connect', ssid],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
return {"status": "success", "message": f"Connected to {ssid}"}
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Error connecting to network {ssid}: {e.stderr.strip()}")
|
||||
return {"status": "error", "message": e.stderr.strip()}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in connect_wifi: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def check_connection_exists(self, ssid):
|
||||
"""Check if a WiFi connection already exists."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['nmcli', '-t', '-f', 'NAME', 'connection', 'show'],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
connections = result.stdout.strip().split('\n')
|
||||
return ssid in connections
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Error checking existing connections: {e.stderr.strip()}")
|
||||
return False
|
||||
|
||||
def validate_network_configuration(self, ssid):
|
||||
"""Validate network configuration in NetworkManager."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['nmcli', '-t', '-f', 'NAME,UUID,TYPE,AUTOCONNECT', 'connection', 'show'],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
connections = result.stdout.strip().split('\n')
|
||||
for conn in connections:
|
||||
if ssid in conn:
|
||||
self.logger.info(f"Network {ssid} validated in NetworkManager.")
|
||||
return True
|
||||
self.logger.warning(f"Network {ssid} not found in NetworkManager.")
|
||||
return False
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error validating network {ssid}: {e}")
|
||||
return False
|
||||
|
||||
def import_potfiles(self, data=None):
|
||||
"""Import WiFi credentials from .pot/.potfile files."""
|
||||
try:
|
||||
potfiles_folder = self.shared_data.potfiles_dir
|
||||
import glob
|
||||
potfile_paths = glob.glob(f"{potfiles_folder}/*.pot") + glob.glob(f"{potfiles_folder}/*.potfile")
|
||||
|
||||
networks_added = []
|
||||
DEFAULT_PRIORITY = 5
|
||||
|
||||
for potfile_path in potfile_paths:
|
||||
with open(potfile_path, 'r') as potfile:
|
||||
for line in potfile:
|
||||
line = line.strip()
|
||||
if ':' not in line:
|
||||
self.logger.warning(f"Ignoring malformed line in {potfile_path}: {line}")
|
||||
continue
|
||||
|
||||
if line.startswith('$WPAPSK$') and '#' in line:
|
||||
try:
|
||||
ssid_hash_part, password = line.split(':', 1)
|
||||
ssid = ssid_hash_part.split('#')[0].replace('$WPAPSK$', '')
|
||||
except ValueError:
|
||||
self.logger.warning(f"Failed to parse WPAPSK line in {potfile_path}: {line}")
|
||||
continue
|
||||
elif len(line.split(':')) == 4:
|
||||
try:
|
||||
_, _, ssid, password = line.split(':')
|
||||
except ValueError:
|
||||
self.logger.warning(f"Failed to parse custom line in {potfile_path}: {line}")
|
||||
continue
|
||||
else:
|
||||
self.logger.warning(f"Unknown format in {potfile_path}: {line}")
|
||||
continue
|
||||
|
||||
if ssid and password:
|
||||
if not self.check_connection_exists(ssid):
|
||||
try:
|
||||
subprocess.run(
|
||||
['sudo', 'nmcli', 'connection', 'add', 'type', 'wifi',
|
||||
'con-name', ssid, 'ifname', '*', 'ssid', ssid,
|
||||
'wifi-sec.key-mgmt', 'wpa-psk', 'wifi-sec.psk', password,
|
||||
'connection.autoconnect', 'yes',
|
||||
'connection.autoconnect-priority', str(DEFAULT_PRIORITY)],
|
||||
check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
networks_added.append(ssid)
|
||||
self.logger.info(f"Imported network {ssid} from {potfile_path}")
|
||||
except subprocess.CalledProcessError as e:
|
||||
self.logger.error(f"Failed to add network {ssid}: {e.stderr.strip()}")
|
||||
else:
|
||||
self.logger.info(f"Network {ssid} already exists. Skipping.")
|
||||
else:
|
||||
self.logger.warning(f"Incomplete data in {potfile_path}: {line}")
|
||||
|
||||
return {"status": "success", "networks_added": networks_added}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Unexpected error importing potfiles: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
|
||||
|
||||
def delete_preconfigured_file(self, handler):
|
||||
try:
|
||||
os.remove('/etc/NetworkManager/system-connections/preconfigured.nmconnection')
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success"}).encode('utf-8'))
|
||||
except FileNotFoundError:
|
||||
handler.send_response(404)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": "Fichier introuvable"}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def create_preconfigured_file(self, handler):
|
||||
try:
|
||||
with open('/etc/NetworkManager/system-connections/preconfigured.nmconnection', 'w') as f:
|
||||
f.write('Exemple de contenu') # Ajoutez le contenu par défaut
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success"}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
199
web_utils/orchestrator_utils.py
Normal file
199
web_utils/orchestrator_utils.py
Normal file
@@ -0,0 +1,199 @@
|
||||
# web_utils/orchestrator_utils.py
|
||||
"""
|
||||
Orchestrator management utilities.
|
||||
Handles attack execution, scanning, and credential management.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import html
|
||||
import importlib
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="orchestrator_utils.py", level=logging.DEBUG)
|
||||
|
||||
class OrchestratorUtils:
|
||||
"""Utilities for orchestrator and attack management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def execute_manual_attack(self, params):
|
||||
"""Execute a manual attack on a specific target."""
|
||||
try:
|
||||
ip = params['ip']
|
||||
port = params['port']
|
||||
action_class = params['action']
|
||||
self.shared_data.bjorn_status_text2 = ""
|
||||
|
||||
self.logger.info(f"Received request to execute {action_class} on {ip}:{port}")
|
||||
|
||||
# Load actions
|
||||
self._load_actions()
|
||||
action_instance = next((action for action in self.shared_data.actions if action.action_name == action_class), None)
|
||||
if action_instance is None:
|
||||
raise Exception(f"Action class {action_class} not found")
|
||||
|
||||
current_data = self.shared_data.read_data()
|
||||
row = next((r for r in current_data if r["IPs"] == ip), None)
|
||||
if row is None:
|
||||
raise Exception(f"No data found for IP: {ip}")
|
||||
|
||||
action_key = action_instance.action_name
|
||||
self.logger.info(f"Executing {action_key} on {ip}:{port}")
|
||||
result = action_instance.execute(ip, port, row, action_key)
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
if result == 'success':
|
||||
row[action_key] = f'success_{timestamp}'
|
||||
self.logger.info(f"Action {action_key} executed successfully on {ip}:{port}")
|
||||
else:
|
||||
row[action_key] = f'failed_{timestamp}'
|
||||
self.logger.error(f"Action {action_key} failed on {ip}:{port}")
|
||||
self.shared_data.write_data(current_data)
|
||||
|
||||
# Update status after completion
|
||||
self.shared_data.bjorn_orch_status = "IDLE"
|
||||
self.shared_data.bjorn_status_text2 = "Waiting for instructions..."
|
||||
|
||||
return {"status": "success", "message": "Manual attack executed"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error executing manual attack: {e}")
|
||||
self.shared_data.bjorn_orch_status = "IDLE"
|
||||
self.shared_data.bjorn_status_text2 = "Waiting for instructions..."
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def execute_manual_scan(self):
|
||||
"""Execute a manual network scan."""
|
||||
try:
|
||||
# Import network scanner
|
||||
module = importlib.import_module('actions.scanning')
|
||||
scanner_class = getattr(module, getattr(module, 'b_class'))
|
||||
network_scanner = scanner_class(self.shared_data)
|
||||
|
||||
# Update status
|
||||
self.shared_data.bjorn_orch_status = "NetworkScanner"
|
||||
self.shared_data.bjorn_status_text2 = "Manual scan..."
|
||||
|
||||
# Execute scan
|
||||
network_scanner.scan()
|
||||
|
||||
# Reset status
|
||||
self.shared_data.bjorn_orch_status = "IDLE"
|
||||
self.shared_data.bjorn_status_text2 = "Waiting for instructions..."
|
||||
|
||||
return {"status": "success", "message": "Network scan completed"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error executing manual scan: {e}")
|
||||
self.shared_data.bjorn_orch_status = "IDLE"
|
||||
self.shared_data.bjorn_status_text2 = "Waiting for instructions..."
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def start_orchestrator(self):
|
||||
"""Start the orchestrator."""
|
||||
try:
|
||||
bjorn_instance = self.shared_data.bjorn_instance
|
||||
self.shared_data.manual_mode = False
|
||||
self.shared_data.orchestrator_should_exit = False
|
||||
bjorn_instance.start_orchestrator()
|
||||
return {"status": "success", "message": "Orchestrator starting..."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error starting orchestrator: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def stop_orchestrator(self):
|
||||
"""Stop the orchestrator."""
|
||||
try:
|
||||
bjorn_instance = self.shared_data.bjorn_instance
|
||||
self.shared_data.manual_mode = False
|
||||
bjorn_instance.stop_orchestrator()
|
||||
self.shared_data.orchestrator_should_exit = True
|
||||
return {"status": "success", "message": "Orchestrator stopping..."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error stopping orchestrator: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def serve_credentials_data(self, handler):
|
||||
"""Serve credentials data as HTML."""
|
||||
try:
|
||||
creds = self.shared_data.db.list_creds_grouped()
|
||||
html_content = self._html_from_creds(creds)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "text/html")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(html_content.encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def _html_from_creds(self, rows):
|
||||
"""Generate HTML table from credentials data."""
|
||||
out = ['<div class="credentials-container">']
|
||||
|
||||
# Group by service
|
||||
by_service = {}
|
||||
for r in rows:
|
||||
by_service.setdefault(r["service"], []).append(r)
|
||||
|
||||
for svc, items in by_service.items():
|
||||
out.append(f"<h2>{html.escape(svc)}.db</h2>")
|
||||
out.append('<table class="styled-table"><thead><tr>')
|
||||
for h in ["MAC", "IP", "Hostname", "User", "Password", "Port", "Database", "Last Seen"]:
|
||||
out.append(f"<th>{h}</th>")
|
||||
out.append("</tr></thead><tbody>")
|
||||
|
||||
for r in items:
|
||||
out.append("<tr>")
|
||||
out.append(f"<td>{html.escape(r.get('mac_address') or '')}</td>")
|
||||
out.append(f"<td>{html.escape(r.get('ip') or '')}</td>")
|
||||
out.append(f"<td>{html.escape(r.get('hostname') or '')}</td>")
|
||||
out.append(f"<td>{html.escape(r.get('user') or '')}</td>")
|
||||
out.append(f"<td>{html.escape(r.get('password') or '')}</td>")
|
||||
out.append(f"<td>{html.escape(str(r.get('port') or ''))}</td>")
|
||||
out.append(f"<td>{html.escape(r.get('database') or '')}</td>")
|
||||
out.append(f"<td>{html.escape(r.get('last_seen') or '')}</td>")
|
||||
out.append("</tr>")
|
||||
|
||||
out.append("</tbody></table>")
|
||||
|
||||
out.append("</div>")
|
||||
return "\n".join(out)
|
||||
|
||||
def _load_actions(self):
|
||||
"""Load actions from database."""
|
||||
if self.shared_data.actions is None or self.shared_data.standalone_actions is None:
|
||||
self.shared_data.actions, self.shared_data.standalone_actions = [], []
|
||||
for action in self.shared_data.db.list_actions():
|
||||
module_name = action["b_module"]
|
||||
if module_name == 'scanning':
|
||||
self._load_scanner(module_name)
|
||||
else:
|
||||
self._load_action(module_name, action)
|
||||
|
||||
def _load_scanner(self, module_name):
|
||||
"""Load the network scanner."""
|
||||
module = importlib.import_module(f'actions.{module_name}')
|
||||
b_class = getattr(module, 'b_class')
|
||||
self.shared_data.network_scanner = getattr(module, b_class)(self.shared_data)
|
||||
|
||||
def _load_action(self, module_name, action):
|
||||
"""Load an action from the actions directory."""
|
||||
module = importlib.import_module(f'actions.{module_name}')
|
||||
try:
|
||||
b_class = action["b_class"]
|
||||
action_instance = getattr(module, b_class)(self.shared_data)
|
||||
action_instance.action_name = b_class
|
||||
action_instance.port = action.get("b_port")
|
||||
action_instance.b_parent_action = action.get("b_parent")
|
||||
if action_instance.port == 0:
|
||||
self.shared_data.standalone_actions.append(action_instance)
|
||||
else:
|
||||
self.shared_data.actions.append(action_instance)
|
||||
except AttributeError as e:
|
||||
self.logger.error(f"Module {module_name} is missing required attributes: {e}")
|
||||
737
web_utils/script_utils.py
Normal file
737
web_utils/script_utils.py
Normal file
@@ -0,0 +1,737 @@
|
||||
# web_utils/script_utils.py
|
||||
"""
|
||||
Script launcher and execution utilities.
|
||||
Handles script management, execution, monitoring, and output capture.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import subprocess
|
||||
import os
|
||||
import time
|
||||
import threading
|
||||
import importlib.util
|
||||
import ast
|
||||
import html
|
||||
import cgi
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, List
|
||||
from io import BytesIO
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="script_utils.py", level=logging.DEBUG)
|
||||
|
||||
class ScriptUtils:
|
||||
"""Utilities for script management and execution."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def get_script_description(self, script_path: Path) -> str:
|
||||
"""Extract description from script comments."""
|
||||
try:
|
||||
with open(script_path, 'r', encoding='utf-8') as f:
|
||||
lines = [line.strip() for line in f.readlines()[:10]]
|
||||
|
||||
description = []
|
||||
for line in lines:
|
||||
if line.startswith('#'):
|
||||
clean_line = html.escape(line[1:].strip())
|
||||
description.append(clean_line)
|
||||
elif line.startswith('"""') or line.startswith("'''"):
|
||||
break
|
||||
elif line and not description:
|
||||
break
|
||||
|
||||
description_text = '\n'.join(description) if description else "No description available"
|
||||
return description_text
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error reading script description: {e}")
|
||||
return "Error reading description"
|
||||
|
||||
def list_scripts(self) -> Dict:
|
||||
"""List all actions with metadata for the launcher."""
|
||||
try:
|
||||
actions_out: list[dict] = []
|
||||
db_actions = self.shared_data.db.list_actions()
|
||||
|
||||
for row in db_actions:
|
||||
b_class = (row.get("b_class") or "").strip()
|
||||
b_module = (row.get("b_module") or "").strip()
|
||||
action_path = os.path.join(self.shared_data.actions_dir, f"{b_module}.py")
|
||||
|
||||
# Load b_args from DB (priority)
|
||||
db_args_raw = row.get("b_args")
|
||||
if isinstance(db_args_raw, str):
|
||||
db_args_raw_str = db_args_raw.strip()
|
||||
if (db_args_raw_str.startswith("{") and db_args_raw_str.endswith("}")) or \
|
||||
(db_args_raw_str.startswith("[") and db_args_raw_str.endswith("]")):
|
||||
try:
|
||||
b_args = json.loads(db_args_raw_str)
|
||||
except Exception:
|
||||
b_args = {}
|
||||
else:
|
||||
b_args = {}
|
||||
elif db_args_raw is None:
|
||||
b_args = {}
|
||||
else:
|
||||
b_args = db_args_raw
|
||||
|
||||
# Basic metadata from DB
|
||||
b_name = row.get("b_name")
|
||||
b_description = row.get("b_description") or row.get("b_status") or "No description available"
|
||||
b_author = row.get("b_author")
|
||||
b_version = row.get("b_version")
|
||||
b_icon = row.get("b_icon")
|
||||
b_docs_url = row.get("b_docs_url")
|
||||
|
||||
b_examples = None
|
||||
if row.get("b_examples") is not None:
|
||||
try:
|
||||
if isinstance(row["b_examples"], str):
|
||||
b_examples = json.loads(row["b_examples"])
|
||||
else:
|
||||
b_examples = row["b_examples"]
|
||||
except Exception:
|
||||
b_examples = None
|
||||
|
||||
# Enrich from module if available
|
||||
try:
|
||||
if os.path.exists(action_path):
|
||||
spec = importlib.util.spec_from_file_location(b_module, action_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
# Dynamic b_args
|
||||
if hasattr(module, "compute_dynamic_b_args"):
|
||||
try:
|
||||
b_args = module.compute_dynamic_b_args(b_args or {})
|
||||
except Exception as e:
|
||||
self.logger.warning(f"compute_dynamic_b_args failed for {b_module}: {e}")
|
||||
|
||||
# Enrich fields
|
||||
if getattr(module, "b_name", None): b_name = module.b_name
|
||||
if getattr(module, "b_description", None): b_description = module.b_description
|
||||
if getattr(module, "b_author", None): b_author = module.b_author
|
||||
if getattr(module, "b_version", None): b_version = module.b_version
|
||||
if getattr(module, "b_icon", None): b_icon = module.b_icon
|
||||
if getattr(module, "b_docs_url", None): b_docs_url = module.b_docs_url
|
||||
if getattr(module, "b_examples", None): b_examples = module.b_examples
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Could not import {b_module} for dynamic/meta: {e}")
|
||||
|
||||
# Parse tags
|
||||
tags_raw = row.get("b_tags")
|
||||
if isinstance(tags_raw, str):
|
||||
t = tags_raw.strip()
|
||||
if (t.startswith("{") and t.endswith("}")) or (t.startswith("[") and t.endswith("]")):
|
||||
try:
|
||||
tags = json.loads(t)
|
||||
except Exception:
|
||||
tags = tags_raw
|
||||
else:
|
||||
tags = tags_raw
|
||||
else:
|
||||
tags = tags_raw
|
||||
|
||||
# Display name
|
||||
display_name = b_name or (f"{b_module}.py" if b_module else (f"{b_class}.py" if b_class else "Unnamed"))
|
||||
|
||||
# Icon URL
|
||||
icon_url = self._normalize_icon_url(b_icon, b_class)
|
||||
|
||||
# Build action info
|
||||
action_info = {
|
||||
"name": display_name,
|
||||
"path": action_path,
|
||||
"b_module": b_module,
|
||||
"b_class": b_class,
|
||||
"category": row.get("b_action", "normal") or "normal",
|
||||
"type": "action",
|
||||
"description": b_description or "No description available",
|
||||
"b_args": b_args,
|
||||
"enabled": bool(row.get("b_enabled", 1)),
|
||||
"priority": row.get("b_priority", 50),
|
||||
"tags": tags,
|
||||
"b_author": b_author,
|
||||
"b_version": b_version,
|
||||
"b_icon": icon_url,
|
||||
"b_docs_url": b_docs_url,
|
||||
"b_examples": b_examples,
|
||||
"is_running": False,
|
||||
"output": []
|
||||
}
|
||||
|
||||
# Runtime state
|
||||
with self.shared_data.scripts_lock:
|
||||
if action_path in self.shared_data.running_scripts:
|
||||
runinfo = self.shared_data.running_scripts[action_path]
|
||||
action_info["is_running"] = runinfo.get("is_running", False)
|
||||
action_info["output"] = runinfo.get("output", [])
|
||||
action_info["last_error"] = runinfo.get("last_error", "")
|
||||
|
||||
actions_out.append(action_info)
|
||||
|
||||
actions_out.sort(key=lambda x: x["name"])
|
||||
return {"status": "success", "data": actions_out}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error listing actions: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def _normalize_icon_url(self, raw_icon: str | None, b_class: str) -> str:
|
||||
"""Normalize icon URL for frontend consumption."""
|
||||
def _default_icon_url(b_class: str) -> str | None:
|
||||
if not b_class:
|
||||
return None
|
||||
fname = f"{b_class}.png"
|
||||
icon_fs = os.path.join(self.shared_data.actions_icons_dir, fname)
|
||||
return f"/actions_icons/{fname}" if os.path.exists(icon_fs) else None
|
||||
|
||||
if raw_icon:
|
||||
s = str(raw_icon).strip()
|
||||
if s.startswith("http://") or s.startswith("https://"):
|
||||
return s
|
||||
if "/" not in s and "\\" not in s:
|
||||
return f"/actions_icons/{s}"
|
||||
url = _default_icon_url(b_class)
|
||||
if url:
|
||||
return url
|
||||
|
||||
url = _default_icon_url(b_class)
|
||||
if url:
|
||||
return url
|
||||
|
||||
return "/actions/actions_icons/default.png"
|
||||
|
||||
def run_script(self, data: Dict) -> Dict:
|
||||
"""Run an action/script with arguments."""
|
||||
try:
|
||||
script_key = data.get("script_name")
|
||||
args = data.get("args", "")
|
||||
|
||||
if not script_key:
|
||||
return {"status": "error", "message": "Script name is required"}
|
||||
|
||||
# Find action in database
|
||||
action = None
|
||||
for a in self.shared_data.db.list_actions():
|
||||
if a["b_class"] == script_key or a["b_module"] == script_key:
|
||||
action = a
|
||||
break
|
||||
|
||||
if not action:
|
||||
return {"status": "error", "message": f"Action {script_key} not found"}
|
||||
|
||||
module_name = action["b_module"]
|
||||
script_path = os.path.join(self.shared_data.actions_dir, f"{module_name}.py")
|
||||
|
||||
if not os.path.exists(script_path):
|
||||
return {"status": "error", "message": f"Script file {script_path} not found"}
|
||||
|
||||
# Check if already running
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts and \
|
||||
self.shared_data.running_scripts[script_path].get("is_running", False):
|
||||
return {"status": "error", "message": f"Script {module_name} is already running"}
|
||||
|
||||
# Prepare environment
|
||||
env = dict(os.environ)
|
||||
env["PYTHONUNBUFFERED"] = "1"
|
||||
env["BJORN_EMBEDDED"] = "1"
|
||||
|
||||
# Start process
|
||||
cmd = ["sudo", "python3", "-u", script_path]
|
||||
if args:
|
||||
cmd.extend(args.split())
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
bufsize=1,
|
||||
universal_newlines=True,
|
||||
env=env,
|
||||
cwd=self.shared_data.actions_dir
|
||||
)
|
||||
|
||||
# Store process info
|
||||
self.shared_data.running_scripts[script_path] = {
|
||||
"process": process,
|
||||
"output": [],
|
||||
"start_time": time.time(),
|
||||
"is_running": True,
|
||||
"last_error": "",
|
||||
"b_class": action["b_class"],
|
||||
"b_module": module_name,
|
||||
}
|
||||
|
||||
# Start monitoring thread
|
||||
threading.Thread(
|
||||
target=self.monitor_script_output,
|
||||
args=(script_path, process),
|
||||
daemon=True
|
||||
).start()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Started {module_name}",
|
||||
"data": {
|
||||
"is_running": True,
|
||||
"output": [],
|
||||
"script_path": script_path
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error running script: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def stop_script(self, data: Dict) -> Dict:
|
||||
"""Stop a running script."""
|
||||
try:
|
||||
script_name = data.get('script_name')
|
||||
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "Script name is required"}
|
||||
|
||||
# Handle both paths and names
|
||||
if not script_name.startswith('/'):
|
||||
for path, info in self.shared_data.running_scripts.items():
|
||||
if info.get("b_module") == script_name or info.get("b_class") == script_name:
|
||||
script_name = path
|
||||
break
|
||||
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_name not in self.shared_data.running_scripts:
|
||||
return {"status": "error", "message": f"Script {script_name} not found or not running"}
|
||||
|
||||
script_info = self.shared_data.running_scripts[script_name]
|
||||
if script_info["process"]:
|
||||
script_info["process"].terminate()
|
||||
try:
|
||||
script_info["process"].wait(timeout=5)
|
||||
except subprocess.TimeoutExpired:
|
||||
script_info["process"].kill()
|
||||
script_info["process"].wait()
|
||||
|
||||
script_info["output"].append("Script stopped by user")
|
||||
script_info["is_running"] = False
|
||||
script_info["process"] = None
|
||||
|
||||
return {"status": "success", "message": f"Script {script_name} stopped"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error stopping script: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def get_script_output(self, data: Dict) -> Dict:
|
||||
"""Get output for a running or completed script."""
|
||||
try:
|
||||
script_name = data.get('script_name')
|
||||
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "Script name is required"}
|
||||
|
||||
self.logger.debug(f"Getting output for: {script_name}")
|
||||
|
||||
with self.shared_data.scripts_lock:
|
||||
# Direct path lookup
|
||||
if script_name in self.shared_data.running_scripts:
|
||||
script_info = self.shared_data.running_scripts[script_name]
|
||||
return {
|
||||
"status": "success",
|
||||
"data": {
|
||||
"output": script_info["output"],
|
||||
"is_running": script_info.get("is_running", False),
|
||||
"runtime": time.time() - script_info.get("start_time", time.time()),
|
||||
"last_error": script_info.get("last_error", "")
|
||||
}
|
||||
}
|
||||
|
||||
# Try basename lookup
|
||||
script_basename = os.path.basename(script_name)
|
||||
for key, info in self.shared_data.running_scripts.items():
|
||||
if os.path.basename(key) == script_basename:
|
||||
return {
|
||||
"status": "success",
|
||||
"data": {
|
||||
"output": info["output"],
|
||||
"is_running": info.get("is_running", False),
|
||||
"runtime": time.time() - info.get("start_time", time.time()),
|
||||
"last_error": info.get("last_error", "")
|
||||
}
|
||||
}
|
||||
|
||||
# Try module/class name lookup
|
||||
for key, info in self.shared_data.running_scripts.items():
|
||||
if info.get("b_module") == script_name or info.get("b_class") == script_name:
|
||||
return {
|
||||
"status": "success",
|
||||
"data": {
|
||||
"output": info["output"],
|
||||
"is_running": info.get("is_running", False),
|
||||
"runtime": time.time() - info.get("start_time", time.time()),
|
||||
"last_error": info.get("last_error", "")
|
||||
}
|
||||
}
|
||||
|
||||
# Not found - return empty
|
||||
return {
|
||||
"status": "success",
|
||||
"data": {
|
||||
"output": [],
|
||||
"is_running": False,
|
||||
"runtime": 0,
|
||||
"last_error": ""
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting script output: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def monitor_script_output(self, script_path: str, process: subprocess.Popen):
|
||||
"""Monitor script output in real-time."""
|
||||
try:
|
||||
self.logger.debug(f"Starting output monitoring for: {script_path}")
|
||||
|
||||
while True:
|
||||
line = process.stdout.readline()
|
||||
|
||||
if not line and process.poll() is not None:
|
||||
break
|
||||
|
||||
if line:
|
||||
line = line.rstrip()
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts:
|
||||
self.shared_data.running_scripts[script_path]["output"].append(line)
|
||||
self.logger.debug(f"[{os.path.basename(script_path)}] {line}")
|
||||
|
||||
# Process ended
|
||||
return_code = process.poll()
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts:
|
||||
info = self.shared_data.running_scripts[script_path]
|
||||
info["process"] = None
|
||||
info["is_running"] = False
|
||||
|
||||
if return_code == 0:
|
||||
info["output"].append("Script completed successfully")
|
||||
else:
|
||||
info["output"].append(f"Script exited with code {return_code}")
|
||||
info["last_error"] = f"Exit code: {return_code}"
|
||||
|
||||
self.logger.info(f"Script {script_path} finished with code {return_code}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error monitoring output for {script_path}: {e}")
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_path in self.shared_data.running_scripts:
|
||||
info = self.shared_data.running_scripts[script_path]
|
||||
info["output"].append(f"Monitoring error: {str(e)}")
|
||||
info["last_error"] = str(e)
|
||||
info["process"] = None
|
||||
info["is_running"] = False
|
||||
|
||||
def upload_script(self, handler) -> None:
|
||||
"""Upload a new script file."""
|
||||
try:
|
||||
form = cgi.FieldStorage(
|
||||
fp=handler.rfile,
|
||||
headers=handler.headers,
|
||||
environ={'REQUEST_METHOD': 'POST'}
|
||||
)
|
||||
if 'script_file' not in form:
|
||||
resp = {"status": "error", "message": "Missing 'script_file'"}
|
||||
handler.send_response(400)
|
||||
else:
|
||||
file_item = form['script_file']
|
||||
if not file_item.filename.endswith('.py'):
|
||||
resp = {"status": "error", "message": "Only .py allowed"}
|
||||
handler.send_response(400)
|
||||
else:
|
||||
script_name = os.path.basename(file_item.filename)
|
||||
script_path = Path(self.shared_data.actions_dir) / script_name
|
||||
if script_path.exists():
|
||||
resp = {"status": "error", "message": f"Script '{script_name}' already exists."}
|
||||
handler.send_response(400)
|
||||
else:
|
||||
with open(script_path, 'wb') as f:
|
||||
f.write(file_item.file.read())
|
||||
|
||||
description = self.get_script_description(script_path)
|
||||
|
||||
self.shared_data.db.add_script(
|
||||
name=script_name,
|
||||
type_="script",
|
||||
path=str(script_path),
|
||||
category="general",
|
||||
description=description
|
||||
)
|
||||
|
||||
resp = {"status": "success", "message": f"Script '{script_name}' uploaded."}
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(resp).encode('utf-8'))
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error uploading script: {e}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def delete_script(self, data: Dict) -> Dict:
|
||||
"""Delete a script."""
|
||||
try:
|
||||
script_name = data.get('script_name')
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "Missing script_name"}
|
||||
|
||||
rows = self.shared_data.db.query("SELECT * FROM scripts WHERE name=?", (script_name,))
|
||||
if not rows:
|
||||
return {"status": "error", "message": f"Script '{script_name}' not found in DB"}
|
||||
row = rows[0]
|
||||
is_project = row["type"] == "project"
|
||||
path = Path(row["path"])
|
||||
|
||||
if is_project and path.exists():
|
||||
import shutil
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
script_path = Path(self.shared_data.actions_dir) / script_name
|
||||
if script_path.exists():
|
||||
with self.shared_data.scripts_lock:
|
||||
if str(script_path) in self.shared_data.running_scripts and \
|
||||
self.shared_data.running_scripts[str(script_path)].get("is_running", False):
|
||||
return {"status": "error", "message": f"Script '{script_name}' is running."}
|
||||
script_path.unlink()
|
||||
|
||||
self.shared_data.db.delete_script(script_name)
|
||||
return {"status": "success", "message": f"{'Project' if is_project else 'Script'} '{script_name}' deleted."}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error deleting script: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def upload_project(self, handler) -> None:
|
||||
"""Upload a project with multiple files."""
|
||||
try:
|
||||
form = cgi.FieldStorage(
|
||||
fp=handler.rfile,
|
||||
headers=handler.headers,
|
||||
environ={'REQUEST_METHOD': 'POST'}
|
||||
)
|
||||
if 'main_file' not in form:
|
||||
raise ValueError("Missing main_file")
|
||||
main_file_path = form.getvalue('main_file')
|
||||
project_name = Path(main_file_path).parts[0]
|
||||
project_dir = Path(self.shared_data.actions_dir) / project_name
|
||||
project_dir.mkdir(exist_ok=True)
|
||||
|
||||
files = form['project_files[]']
|
||||
if not isinstance(files, list):
|
||||
files = [files]
|
||||
for fileitem in files:
|
||||
if fileitem.filename:
|
||||
relative_path = Path(fileitem.filename).relative_to(project_name)
|
||||
file_path = project_dir / relative_path
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(file_path, 'wb') as f:
|
||||
f.write(fileitem.file.read())
|
||||
|
||||
description = self.get_script_description(project_dir / Path(main_file_path).name)
|
||||
|
||||
self.shared_data.db.add_script(
|
||||
name=project_name,
|
||||
type_="project",
|
||||
path=str(project_dir),
|
||||
main_file=main_file_path,
|
||||
category="projects",
|
||||
description=description
|
||||
)
|
||||
|
||||
resp = {"status": "success", "message": f"Project '{project_name}' uploaded."}
|
||||
handler.send_response(200)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error uploading project: {e}")
|
||||
resp = {"status": "error", "message": str(e)}
|
||||
handler.send_response(400)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(resp).encode('utf-8'))
|
||||
|
||||
def get_action_args_schema(self, data: Dict) -> Dict:
|
||||
"""Get the arguments schema for a specific action."""
|
||||
try:
|
||||
action_name = data.get("action_name")
|
||||
|
||||
if not action_name:
|
||||
return {"status": "error", "message": "Action name is required"}
|
||||
|
||||
action = None
|
||||
for a in self.shared_data.db.list_actions():
|
||||
if a["b_class"] == action_name or a["b_module"] == action_name:
|
||||
action = a
|
||||
break
|
||||
|
||||
if not action:
|
||||
return {"status": "error", "message": f"Action {action_name} not found"}
|
||||
|
||||
module_name = action["b_module"]
|
||||
action_path = os.path.join(self.shared_data.actions_dir, f"{module_name}.py")
|
||||
|
||||
b_args = {}
|
||||
|
||||
if os.path.exists(action_path):
|
||||
try:
|
||||
spec = importlib.util.spec_from_file_location(module_name, action_path)
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
if hasattr(module, 'b_args'):
|
||||
b_args = module.b_args
|
||||
|
||||
if hasattr(module, 'compute_dynamic_b_args'):
|
||||
b_args = module.compute_dynamic_b_args(b_args)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Could not load b_args for {module_name}: {e}")
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"data": {
|
||||
"action_name": action_name,
|
||||
"module": module_name,
|
||||
"args_schema": b_args,
|
||||
"description": action.get("b_description", ""),
|
||||
"enabled": bool(action.get("b_enabled", 1))
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting action args schema: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def get_running_scripts(self) -> Dict:
|
||||
"""Get list of all currently running scripts."""
|
||||
try:
|
||||
running = []
|
||||
|
||||
with self.shared_data.scripts_lock:
|
||||
for path, info in self.shared_data.running_scripts.items():
|
||||
if info.get("is_running", False):
|
||||
running.append({
|
||||
"path": path,
|
||||
"name": os.path.basename(path),
|
||||
"module": info.get("b_module", ""),
|
||||
"class": info.get("b_class", ""),
|
||||
"start_time": info.get("start_time", 0),
|
||||
"runtime": time.time() - info.get("start_time", time.time()),
|
||||
"output_lines": len(info.get("output", []))
|
||||
})
|
||||
|
||||
return {"status": "success", "data": running}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting running scripts: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def clear_script_output(self, data: Dict) -> Dict:
|
||||
"""Clear output for a specific script."""
|
||||
try:
|
||||
script_name = data.get('script_name')
|
||||
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "Script name is required"}
|
||||
|
||||
cleared = False
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_name in self.shared_data.running_scripts:
|
||||
self.shared_data.running_scripts[script_name]["output"] = []
|
||||
cleared = True
|
||||
else:
|
||||
for key, info in self.shared_data.running_scripts.items():
|
||||
if (os.path.basename(key) == script_name or
|
||||
info.get("b_module") == script_name or
|
||||
info.get("b_class") == script_name):
|
||||
info["output"] = []
|
||||
cleared = True
|
||||
break
|
||||
|
||||
if cleared:
|
||||
return {"status": "success", "message": "Output cleared"}
|
||||
else:
|
||||
return {"status": "error", "message": "Script not found"}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error clearing script output: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def export_script_logs(self, data: Dict) -> Dict:
|
||||
"""Export logs for a script to a file."""
|
||||
try:
|
||||
from datetime import datetime
|
||||
import csv
|
||||
|
||||
script_name = data.get('script_name')
|
||||
format_type = data.get('format', 'txt')
|
||||
|
||||
if not script_name:
|
||||
return {"status": "error", "message": "Script name is required"}
|
||||
|
||||
output = []
|
||||
script_info = None
|
||||
|
||||
with self.shared_data.scripts_lock:
|
||||
if script_name in self.shared_data.running_scripts:
|
||||
script_info = self.shared_data.running_scripts[script_name]
|
||||
else:
|
||||
for key, info in self.shared_data.running_scripts.items():
|
||||
if (os.path.basename(key) == script_name or
|
||||
info.get("b_module") == script_name or
|
||||
info.get("b_class") == script_name):
|
||||
script_info = info
|
||||
break
|
||||
|
||||
if not script_info:
|
||||
return {"status": "error", "message": "Script not found"}
|
||||
|
||||
output = script_info.get("output", [])
|
||||
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"{script_name}_{timestamp}.{format_type}"
|
||||
filepath = os.path.join(self.shared_data.output_dir, filename)
|
||||
|
||||
if format_type == 'json':
|
||||
with open(filepath, 'w') as f:
|
||||
json.dump({
|
||||
"script": script_name,
|
||||
"timestamp": timestamp,
|
||||
"logs": output
|
||||
}, f, indent=2)
|
||||
elif format_type == 'csv':
|
||||
with open(filepath, 'w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(['Timestamp', 'Message'])
|
||||
for line in output:
|
||||
writer.writerow([datetime.now().isoformat(), line])
|
||||
else:
|
||||
with open(filepath, 'w') as f:
|
||||
f.write('\n'.join(output))
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Logs exported to {filename}",
|
||||
"data": {
|
||||
"filename": filename,
|
||||
"path": filepath,
|
||||
"lines": len(output)
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error exporting logs: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
243
web_utils/studio_utils.py
Normal file
243
web_utils/studio_utils.py
Normal file
@@ -0,0 +1,243 @@
|
||||
# web_utils/studio_utils.py
|
||||
"""
|
||||
Studio visual editor utilities.
|
||||
Handles action/edge/host management for the visual workflow editor.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
from typing import Any, Dict, Optional
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="studio_utils.py", level=logging.DEBUG)
|
||||
|
||||
class StudioUtils:
|
||||
"""Utilities for studio visual editor operations."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def studio_get_actions_studio(self, handler):
|
||||
"""Get all studio actions with positions and metadata."""
|
||||
try:
|
||||
rows = self.shared_data.db.get_studio_actions()
|
||||
return self._write_json(handler, {"status": "ok", "data": rows})
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_get_actions error: {e}")
|
||||
return self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def studio_get_actions_db(self, handler):
|
||||
"""Get all runtime actions from DB."""
|
||||
try:
|
||||
rows = self.shared_data.db.get_db_actions()
|
||||
return self._write_json(handler, {"status": "ok", "data": rows})
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_get_actions_db error: {e}")
|
||||
return self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def studio_get_edges(self, handler):
|
||||
"""Get all studio edges (connections between actions)."""
|
||||
try:
|
||||
rows = self.shared_data.db.get_studio_edges()
|
||||
return self._write_json(handler, {"status": "ok", "data": rows})
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_get_edges error: {e}")
|
||||
return self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def studio_get_hosts(self, handler):
|
||||
"""Get hosts for studio (real + simulated)."""
|
||||
try:
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
include_real = qs.get('include_real', ['1'])[0] not in ('0', 'false', 'False')
|
||||
rows = self.shared_data.db.get_studio_hosts(include_real=include_real)
|
||||
return self._write_json(handler, {"status": "ok", "data": rows})
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_get_hosts error: {e}")
|
||||
return self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def studio_load_layout(self, handler):
|
||||
"""Load a saved studio layout."""
|
||||
try:
|
||||
qs = parse_qs(urlparse(handler.path).query)
|
||||
name = (qs.get('name', [''])[0] or '').strip()
|
||||
if not name:
|
||||
return self._write_json(handler, {"status": "error", "message": "Missing layout name"}, 400)
|
||||
|
||||
row = self.shared_data.db.load_studio_layout(name)
|
||||
if not row:
|
||||
return self._write_json(handler, {"status": "error", "message": "Layout not found"}, 404)
|
||||
return self._write_json(handler, {"status": "ok", "data": row})
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_load_layout error: {e}")
|
||||
return self._write_json(handler, {"status": "error", "message": str(e)}, 500)
|
||||
|
||||
def studio_sync_actions_studio(self):
|
||||
"""Import values from 'actions' table to 'actions_studio' (non-destructive)."""
|
||||
try:
|
||||
self.shared_data.db._sync_actions_studio_schema_and_rows()
|
||||
return {
|
||||
"status": "ok",
|
||||
"message": "Import from 'actions' completed (non-destructive). Save manually."
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_sync_actions error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_update_action(self, data: dict):
|
||||
"""Update action studio properties."""
|
||||
try:
|
||||
b_class = (data.get('b_class') or '').strip()
|
||||
updates = data.get('updates') or {}
|
||||
if not b_class or not isinstance(updates, dict) or not updates:
|
||||
return {"status": "error", "message": "Missing b_class or updates"}
|
||||
self.shared_data.db.update_studio_action(b_class, updates)
|
||||
return {"status": "ok", "message": "Action updated"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_update_action error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_upsert_edge(self, data: dict):
|
||||
"""Create or update an edge between actions."""
|
||||
try:
|
||||
fa = (data.get('from_action') or '').strip()
|
||||
ta = (data.get('to_action') or '').strip()
|
||||
et = (data.get('edge_type') or 'requires').strip()
|
||||
md = data.get('metadata')
|
||||
if not fa or not ta:
|
||||
return {"status": "error", "message": "Missing from_action or to_action"}
|
||||
self.shared_data.db.upsert_studio_edge(fa, ta, et, md)
|
||||
return {"status": "ok", "message": "Edge upserted"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_upsert_edge error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_delete_edge(self, data: dict):
|
||||
"""Delete an edge."""
|
||||
try:
|
||||
edge_id = data.get('edge_id')
|
||||
if edge_id is None:
|
||||
return {"status": "error", "message": "Missing edge_id"}
|
||||
self.shared_data.db.delete_studio_edge(int(edge_id))
|
||||
return {"status": "ok", "message": "Edge deleted"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_delete_edge error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_upsert_host(self, data: dict):
|
||||
"""Create or update a simulated host."""
|
||||
try:
|
||||
mac = (data.get('mac_address') or '').strip()
|
||||
payload = data.get('data') or {}
|
||||
if not mac or not isinstance(payload, dict):
|
||||
return {"status": "error", "message": "Missing mac_address or data"}
|
||||
self.shared_data.db.upsert_studio_host(mac, payload)
|
||||
return {"status": "ok", "message": "Host upserted"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_upsert_host error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_save_layout(self, data: dict):
|
||||
"""Save a studio layout."""
|
||||
try:
|
||||
name = (data.get('name') or '').strip()
|
||||
layout_data = data.get('layout_data')
|
||||
desc = data.get('description')
|
||||
if not name or layout_data is None:
|
||||
return {"status": "error", "message": "Missing name or layout_data"}
|
||||
self.shared_data.db.save_studio_layout(name, layout_data, desc)
|
||||
return {"status": "ok", "message": "Layout saved"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_save_layout error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_apply_to_runtime(self):
|
||||
"""Apply studio settings to runtime actions."""
|
||||
try:
|
||||
self.shared_data.db.apply_studio_to_runtime()
|
||||
return {"status": "ok", "message": "Studio configuration applied to runtime actions"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_apply_to_runtime error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_save_bundle(self, data: dict):
|
||||
"""Save complete studio state (actions, edges, layout)."""
|
||||
try:
|
||||
actions = data.get('actions') or []
|
||||
edges = data.get('edges') or []
|
||||
layout = data.get('layout') or {}
|
||||
|
||||
# Update action positions and properties
|
||||
for a in actions:
|
||||
b_class = (a.get('b_class') or '').strip()
|
||||
if not b_class:
|
||||
continue
|
||||
updates = {}
|
||||
for k in ('studio_x', 'studio_y', 'b_module', 'b_status', 'b_action', 'b_enabled',
|
||||
'b_priority', 'b_timeout', 'b_max_retries', 'b_cooldown', 'b_rate_limit',
|
||||
'b_port', 'b_service', 'b_tags', 'b_trigger', 'b_requires'):
|
||||
if k in a and a[k] is not None:
|
||||
updates[k] = a[k]
|
||||
if updates:
|
||||
self.shared_data.db.update_studio_action(b_class, updates)
|
||||
|
||||
# Upsert edges
|
||||
for e in edges:
|
||||
fa = (e.get('from_action') or '').strip()
|
||||
ta = (e.get('to_action') or '').strip()
|
||||
et = (e.get('edge_type') or 'requires').strip()
|
||||
if fa and ta:
|
||||
self.shared_data.db.upsert_studio_edge(fa, ta, et, e.get('metadata'))
|
||||
|
||||
# Save layout
|
||||
try:
|
||||
self.shared_data.db.save_studio_layout('autosave', layout, 'autosave from UI')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return {"status": "ok", "message": "Studio saved"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_save_bundle error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_upsert_host_flat(self, data: dict):
|
||||
"""Upsert host with flat data structure."""
|
||||
try:
|
||||
mac = (data.get('mac_address') or '').strip()
|
||||
if not mac:
|
||||
return {"status": "error", "message": "Missing mac_address"}
|
||||
|
||||
payload = {
|
||||
"hostname": data.get('hostname'),
|
||||
"ips": data.get('ips'),
|
||||
"ports": data.get('ports'),
|
||||
"services": data.get('services'),
|
||||
"vulns": data.get('vulns'),
|
||||
"creds": data.get('creds'),
|
||||
"alive": data.get('alive'),
|
||||
"is_simulated": data.get('is_simulated', 1),
|
||||
}
|
||||
self.shared_data.db.upsert_studio_host(mac, payload)
|
||||
return {"status": "ok", "message": "Host upserted"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_upsert_host_flat error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def studio_delete_host(self, data: dict):
|
||||
"""Delete a studio host."""
|
||||
try:
|
||||
mac = (data.get('mac_address') or '').strip()
|
||||
if not mac:
|
||||
return {"status": "error", "message": "Missing mac_address"}
|
||||
self.shared_data.db.delete_studio_host(mac)
|
||||
return {"status": "ok", "message": "Host deleted"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"studio_delete_host error: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def _write_json(self, handler, obj: dict, code: int = 200):
|
||||
"""Write JSON response."""
|
||||
handler.send_response(code)
|
||||
handler.send_header('Content-Type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(obj).encode('utf-8'))
|
||||
359
web_utils/system_utils.py
Normal file
359
web_utils/system_utils.py
Normal file
@@ -0,0 +1,359 @@
|
||||
# web_utils/system_utils.py
|
||||
"""
|
||||
System utilities for management operations.
|
||||
Handles system commands, service management, configuration.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import subprocess
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
from typing import Any, Dict, Optional
|
||||
from logger import Logger
|
||||
|
||||
|
||||
logger = Logger(name="system_utils.py", level=logging.DEBUG)
|
||||
|
||||
class SystemUtils:
|
||||
"""Utilities for system-level operations."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
def reboot_system(self, handler):
|
||||
"""Reboot the system."""
|
||||
try:
|
||||
command = "sudo reboot"
|
||||
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "System is rebooting"}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def shutdown_system(self, handler):
|
||||
"""Shutdown the system."""
|
||||
try:
|
||||
command = "sudo shutdown now"
|
||||
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "System is shutting down"}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def restart_bjorn_service(self, handler):
|
||||
"""Restart the Bjorn service."""
|
||||
if not hasattr(handler, 'send_response'):
|
||||
raise TypeError("Invalid handler passed. Expected an HTTP handler.")
|
||||
|
||||
try:
|
||||
command = "sudo systemctl restart bjorn.service"
|
||||
subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Bjorn service restarted successfully"}).encode('utf-8'))
|
||||
except subprocess.CalledProcessError as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def clear_logs(self, handler):
|
||||
"""Clear logs directory contents."""
|
||||
try:
|
||||
command = "sudo rm -rf data/logs/*"
|
||||
subprocess.Popen(command, shell=True)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Logs cleared successfully"}).encode('utf-8'))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def initialize_db(self, handler):
|
||||
"""Initialize or prepare database schema."""
|
||||
try:
|
||||
self.shared_data.sync_actions_to_database()
|
||||
self.shared_data.initialize_database()
|
||||
self.shared_data.initialize_statistics()
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Database initialized successfully"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def erase_bjorn_memories(self, handler):
|
||||
"""Erase all Bjorn-related memories and restart service."""
|
||||
try:
|
||||
# Import file_utils for clear operations
|
||||
from web_utils.file_utils import FileUtils
|
||||
file_utils = FileUtils(self.logger, self.shared_data)
|
||||
|
||||
# Clear various components
|
||||
file_utils.clear_output_folder(handler)
|
||||
self.clear_netkb(handler, restart=False)
|
||||
self.clear_livestatus(handler, restart=False)
|
||||
self.clear_actions_file(handler, restart=False)
|
||||
self.clear_shared_config_json(handler, restart=False)
|
||||
self.clear_logs(handler)
|
||||
|
||||
# Restart service once at the end
|
||||
self.logger.debug("Restarting Bjorn service after clearing memories...")
|
||||
self.restart_bjorn_service(handler)
|
||||
|
||||
self.logger.info("Bjorn memories erased and service restarted successfully.")
|
||||
handler.send_response(200)
|
||||
handler.send_header('Content-type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "success",
|
||||
"message": "Bjorn memories erased and service restarted successfully."
|
||||
}).encode('utf-8'))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error erasing Bjorn memories: {str(e)}")
|
||||
handler.send_response(500)
|
||||
handler.send_header('Content-type', 'application/json')
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({
|
||||
"status": "error",
|
||||
"message": f"Error erasing Bjorn memories: {str(e)}"
|
||||
}).encode('utf-8'))
|
||||
|
||||
def clear_netkb(self, handler, restart=True):
|
||||
"""Clear network knowledge base in database."""
|
||||
try:
|
||||
db = self.shared_data.db
|
||||
db.execute("DELETE FROM action_results;")
|
||||
db.execute("DELETE FROM hosts;")
|
||||
db.update_livestats(0, 0, 0, 0)
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "NetKB cleared in database"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def clear_livestatus(self, handler, restart=True):
|
||||
"""Clear live status counters."""
|
||||
try:
|
||||
self.shared_data.db.update_livestats(0, 0, 0, 0)
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Livestatus counters reset"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def clear_actions_file(self, handler, restart=True):
|
||||
"""Clear actions table and resynchronize from modules."""
|
||||
try:
|
||||
self.shared_data.db.execute("DELETE FROM actions;")
|
||||
self.shared_data.generate_actions_json()
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Actions table refreshed"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def clear_shared_config_json(self, handler, restart=True):
|
||||
"""Reset configuration to defaults."""
|
||||
try:
|
||||
self.shared_data.config = self.shared_data.get_default_config()
|
||||
self.shared_data.save_config()
|
||||
if restart:
|
||||
self.restart_bjorn_service(handler)
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "success", "message": "Configuration reset to defaults"}).encode("utf-8"))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode("utf-8"))
|
||||
|
||||
def save_configuration(self, data):
|
||||
"""Save configuration to database."""
|
||||
try:
|
||||
if not isinstance(data, dict):
|
||||
return {"status": "error", "message": "Invalid data format: expected dictionary"}
|
||||
|
||||
cfg = dict(self.shared_data.config)
|
||||
for k, v in data.items():
|
||||
if isinstance(v, bool):
|
||||
cfg[k] = v
|
||||
elif isinstance(v, str) and v.lower() in ('true', 'false'):
|
||||
cfg[k] = (v.lower() == 'true')
|
||||
elif isinstance(v, (int, float)):
|
||||
cfg[k] = v
|
||||
elif isinstance(v, list) or v is None:
|
||||
cfg[k] = [] if v is None else [x for x in v if x != ""]
|
||||
elif isinstance(v, str):
|
||||
cfg[k] = float(v) if v.replace('.', '', 1).isdigit() and '.' in v else (int(v) if v.isdigit() else v)
|
||||
else:
|
||||
cfg[k] = v
|
||||
|
||||
self.shared_data.config = cfg
|
||||
self.shared_data.save_config()
|
||||
self.shared_data.load_config()
|
||||
return {"status": "success", "message": "Configuration saved"}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error saving configuration: {e}")
|
||||
return {"status": "error", "message": str(e)}
|
||||
|
||||
def serve_current_config(self, handler):
|
||||
"""Serve current configuration as JSON."""
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(self.shared_data.config).encode('utf-8'))
|
||||
|
||||
def restore_default_config(self, handler):
|
||||
"""Restore default configuration."""
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
self.shared_data.config = self.shared_data.default_config.copy()
|
||||
self.shared_data.save_config()
|
||||
handler.wfile.write(json.dumps(self.shared_data.config).encode('utf-8'))
|
||||
|
||||
def serve_logs(self, handler):
|
||||
"""Serve logs for web console."""
|
||||
try:
|
||||
log_file_path = self.shared_data.webconsolelog
|
||||
if not os.path.exists(log_file_path):
|
||||
subprocess.Popen(f"sudo tail -f /home/bjorn/Bjorn/data/logs/* > {log_file_path}", shell=True)
|
||||
|
||||
with open(log_file_path, 'r') as log_file:
|
||||
log_lines = log_file.readlines()
|
||||
|
||||
max_lines = 2000
|
||||
if len(log_lines) > max_lines:
|
||||
log_lines = log_lines[-max_lines:]
|
||||
with open(log_file_path, 'w') as log_file:
|
||||
log_file.writelines(log_lines)
|
||||
|
||||
log_data = ''.join(log_lines)
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "text/plain")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(log_data.encode('utf-8'))
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.send_header("Content-type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps({"status": "error", "message": str(e)}).encode('utf-8'))
|
||||
|
||||
def sse_log_stream(self, handler):
|
||||
"""Stream logs using Server-Sent Events (SSE)."""
|
||||
try:
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "text/event-stream")
|
||||
handler.send_header("Cache-Control", "no-cache")
|
||||
handler.send_header("Connection", "keep-alive")
|
||||
handler.send_header("Access-Control-Allow-Origin", "*")
|
||||
handler.end_headers()
|
||||
|
||||
log_file_path = self.shared_data.log_file
|
||||
|
||||
handler.wfile.write(b"data: Connected\n\n")
|
||||
handler.wfile.flush()
|
||||
|
||||
with open(log_file_path, 'r') as log_file:
|
||||
log_file.seek(0, os.SEEK_END)
|
||||
while True:
|
||||
line = log_file.readline()
|
||||
if line:
|
||||
message = f"data: {line.strip()}\n\n"
|
||||
handler.wfile.write(message.encode('utf-8'))
|
||||
handler.wfile.flush()
|
||||
else:
|
||||
handler.wfile.write(b": heartbeat\n\n")
|
||||
handler.wfile.flush()
|
||||
time.sleep(1)
|
||||
|
||||
except (ConnectionResetError, ConnectionAbortedError, BrokenPipeError) as e:
|
||||
self.logger.info("Client disconnected from SSE stream")
|
||||
except Exception as e:
|
||||
self.logger.error(f"SSE Error: {e}")
|
||||
finally:
|
||||
self.logger.info("SSE stream closed")
|
||||
|
||||
def serve_bjorn_status(self, handler):
|
||||
"""Serve Bjorn status information."""
|
||||
try:
|
||||
status_data = {
|
||||
"status": self.shared_data.bjorn_orch_status,
|
||||
"status2": self.shared_data.bjorn_status_text2,
|
||||
"image_path": "/bjorn_status_image?t=" + str(int(time.time()))
|
||||
}
|
||||
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(status_data).encode('utf-8'))
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in serve_bjorn_status: {str(e)}")
|
||||
|
||||
def check_manual_mode(self, handler):
|
||||
"""Check if manual mode is enabled."""
|
||||
try:
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "text/plain")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(str(self.shared_data.manual_mode).encode('utf-8'))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.end_headers()
|
||||
|
||||
def check_console_autostart(self, handler):
|
||||
"""Check console autostart setting."""
|
||||
try:
|
||||
handler.send_response(200)
|
||||
handler.send_header("Content-type", "text/plain")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(str(self.shared_data.consoleonwebstart).encode('utf-8'))
|
||||
except Exception as e:
|
||||
handler.send_response(500)
|
||||
handler.end_headers()
|
||||
583
web_utils/vuln_utils.py
Normal file
583
web_utils/vuln_utils.py
Normal file
@@ -0,0 +1,583 @@
|
||||
# web_utils/vuln_utils.py
|
||||
"""
|
||||
Vulnerability management and CVE enrichment utilities.
|
||||
Handles vulnerability data, CVE metadata, and enrichment from external sources.
|
||||
Optimized for low-power devices like Raspberry Pi Zero.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import time
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
from typing import Any, Dict, Optional, List, Union
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="vuln_utils.py", level=logging.DEBUG)
|
||||
|
||||
class CveEnricherOptimized:
|
||||
"""Optimized CVE enricher for Raspberry Pi Zero."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.shared = shared_data
|
||||
self.db = shared_data.db
|
||||
self._kev_index = set()
|
||||
self._last_kev_refresh = 0
|
||||
self._kev_ttl = 24 * 3600
|
||||
self._nvd_ttl = 48 * 3600
|
||||
self._cache_enabled = True
|
||||
self._max_parallel_requests = 1
|
||||
|
||||
def get(self, cve_id: str, use_cache_only: bool = False) -> Dict[str, Any]:
|
||||
"""Retrieve CVE metadata with aggressive caching."""
|
||||
try:
|
||||
row = self.db.get_cve_meta(cve_id)
|
||||
except Exception:
|
||||
row = None
|
||||
|
||||
if row:
|
||||
try:
|
||||
age = time.time() - int(row.get("updated_at") or 0)
|
||||
except Exception:
|
||||
age = 0
|
||||
if use_cache_only or age < self._nvd_ttl * 2:
|
||||
return self._format_cached_row(row)
|
||||
|
||||
if use_cache_only:
|
||||
return self._get_minimal_cve_data(cve_id)
|
||||
|
||||
try:
|
||||
nvd = self._fetch_nvd_minimal(cve_id)
|
||||
if nvd:
|
||||
data = {
|
||||
"cve_id": cve_id,
|
||||
"description": nvd.get("description", f"{cve_id} vulnerability"),
|
||||
"cvss": nvd.get("cvss"),
|
||||
"references": nvd.get("references", [])[:3],
|
||||
"lastModified": nvd.get("lastModified"),
|
||||
"affected": [],
|
||||
"exploits": [],
|
||||
"is_kev": False,
|
||||
"epss": None,
|
||||
"epss_percentile": None,
|
||||
"updated_at": time.time(),
|
||||
}
|
||||
try:
|
||||
self.db.upsert_cve_meta(data)
|
||||
except Exception:
|
||||
logger.debug("Failed to upsert cve_meta for %s", cve_id, exc_info=True)
|
||||
return data
|
||||
except Exception:
|
||||
logger.debug("NVD fetch failed for %s", cve_id, exc_info=True)
|
||||
|
||||
return self._get_minimal_cve_data(cve_id)
|
||||
|
||||
def get_bulk(self, cve_ids: List[str], max_fetch: int = 5) -> Dict[str, Dict[str, Any]]:
|
||||
"""Bulk retrieval optimized for Pi Zero."""
|
||||
if not cve_ids:
|
||||
return {}
|
||||
|
||||
# dedupe and cap
|
||||
cve_ids = list(dict.fromkeys(cve_ids))[:50]
|
||||
result: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
try:
|
||||
cached = self.db.get_cve_meta_bulk(cve_ids) or {}
|
||||
for cid, row in cached.items():
|
||||
result[cid] = self._format_cached_row(row)
|
||||
except Exception:
|
||||
logger.debug("Bulk DB fetch failed", exc_info=True)
|
||||
cached = {}
|
||||
|
||||
missing = [c for c in cve_ids if c not in result]
|
||||
|
||||
to_fetch = missing[:max_fetch]
|
||||
for cid in to_fetch:
|
||||
try:
|
||||
data = self.get(cid, use_cache_only=False)
|
||||
if data:
|
||||
result[cid] = data
|
||||
except Exception:
|
||||
logger.debug("Failed to fetch CVE %s", cid, exc_info=True)
|
||||
|
||||
# For the rest, return minimal stubs
|
||||
for cid in missing[max_fetch:]:
|
||||
result[cid] = self._get_minimal_cve_data(cid)
|
||||
|
||||
return result
|
||||
|
||||
def _fetch_nvd_minimal(self, cve_id: str) -> Dict[str, Any]:
|
||||
"""Fetch NVD with short timeout and minimal data."""
|
||||
url = f"https://services.nvd.nist.gov/rest/json/cves/2.0?cveId={urllib.parse.quote(cve_id)}"
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(url)
|
||||
with urllib.request.urlopen(req, timeout=5) as r:
|
||||
data = json.loads(r.read().decode("utf-8"))
|
||||
|
||||
vulns = data.get("vulnerabilities", [])
|
||||
if not vulns:
|
||||
return {}
|
||||
|
||||
cve = vulns[0].get("cve", {})
|
||||
|
||||
metrics = cve.get("metrics", {})
|
||||
cvss = None
|
||||
if "cvssMetricV31" in metrics and metrics["cvssMetricV31"]:
|
||||
cvss = metrics["cvssMetricV31"][0].get("cvssData")
|
||||
elif "cvssMetricV2" in metrics and metrics["cvssMetricV2"]:
|
||||
cvss = metrics["cvssMetricV2"][0].get("cvssData")
|
||||
|
||||
desc = ""
|
||||
if cve.get("descriptions"):
|
||||
desc = cve["descriptions"][0].get("value", "")[:500]
|
||||
|
||||
# references minimal - leave empty for now (can be enriched later)
|
||||
return {
|
||||
"description": desc,
|
||||
"cvss": cvss,
|
||||
"references": [],
|
||||
"lastModified": cve.get("lastModified"),
|
||||
}
|
||||
except Exception:
|
||||
logger.debug("Error fetching NVD for %s", cve_id, exc_info=True)
|
||||
return {}
|
||||
|
||||
def _format_cached_row(self, row: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Format a cached DB row into the API shape."""
|
||||
return {
|
||||
"cve_id": row.get("cve_id"),
|
||||
"description": row.get("description", ""),
|
||||
"cvss": row.get("cvss_json"),
|
||||
"references": row.get("references_json", []) or [],
|
||||
"lastModified": row.get("last_modified"),
|
||||
"affected": row.get("affected_json", []) or [],
|
||||
"solution": row.get("solution"),
|
||||
"exploits": row.get("exploits_json", []) or [],
|
||||
"is_kev": bool(row.get("is_kev")),
|
||||
"epss": row.get("epss"),
|
||||
"epss_percentile": row.get("epss_percentile"),
|
||||
"updated_at": row.get("updated_at"),
|
||||
}
|
||||
|
||||
def _get_minimal_cve_data(self, cve_id: str) -> Dict[str, Any]:
|
||||
"""Return minimal data without fetching external sources."""
|
||||
year = "2020"
|
||||
try:
|
||||
parts = cve_id.split("-")
|
||||
if len(parts) >= 2:
|
||||
year = parts[1]
|
||||
except Exception:
|
||||
year = "2020"
|
||||
|
||||
# simple heuristic
|
||||
try:
|
||||
year_int = int(year)
|
||||
except Exception:
|
||||
year_int = 2020
|
||||
|
||||
if year_int >= 2024:
|
||||
severity = "high"
|
||||
score = 7.5
|
||||
elif year_int >= 2023:
|
||||
severity = "medium"
|
||||
score = 5.5
|
||||
else:
|
||||
severity = "low"
|
||||
score = 3.5
|
||||
|
||||
return {
|
||||
"cve_id": cve_id,
|
||||
"description": f"{cve_id} - Security vulnerability",
|
||||
"cvss": {"baseScore": score, "baseSeverity": severity.upper()},
|
||||
"references": [],
|
||||
"affected": [],
|
||||
"exploits": [],
|
||||
"is_kev": False,
|
||||
"epss": None,
|
||||
"updated_at": time.time(),
|
||||
}
|
||||
|
||||
|
||||
class VulnUtils:
|
||||
"""Utilities for vulnerability management."""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
self.cve_enricher = CveEnricherOptimized(shared_data) if shared_data else None
|
||||
|
||||
# Helper to write JSON responses
|
||||
@staticmethod
|
||||
def _send_json(handler, status: int, payload: Any, cache_max_age: Optional[int] = None) -> None:
|
||||
try:
|
||||
handler.send_response(status)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
if cache_max_age is not None:
|
||||
handler.send_header("Cache-Control", f"max-age={int(cache_max_age)}")
|
||||
handler.end_headers()
|
||||
handler.wfile.write(json.dumps(payload).encode("utf-8"))
|
||||
except Exception:
|
||||
# If writing response fails, log locally (can't do much else)
|
||||
logger.exception("Failed to send JSON response")
|
||||
|
||||
def serve_vulns_data_optimized(self, handler) -> None:
|
||||
"""Optimized API for vulnerabilities with pagination and caching."""
|
||||
try:
|
||||
parsed = urlparse(handler.path)
|
||||
params = parse_qs(parsed.query)
|
||||
|
||||
page = int(params.get("page", ["1"])[0])
|
||||
limit = int(params.get("limit", ["50"])[0])
|
||||
offset = max((page - 1) * limit, 0)
|
||||
|
||||
db = self.shared_data.db
|
||||
vulns = db.query(
|
||||
"""
|
||||
SELECT
|
||||
v.id,
|
||||
v.mac_address,
|
||||
v.ip,
|
||||
v.hostname,
|
||||
v.port,
|
||||
v.vuln_id,
|
||||
v.is_active,
|
||||
v.first_seen,
|
||||
v.last_seen,
|
||||
h.vendor AS host_vendor,
|
||||
h.ips AS current_ips
|
||||
FROM vulnerabilities v
|
||||
LEFT JOIN hosts h ON v.mac_address = h.mac_address
|
||||
WHERE v.is_active = 1
|
||||
ORDER BY v.last_seen DESC
|
||||
LIMIT ? OFFSET ?
|
||||
""",
|
||||
(limit, offset),
|
||||
)
|
||||
|
||||
total_row = db.query_one("SELECT COUNT(*) as total FROM vulnerabilities WHERE is_active=1")
|
||||
total = total_row["total"] if total_row else 0
|
||||
|
||||
cve_ids = [v["vuln_id"] for v in vulns if (v.get("vuln_id") or "").startswith("CVE-")]
|
||||
|
||||
meta = {}
|
||||
if self.cve_enricher and cve_ids:
|
||||
# try to use DB bulk first (fast)
|
||||
try:
|
||||
meta = db.get_cve_meta_bulk(cve_ids[:20]) or {}
|
||||
except Exception:
|
||||
logger.debug("DB bulk meta fetch failed", exc_info=True)
|
||||
meta = {}
|
||||
|
||||
# enrich list
|
||||
for vuln in vulns:
|
||||
vid = (vuln.get("vuln_id") or "").strip()
|
||||
m = meta.get(vid)
|
||||
if m:
|
||||
vuln["severity"] = self._get_severity_from_cvss(m.get("cvss_json"))
|
||||
vuln["cvss_score"] = self._extract_cvss_score(m.get("cvss_json"))
|
||||
vuln["description"] = (m.get("description") or "")[:200]
|
||||
vuln["is_kev"] = bool(m.get("is_kev"))
|
||||
vuln["epss"] = m.get("epss")
|
||||
else:
|
||||
vuln["severity"] = vuln.get("severity") or "medium"
|
||||
vuln["cvss_score"] = vuln.get("cvss_score")
|
||||
vuln["description"] = vuln.get("description") or f"{vid} vulnerability"
|
||||
vuln["is_kev"] = False
|
||||
vuln["epss"] = None
|
||||
|
||||
response = {
|
||||
"vulnerabilities": vulns,
|
||||
"pagination": {
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"total": total,
|
||||
"pages": (total + limit - 1) // limit if limit > 0 else 0,
|
||||
},
|
||||
}
|
||||
|
||||
self._send_json(handler, 200, response, cache_max_age=10)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("serve_vulns_data_optimized failed")
|
||||
self._send_json(handler, 500, {"error": str(e)})
|
||||
|
||||
def fix_vulns_data(self, handler) -> None:
|
||||
"""Fix vulnerability data inconsistencies."""
|
||||
try:
|
||||
db = self.shared_data.db
|
||||
fixed_count = 0
|
||||
|
||||
vulns_to_fix = db.query(
|
||||
"""
|
||||
SELECT v.id, v.mac_address, h.ips, h.hostnames
|
||||
FROM vulnerabilities v
|
||||
LEFT JOIN hosts h ON v.mac_address = h.mac_address
|
||||
WHERE (v.ip IS NULL OR v.ip = 'NULL' OR v.ip = '')
|
||||
OR (v.hostname IS NULL OR v.hostname = 'NULL' OR v.hostname = '')
|
||||
"""
|
||||
)
|
||||
|
||||
for vuln in vulns_to_fix:
|
||||
if vuln.get("ips") or vuln.get("hostnames"):
|
||||
ip = vuln["ips"].split(";")[0] if vuln.get("ips") else None
|
||||
hostname = vuln["hostnames"].split(";")[0] if vuln.get("hostnames") else None
|
||||
|
||||
db.execute(
|
||||
"""
|
||||
UPDATE vulnerabilities
|
||||
SET ip = ?, hostname = ?
|
||||
WHERE id = ?
|
||||
""",
|
||||
(ip, hostname, vuln["id"]),
|
||||
)
|
||||
|
||||
fixed_count += 1
|
||||
|
||||
db.execute("UPDATE vulnerabilities SET port = 0 WHERE port IS NULL")
|
||||
|
||||
db.execute(
|
||||
"""
|
||||
DELETE FROM vulnerabilities
|
||||
WHERE rowid NOT IN (
|
||||
SELECT MIN(rowid)
|
||||
FROM vulnerabilities
|
||||
GROUP BY mac_address, vuln_id, port
|
||||
)
|
||||
"""
|
||||
)
|
||||
|
||||
response = {
|
||||
"status": "success",
|
||||
"message": f"Fixed {fixed_count} vulnerability entries",
|
||||
"fixed_count": fixed_count,
|
||||
}
|
||||
|
||||
self._send_json(handler, 200, response)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("fix_vulns_data failed")
|
||||
self._send_json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def get_vuln_enrichment_status(self, handler) -> None:
|
||||
"""Check CVE enrichment status."""
|
||||
try:
|
||||
stats = self.shared_data.db.query_one(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(DISTINCT v.vuln_id) as total_cves,
|
||||
COUNT(DISTINCT c.cve_id) as enriched_cves
|
||||
FROM vulnerabilities v
|
||||
LEFT JOIN cve_meta c ON v.vuln_id = c.cve_id
|
||||
WHERE v.vuln_id LIKE 'CVE-%'
|
||||
"""
|
||||
)
|
||||
|
||||
total = stats["total_cves"] or 0
|
||||
enriched = stats["enriched_cves"] or 0
|
||||
|
||||
response = {
|
||||
"total_cves": total,
|
||||
"enriched_cves": enriched,
|
||||
"missing": total - enriched,
|
||||
"percentage": round(enriched / total * 100, 2) if total > 0 else 0,
|
||||
}
|
||||
|
||||
self._send_json(handler, 200, response, cache_max_age=30)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("get_vuln_enrichment_status failed")
|
||||
self._send_json(handler, 500, {"error": str(e)})
|
||||
|
||||
def serve_vuln_history(self, handler) -> None:
|
||||
"""Get vulnerability history with filters."""
|
||||
try:
|
||||
db = self.shared_data.db
|
||||
qs = parse_qs(urlparse(handler.path).query or "")
|
||||
cve = (qs.get("cve") or [None])[0]
|
||||
mac = (qs.get("mac") or [None])[0]
|
||||
try:
|
||||
limit = int((qs.get("limit") or ["500"])[0])
|
||||
except Exception:
|
||||
limit = 500
|
||||
|
||||
rows = db.list_vulnerability_history(cve_id=cve, mac=mac, limit=limit)
|
||||
self._send_json(handler, 200, {"history": rows})
|
||||
except Exception as e:
|
||||
logger.exception("serve_vuln_history failed")
|
||||
self._send_json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def serve_cve_details(self, handler, cve_id: str) -> None:
|
||||
"""Get detailed CVE information."""
|
||||
try:
|
||||
# prefer explicit cve_id param, fallback to path parsing
|
||||
cve = cve_id or handler.path.rsplit("/", 1)[-1]
|
||||
data = self.cve_enricher.get(cve, use_cache_only=False) if self.cve_enricher else {}
|
||||
|
||||
self._send_json(handler, 200, data)
|
||||
except Exception as e:
|
||||
logger.exception("serve_cve_details failed")
|
||||
self._send_json(handler, 500, {"error": str(e)})
|
||||
|
||||
def serve_cve_bulk(self, handler, data: Dict[str, Any]) -> None:
|
||||
"""Bulk CVE enrichment."""
|
||||
try:
|
||||
cves = data.get("cves") or []
|
||||
merged = self.cve_enricher.get_bulk(cves) if self.cve_enricher else {}
|
||||
self._send_json(handler, 200, {"cves": merged})
|
||||
except Exception as e:
|
||||
logger.exception("serve_cve_bulk failed")
|
||||
self._send_json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def serve_exploitdb_by_cve(self, handler, cve_id: str) -> None:
|
||||
"""Get Exploit-DB entries for a CVE."""
|
||||
try:
|
||||
data = self.cve_enricher.get(cve_id) if self.cve_enricher else {}
|
||||
exploits = data.get("exploits") or []
|
||||
self._send_json(handler, 200, {"exploits": exploits})
|
||||
except Exception as e:
|
||||
logger.exception("serve_exploitdb_by_cve failed")
|
||||
self._send_json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def _get_severity_from_cvss(self, cvss_json: Union[str, Dict[str, Any], None]) -> str:
|
||||
"""Extract severity from CVSS data."""
|
||||
if not cvss_json:
|
||||
return "medium"
|
||||
|
||||
try:
|
||||
if isinstance(cvss_json, str):
|
||||
cvss = json.loads(cvss_json)
|
||||
else:
|
||||
cvss = cvss_json
|
||||
|
||||
if not isinstance(cvss, dict):
|
||||
return "medium"
|
||||
|
||||
if "baseSeverity" in cvss and cvss.get("baseSeverity"):
|
||||
return (cvss["baseSeverity"] or "medium").lower()
|
||||
|
||||
if "baseScore" in cvss:
|
||||
score = float(cvss.get("baseScore", 0))
|
||||
if score >= 9.0:
|
||||
return "critical"
|
||||
elif score >= 7.0:
|
||||
return "high"
|
||||
elif score >= 4.0:
|
||||
return "medium"
|
||||
else:
|
||||
return "low"
|
||||
except Exception:
|
||||
logger.debug("Failed to parse cvss_json", exc_info=True)
|
||||
|
||||
return "medium"
|
||||
|
||||
def _extract_cvss_score(self, cvss_json: Union[str, Dict[str, Any], None]) -> Optional[float]:
|
||||
"""Extract CVSS score."""
|
||||
if not cvss_json:
|
||||
return None
|
||||
|
||||
try:
|
||||
if isinstance(cvss_json, str):
|
||||
cvss = json.loads(cvss_json)
|
||||
else:
|
||||
cvss = cvss_json
|
||||
|
||||
if isinstance(cvss, dict):
|
||||
return float(cvss.get("baseScore", 0) or 0)
|
||||
except Exception:
|
||||
logger.debug("Failed to extract cvss score", exc_info=True)
|
||||
|
||||
return None
|
||||
|
||||
def serve_vulns_data(self, handler) -> None:
|
||||
"""Serve vulnerability data as JSON with server-side enrichment."""
|
||||
try:
|
||||
vulns = self.shared_data.db.get_all_vulns() or []
|
||||
|
||||
cve_ids: List[str] = []
|
||||
for v in vulns:
|
||||
vid = (v.get("vuln_id") or "").strip()
|
||||
if vid.startswith("CVE-"):
|
||||
cve_ids.append(vid)
|
||||
|
||||
meta = {}
|
||||
if self.cve_enricher and cve_ids:
|
||||
meta = self.cve_enricher.get_bulk(cve_ids)
|
||||
|
||||
for vuln in vulns:
|
||||
vid = (vuln.get("vuln_id") or "").strip()
|
||||
m = meta.get(vid)
|
||||
if m:
|
||||
cvss = m.get("cvss") or {}
|
||||
base_score = cvss.get("baseScore") if isinstance(cvss, dict) else (cvss or {}).get("baseScore")
|
||||
base_sev = (cvss.get("baseSeverity") or "").lower() if isinstance(cvss, dict) else ""
|
||||
|
||||
vuln["severity"] = base_sev or vuln.get("severity") or "medium"
|
||||
vuln["cvss_score"] = base_score if base_score is not None else vuln.get("cvss_score") or None
|
||||
vuln["description"] = m.get("description") or vuln.get("description") or f"{vid} vulnerability detected"
|
||||
vuln["affected_product"] = vuln.get("affected_product") or "Unknown"
|
||||
vuln["is_kev"] = bool(m.get("is_kev"))
|
||||
vuln["has_exploit"] = bool(m.get("exploits"))
|
||||
vuln["epss"] = m.get("epss")
|
||||
vuln["epss_percentile"] = m.get("epss_percentile")
|
||||
vuln["references"] = m.get("references") or []
|
||||
else:
|
||||
vuln.setdefault("severity", "medium")
|
||||
vuln.setdefault("cvss_score", 5.0)
|
||||
vuln["is_kev"] = False
|
||||
vuln["has_exploit"] = False
|
||||
vuln["epss"] = None
|
||||
vuln["epss_percentile"] = None
|
||||
vuln["references"] = []
|
||||
|
||||
self._send_json(handler, 200, vulns, cache_max_age=10)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("serve_vulns_data failed")
|
||||
self._send_json(handler, 500, {"error": str(e)})
|
||||
|
||||
def serve_vulns_stats(self, handler) -> None:
|
||||
"""Lightweight endpoint for statistics only."""
|
||||
try:
|
||||
stats = self.shared_data.db.query_one(
|
||||
"""
|
||||
SELECT
|
||||
COUNT(*) as total,
|
||||
COUNT(CASE WHEN is_active = 1 THEN 1 END) as active,
|
||||
COUNT(DISTINCT mac_address) as hosts,
|
||||
COUNT(DISTINCT CASE WHEN is_active = 1 THEN mac_address END) as active_hosts
|
||||
FROM vulnerabilities
|
||||
"""
|
||||
)
|
||||
|
||||
severity_counts = self.shared_data.db.query(
|
||||
"""
|
||||
SELECT
|
||||
CASE
|
||||
WHEN vuln_id LIKE 'CVE-2024%' THEN 'high'
|
||||
WHEN vuln_id LIKE 'CVE-2023%' THEN 'medium'
|
||||
WHEN vuln_id LIKE 'CVE-2022%' THEN 'low'
|
||||
ELSE 'medium'
|
||||
END as severity,
|
||||
COUNT(*) as count
|
||||
FROM vulnerabilities
|
||||
WHERE is_active = 1
|
||||
GROUP BY severity
|
||||
"""
|
||||
)
|
||||
|
||||
response = {
|
||||
"total": stats.get("total") if stats else 0,
|
||||
"active": stats.get("active") if stats else 0,
|
||||
"hosts": stats.get("hosts") if stats else 0,
|
||||
"active_hosts": stats.get("active_hosts") if stats else 0,
|
||||
"by_severity": {row["severity"]: row["count"] for row in severity_counts} if severity_counts else {},
|
||||
}
|
||||
|
||||
self._send_json(handler, 200, response, cache_max_age=10)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("serve_vulns_stats failed")
|
||||
self._send_json(handler, 500, {"error": str(e)})
|
||||
330
web_utils/webenum_utils.py
Normal file
330
web_utils/webenum_utils.py
Normal file
@@ -0,0 +1,330 @@
|
||||
# webutils/webenum_utils.py
|
||||
from __future__ import annotations
|
||||
import json
|
||||
import base64
|
||||
import time
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional, List
|
||||
import logging
|
||||
from logger import Logger
|
||||
logger = Logger(name="webenum_utils.py", level=logging.DEBUG)
|
||||
class WebEnumUtils:
|
||||
"""
|
||||
REST utilities for Web Enumeration (table `webenum`).
|
||||
|
||||
Resilient to missing `shared_data` at construction:
|
||||
- If `self.shared_data` is None, handlers try to read `handler.shared_data`.
|
||||
Expects a DB adapter at `shared_data.db` exposing: query, query_one, execute.
|
||||
"""
|
||||
|
||||
def __init__(self, shared_data):
|
||||
self.logger = logger
|
||||
self.shared_data = shared_data
|
||||
|
||||
# Anti-flapping: serve a recent non-empty payload when DB hiccups
|
||||
self._last_payload: Dict[str, Any] = {}
|
||||
self._last_ts: float = 0.0
|
||||
self._snapshot_ttl: float = 8.0 # seconds
|
||||
|
||||
# ---------------------- Internal helpers ----------------------
|
||||
|
||||
def _resolve_shared(self, handler) -> Any:
|
||||
"""Resolve SharedData from self or the HTTP handler."""
|
||||
sd = self.shared_data or getattr(handler, "shared_data", None)
|
||||
if sd is None or getattr(sd, "db", None) is None:
|
||||
# Return a clear 503 later if unavailable
|
||||
raise RuntimeError("SharedData.db is not available (wire shared_data into WebEnumUtils or handler).")
|
||||
return sd
|
||||
|
||||
def _to_jsonable(self, obj):
|
||||
if obj is None or isinstance(obj, (bool, int, float, str)):
|
||||
return obj
|
||||
if isinstance(obj, Path):
|
||||
return str(obj)
|
||||
if isinstance(obj, bytes):
|
||||
return {"_b64": base64.b64encode(obj).decode("ascii")}
|
||||
if isinstance(obj, datetime):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, dict):
|
||||
return {k: self._to_jsonable(v) for k, v in obj.items()}
|
||||
if isinstance(obj, (list, tuple, set)):
|
||||
return [self._to_jsonable(v) for v in obj]
|
||||
return str(obj)
|
||||
|
||||
def _json(self, handler, code: int, obj):
|
||||
safe = self._to_jsonable(obj)
|
||||
payload = json.dumps(safe, ensure_ascii=False).encode("utf-8")
|
||||
handler.send_response(code)
|
||||
handler.send_header("Content-Type", "application/json")
|
||||
handler.send_header("Content-Length", str(len(payload)))
|
||||
handler.end_headers()
|
||||
try:
|
||||
handler.wfile.write(payload)
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
|
||||
# ---------------------- Stats & DB helpers ----------------------
|
||||
|
||||
def _get_webenum_stats(self, db) -> Dict[str, int]:
|
||||
"""Global stats for filters/summary badges."""
|
||||
try:
|
||||
stats = db.query_one("""
|
||||
SELECT
|
||||
COUNT(*) as total_results,
|
||||
COUNT(DISTINCT hostname) as unique_hosts,
|
||||
COUNT(CASE WHEN status BETWEEN 200 AND 299 THEN 1 END) as success_2xx,
|
||||
COUNT(CASE WHEN status BETWEEN 300 AND 399 THEN 1 END) as redirect_3xx,
|
||||
COUNT(CASE WHEN status BETWEEN 400 AND 499 THEN 1 END) as client_error_4xx,
|
||||
COUNT(CASE WHEN status >= 500 THEN 1 END) as server_error_5xx
|
||||
FROM webenum
|
||||
WHERE is_active = 1
|
||||
""") or {}
|
||||
return {
|
||||
'total_results': stats.get('total_results', 0) or 0,
|
||||
'unique_hosts': stats.get('unique_hosts', 0) or 0,
|
||||
'success_2xx': stats.get('success_2xx', 0) or 0,
|
||||
'redirect_3xx': stats.get('redirect_3xx', 0) or 0,
|
||||
'client_error_4xx': stats.get('client_error_4xx', 0) or 0,
|
||||
'server_error_5xx': stats.get('server_error_5xx', 0) or 0
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error getting webenum stats: {e}")
|
||||
return {
|
||||
'total_results': 0,
|
||||
'unique_hosts': 0,
|
||||
'success_2xx': 0,
|
||||
'redirect_3xx': 0,
|
||||
'client_error_4xx': 0,
|
||||
'server_error_5xx': 0
|
||||
}
|
||||
|
||||
def add_webenum_result(
|
||||
self,
|
||||
db,
|
||||
mac_address: str,
|
||||
ip: str,
|
||||
hostname: Optional[str],
|
||||
port: int,
|
||||
directory: str,
|
||||
status: int,
|
||||
size: int = 0,
|
||||
response_time: int = 0,
|
||||
content_type: Optional[str] = None,
|
||||
tool: str = 'gobuster'
|
||||
) -> None:
|
||||
"""Insert/Upsert a single result into `webenum`."""
|
||||
try:
|
||||
db.execute("""
|
||||
INSERT INTO webenum (
|
||||
mac_address, ip, hostname, port, directory, status,
|
||||
size, response_time, content_type, tool, is_active
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, 1)
|
||||
ON CONFLICT(mac_address, ip, port, directory) DO UPDATE SET
|
||||
status = excluded.status,
|
||||
size = excluded.size,
|
||||
response_time = excluded.response_time,
|
||||
content_type = excluded.content_type,
|
||||
hostname = COALESCE(excluded.hostname, webenum.hostname),
|
||||
tool = COALESCE(excluded.tool, webenum.tool),
|
||||
last_seen = CURRENT_TIMESTAMP,
|
||||
is_active = 1
|
||||
""", (mac_address, ip, hostname, port, directory, status,
|
||||
size, response_time, content_type, tool))
|
||||
self.logger.debug(f"Added webenum result: {ip}:{port}{directory} -> {status}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error adding webenum result: {e}")
|
||||
|
||||
# ---------------------- REST handlers ----------------------
|
||||
|
||||
def serve_webenum_data(self, handler):
|
||||
"""GET /api/webenum/results : list + pagination + filters + stats."""
|
||||
try:
|
||||
sd = self._resolve_shared(handler)
|
||||
db = sd.db
|
||||
|
||||
from urllib.parse import parse_qs, urlparse
|
||||
query = parse_qs(urlparse(handler.path).query)
|
||||
|
||||
# Pagination
|
||||
page = max(1, int(query.get('page', ['1'])[0]))
|
||||
limit = max(1, min(500, int(query.get('limit', ['50'])[0])))
|
||||
offset = (page - 1) * limit
|
||||
|
||||
# Filters
|
||||
host_filter = (query.get('host', [''])[0]).strip()
|
||||
status_filter = (query.get('status', [''])[0]).strip()
|
||||
port_filter = (query.get('port', [''])[0]).strip()
|
||||
date_filter = (query.get('date', [''])[0]).strip()
|
||||
search = (query.get('search', [''])[0]).strip()
|
||||
|
||||
# WHERE construction
|
||||
where_clauses = ["is_active = 1"]
|
||||
params: List[Any] = []
|
||||
|
||||
if host_filter:
|
||||
# Match either hostname or IP when the frontend sends "host"
|
||||
where_clauses.append("(hostname = ? OR ip = ?)")
|
||||
params.extend([host_filter, host_filter])
|
||||
|
||||
if status_filter:
|
||||
if status_filter == '2xx':
|
||||
where_clauses.append("status BETWEEN 200 AND 299")
|
||||
elif status_filter == '3xx':
|
||||
where_clauses.append("status BETWEEN 300 AND 399")
|
||||
elif status_filter == '4xx':
|
||||
where_clauses.append("status BETWEEN 400 AND 499")
|
||||
elif status_filter == '5xx':
|
||||
where_clauses.append("status >= 500")
|
||||
else:
|
||||
try:
|
||||
s_val = int(status_filter)
|
||||
where_clauses.append("status = ?")
|
||||
params.append(s_val)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if port_filter:
|
||||
try:
|
||||
where_clauses.append("port = ?")
|
||||
params.append(int(port_filter))
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if date_filter:
|
||||
# expected YYYY-MM-DD
|
||||
where_clauses.append("DATE(scan_date) = ?")
|
||||
params.append(date_filter)
|
||||
|
||||
if search:
|
||||
where_clauses.append("""(
|
||||
hostname LIKE ? OR
|
||||
ip LIKE ? OR
|
||||
directory LIKE ? OR
|
||||
CAST(status AS TEXT) LIKE ?
|
||||
)""")
|
||||
search_term = f"%{search}%"
|
||||
params.extend([search_term] * 4)
|
||||
|
||||
where_sql = " AND ".join(where_clauses)
|
||||
|
||||
# Main query — alias columns to match the frontend schema
|
||||
results = db.query(f"""
|
||||
SELECT
|
||||
id,
|
||||
mac_address AS mac,
|
||||
ip,
|
||||
COALESCE(hostname, ip) AS host,
|
||||
port,
|
||||
directory,
|
||||
status,
|
||||
size,
|
||||
response_time,
|
||||
content_type,
|
||||
scan_date,
|
||||
tool
|
||||
FROM webenum
|
||||
WHERE {where_sql}
|
||||
ORDER BY scan_date DESC, host ASC, port ASC
|
||||
LIMIT ? OFFSET ?
|
||||
""", params + [limit, offset])
|
||||
|
||||
# Total for pagination
|
||||
total_row = db.query_one(f"""
|
||||
SELECT COUNT(*) AS total FROM webenum WHERE {where_sql}
|
||||
""", params) or {"total": 0}
|
||||
total = total_row.get("total", 0) or 0
|
||||
|
||||
# Stats + filter options
|
||||
stats = self._get_webenum_stats(db)
|
||||
|
||||
hosts = db.query("""
|
||||
SELECT DISTINCT hostname
|
||||
FROM webenum
|
||||
WHERE hostname IS NOT NULL AND hostname <> '' AND is_active = 1
|
||||
ORDER BY hostname
|
||||
""")
|
||||
ports = db.query("""
|
||||
SELECT DISTINCT port
|
||||
FROM webenum
|
||||
WHERE is_active = 1
|
||||
ORDER BY port
|
||||
""")
|
||||
|
||||
payload = {
|
||||
"results": results,
|
||||
"total": total,
|
||||
"page": page,
|
||||
"limit": limit,
|
||||
"stats": stats,
|
||||
"filters": {
|
||||
"hosts": [h['hostname'] for h in hosts if 'hostname' in h],
|
||||
"ports": [p['port'] for p in ports if 'port' in p]
|
||||
}
|
||||
}
|
||||
|
||||
# Anti-flapping: if now empty but a recent snapshot exists, return it
|
||||
now = time.time()
|
||||
if total == 0 and self._last_payload and (now - self._last_ts) <= self._snapshot_ttl:
|
||||
return self._json(handler, 200, self._last_payload)
|
||||
|
||||
# Update snapshot
|
||||
self._last_payload = payload
|
||||
self._last_ts = now
|
||||
return self._json(handler, 200, payload)
|
||||
|
||||
except RuntimeError as e:
|
||||
# Clear 503 when shared_data/db is not wired
|
||||
self.logger.error(str(e))
|
||||
return self._json(handler, 503, {"status": "error", "message": str(e)})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error serving webenum data: {e}")
|
||||
now = time.time()
|
||||
if self._last_payload and (now - self._last_ts) <= self._snapshot_ttl:
|
||||
self.logger.warning("/api/webenum/results fallback to snapshot after error")
|
||||
return self._json(handler, 200, self._last_payload)
|
||||
return self._json(handler, 500, {"status": "error", "message": str(e)})
|
||||
|
||||
def import_webenum_results(self, handler, data: Dict[str, Any]):
|
||||
"""POST /api/webenum/import : bulk import {results:[...] }."""
|
||||
try:
|
||||
sd = self._resolve_shared(handler)
|
||||
db = sd.db
|
||||
|
||||
results = data.get('results', []) or []
|
||||
imported = 0
|
||||
|
||||
for r in results:
|
||||
# Accept both (`hostname`, `mac_address`) and (`host`, `mac`)
|
||||
hostname = r.get('hostname') or r.get('host')
|
||||
mac_address = r.get('mac_address') or r.get('mac') or ''
|
||||
self.add_webenum_result(
|
||||
db=db,
|
||||
mac_address=mac_address,
|
||||
ip=r.get('ip', '') or '',
|
||||
hostname=hostname,
|
||||
port=int(r.get('port', 80) or 80),
|
||||
directory=r.get('directory', '/') or '/',
|
||||
status=int(r.get('status', 0) or 0),
|
||||
size=int(r.get('size', 0) or 0),
|
||||
response_time=int(r.get('response_time', 0) or 0),
|
||||
content_type=r.get('content_type'),
|
||||
tool=r.get('tool', 'import') or 'import'
|
||||
)
|
||||
imported += 1
|
||||
|
||||
return self._json(handler, 200, {
|
||||
"status": "success",
|
||||
"message": f"Imported {imported} web enumeration results",
|
||||
"imported": imported
|
||||
})
|
||||
|
||||
except RuntimeError as e:
|
||||
self.logger.error(str(e))
|
||||
return self._json(handler, 503, {"status": "error", "message": str(e)})
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error importing webenum results: {e}")
|
||||
return self._json(handler, 500, {
|
||||
"status": "error",
|
||||
"message": str(e)
|
||||
})
|
||||
Reference in New Issue
Block a user