feat: Add login page with dynamic RGB effects and password toggle functionality

feat: Implement package management utilities with JSON endpoints for listing and uninstalling packages

feat: Create plugin management utilities with endpoints for listing, configuring, and installing plugins

feat: Develop schedule and trigger management utilities with CRUD operations for schedules and triggers
This commit is contained in:
infinition
2026-03-19 00:40:04 +01:00
parent 3fa4d5742a
commit b0584a1a8e
176 changed files with 7795 additions and 1781 deletions

View File

@@ -1,13 +1,4 @@
# AARP Spoofer by poisoning the ARP cache of a target and a gateway.
# Saves settings (target, gateway, interface, delay) in `/home/bjorn/.settings_bjorn/arpspoofer_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -t, --target IP address of the target device (overrides saved value).
# -g, --gateway IP address of the gateway (overrides saved value).
# -i, --interface Network interface (default: primary or saved).
# -d, --delay Delay between ARP packets in seconds (default: 2 or saved).
# - First time: python arpspoofer.py -t TARGET -g GATEWAY -i INTERFACE -d DELAY
# - Subsequent: python arpspoofer.py (uses saved settings).
# - Update: Provide any argument to override saved values.
"""arp_spoofer.py - ARP cache poisoning between target and gateway (scapy)."""
import os
import json
@@ -19,7 +10,7 @@ from scapy.all import ARP, send, sr1, conf
b_class = "ARPSpoof"
b_module = "arp_spoofer"
b_enabled = 0
# Répertoire et fichier de paramètres
# Settings directory and file
SETTINGS_DIR = "/home/bjorn/.settings_bjorn"
SETTINGS_FILE = os.path.join(SETTINGS_DIR, "arpspoofer_settings.json")
@@ -29,7 +20,7 @@ class ARPSpoof:
self.gateway_ip = gateway_ip
self.interface = interface
self.delay = delay
conf.iface = self.interface # Set the interface
conf.iface = self.interface
print(f"ARPSpoof initialized with target IP: {self.target_ip}, gateway IP: {self.gateway_ip}, interface: {self.interface}, delay: {self.delay}s")
def get_mac(self, ip):
@@ -144,7 +135,7 @@ if __name__ == "__main__":
parser.add_argument("-d", "--delay", type=float, default=2, help="Delay between ARP packets in seconds (default: 2 seconds)")
args = parser.parse_args()
# Load saved settings and override with CLI arguments
# Load saved settings, override with CLI args
settings = load_settings()
target_ip = args.target or settings.get("target")
gateway_ip = args.gateway or settings.get("gateway")
@@ -155,9 +146,9 @@ if __name__ == "__main__":
print("Target and Gateway IPs are required. Use -t and -g or save them in the settings file.")
exit(1)
# Save the settings for future use
# Persist settings for future runs
save_settings(target_ip, gateway_ip, interface, delay)
# Execute the attack
# Launch ARP spoof
spoof = ARPSpoof(target_ip=target_ip, gateway_ip=gateway_ip, interface=interface, delay=delay)
spoof.execute()

View File

@@ -1,11 +1,4 @@
# Resource exhaustion testing tool for network and service stress analysis.
# Saves settings in `/home/bjorn/.settings_bjorn/berserker_force_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -t, --target Target IP or hostname to test.
# -p, --ports Ports to test (comma-separated, default: common ports).
# -m, --mode Test mode (syn, udp, http, mixed, default: mixed).
# -r, --rate Packets per second (default: 100).
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/stress).
"""berserker_force.py - Network stress testing via SYN/UDP/HTTP floods (scapy-based)."""
import os
import json

View File

@@ -1,9 +1,4 @@
# demo_action.py
# Demonstration Action: wrapped in a DemoAction class
# ---------------------------------------------------------------------------
# Metadata (compatible with sync_actions / Neo launcher)
# ---------------------------------------------------------------------------
"""demo_action.py - Minimal action template that just prints received arguments."""
b_class = "DemoAction"
b_module = "demo_action"
b_enabled = 1

View File

@@ -1,11 +1,4 @@
# DNS Pillager for reconnaissance and enumeration of DNS infrastructure.
# Saves settings in `/home/bjorn/.settings_bjorn/dns_pillager_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -d, --domain Target domain for enumeration (overrides saved value).
# -w, --wordlist Path to subdomain wordlist (default: built-in list).
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/dns).
# -t, --threads Number of threads for scanning (default: 10).
# -r, --recursive Enable recursive enumeration of discovered subdomains.
"""dns_pillager.py - DNS recon and subdomain enumeration with threaded brute."""
import os
import json

View File

@@ -1,11 +1,4 @@
# Data collection and organization tool to aggregate findings from other modules.
# Saves settings in `/home/bjorn/.settings_bjorn/freya_harvest_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -i, --input Input directory to monitor (default: /home/bjorn/Bjorn/data/output/).
# -o, --output Output directory for reports (default: /home/bjorn/Bjorn/data/reports).
# -f, --format Output format (json, html, md, default: all).
# -w, --watch Watch for new findings in real-time.
# -c, --clean Clean old data before processing.
"""freya_harvest.py - Aggregates findings from other modules into JSON/HTML/MD reports."""
import os
import json

View File

@@ -1,10 +1,4 @@
"""
ftp_bruteforce.py — FTP bruteforce (DB-backed, no CSV/JSON, no rich)
- Cibles: (ip, port) par lorchestrateur
- IP -> (MAC, hostname) via DB.hosts
- Succès -> DB.creds (service='ftp')
- Conserve la logique dorigine (queue/threads, sleep éventuels, etc.)
"""
"""ftp_bruteforce.py - FTP bruteforce with DB-backed credential storage."""
import os
import threading
@@ -27,11 +21,11 @@ b_parent = None
b_service = '["ftp"]'
b_trigger = 'on_any:["on_service:ftp","on_new_port:21"]'
b_priority = 70
b_cooldown = 1800, # 30 minutes entre deux runs
b_rate_limit = '3/86400' # 3 fois par jour max
b_cooldown = 1800, # 30 min between runs
b_rate_limit = '3/86400' # max 3 per day
class FTPBruteforce:
"""Wrapper orchestrateur -> FTPConnector."""
"""Orchestrator wrapper -> FTPConnector."""
def __init__(self, shared_data):
self.shared_data = shared_data
@@ -39,13 +33,13 @@ class FTPBruteforce:
logger.info("FTPConnector initialized.")
def bruteforce_ftp(self, ip, port):
"""Lance le bruteforce FTP pour (ip, port)."""
"""Run FTP bruteforce for (ip, port)."""
return self.ftp_bruteforce.run_bruteforce(ip, port)
def execute(self, ip, port, row, status_key):
"""Point dentrée orchestrateur (retour 'success' / 'failed')."""
"""Orchestrator entry point (returns success / failed)."""
self.shared_data.bjorn_orch_status = "FTPBruteforce"
# comportement original : un petit délai visuel
# Original behavior: small visual delay
time.sleep(5)
logger.info(f"Brute forcing FTP on {ip}:{port}...")
success, results = self.bruteforce_ftp(ip, port)
@@ -53,12 +47,11 @@ class FTPBruteforce:
class FTPConnector:
"""Gère les tentatives FTP, persistance DB, mapping IP→(MAC, Hostname)."""
"""Handles FTP attempts, DB persistence, IP->(MAC, Hostname) mapping."""
def __init__(self, shared_data):
self.shared_data = shared_data
# Wordlists inchangées
self.users = self._read_lines(shared_data.users_file)
self.passwords = self._read_lines(shared_data.passwords_file)
@@ -70,7 +63,7 @@ class FTPConnector:
self.results: List[List[str]] = [] # [mac, ip, hostname, user, password, port]
self.queue = Queue()
# ---------- util fichiers ----------
# ---------- file utils ----------
@staticmethod
def _read_lines(path: str) -> List[str]:
try:
@@ -181,7 +174,7 @@ class FTPConnector:
finally:
self.queue.task_done()
# Pause configurable entre chaque tentative FTP
# Configurable delay between FTP attempts
if getattr(self.shared_data, "timewait_ftp", 0) > 0:
time.sleep(self.shared_data.timewait_ftp)
@@ -190,7 +183,7 @@ class FTPConnector:
mac_address = self.mac_for_ip(adresse_ip)
hostname = self.hostname_for_ip(adresse_ip) or ""
total_tasks = len(self.users) * len(self.passwords) + 1 # (logique d'origine conservée)
total_tasks = len(self.users) * len(self.passwords) + 1 # (original logic preserved)
if len(self.users) * len(self.passwords) == 0:
logger.warning("No users/passwords loaded. Abort.")
return False, []

View File

@@ -1,11 +1,4 @@
# Stealth operations module for IDS/IPS evasion and traffic manipulation.a
# Saves settings in `/home/bjorn/.settings_bjorn/heimdall_guard_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -i, --interface Network interface to use (default: active interface).
# -m, --mode Operating mode (timing, random, fragmented, all).
# -d, --delay Base delay between operations in seconds (default: 1).
# -r, --randomize Randomization factor for timing (default: 0.5).
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/stealth).
"""heimdall_guard.py - IDS/IPS evasion via timing jitter, fragmentation, and traffic shaping."""
import os
import json

View File

@@ -1,3 +1,5 @@
"""idle.py - No-op placeholder action for when Bjorn has nothing to do."""
from shared import SharedData
b_class = "IDLE"

View File

@@ -1,11 +1,4 @@
# WiFi deception tool for creating malicious access points and capturing authentications.
# Saves settings in `/home/bjorn/.settings_bjorn/loki_deceiver_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -i, --interface Wireless interface for AP creation (default: wlan0).
# -s, --ssid SSID for the fake access point (or target to clone).
# -c, --channel WiFi channel (default: 6).
# -p, --password Optional password for WPA2 AP.
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/wifi).
"""loki_deceiver.py - Rogue AP creation and WiFi auth capture (scapy/hostapd)."""
import os
import json

View File

@@ -1,9 +1,4 @@
# actions/NmapVulnScanner.py
"""
Vulnerability Scanner Action
Scanne ultra-rapidement CPE (+ CVE via vulners si dispo),
avec fallback "lourd" optionnel.
"""
"""nmap_vuln_scanner.py - CPE + CVE vulnerability scanning via nmap/vulners."""
import nmap
import json
@@ -16,7 +11,7 @@ from logger import Logger
logger = Logger(name="NmapVulnScanner.py", level=logging.DEBUG)
# Paramètres pour le scheduler (inchangés)
# Scheduler parameters
b_class = "NmapVulnScanner"
b_module = "nmap_vuln_scanner"
b_status = "NmapVulnScanner"
@@ -34,7 +29,7 @@ b_rate_limit = None
class NmapVulnScanner:
"""Scanner de vulnérabilités via nmap (mode rapide CPE/CVE)."""
"""Vulnerability scanner via nmap (fast CPE/CVE mode)."""
def __init__(self, shared_data: SharedData):
self.shared_data = shared_data
@@ -48,14 +43,14 @@ class NmapVulnScanner:
logger.info(f"Starting vulnerability scan for {ip}")
self.shared_data.bjorn_orch_status = "NmapVulnScanner"
# 1) metadata depuis la queue
# 1) metadata from the queue
meta = {}
try:
meta = json.loads(row.get('metadata') or '{}')
except Exception:
pass
# 2) récupérer ports (ordre: row -> metadata -> DB par MAC -> DB par IP)
# 2) resolve ports (order: row -> metadata -> DB by MAC -> DB by IP)
ports_str = (
row.get("Ports") or row.get("ports") or
meta.get("ports_snapshot") or ""
@@ -89,19 +84,19 @@ class NmapVulnScanner:
ports = [p.strip() for p in ports_str.split(';') if p.strip()]
mac = mac or row.get("MAC Address") or ""
# NEW: skip ports déjà scannés (sauf si TTL expiré)
# Skip already-scanned ports (unless TTL expired)
ports = self._filter_ports_already_scanned(mac, ports)
if not ports:
logger.info(f"No new/changed ports to scan for {ip}")
# touche quand même les statuts pour désactiver d'éventuelles anciennes entrées
# Still touch statuses to deactivate stale entries
self.save_vulnerabilities(mac, ip, [])
return 'success'
# Scanner (mode rapide par défaut)
# Scan (fast mode by default)
findings = self.scan_vulnerabilities(ip, ports)
# Persistance (split CVE/CPE)
# Persistence (split CVE/CPE)
self.save_vulnerabilities(mac, ip, findings)
logger.success(f"Vuln scan done on {ip}: {len(findings)} entries")
return 'success'
@@ -112,18 +107,18 @@ class NmapVulnScanner:
def _filter_ports_already_scanned(self, mac: str, ports: List[str]) -> List[str]:
"""
Retourne la liste des ports à scanner en excluant ceux déjà scannés récemment.
- Config:
Return ports to scan, excluding recently scanned ones.
Config:
vuln_rescan_on_change_only (bool, default True)
vuln_rescan_ttl_seconds (int, 0 = désactivé)
vuln_rescan_ttl_seconds (int, 0 = disabled)
"""
if not ports:
return []
if not bool(self.shared_data.config.get('vuln_rescan_on_change_only', True)):
return ports # pas de filtrage
return ports # no filtering
# Ports déjà couverts par detected_software (is_active=1)
# Ports already covered by detected_software (is_active=1)
rows = self.shared_data.db.query("""
SELECT port, last_seen
FROM detected_software
@@ -149,21 +144,21 @@ class NmapVulnScanner:
dt = datetime.fromisoformat(ls.replace('Z',''))
return dt >= cutoff
except Exception:
return True # si doute, on considère "frais"
return True # if in doubt, consider it fresh
return [p for p in ports if (p not in seen) or (not fresh(p))]
else:
# Sans TTL: si déjà scanné/présent actif => on skip
# No TTL: if already scanned/active => skip
return [p for p in ports if p not in seen]
# ---------------------------- Scanning ------------------------------ #
def scan_vulnerabilities(self, ip: str, ports: List[str]) -> List[Dict]:
"""
Mode rapide (par défaut) :
- nmap -sV --version-light sur un set réduit de ports
- CPE extraits directement du service detection
- (option) --script=vulners pour extraire CVE (si script installé)
Fallback (si vuln_fast=False) : ancien mode avec scripts 'vuln', etc.
Fast mode (default):
- nmap -sV --version-light on a reduced port set
- CPE extracted directly from service detection
- (optional) --script=vulners to extract CVE (if script installed)
Fallback (vuln_fast=False): legacy mode with 'vuln' scripts, etc.
"""
fast = bool(self.shared_data.config.get('vuln_fast', True))
use_vulners = bool(self.shared_data.config.get('nse_vulners', False))
@@ -182,7 +177,7 @@ class NmapVulnScanner:
return self._scan_heavy(ip, port_list)
def _scan_fast_cpe_cve(self, ip: str, port_list: str, use_vulners: bool) -> List[Dict]:
"""Scan rapide pour récupérer CPE et (option) CVE via vulners."""
"""Fast scan to extract CPE and (optionally) CVE via vulners."""
vulns: List[Dict] = []
args = "-sV --version-light -T4 --max-retries 1 --host-timeout 30s --script-timeout 10s"
@@ -206,7 +201,7 @@ class NmapVulnScanner:
port_info = host[proto][port]
service = port_info.get('name', '') or ''
# 1) CPE depuis -sV
# 1) CPE from -sV
cpe_values = self._extract_cpe_values(port_info)
for cpe in cpe_values:
vulns.append({
@@ -217,7 +212,7 @@ class NmapVulnScanner:
'details': f"CPE detected: {cpe}"[:500]
})
# 2) CVE via script 'vulners' (si actif)
# 2) CVE via 'vulners' script (if enabled)
try:
script_out = (port_info.get('script') or {}).get('vulners')
if script_out:
@@ -235,7 +230,7 @@ class NmapVulnScanner:
return vulns
def _scan_heavy(self, ip: str, port_list: str) -> List[Dict]:
"""Ancienne stratégie (plus lente) avec catégorie vuln, etc."""
"""Legacy strategy (slower) with vuln category scripts, etc."""
vulnerabilities: List[Dict] = []
vuln_scripts = [
'vuln','exploit','http-vuln-*','smb-vuln-*',
@@ -272,7 +267,7 @@ class NmapVulnScanner:
'details': str(output)[:500]
})
if 'vuln' in (script_name or '') and not self.extract_cves(str(output)):
# On ne stocke plus ces 'FINDING' (pas de CVE)
# Skip findings without CVE IDs
pass
if bool(self.shared_data.config.get('scan_cpe', False)):
@@ -285,7 +280,7 @@ class NmapVulnScanner:
# ---------------------------- Helpers -------------------------------- #
def _extract_cpe_values(self, port_info: Dict[str, Any]) -> List[str]:
"""Normalise tous les formats possibles de CPE renvoyés par python-nmap."""
"""Normalize all CPE formats returned by python-nmap."""
cpe = port_info.get('cpe')
if not cpe:
return []
@@ -300,7 +295,7 @@ class NmapVulnScanner:
return []
def extract_cves(self, text: str) -> List[str]:
"""Extrait les identifiants CVE d'un texte."""
"""Extract CVE identifiers from text."""
import re
if not text:
return []
@@ -308,7 +303,7 @@ class NmapVulnScanner:
return re.findall(cve_pattern, str(text), re.IGNORECASE)
def scan_cpe(self, ip: str, ports: List[str]) -> List[Dict]:
"""(Fallback lourd) Scan CPE détaillé si demandé."""
"""(Heavy fallback) Detailed CPE scan if requested."""
cpe_vulns: List[Dict] = []
try:
port_list = ','.join([str(p) for p in ports if str(p).strip()])
@@ -340,9 +335,9 @@ class NmapVulnScanner:
# ---------------------------- Persistence ---------------------------- #
def save_vulnerabilities(self, mac: str, ip: str, findings: List[Dict]):
"""Sépare CPE et CVE, met à jour les statuts + enregistre les nouveautés avec toutes les infos."""
# Récupérer le hostname depuis la DB
"""Split CPE/CVE, update statuses, and persist new findings with full info."""
# Fetch hostname from DB
hostname = None
try:
host_row = self.shared_data.db.query_one(
@@ -354,7 +349,7 @@ class NmapVulnScanner:
except Exception as e:
logger.debug(f"Could not fetch hostname: {e}")
# Grouper par port avec les infos complètes
# Group by port with full info
findings_by_port = {}
for f in findings:
port = int(f.get('port', 0) or 0)
@@ -376,26 +371,26 @@ class NmapVulnScanner:
elif vid.lower().startswith('cpe:'):
findings_by_port[port]['cpes'].add(vid)
# 1) Traiter les CVE par port
# 1) Process CVEs by port
for port, data in findings_by_port.items():
if data['cves']:
for cve in data['cves']:
try:
# Vérifier si existe déjà
# Check if already exists
existing = self.shared_data.db.query_one(
"SELECT id FROM vulnerabilities WHERE mac_address=? AND vuln_id=? AND port=? LIMIT 1",
(mac, cve, port)
)
if existing:
# Mettre à jour avec IP et hostname
# Update with IP and hostname
self.shared_data.db.execute("""
UPDATE vulnerabilities
SET ip=?, hostname=?, last_seen=CURRENT_TIMESTAMP, is_active=1
WHERE mac_address=? AND vuln_id=? AND port=?
""", (ip, hostname, mac, cve, port))
else:
# Nouvelle entrée avec toutes les infos
# New entry with full info
self.shared_data.db.execute("""
INSERT INTO vulnerabilities(mac_address, ip, hostname, port, vuln_id, is_active)
VALUES(?,?,?,?,?,1)
@@ -406,7 +401,7 @@ class NmapVulnScanner:
except Exception as e:
logger.error(f"Failed to save CVE {cve}: {e}")
# 2) Traiter les CPE
# 2) Process CPEs
for port, data in findings_by_port.items():
for cpe in data['cpes']:
try:

View File

@@ -1,5 +1,6 @@
"""odin_eye.py - Dynamic network interface detection and monitoring."""
# --- AJOUTS EN HAUT DU FICHIER ---------------------------------------------
# --- Dynamic interface detection ---
import os
try:
import psutil
@@ -9,13 +10,13 @@ except Exception:
def _list_net_ifaces() -> list[str]:
names = set()
# 1) psutil si dispo
# 1) psutil if available
if psutil:
try:
names.update(ifname for ifname in psutil.net_if_addrs().keys() if ifname != "lo")
except Exception:
pass
# 2) fallback kernel
# 2) kernel fallback
try:
for n in os.listdir("/sys/class/net"):
if n and n != "lo":
@@ -23,7 +24,7 @@ def _list_net_ifaces() -> list[str]:
except Exception:
pass
out = ["auto"] + sorted(names)
# sécurité: pas de doublons
# deduplicate
seen, unique = set(), []
for x in out:
if x not in seen:
@@ -31,7 +32,7 @@ def _list_net_ifaces() -> list[str]:
return unique
# Hook appelée par le backend avant affichage UI / sync DB
# Hook called by the backend before UI display / DB sync
def compute_dynamic_b_args(base: dict) -> dict:
"""
Compute dynamic arguments at runtime.
@@ -54,21 +55,20 @@ def compute_dynamic_b_args(base: dict) -> dict:
return d
# --- MÉTADONNÉES UI SUPPLÉMENTAIRES -----------------------------------------
# Exemples darguments (affichage frontend; aussi persisté en DB via sync_actions)
# --- Additional UI metadata ---
# Example arguments (frontend display; also persisted in DB via sync_actions)
b_examples = [
{"interface": "auto", "filter": "http or ftp", "timeout": 120, "max_packets": 5000, "save_credentials": True},
{"interface": "wlan0", "filter": "(http or smtp) and not broadcast", "timeout": 300, "max_packets": 10000},
]
# Lien MD (peut être un chemin local servi par votre frontend, ou un http(s))
# Exemple: un README markdown stocké dans votre repo
# Docs link (local path served by frontend, or http(s))
b_docs_url = "docs/actions/OdinEye.md"
# --- Métadonnées d'action (consommées par shared.generate_actions_json) -----
# --- Action metadata (consumed by shared.generate_actions_json) ---
b_class = "OdinEye"
b_module = "odin_eye" # nom du fichier sans .py
b_module = "odin_eye"
b_enabled = 0
b_action = "normal"
b_category = "recon"
@@ -81,20 +81,20 @@ b_author = "Fabien / Cyberviking"
b_version = "1.0.0"
b_icon = "OdinEye.png"
# Schéma d'arguments pour UI dynamique (clé == nom du flag sans '--')
# UI argument schema (key == flag name without '--')
b_args = {
"interface": {
"type": "select", "label": "Network Interface",
"choices": [], # <- Laisser vide: rempli dynamiquement par compute_dynamic_b_args(...)
"choices": [], # Populated dynamically by compute_dynamic_b_args()
"default": "auto",
"help": "Interface à écouter. 'auto' tente de détecter l'interface par défaut." },
"help": "Interface to listen on. 'auto' tries to detect the default interface." },
"filter": {"type": "text", "label": "BPF Filter", "default": "(http or ftp or smtp or pop3 or imap or telnet) and not broadcast"},
"output": {"type": "text", "label": "Output dir", "default": "/home/bjorn/Bjorn/data/output/packets"},
"timeout": {"type": "number", "label": "Timeout (s)", "min": 10, "max": 36000, "step": 1, "default": 300},
"max_packets": {"type": "number", "label": "Max packets", "min": 100, "max": 2000000, "step": 100, "default": 10000},
}
# ----------------- Code d'analyse (ton code existant) -----------------------
# --- Traffic analysis code ---
import os, json, pyshark, argparse, logging, re, threading, signal
from datetime import datetime
from collections import defaultdict
@@ -249,7 +249,7 @@ class OdinEye:
def execute(self):
try:
# Timeout thread (inchangé) ...
# Timeout thread
if self.timeout and self.timeout > 0:
def _stop_after():
self.stop_capture.wait(self.timeout)
@@ -260,13 +260,13 @@ class OdinEye:
self.capture = pyshark.LiveCapture(interface=self.interface, bpf_filter=self.capture_filter)
# Interruption douce — SKIP si on tourne en mode importlib (thread)
# Graceful interrupt - skip if running in importlib (threaded) mode
if os.environ.get("BJORN_EMBEDDED") != "1":
try:
signal.signal(signal.SIGINT, self.handle_interrupt)
signal.signal(signal.SIGTERM, self.handle_interrupt)
except Exception:
# Ex: ValueError si pas dans le main thread
# e.g. ValueError if not in main thread
pass
for packet in self.capture.sniff_continuously():

View File

@@ -1,11 +1,5 @@
# actions/presence_join.py
# -*- coding: utf-8 -*-
"""
PresenceJoin — Sends a Discord webhook when the targeted host JOINS the network.
- Triggered by the scheduler ONLY on transition OFF->ON (b_trigger="on_join").
- Targeting via b_requires (e.g. {"any":[{"mac_is":"AA:BB:..."}]}).
- The action does not query anything: it only notifies when called.
"""
"""presence_join.py - Discord webhook notification when a target host joins the network."""
import requests
from typing import Optional

View File

@@ -1,11 +1,5 @@
# actions/presence_left.py
# -*- coding: utf-8 -*-
"""
PresenceLeave — Sends a Discord webhook when the targeted host LEAVES the network.
- Triggered by the scheduler ONLY on transition ON->OFF (b_trigger="on_leave").
- Targeting via b_requires (e.g. {"any":[{"mac_is":"AA:BB:..."}]}).
- The action does not query anything: it only notifies when called.
"""
"""presence_left.py - Discord webhook notification when a target host leaves the network."""
import requests
from typing import Optional

View File

@@ -1,11 +1,4 @@
# Advanced password cracker supporting multiple hash formats and attack methods.
# Saves settings in `/home/bjorn/.settings_bjorn/rune_cracker_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -i, --input Input file containing hashes to crack.
# -w, --wordlist Path to password wordlist (default: built-in list).
# -r, --rules Path to rules file for mutations (default: built-in rules).
# -t, --type Hash type (md5, sha1, sha256, sha512, ntlm).
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/hashes).
"""rune_cracker.py - Threaded hash cracker with wordlist + mutation rules (MD5/SHA/NTLM)."""
import os
import json

View File

@@ -1,12 +1,4 @@
# scanning.py Network scanner (DB-first, no stubs)
# - Host discovery (nmap -sn -PR)
# - Resolve MAC/hostname (per-host threads) -> DB (hosts table)
# - Port scan (multi-threads) -> DB (merge ports by MAC)
# - Mark alive=0 for hosts not seen this run
# - Update stats (stats table)
# - Light logging (milestones) without flooding
# - WAL checkpoint(TRUNCATE) + PRAGMA optimize at end of scan
# - NEW: No DB insert without a real MAC. Unresolved IPs are kept in-memory for this run.
"""scanning.py - Network scanner: nmap host discovery, port scan, MAC resolve, all DB-backed."""
import os
import threading

View File

@@ -1,10 +1,4 @@
"""
smb_bruteforce.py — SMB bruteforce (DB-backed, no CSV/JSON, no rich)
- Cibles fournies par lorchestrateur (ip, port)
- IP -> (MAC, hostname) depuis DB.hosts
- Succès enregistrés dans DB.creds (service='smb'), 1 ligne PAR PARTAGE (database=<share>)
- Conserve la logique de queue/threads et les signatures. Plus de rich/progress.
"""
"""smb_bruteforce.py - SMB bruteforce with per-share credential storage in DB."""
import os
import threading
@@ -28,14 +22,14 @@ b_parent = None
b_service = '["smb"]'
b_trigger = 'on_any:["on_service:smb","on_new_port:445"]'
b_priority = 70
b_cooldown = 1800 # 30 minutes entre deux runs
b_rate_limit = '3/86400' # 3 fois par jour max
b_cooldown = 1800 # 30 min between runs
b_rate_limit = '3/86400' # max 3 per day
IGNORED_SHARES = {'print$', 'ADMIN$', 'IPC$', 'C$', 'D$', 'E$', 'F$'}
class SMBBruteforce:
"""Wrapper orchestrateur -> SMBConnector."""
"""Orchestrator wrapper -> SMBConnector."""
def __init__(self, shared_data):
self.shared_data = shared_data
@@ -43,23 +37,22 @@ class SMBBruteforce:
logger.info("SMBConnector initialized.")
def bruteforce_smb(self, ip, port):
"""Lance le bruteforce SMB pour (ip, port)."""
"""Run SMB bruteforce for (ip, port)."""
return self.smb_bruteforce.run_bruteforce(ip, port)
def execute(self, ip, port, row, status_key):
"""Point dentrée orchestrateur (retour 'success' / 'failed')."""
"""Orchestrator entry point (returns success / failed)."""
self.shared_data.bjorn_orch_status = "SMBBruteforce"
success, results = self.bruteforce_smb(ip, port)
return 'success' if success else 'failed'
class SMBConnector:
"""Gère les tentatives SMB, la persistance DB et le mapping IP(MAC, Hostname)."""
"""Handles SMB attempts, DB persistence, and IP->(MAC, Hostname) mapping."""
def __init__(self, shared_data):
self.shared_data = shared_data
# Wordlists inchangées
self.users = self._read_lines(shared_data.users_file)
self.passwords = self._read_lines(shared_data.passwords_file)
@@ -71,7 +64,7 @@ class SMBConnector:
self.results: List[List[str]] = [] # [mac, ip, hostname, share, user, password, port]
self.queue = Queue()
# ---------- util fichiers ----------
# ---------- file utils ----------
@staticmethod
def _read_lines(path: str) -> List[str]:
try:
@@ -267,7 +260,7 @@ class SMBConnector:
for t in threads:
t.join()
# Fallback smbclient -L si rien trouvé
# Fallback smbclient -L if nothing found
if not success_flag[0]:
logger.info(f"No success via SMBConnection. Trying smbclient -L for {adresse_ip}")
for user in self.users:
@@ -290,7 +283,7 @@ class SMBConnector:
# ---------- persistence DB ----------
def save_results(self):
# insère self.results dans creds (service='smb'), database = <share>
# Insert results into creds (service='smb'), database = <share>
for mac, ip, hostname, share, user, password, port in self.results:
try:
self.shared_data.db.insert_cred(
@@ -301,7 +294,7 @@ class SMBConnector:
user=user,
password=password,
port=port,
database=share, # utilise la colonne 'database' pour distinguer les shares
database=share, # uses 'database' column to distinguish shares
extra=None
)
except Exception as e:
@@ -315,12 +308,12 @@ class SMBConnector:
self.results = []
def removeduplicates(self):
# plus nécessaire avec l'index unique; conservé pour compat.
# No longer needed with unique index; kept for compat
pass
if __name__ == "__main__":
# Mode autonome non utilisé en prod; on laisse simple
# Standalone mode not used in prod
try:
sd = SharedData()
smb_bruteforce = SMBBruteforce(sd)

View File

@@ -1,11 +1,4 @@
"""
sql_bruteforce.py — MySQL bruteforce (DB-backed, no CSV/JSON, no rich)
- Cibles: (ip, port) par lorchestrateur
- IP -> (MAC, hostname) via DB.hosts
- Connexion sans DB puis SHOW DATABASES; une entrée par DB trouvée
- Succès -> DB.creds (service='sql', database=<db>)
- Conserve la logique (pymysql, queue/threads)
"""
"""sql_bruteforce.py - MySQL bruteforce with per-database credential storage (pymysql)."""
import os
import pymysql
@@ -28,11 +21,11 @@ b_parent = None
b_service = '["sql"]'
b_trigger = 'on_any:["on_service:sql","on_new_port:3306"]'
b_priority = 70
b_cooldown = 1800 # 30 minutes entre deux runs
b_rate_limit = '3/86400' # 3 fois par jour max
b_cooldown = 1800 # 30 min between runs
b_rate_limit = '3/86400' # max 3 per day
class SQLBruteforce:
"""Wrapper orchestrateur -> SQLConnector."""
"""Orchestrator wrapper -> SQLConnector."""
def __init__(self, shared_data):
self.shared_data = shared_data
@@ -40,22 +33,21 @@ class SQLBruteforce:
logger.info("SQLConnector initialized.")
def bruteforce_sql(self, ip, port):
"""Lance le bruteforce SQL pour (ip, port)."""
"""Run SQL bruteforce for (ip, port)."""
return self.sql_bruteforce.run_bruteforce(ip, port)
def execute(self, ip, port, row, status_key):
"""Point dentrée orchestrateur (retour 'success' / 'failed')."""
"""Orchestrator entry point (returns success / failed)."""
success, results = self.bruteforce_sql(ip, port)
return 'success' if success else 'failed'
class SQLConnector:
"""Gère les tentatives SQL (MySQL), persistance DB, mapping IP→(MAC, Hostname)."""
"""Handles SQL (MySQL) attempts, DB persistence, IP->(MAC, Hostname) mapping."""
def __init__(self, shared_data):
self.shared_data = shared_data
# Wordlists inchangées
self.users = self._read_lines(shared_data.users_file)
self.passwords = self._read_lines(shared_data.passwords_file)
@@ -67,7 +59,7 @@ class SQLConnector:
self.results: List[List[str]] = [] # [ip, user, password, port, database, mac, hostname]
self.queue = Queue()
# ---------- util fichiers ----------
# ---------- file utils ----------
@staticmethod
def _read_lines(path: str) -> List[str]:
try:
@@ -111,7 +103,7 @@ class SQLConnector:
# ---------- SQL ----------
def sql_connect(self, adresse_ip: str, user: str, password: str):
"""
Connexion sans DB puis SHOW DATABASES; retourne (True, [dbs]) ou (False, []).
Connect without DB then SHOW DATABASES; returns (True, [dbs]) or (False, []).
"""
try:
conn = pymysql.connect(
@@ -242,7 +234,7 @@ class SQLConnector:
# ---------- persistence DB ----------
def save_results(self):
# pour chaque DB trouvée, créer/mettre à jour une ligne dans creds (service='sql', database=<dbname>)
# For each discovered DB, create/update a row in creds (service='sql', database=<dbname>)
for ip, user, password, port, dbname in self.results:
mac = self.mac_for_ip(ip)
hostname = self.hostname_for_ip(ip) or ""
@@ -269,7 +261,7 @@ class SQLConnector:
self.results = []
def remove_duplicates(self):
# inutile avec lindex unique; conservé pour compat.
# No longer needed with unique index; kept for compat
pass

View File

@@ -1,15 +1,4 @@
"""
ssh_bruteforce.py - This script performs a brute force attack on SSH services (port 22)
to find accessible accounts using various user credentials. It logs the results of
successful connections.
SQL version (minimal changes):
- Targets still provided by the orchestrator (ip + port)
- IP -> (MAC, hostname) mapping read from DB 'hosts'
- Successes saved into DB.creds (service='ssh') with robust fallback upsert
- Action status recorded in DB.action_results (via SSHBruteforce.execute)
- Paramiko noise silenced; ssh.connect avoids agent/keys to reduce hangs
"""
"""ssh_bruteforce.py - SSH bruteforce with DB-backed credential storage (paramiko)."""
import os
import paramiko
@@ -22,7 +11,6 @@ from queue import Queue
from shared import SharedData
from logger import Logger
# Configure the logger
logger = Logger(name="ssh_bruteforce.py", level=logging.DEBUG)
# Silence Paramiko internals
@@ -30,7 +18,7 @@ for _name in ("paramiko", "paramiko.transport", "paramiko.client", "paramiko.hos
"paramiko.kex", "paramiko.auth_handler"):
logging.getLogger(_name).setLevel(logging.CRITICAL)
# Define the necessary global variables
# Module metadata
b_class = "SSHBruteforce"
b_module = "ssh_bruteforce"
b_status = "brute_force_ssh"
@@ -38,9 +26,9 @@ b_port = 22
b_service = '["ssh"]'
b_trigger = 'on_any:["on_service:ssh","on_new_port:22"]'
b_parent = None
b_priority = 70 # tu peux ajuster la priorité si besoin
b_cooldown = 1800 # 30 minutes entre deux runs
b_rate_limit = '3/86400' # 3 fois par jour max
b_priority = 70
b_cooldown = 1800 # 30 min between runs
b_rate_limit = '3/86400' # max 3 per day
class SSHBruteforce:

View File

@@ -1,13 +1,4 @@
"""
steal_data_sql.py — SQL data looter (DB-backed)
SQL mode:
- Orchestrator provides (ip, port) after parent success (SQLBruteforce).
- DB.creds (service='sql') provides (user,password, database?).
- We connect first without DB to enumerate tables (excluding system schemas),
then connect per schema to export CSVs.
- Output under: {data_stolen_dir}/sql/{mac}_{ip}/{schema}/{schema_table}.csv
"""
"""steal_data_sql.py - SQL data exfiltration: enumerate schemas and dump tables to CSV."""
import os
import logging

View File

@@ -1,12 +1,4 @@
"""
steal_files_ftp.py — FTP file looter (DB-backed)
SQL mode:
- Orchestrator provides (ip, port) after parent success (FTPBruteforce).
- FTP credentials are read from DB.creds (service='ftp'); anonymous is also tried.
- IP -> (MAC, hostname) via DB.hosts.
- Loot saved under: {data_stolen_dir}/ftp/{mac}_{ip}/(anonymous|<username>)/...
"""
"""steal_files_ftp.py - FTP file exfiltration using DB creds from FTPBruteforce."""
import os
import logging

View File

@@ -1,12 +1,4 @@
"""
steal_files_smb.py — SMB file looter (DB-backed).
SQL mode:
- Orchestrator provides (ip, port) after parent success (SMBBruteforce).
- DB.creds (service='smb') provides credentials; 'database' column stores share name.
- Also try anonymous (''/'').
- Output under: {data_stolen_dir}/smb/{mac}_{ip}/{share}/...
"""
"""steal_files_smb.py - SMB share exfiltration using DB creds from SMBBruteforce."""
import os
import logging

View File

@@ -1,17 +1,4 @@
"""
steal_files_ssh.py — SSH file looter (DB-backed)
SQL mode:
- Orchestrator provides (ip, port) and ensures parent action success (SSHBruteforce).
- SSH credentials are read from the DB table `creds` (service='ssh').
- IP -> (MAC, hostname) mapping is read from the DB table `hosts`.
- Looted files are saved under: {shared_data.data_stolen_dir}/ssh/{mac}_{ip}/...
- Paramiko logs are silenced to avoid noisy banners/tracebacks.
Parent gate:
- Orchestrator enforces parent success (b_parent='SSHBruteforce').
- This action runs once per eligible target (alive, open port, parent OK).
"""
"""steal_files_ssh.py - SSH file exfiltration using DB creds from SSHBruteforce (paramiko)."""
import os
import time
@@ -203,7 +190,7 @@ class StealFilesSSH:
names = set(self.shared_data.steal_file_names or [])
if not exts and not names:
# If no filters are defined, do nothing (too risky to pull everything).
logger.warning("No steal_file_extensions / steal_file_names configured skipping.")
logger.warning("No steal_file_extensions / steal_file_names configured - skipping.")
return []
matches: List[str] = []

View File

@@ -1,12 +1,4 @@
"""
steal_files_telnet.py — Telnet file looter (DB-backed)
SQL mode:
- Orchestrator provides (ip, port) after parent success (TelnetBruteforce).
- Credentials read from DB.creds (service='telnet'); we try each pair.
- Files found via 'find / -type f', then retrieved with 'cat'.
- Output under: {data_stolen_dir}/telnet/{mac}_{ip}/...
"""
"""steal_files_telnet.py - Telnet file exfiltration using DB creds from TelnetBruteforce."""
import os
import telnetlib
@@ -110,7 +102,7 @@ class StealFilesTelnet:
if password:
tn.read_until(b"Password: ", timeout=5)
tn.write(password.encode('ascii') + b"\n")
# prompt detection (naïf mais identique à l'original)
# Naive prompt detection (same as original)
time.sleep(2)
self.telnet_connected = True
logger.info(f"Connected to {ip} via Telnet as {username}")

View File

@@ -1,10 +1,4 @@
"""
telnet_bruteforce.py — Telnet bruteforce (DB-backed, no CSV/JSON, no rich)
- Cibles: (ip, port) par lorchestrateur
- IP -> (MAC, hostname) via DB.hosts
- Succès -> DB.creds (service='telnet')
- Conserve la logique dorigine (telnetlib, queue/threads)
"""
"""telnet_bruteforce.py - Telnet bruteforce with DB-backed credential storage."""
import os
import telnetlib
@@ -27,11 +21,11 @@ b_parent = None
b_service = '["telnet"]'
b_trigger = 'on_any:["on_service:telnet","on_new_port:23"]'
b_priority = 70
b_cooldown = 1800 # 30 minutes entre deux runs
b_rate_limit = '3/86400' # 3 fois par jour max
b_cooldown = 1800 # 30 min between runs
b_rate_limit = '3/86400' # max 3 per day
class TelnetBruteforce:
"""Wrapper orchestrateur -> TelnetConnector."""
"""Orchestrator wrapper -> TelnetConnector."""
def __init__(self, shared_data):
self.shared_data = shared_data
@@ -39,11 +33,11 @@ class TelnetBruteforce:
logger.info("TelnetConnector initialized.")
def bruteforce_telnet(self, ip, port):
"""Lance le bruteforce Telnet pour (ip, port)."""
"""Run Telnet bruteforce for (ip, port)."""
return self.telnet_bruteforce.run_bruteforce(ip, port)
def execute(self, ip, port, row, status_key):
"""Point dentrée orchestrateur (retour 'success' / 'failed')."""
"""Orchestrator entry point (returns success / failed)."""
logger.info(f"Executing TelnetBruteforce on {ip}:{port}")
self.shared_data.bjorn_orch_status = "TelnetBruteforce"
success, results = self.bruteforce_telnet(ip, port)
@@ -51,12 +45,11 @@ class TelnetBruteforce:
class TelnetConnector:
"""Gère les tentatives Telnet, persistance DB, mapping IP→(MAC, Hostname)."""
"""Handles Telnet attempts, DB persistence, IP->(MAC, Hostname) mapping."""
def __init__(self, shared_data):
self.shared_data = shared_data
# Wordlists inchangées
self.users = self._read_lines(shared_data.users_file)
self.passwords = self._read_lines(shared_data.passwords_file)
@@ -68,7 +61,7 @@ class TelnetConnector:
self.results: List[List[str]] = [] # [mac, ip, hostname, user, password, port]
self.queue = Queue()
# ---------- util fichiers ----------
# ---------- file utils ----------
@staticmethod
def _read_lines(path: str) -> List[str]:
try:

View File

@@ -1,11 +1,4 @@
# Service fingerprinting and version detection tool for vulnerability identification.
# Saves settings in `/home/bjorn/.settings_bjorn/thor_hammer_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -t, --target Target IP or hostname to scan (overrides saved value).
# -p, --ports Ports to scan (default: common ports, comma-separated).
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/services).
# -d, --delay Delay between probes in seconds (default: 1).
# -v, --verbose Enable verbose output for detailed service information.
"""thor_hammer.py - Service fingerprinting and version detection for vuln identification."""
import os
import json

View File

@@ -1,11 +1,4 @@
# Web application scanner for discovering hidden paths and vulnerabilities.
# Saves settings in `/home/bjorn/.settings_bjorn/valkyrie_scout_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -u, --url Target URL to scan (overrides saved value).
# -w, --wordlist Path to directory wordlist (default: built-in list).
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/webscan).
# -t, --threads Number of concurrent threads (default: 10).
# -d, --delay Delay between requests in seconds (default: 0.1).
"""valkyrie_scout.py - Web app scanner for hidden paths and directory enumeration."""
import os
import json

View File

@@ -1,13 +1,6 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
web_enum.py — Gobuster Web Enumeration -> DB writer for table `webenum`.
- Writes each finding into the `webenum` table
- ON CONFLICT(mac_address, ip, port, directory) DO UPDATE
- Respects orchestrator stop flag (shared_data.orchestrator_should_exit)
- No filesystem output: parse Gobuster stdout directly
"""
"""web_enum.py - Gobuster-based web directory enumeration, results written to DB."""
import re
import socket

View File

@@ -1,5 +1,4 @@
# wpasec_potfiles.py
# WPAsec Potfile Manager - Download, clean, import, or erase WiFi credentials
"""wpasec_potfiles.py - Download, clean, import, or erase WiFi creds from WPAsec potfiles."""
import os
import json

View File

@@ -1,11 +1,4 @@
# Network topology mapping tool for discovering and visualizing network segments.
# Saves settings in `/home/bjorn/.settings_bjorn/yggdrasil_mapper_settings.json`.
# Automatically loads saved settings if arguments are not provided.
# -r, --range Network range to scan (CIDR format).
# -i, --interface Network interface to use (default: active interface).
# -d, --depth Maximum trace depth for routing (default: 5).
# -o, --output Output directory (default: /home/bjorn/Bjorn/data/output/topology).
# -t, --timeout Timeout for probes in seconds (default: 2).
"""yggdrasil_mapper.py - Network topology mapper with traceroute and graph visualization."""
import os
import json