Autarch Will Control The Internet
This commit is contained in:
1
core/__init__.py
Normal file
1
core/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# AUTARCH Core Framework
|
||||
438
core/agent.py
Normal file
438
core/agent.py
Normal file
@@ -0,0 +1,438 @@
|
||||
"""
|
||||
AUTARCH Agent System
|
||||
Autonomous agent that uses LLM to accomplish tasks with tools
|
||||
"""
|
||||
|
||||
import re
|
||||
import json
|
||||
from typing import Optional, List, Dict, Any, Callable
|
||||
from dataclasses import dataclass, field
|
||||
from enum import Enum
|
||||
|
||||
from .llm import get_llm, LLM, LLMError
|
||||
from .tools import get_tool_registry, ToolRegistry
|
||||
from .banner import Colors
|
||||
|
||||
|
||||
class AgentState(Enum):
|
||||
"""Agent execution states."""
|
||||
IDLE = "idle"
|
||||
THINKING = "thinking"
|
||||
EXECUTING = "executing"
|
||||
WAITING_USER = "waiting_user"
|
||||
COMPLETE = "complete"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
@dataclass
|
||||
class AgentStep:
|
||||
"""Record of a single agent step."""
|
||||
thought: str
|
||||
tool_name: Optional[str] = None
|
||||
tool_args: Optional[Dict[str, Any]] = None
|
||||
tool_result: Optional[str] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class AgentResult:
|
||||
"""Result of an agent task execution."""
|
||||
success: bool
|
||||
summary: str
|
||||
steps: List[AgentStep] = field(default_factory=list)
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class Agent:
|
||||
"""Autonomous agent that uses LLM and tools to accomplish tasks."""
|
||||
|
||||
SYSTEM_PROMPT = """You are AUTARCH, an autonomous AI agent created by darkHal and Setec Security Labs.
|
||||
|
||||
Your purpose is to accomplish tasks using the tools available to you. You think step by step, use tools to gather information and take actions, then continue until the task is complete.
|
||||
|
||||
## How to respond
|
||||
|
||||
You MUST respond in the following format for EVERY response:
|
||||
|
||||
THOUGHT: [Your reasoning about what to do next]
|
||||
ACTION: [tool_name]
|
||||
PARAMS: {"param1": "value1", "param2": "value2"}
|
||||
|
||||
OR when the task is complete:
|
||||
|
||||
THOUGHT: [Summary of what was accomplished]
|
||||
ACTION: task_complete
|
||||
PARAMS: {"summary": "Description of completed work"}
|
||||
|
||||
OR when you need user input:
|
||||
|
||||
THOUGHT: [Why you need to ask the user]
|
||||
ACTION: ask_user
|
||||
PARAMS: {"question": "Your question"}
|
||||
|
||||
## Rules
|
||||
1. Always start with THOUGHT to explain your reasoning
|
||||
2. Always specify exactly one ACTION
|
||||
3. Always provide PARAMS as valid JSON (even if empty: {})
|
||||
4. Use tools to verify your work - don't assume success
|
||||
5. If a tool fails, analyze the error and try a different approach
|
||||
6. Only use task_complete when the task is fully done
|
||||
|
||||
{tools_description}
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
llm: LLM = None,
|
||||
tools: ToolRegistry = None,
|
||||
max_steps: int = 20,
|
||||
verbose: bool = True
|
||||
):
|
||||
"""Initialize the agent.
|
||||
|
||||
Args:
|
||||
llm: LLM instance to use. Uses global if not provided.
|
||||
tools: Tool registry to use. Uses global if not provided.
|
||||
max_steps: Maximum steps before stopping.
|
||||
verbose: Whether to print progress.
|
||||
"""
|
||||
self.llm = llm or get_llm()
|
||||
self.tools = tools or get_tool_registry()
|
||||
self.max_steps = max_steps
|
||||
self.verbose = verbose
|
||||
|
||||
self.state = AgentState.IDLE
|
||||
self.current_task: Optional[str] = None
|
||||
self.steps: List[AgentStep] = []
|
||||
self.conversation: List[Dict[str, str]] = []
|
||||
|
||||
# Callbacks
|
||||
self.on_step: Optional[Callable[[AgentStep], None]] = None
|
||||
self.on_state_change: Optional[Callable[[AgentState], None]] = None
|
||||
|
||||
def _set_state(self, state: AgentState):
|
||||
"""Update agent state and notify callback."""
|
||||
self.state = state
|
||||
if self.on_state_change:
|
||||
self.on_state_change(state)
|
||||
|
||||
def _log(self, message: str, level: str = "info"):
|
||||
"""Log a message if verbose mode is on."""
|
||||
if not self.verbose:
|
||||
return
|
||||
|
||||
colors = {
|
||||
"info": Colors.CYAN,
|
||||
"success": Colors.GREEN,
|
||||
"warning": Colors.YELLOW,
|
||||
"error": Colors.RED,
|
||||
"thought": Colors.MAGENTA,
|
||||
"action": Colors.BLUE,
|
||||
"result": Colors.WHITE,
|
||||
}
|
||||
symbols = {
|
||||
"info": "*",
|
||||
"success": "+",
|
||||
"warning": "!",
|
||||
"error": "X",
|
||||
"thought": "?",
|
||||
"action": ">",
|
||||
"result": "<",
|
||||
}
|
||||
|
||||
color = colors.get(level, Colors.WHITE)
|
||||
symbol = symbols.get(level, "*")
|
||||
print(f"{color}[{symbol}] {message}{Colors.RESET}")
|
||||
|
||||
def _build_system_prompt(self) -> str:
|
||||
"""Build the system prompt with tools description."""
|
||||
tools_desc = self.tools.get_tools_prompt()
|
||||
return self.SYSTEM_PROMPT.format(tools_description=tools_desc)
|
||||
|
||||
def _parse_response(self, response: str) -> tuple[str, str, Dict[str, Any]]:
|
||||
"""Parse LLM response into thought, action, and params.
|
||||
|
||||
Args:
|
||||
response: The raw LLM response.
|
||||
|
||||
Returns:
|
||||
Tuple of (thought, action_name, params_dict)
|
||||
|
||||
Raises:
|
||||
ValueError: If response cannot be parsed.
|
||||
"""
|
||||
# Extract THOUGHT
|
||||
thought_match = re.search(r'THOUGHT:\s*(.+?)(?=ACTION:|$)', response, re.DOTALL)
|
||||
thought = thought_match.group(1).strip() if thought_match else ""
|
||||
|
||||
# Extract ACTION
|
||||
action_match = re.search(r'ACTION:\s*(\w+)', response)
|
||||
if not action_match:
|
||||
raise ValueError("No ACTION found in response")
|
||||
action = action_match.group(1).strip()
|
||||
|
||||
# Extract PARAMS
|
||||
params_match = re.search(r'PARAMS:\s*(\{.*?\})', response, re.DOTALL)
|
||||
if params_match:
|
||||
try:
|
||||
params = json.loads(params_match.group(1))
|
||||
except json.JSONDecodeError:
|
||||
# Try to fix common JSON issues
|
||||
params_str = params_match.group(1)
|
||||
# Replace single quotes with double quotes
|
||||
params_str = params_str.replace("'", '"')
|
||||
try:
|
||||
params = json.loads(params_str)
|
||||
except json.JSONDecodeError:
|
||||
params = {}
|
||||
else:
|
||||
params = {}
|
||||
|
||||
return thought, action, params
|
||||
|
||||
def _execute_tool(self, tool_name: str, params: Dict[str, Any]) -> str:
|
||||
"""Execute a tool and return the result.
|
||||
|
||||
Args:
|
||||
tool_name: Name of the tool to execute.
|
||||
params: Parameters for the tool.
|
||||
|
||||
Returns:
|
||||
Tool result string.
|
||||
"""
|
||||
result = self.tools.execute(tool_name, **params)
|
||||
|
||||
if result["success"]:
|
||||
return str(result["result"])
|
||||
else:
|
||||
return f"[Error]: {result['error']}"
|
||||
|
||||
def run(self, task: str, user_input_handler: Callable[[str], str] = None,
|
||||
step_callback: Optional[Callable[['AgentStep'], None]] = None) -> AgentResult:
|
||||
"""Run the agent on a task.
|
||||
|
||||
Args:
|
||||
task: The task description.
|
||||
user_input_handler: Callback for handling ask_user actions.
|
||||
If None, uses default input().
|
||||
step_callback: Optional per-step callback invoked after each step completes.
|
||||
Overrides self.on_step for this run if provided.
|
||||
|
||||
Returns:
|
||||
AgentResult with execution details.
|
||||
"""
|
||||
if step_callback is not None:
|
||||
self.on_step = step_callback
|
||||
self.current_task = task
|
||||
self.steps = []
|
||||
self.conversation = []
|
||||
|
||||
# Ensure model is loaded
|
||||
if not self.llm.is_loaded:
|
||||
self._log("Loading model...", "info")
|
||||
try:
|
||||
self.llm.load_model(verbose=self.verbose)
|
||||
except LLMError as e:
|
||||
self._set_state(AgentState.ERROR)
|
||||
return AgentResult(
|
||||
success=False,
|
||||
summary="Failed to load model",
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
self._set_state(AgentState.THINKING)
|
||||
self._log(f"Starting task: {task}", "info")
|
||||
|
||||
# Build initial prompt
|
||||
system_prompt = self._build_system_prompt()
|
||||
self.conversation.append({"role": "system", "content": system_prompt})
|
||||
self.conversation.append({"role": "user", "content": f"Task: {task}"})
|
||||
|
||||
step_count = 0
|
||||
parse_failures = 0 # Track consecutive format failures
|
||||
|
||||
while step_count < self.max_steps:
|
||||
step_count += 1
|
||||
self._log(f"Step {step_count}/{self.max_steps}", "info")
|
||||
|
||||
# Generate response
|
||||
self._set_state(AgentState.THINKING)
|
||||
try:
|
||||
prompt = self._build_prompt()
|
||||
response = self.llm.generate(
|
||||
prompt,
|
||||
stop=["OBSERVATION:", "\nUser:", "\nTask:"],
|
||||
temperature=0.3, # Lower temperature for more focused responses
|
||||
)
|
||||
except LLMError as e:
|
||||
self._set_state(AgentState.ERROR)
|
||||
return AgentResult(
|
||||
success=False,
|
||||
summary="LLM generation failed",
|
||||
steps=self.steps,
|
||||
error=str(e)
|
||||
)
|
||||
|
||||
# Parse response
|
||||
try:
|
||||
thought, action, params = self._parse_response(response)
|
||||
parse_failures = 0 # Reset on success
|
||||
except ValueError as e:
|
||||
parse_failures += 1
|
||||
self._log(f"Failed to parse response: {e}", "error")
|
||||
self._log(f"Raw response: {response[:200]}...", "warning")
|
||||
|
||||
# After 2 consecutive parse failures, the model can't follow
|
||||
# the structured format — treat its response as a direct answer
|
||||
if parse_failures >= 2:
|
||||
# Clean up the raw response for display
|
||||
answer = response.strip()
|
||||
# Remove ChatML tokens if present
|
||||
for tag in ['<|im_end|>', '<|im_start|>', '<|endoftext|>']:
|
||||
answer = answer.split(tag)[0]
|
||||
answer = answer.strip()
|
||||
if not answer:
|
||||
answer = "I could not process that request in agent mode. Try switching to Chat mode."
|
||||
|
||||
self._log("Model cannot follow structured format, returning direct answer", "warning")
|
||||
step = AgentStep(thought="Direct response (model does not support agent format)", tool_name="task_complete", tool_args={"summary": answer})
|
||||
step.tool_result = answer
|
||||
self.steps.append(step)
|
||||
if self.on_step:
|
||||
self.on_step(step)
|
||||
self._set_state(AgentState.COMPLETE)
|
||||
return AgentResult(success=True, summary=answer, steps=self.steps)
|
||||
|
||||
# First failure — give one retry with format correction
|
||||
self.conversation.append({
|
||||
"role": "assistant",
|
||||
"content": response
|
||||
})
|
||||
self.conversation.append({
|
||||
"role": "user",
|
||||
"content": "Error: Could not parse your response. Please use the exact format:\nTHOUGHT: [reasoning]\nACTION: [tool_name]\nPARAMS: {\"param\": \"value\"}"
|
||||
})
|
||||
continue
|
||||
|
||||
self._log(f"Thought: {thought[:100]}..." if len(thought) > 100 else f"Thought: {thought}", "thought")
|
||||
self._log(f"Action: {action}", "action")
|
||||
|
||||
step = AgentStep(thought=thought, tool_name=action, tool_args=params)
|
||||
|
||||
# Handle task_complete
|
||||
if action == "task_complete":
|
||||
summary = params.get("summary", thought)
|
||||
step.tool_result = summary
|
||||
self.steps.append(step)
|
||||
|
||||
if self.on_step:
|
||||
self.on_step(step)
|
||||
|
||||
self._set_state(AgentState.COMPLETE)
|
||||
self._log(f"Task complete: {summary}", "success")
|
||||
|
||||
return AgentResult(
|
||||
success=True,
|
||||
summary=summary,
|
||||
steps=self.steps
|
||||
)
|
||||
|
||||
# Handle ask_user
|
||||
if action == "ask_user":
|
||||
question = params.get("question", "What should I do?")
|
||||
self._set_state(AgentState.WAITING_USER)
|
||||
self._log(f"Agent asks: {question}", "info")
|
||||
|
||||
if user_input_handler:
|
||||
user_response = user_input_handler(question)
|
||||
else:
|
||||
print(f"\n{Colors.YELLOW}Agent question: {question}{Colors.RESET}")
|
||||
user_response = input(f"{Colors.GREEN}Your answer: {Colors.RESET}").strip()
|
||||
|
||||
step.tool_result = f"User response: {user_response}"
|
||||
self.steps.append(step)
|
||||
|
||||
if self.on_step:
|
||||
self.on_step(step)
|
||||
|
||||
# Add to conversation
|
||||
self.conversation.append({
|
||||
"role": "assistant",
|
||||
"content": f"THOUGHT: {thought}\nACTION: {action}\nPARAMS: {json.dumps(params)}"
|
||||
})
|
||||
self.conversation.append({
|
||||
"role": "user",
|
||||
"content": f"OBSERVATION: User responded: {user_response}"
|
||||
})
|
||||
continue
|
||||
|
||||
# Execute tool
|
||||
self._set_state(AgentState.EXECUTING)
|
||||
self._log(f"Executing: {action}({params})", "action")
|
||||
|
||||
result = self._execute_tool(action, params)
|
||||
step.tool_result = result
|
||||
self.steps.append(step)
|
||||
|
||||
if self.on_step:
|
||||
self.on_step(step)
|
||||
|
||||
# Truncate long results for display
|
||||
display_result = result[:200] + "..." if len(result) > 200 else result
|
||||
self._log(f"Result: {display_result}", "result")
|
||||
|
||||
# Add to conversation
|
||||
self.conversation.append({
|
||||
"role": "assistant",
|
||||
"content": f"THOUGHT: {thought}\nACTION: {action}\nPARAMS: {json.dumps(params)}"
|
||||
})
|
||||
self.conversation.append({
|
||||
"role": "user",
|
||||
"content": f"OBSERVATION: {result}"
|
||||
})
|
||||
|
||||
# Max steps reached
|
||||
self._set_state(AgentState.ERROR)
|
||||
self._log(f"Max steps ({self.max_steps}) reached", "warning")
|
||||
|
||||
return AgentResult(
|
||||
success=False,
|
||||
summary="Max steps reached without completing task",
|
||||
steps=self.steps,
|
||||
error=f"Exceeded maximum of {self.max_steps} steps"
|
||||
)
|
||||
|
||||
def _build_prompt(self) -> str:
|
||||
"""Build the full prompt from conversation history."""
|
||||
parts = []
|
||||
for msg in self.conversation:
|
||||
role = msg["role"]
|
||||
content = msg["content"]
|
||||
|
||||
if role == "system":
|
||||
parts.append(f"<|im_start|>system\n{content}<|im_end|>")
|
||||
elif role == "user":
|
||||
parts.append(f"<|im_start|>user\n{content}<|im_end|>")
|
||||
elif role == "assistant":
|
||||
parts.append(f"<|im_start|>assistant\n{content}<|im_end|>")
|
||||
|
||||
parts.append("<|im_start|>assistant\n")
|
||||
return "\n".join(parts)
|
||||
|
||||
def get_steps_summary(self) -> str:
|
||||
"""Get a formatted summary of all steps taken."""
|
||||
if not self.steps:
|
||||
return "No steps executed"
|
||||
|
||||
lines = []
|
||||
for i, step in enumerate(self.steps, 1):
|
||||
lines.append(f"Step {i}:")
|
||||
lines.append(f" Thought: {step.thought[:80]}...")
|
||||
if step.tool_name:
|
||||
lines.append(f" Action: {step.tool_name}")
|
||||
if step.tool_result:
|
||||
result_preview = step.tool_result[:80] + "..." if len(step.tool_result) > 80 else step.tool_result
|
||||
lines.append(f" Result: {result_preview}")
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
2804
core/android_exploit.py
Normal file
2804
core/android_exploit.py
Normal file
File diff suppressed because it is too large
Load Diff
1910
core/android_protect.py
Normal file
1910
core/android_protect.py
Normal file
File diff suppressed because it is too large
Load Diff
665
core/autonomy.py
Normal file
665
core/autonomy.py
Normal file
@@ -0,0 +1,665 @@
|
||||
"""
|
||||
AUTARCH Autonomy Daemon
|
||||
Background loop that monitors threats, evaluates rules, and dispatches
|
||||
AI-driven responses across all categories (defense, offense, counter,
|
||||
analyze, OSINT, simulate).
|
||||
|
||||
The daemon ties together:
|
||||
- ThreatMonitor (threat data gathering)
|
||||
- RulesEngine (condition-action evaluation)
|
||||
- ModelRouter (SLM/SAM/LAM model tiers)
|
||||
- Agent (autonomous task execution)
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from collections import deque
|
||||
from dataclasses import dataclass, field, asdict
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional, Deque
|
||||
|
||||
from .config import get_config
|
||||
from .rules import RulesEngine, Rule
|
||||
from .model_router import get_model_router, ModelTier
|
||||
|
||||
_logger = logging.getLogger('autarch.autonomy')
|
||||
|
||||
|
||||
@dataclass
|
||||
class ActivityEntry:
|
||||
"""Single entry in the autonomy activity log."""
|
||||
id: str
|
||||
timestamp: str
|
||||
rule_id: Optional[str] = None
|
||||
rule_name: Optional[str] = None
|
||||
tier: Optional[str] = None
|
||||
action_type: str = ''
|
||||
action_detail: str = ''
|
||||
result: str = ''
|
||||
success: bool = True
|
||||
duration_ms: Optional[int] = None
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return asdict(self)
|
||||
|
||||
|
||||
class AutonomyDaemon:
|
||||
"""Background daemon for autonomous threat response.
|
||||
|
||||
Lifecycle: start() -> pause()/resume() -> stop()
|
||||
"""
|
||||
|
||||
LOG_PATH = Path(__file__).parent.parent / 'data' / 'autonomy_log.json'
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.config = config or get_config()
|
||||
self.rules_engine = RulesEngine()
|
||||
self._router = None # Lazy — get_model_router() on start
|
||||
|
||||
# State
|
||||
self._thread: Optional[threading.Thread] = None
|
||||
self._running = False
|
||||
self._paused = False
|
||||
self._stop_event = threading.Event()
|
||||
|
||||
# Agent tracking
|
||||
self._active_agents: Dict[str, threading.Thread] = {}
|
||||
self._agent_lock = threading.Lock()
|
||||
|
||||
# Activity log (ring buffer)
|
||||
settings = self.config.get_autonomy_settings()
|
||||
max_entries = settings.get('log_max_entries', 1000)
|
||||
self._activity: Deque[ActivityEntry] = deque(maxlen=max_entries)
|
||||
self._activity_lock = threading.Lock()
|
||||
|
||||
# SSE subscribers
|
||||
self._subscribers: List = []
|
||||
self._sub_lock = threading.Lock()
|
||||
|
||||
# Load persisted log
|
||||
self._load_log()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Lifecycle
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
@property
|
||||
def status(self) -> dict:
|
||||
"""Current daemon status."""
|
||||
settings = self.config.get_autonomy_settings()
|
||||
with self._agent_lock:
|
||||
active = len(self._active_agents)
|
||||
return {
|
||||
'running': self._running,
|
||||
'paused': self._paused,
|
||||
'enabled': settings['enabled'],
|
||||
'monitor_interval': settings['monitor_interval'],
|
||||
'rule_eval_interval': settings['rule_eval_interval'],
|
||||
'active_agents': active,
|
||||
'max_agents': settings['max_concurrent_agents'],
|
||||
'rules_count': len(self.rules_engine.get_all_rules()),
|
||||
'activity_count': len(self._activity),
|
||||
}
|
||||
|
||||
def start(self) -> bool:
|
||||
"""Start the autonomy daemon background thread."""
|
||||
if self._running:
|
||||
_logger.warning('[Autonomy] Already running')
|
||||
return False
|
||||
|
||||
self._router = get_model_router()
|
||||
self._running = True
|
||||
self._paused = False
|
||||
self._stop_event.clear()
|
||||
|
||||
self._thread = threading.Thread(
|
||||
target=self._run_loop,
|
||||
name='AutonomyDaemon',
|
||||
daemon=True,
|
||||
)
|
||||
self._thread.start()
|
||||
self._log_activity('system', 'Autonomy daemon started')
|
||||
_logger.info('[Autonomy] Daemon started')
|
||||
return True
|
||||
|
||||
def stop(self):
|
||||
"""Stop the daemon and wait for thread exit."""
|
||||
if not self._running:
|
||||
return
|
||||
self._running = False
|
||||
self._stop_event.set()
|
||||
if self._thread and self._thread.is_alive():
|
||||
self._thread.join(timeout=10)
|
||||
self._log_activity('system', 'Autonomy daemon stopped')
|
||||
_logger.info('[Autonomy] Daemon stopped')
|
||||
|
||||
def pause(self):
|
||||
"""Pause rule evaluation (monitoring continues)."""
|
||||
self._paused = True
|
||||
self._log_activity('system', 'Autonomy paused')
|
||||
_logger.info('[Autonomy] Paused')
|
||||
|
||||
def resume(self):
|
||||
"""Resume rule evaluation."""
|
||||
self._paused = False
|
||||
self._log_activity('system', 'Autonomy resumed')
|
||||
_logger.info('[Autonomy] Resumed')
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Main loop
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _run_loop(self):
|
||||
"""Background loop: gather context, evaluate rules, dispatch."""
|
||||
settings = self.config.get_autonomy_settings()
|
||||
monitor_interval = settings['monitor_interval']
|
||||
rule_eval_interval = settings['rule_eval_interval']
|
||||
last_rule_eval = 0
|
||||
|
||||
while self._running and not self._stop_event.is_set():
|
||||
try:
|
||||
# Gather threat context every cycle
|
||||
context = self._gather_context()
|
||||
|
||||
# Evaluate rules at a slower cadence
|
||||
now = time.time()
|
||||
if not self._paused and (now - last_rule_eval) >= rule_eval_interval:
|
||||
last_rule_eval = now
|
||||
self._evaluate_and_dispatch(context)
|
||||
|
||||
except Exception as e:
|
||||
_logger.error(f'[Autonomy] Loop error: {e}')
|
||||
self._log_activity('error', f'Loop error: {e}', success=False)
|
||||
|
||||
# Sleep in short increments so stop is responsive
|
||||
self._stop_event.wait(timeout=monitor_interval)
|
||||
|
||||
def _gather_context(self) -> Dict[str, Any]:
|
||||
"""Gather current threat context from ThreatMonitor."""
|
||||
try:
|
||||
from modules.defender_monitor import get_threat_monitor
|
||||
tm = get_threat_monitor()
|
||||
except ImportError:
|
||||
_logger.warning('[Autonomy] ThreatMonitor not available')
|
||||
return {'timestamp': datetime.now().isoformat()}
|
||||
|
||||
context: Dict[str, Any] = {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
try:
|
||||
context['connections'] = tm.get_connections()
|
||||
context['connection_count'] = len(context['connections'])
|
||||
except Exception:
|
||||
context['connections'] = []
|
||||
context['connection_count'] = 0
|
||||
|
||||
try:
|
||||
context['bandwidth'] = {}
|
||||
bw = tm.get_bandwidth()
|
||||
if bw:
|
||||
total_rx = sum(iface.get('rx_delta', 0) for iface in bw)
|
||||
total_tx = sum(iface.get('tx_delta', 0) for iface in bw)
|
||||
context['bandwidth'] = {
|
||||
'rx_mbps': (total_rx * 8) / 1_000_000,
|
||||
'tx_mbps': (total_tx * 8) / 1_000_000,
|
||||
'interfaces': bw,
|
||||
}
|
||||
except Exception:
|
||||
context['bandwidth'] = {'rx_mbps': 0, 'tx_mbps': 0}
|
||||
|
||||
try:
|
||||
context['arp_alerts'] = tm.check_arp_spoofing()
|
||||
except Exception:
|
||||
context['arp_alerts'] = []
|
||||
|
||||
try:
|
||||
context['new_ports'] = tm.check_new_listening_ports()
|
||||
except Exception:
|
||||
context['new_ports'] = []
|
||||
|
||||
try:
|
||||
context['threat_score'] = tm.calculate_threat_score()
|
||||
except Exception:
|
||||
context['threat_score'] = {'score': 0, 'level': 'LOW', 'details': []}
|
||||
|
||||
try:
|
||||
context['ddos'] = tm.detect_ddos()
|
||||
except Exception:
|
||||
context['ddos'] = {'under_attack': False}
|
||||
|
||||
try:
|
||||
context['scan_indicators'] = tm.check_port_scan_indicators()
|
||||
if isinstance(context['scan_indicators'], list):
|
||||
context['scan_indicators'] = len(context['scan_indicators'])
|
||||
except Exception:
|
||||
context['scan_indicators'] = 0
|
||||
|
||||
return context
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Rule evaluation and dispatch
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _evaluate_and_dispatch(self, context: Dict[str, Any]):
|
||||
"""Evaluate rules and dispatch matching actions."""
|
||||
matches = self.rules_engine.evaluate(context)
|
||||
|
||||
for rule, resolved_actions in matches:
|
||||
for action in resolved_actions:
|
||||
action_type = action.get('type', '')
|
||||
_logger.info(f'[Autonomy] Rule "{rule.name}" triggered -> {action_type}')
|
||||
|
||||
if self._is_agent_action(action_type):
|
||||
self._dispatch_agent(rule, action, context)
|
||||
else:
|
||||
self._dispatch_direct(rule, action, context)
|
||||
|
||||
def _is_agent_action(self, action_type: str) -> bool:
|
||||
"""Check if an action requires an AI agent."""
|
||||
return action_type in ('run_module', 'counter_scan', 'escalate_to_lam')
|
||||
|
||||
def _dispatch_direct(self, rule: Rule, action: dict, context: dict):
|
||||
"""Execute a simple action directly (no LLM needed)."""
|
||||
action_type = action.get('type', '')
|
||||
start = time.time()
|
||||
success = True
|
||||
result = ''
|
||||
|
||||
try:
|
||||
if action_type == 'block_ip':
|
||||
result = self._action_block_ip(action.get('ip', ''))
|
||||
|
||||
elif action_type == 'unblock_ip':
|
||||
result = self._action_unblock_ip(action.get('ip', ''))
|
||||
|
||||
elif action_type == 'rate_limit_ip':
|
||||
result = self._action_rate_limit(
|
||||
action.get('ip', ''),
|
||||
action.get('rate', '10/s'),
|
||||
)
|
||||
|
||||
elif action_type == 'block_port':
|
||||
result = self._action_block_port(
|
||||
action.get('port', ''),
|
||||
action.get('direction', 'inbound'),
|
||||
)
|
||||
|
||||
elif action_type == 'kill_process':
|
||||
result = self._action_kill_process(action.get('pid', ''))
|
||||
|
||||
elif action_type in ('alert', 'log_event'):
|
||||
result = action.get('message', 'No message')
|
||||
|
||||
elif action_type == 'run_shell':
|
||||
result = self._action_run_shell(action.get('command', ''))
|
||||
|
||||
else:
|
||||
result = f'Unknown action type: {action_type}'
|
||||
success = False
|
||||
|
||||
except Exception as e:
|
||||
result = f'Error: {e}'
|
||||
success = False
|
||||
|
||||
duration = int((time.time() - start) * 1000)
|
||||
detail = action.get('ip', '') or action.get('port', '') or action.get('message', '')[:80]
|
||||
self._log_activity(
|
||||
action_type, detail,
|
||||
rule_id=rule.id, rule_name=rule.name,
|
||||
result=result, success=success, duration_ms=duration,
|
||||
)
|
||||
|
||||
def _dispatch_agent(self, rule: Rule, action: dict, context: dict):
|
||||
"""Spawn an AI agent to handle a complex action."""
|
||||
settings = self.config.get_autonomy_settings()
|
||||
max_agents = settings['max_concurrent_agents']
|
||||
|
||||
# Clean finished agents
|
||||
with self._agent_lock:
|
||||
self._active_agents = {
|
||||
k: v for k, v in self._active_agents.items()
|
||||
if v.is_alive()
|
||||
}
|
||||
if len(self._active_agents) >= max_agents:
|
||||
_logger.warning('[Autonomy] Max agents reached, skipping')
|
||||
self._log_activity(
|
||||
action.get('type', 'agent'), 'Skipped: max agents reached',
|
||||
rule_id=rule.id, rule_name=rule.name,
|
||||
success=False,
|
||||
)
|
||||
return
|
||||
|
||||
agent_id = str(uuid.uuid4())[:8]
|
||||
action_type = action.get('type', '')
|
||||
|
||||
# Determine tier
|
||||
if action_type == 'escalate_to_lam':
|
||||
tier = ModelTier.LAM
|
||||
else:
|
||||
tier = ModelTier.SAM
|
||||
|
||||
t = threading.Thread(
|
||||
target=self._run_agent,
|
||||
args=(agent_id, tier, rule, action, context),
|
||||
name=f'Agent-{agent_id}',
|
||||
daemon=True,
|
||||
)
|
||||
|
||||
with self._agent_lock:
|
||||
self._active_agents[agent_id] = t
|
||||
|
||||
t.start()
|
||||
self._log_activity(
|
||||
action_type, f'Agent {agent_id} spawned ({tier.value})',
|
||||
rule_id=rule.id, rule_name=rule.name, tier=tier.value,
|
||||
)
|
||||
|
||||
def _run_agent(self, agent_id: str, tier: ModelTier, rule: Rule,
|
||||
action: dict, context: dict):
|
||||
"""Execute an agent task in a background thread."""
|
||||
from .agent import Agent
|
||||
from .tools import get_tool_registry
|
||||
|
||||
action_type = action.get('type', '')
|
||||
start = time.time()
|
||||
|
||||
# Build task prompt
|
||||
if action_type == 'run_module':
|
||||
module = action.get('module', '')
|
||||
args = action.get('args', '')
|
||||
task = f'Run the AUTARCH module "{module}" with arguments: {args}'
|
||||
|
||||
elif action_type == 'counter_scan':
|
||||
target = action.get('target', '')
|
||||
task = f'Perform a counter-scan against {target}. Gather reconnaissance and identify vulnerabilities.'
|
||||
|
||||
elif action_type == 'escalate_to_lam':
|
||||
task = action.get('task', 'Analyze the current threat landscape and recommend actions.')
|
||||
|
||||
else:
|
||||
task = f'Execute action: {action_type} with params: {json.dumps(action)}'
|
||||
|
||||
# Get LLM instance for the tier
|
||||
router = self._router or get_model_router()
|
||||
llm_inst = router.get_instance(tier)
|
||||
|
||||
if llm_inst is None or not llm_inst.is_loaded:
|
||||
# Try fallback
|
||||
for fallback in (ModelTier.SAM, ModelTier.LAM):
|
||||
llm_inst = router.get_instance(fallback)
|
||||
if llm_inst and llm_inst.is_loaded:
|
||||
tier = fallback
|
||||
break
|
||||
else:
|
||||
self._log_activity(
|
||||
action_type, f'Agent {agent_id}: no model loaded',
|
||||
rule_id=rule.id, rule_name=rule.name,
|
||||
tier=tier.value, success=False,
|
||||
result='No model available for agent execution',
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
agent = Agent(
|
||||
llm=llm_inst,
|
||||
tools=get_tool_registry(),
|
||||
max_steps=15,
|
||||
verbose=False,
|
||||
)
|
||||
result = agent.run(task)
|
||||
duration = int((time.time() - start) * 1000)
|
||||
|
||||
self._log_activity(
|
||||
action_type,
|
||||
f'Agent {agent_id}: {result.summary[:100]}',
|
||||
rule_id=rule.id, rule_name=rule.name,
|
||||
tier=tier.value, success=result.success,
|
||||
result=result.summary, duration_ms=duration,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
duration = int((time.time() - start) * 1000)
|
||||
_logger.error(f'[Autonomy] Agent {agent_id} failed: {e}')
|
||||
self._log_activity(
|
||||
action_type, f'Agent {agent_id} failed: {e}',
|
||||
rule_id=rule.id, rule_name=rule.name,
|
||||
tier=tier.value, success=False,
|
||||
result=str(e), duration_ms=duration,
|
||||
)
|
||||
|
||||
finally:
|
||||
with self._agent_lock:
|
||||
self._active_agents.pop(agent_id, None)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Direct action implementations
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _action_block_ip(self, ip: str) -> str:
|
||||
if not ip:
|
||||
return 'No IP specified'
|
||||
try:
|
||||
from modules.defender_monitor import get_threat_monitor
|
||||
tm = get_threat_monitor()
|
||||
tm.auto_block_ip(ip)
|
||||
return f'Blocked {ip}'
|
||||
except Exception as e:
|
||||
return f'Block failed: {e}'
|
||||
|
||||
def _action_unblock_ip(self, ip: str) -> str:
|
||||
if not ip:
|
||||
return 'No IP specified'
|
||||
try:
|
||||
import subprocess, platform
|
||||
if platform.system() == 'Windows':
|
||||
cmd = f'netsh advfirewall firewall delete rule name="AUTARCH Block {ip}"'
|
||||
else:
|
||||
cmd = f'iptables -D INPUT -s {ip} -j DROP 2>/dev/null; iptables -D OUTPUT -d {ip} -j DROP 2>/dev/null'
|
||||
subprocess.run(cmd, shell=True, capture_output=True, timeout=10)
|
||||
return f'Unblocked {ip}'
|
||||
except Exception as e:
|
||||
return f'Unblock failed: {e}'
|
||||
|
||||
def _action_rate_limit(self, ip: str, rate: str) -> str:
|
||||
if not ip:
|
||||
return 'No IP specified'
|
||||
try:
|
||||
from modules.defender_monitor import get_threat_monitor
|
||||
tm = get_threat_monitor()
|
||||
tm.apply_rate_limit(ip)
|
||||
return f'Rate limited {ip} at {rate}'
|
||||
except Exception as e:
|
||||
return f'Rate limit failed: {e}'
|
||||
|
||||
def _action_block_port(self, port: str, direction: str) -> str:
|
||||
if not port:
|
||||
return 'No port specified'
|
||||
try:
|
||||
import subprocess, platform
|
||||
if platform.system() == 'Windows':
|
||||
d = 'in' if direction == 'inbound' else 'out'
|
||||
cmd = f'netsh advfirewall firewall add rule name="AUTARCH Block Port {port}" dir={d} action=block protocol=TCP localport={port}'
|
||||
else:
|
||||
chain = 'INPUT' if direction == 'inbound' else 'OUTPUT'
|
||||
cmd = f'iptables -A {chain} -p tcp --dport {port} -j DROP'
|
||||
subprocess.run(cmd, shell=True, capture_output=True, timeout=10)
|
||||
return f'Blocked port {port} ({direction})'
|
||||
except Exception as e:
|
||||
return f'Block port failed: {e}'
|
||||
|
||||
def _action_kill_process(self, pid: str) -> str:
|
||||
if not pid:
|
||||
return 'No PID specified'
|
||||
try:
|
||||
import subprocess, platform
|
||||
if platform.system() == 'Windows':
|
||||
cmd = f'taskkill /F /PID {pid}'
|
||||
else:
|
||||
cmd = f'kill -9 {pid}'
|
||||
subprocess.run(cmd, shell=True, capture_output=True, timeout=10)
|
||||
return f'Killed process {pid}'
|
||||
except Exception as e:
|
||||
return f'Kill failed: {e}'
|
||||
|
||||
def _action_run_shell(self, command: str) -> str:
|
||||
if not command:
|
||||
return 'No command specified'
|
||||
try:
|
||||
import subprocess
|
||||
result = subprocess.run(
|
||||
command, shell=True, capture_output=True,
|
||||
text=True, timeout=30,
|
||||
)
|
||||
output = result.stdout[:500]
|
||||
if result.returncode != 0:
|
||||
output += f'\n[exit {result.returncode}]'
|
||||
return output.strip() or '[no output]'
|
||||
except Exception as e:
|
||||
return f'Shell failed: {e}'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Activity log
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _log_activity(self, action_type: str, detail: str, *,
|
||||
rule_id: str = None, rule_name: str = None,
|
||||
tier: str = None, result: str = '',
|
||||
success: bool = True, duration_ms: int = None):
|
||||
"""Add an entry to the activity log and notify SSE subscribers."""
|
||||
entry = ActivityEntry(
|
||||
id=str(uuid.uuid4())[:8],
|
||||
timestamp=datetime.now().isoformat(),
|
||||
rule_id=rule_id,
|
||||
rule_name=rule_name,
|
||||
tier=tier,
|
||||
action_type=action_type,
|
||||
action_detail=detail,
|
||||
result=result,
|
||||
success=success,
|
||||
duration_ms=duration_ms,
|
||||
)
|
||||
|
||||
with self._activity_lock:
|
||||
self._activity.append(entry)
|
||||
|
||||
# Notify SSE subscribers
|
||||
self._notify_subscribers(entry)
|
||||
|
||||
# Persist periodically (every 10 entries)
|
||||
if len(self._activity) % 10 == 0:
|
||||
self._save_log()
|
||||
|
||||
def get_activity(self, limit: int = 50, offset: int = 0) -> List[dict]:
|
||||
"""Get recent activity entries."""
|
||||
with self._activity_lock:
|
||||
entries = list(self._activity)
|
||||
entries.reverse() # Newest first
|
||||
return [e.to_dict() for e in entries[offset:offset + limit]]
|
||||
|
||||
def get_activity_count(self) -> int:
|
||||
return len(self._activity)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# SSE streaming
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def subscribe(self):
|
||||
"""Create an SSE subscriber queue."""
|
||||
import queue
|
||||
q = queue.Queue(maxsize=100)
|
||||
with self._sub_lock:
|
||||
self._subscribers.append(q)
|
||||
return q
|
||||
|
||||
def unsubscribe(self, q):
|
||||
"""Remove an SSE subscriber."""
|
||||
with self._sub_lock:
|
||||
try:
|
||||
self._subscribers.remove(q)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
def _notify_subscribers(self, entry: ActivityEntry):
|
||||
"""Push an activity entry to all SSE subscribers."""
|
||||
data = json.dumps(entry.to_dict())
|
||||
with self._sub_lock:
|
||||
dead = []
|
||||
for q in self._subscribers:
|
||||
try:
|
||||
q.put_nowait(data)
|
||||
except Exception:
|
||||
dead.append(q)
|
||||
for q in dead:
|
||||
try:
|
||||
self._subscribers.remove(q)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Persistence
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _save_log(self):
|
||||
"""Persist activity log to JSON file."""
|
||||
try:
|
||||
self.LOG_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
with self._activity_lock:
|
||||
entries = [e.to_dict() for e in self._activity]
|
||||
self.LOG_PATH.write_text(
|
||||
json.dumps({'entries': entries[-200:]}, indent=2),
|
||||
encoding='utf-8',
|
||||
)
|
||||
except Exception as e:
|
||||
_logger.error(f'[Autonomy] Failed to save log: {e}')
|
||||
|
||||
def _load_log(self):
|
||||
"""Load persisted activity log."""
|
||||
if not self.LOG_PATH.exists():
|
||||
return
|
||||
try:
|
||||
data = json.loads(self.LOG_PATH.read_text(encoding='utf-8'))
|
||||
for entry_dict in data.get('entries', []):
|
||||
entry = ActivityEntry(
|
||||
id=entry_dict.get('id', str(uuid.uuid4())[:8]),
|
||||
timestamp=entry_dict.get('timestamp', ''),
|
||||
rule_id=entry_dict.get('rule_id'),
|
||||
rule_name=entry_dict.get('rule_name'),
|
||||
tier=entry_dict.get('tier'),
|
||||
action_type=entry_dict.get('action_type', ''),
|
||||
action_detail=entry_dict.get('action_detail', ''),
|
||||
result=entry_dict.get('result', ''),
|
||||
success=entry_dict.get('success', True),
|
||||
duration_ms=entry_dict.get('duration_ms'),
|
||||
)
|
||||
self._activity.append(entry)
|
||||
_logger.info(f'[Autonomy] Loaded {len(self._activity)} log entries')
|
||||
except Exception as e:
|
||||
_logger.error(f'[Autonomy] Failed to load log: {e}')
|
||||
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Singleton
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
_daemon_instance: Optional[AutonomyDaemon] = None
|
||||
|
||||
|
||||
def get_autonomy_daemon() -> AutonomyDaemon:
|
||||
"""Get the global AutonomyDaemon instance."""
|
||||
global _daemon_instance
|
||||
if _daemon_instance is None:
|
||||
_daemon_instance = AutonomyDaemon()
|
||||
return _daemon_instance
|
||||
|
||||
|
||||
def reset_autonomy_daemon():
|
||||
"""Stop and reset the global daemon."""
|
||||
global _daemon_instance
|
||||
if _daemon_instance is not None:
|
||||
_daemon_instance.stop()
|
||||
_daemon_instance = None
|
||||
49
core/banner.py
Normal file
49
core/banner.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""
|
||||
AUTARCH Banner Module
|
||||
Displays the main ASCII banner for the framework
|
||||
"""
|
||||
|
||||
# ANSI color codes
|
||||
class Colors:
|
||||
RED = '\033[91m'
|
||||
GREEN = '\033[92m'
|
||||
YELLOW = '\033[93m'
|
||||
BLUE = '\033[94m'
|
||||
MAGENTA = '\033[95m'
|
||||
CYAN = '\033[96m'
|
||||
WHITE = '\033[97m'
|
||||
BOLD = '\033[1m'
|
||||
DIM = '\033[2m'
|
||||
RESET = '\033[0m'
|
||||
|
||||
|
||||
BANNER = f"""{Colors.RED}{Colors.BOLD}
|
||||
▄▄▄ █ ██ ▄▄▄█████▓ ▄▄▄ ██▀███ ▄████▄ ██░ ██
|
||||
▒████▄ ██ ▓██▒▓ ██▒ ▓▒▒████▄ ▓██ ▒ ██▒▒██▀ ▀█ ▓██░ ██▒
|
||||
▒██ ▀█▄ ▓██ ▒██░▒ ▓██░ ▒░▒██ ▀█▄ ▓██ ░▄█ ▒▒▓█ ▄ ▒██▀▀██░
|
||||
░██▄▄▄▄██ ▓▓█ ░██░░ ▓██▓ ░ ░██▄▄▄▄██ ▒██▀▀█▄ ▒▓▓▄ ▄██▒░▓█ ░██
|
||||
▓█ ▓██▒▒▒█████▓ ▒██▒ ░ ▓█ ▓██▒░██▓ ▒██▒▒ ▓███▀ ░░▓█▒░██▓
|
||||
▒▒ ▓▒█░░▒▓▒ ▒ ▒ ▒ ░░ ▒▒ ▓▒█░░ ▒▓ ░▒▓░░ ░▒ ▒ ░ ▒ ░░▒░▒
|
||||
▒ ▒▒ ░░░▒░ ░ ░ ░ ▒ ▒▒ ░ ░▒ ░ ▒░ ░ ▒ ▒ ░▒░ ░
|
||||
░ ▒ ░░░ ░ ░ ░ ░ ▒ ░░ ░ ░ ░ ░░ ░
|
||||
░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░
|
||||
░
|
||||
{Colors.RESET}{Colors.CYAN} By darkHal and Setec Security Labs.{Colors.RESET}
|
||||
{Colors.DIM}═══════════════════════════════════════════════════════════════════{Colors.RESET}
|
||||
"""
|
||||
|
||||
|
||||
def display_banner():
|
||||
"""Print the AUTARCH banner to the console."""
|
||||
print(BANNER)
|
||||
|
||||
|
||||
def clear_screen():
|
||||
"""Clear the terminal screen."""
|
||||
import os
|
||||
os.system('clear' if os.name == 'posix' else 'cls')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
clear_screen()
|
||||
display_banner()
|
||||
586
core/config.py
Normal file
586
core/config.py
Normal file
@@ -0,0 +1,586 @@
|
||||
"""
|
||||
AUTARCH Configuration Handler
|
||||
Manages the autarch_settings.conf file for llama.cpp settings
|
||||
"""
|
||||
|
||||
import os
|
||||
import configparser
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
class Config:
|
||||
"""Configuration manager for AUTARCH settings."""
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
'llama': {
|
||||
'model_path': '',
|
||||
'n_ctx': '4096',
|
||||
'n_threads': '4',
|
||||
'n_gpu_layers': '0',
|
||||
'gpu_backend': 'cpu',
|
||||
'temperature': '0.7',
|
||||
'top_p': '0.9',
|
||||
'top_k': '40',
|
||||
'repeat_penalty': '1.1',
|
||||
'max_tokens': '2048',
|
||||
'seed': '-1',
|
||||
},
|
||||
'autarch': {
|
||||
'first_run': 'true',
|
||||
'modules_path': 'modules',
|
||||
'verbose': 'false',
|
||||
'quiet': 'false',
|
||||
'no_banner': 'false',
|
||||
'llm_backend': 'local',
|
||||
},
|
||||
'claude': {
|
||||
'api_key': '',
|
||||
'model': 'claude-sonnet-4-20250514',
|
||||
'max_tokens': '4096',
|
||||
'temperature': '0.7',
|
||||
},
|
||||
'osint': {
|
||||
'max_threads': '8',
|
||||
'timeout': '8',
|
||||
'include_nsfw': 'false',
|
||||
},
|
||||
'pentest': {
|
||||
'max_pipeline_steps': '50',
|
||||
'output_chunk_size': '2000',
|
||||
'auto_execute': 'false',
|
||||
'save_raw_output': 'true',
|
||||
},
|
||||
'transformers': {
|
||||
'model_path': '',
|
||||
'device': 'auto',
|
||||
'torch_dtype': 'auto',
|
||||
'load_in_8bit': 'false',
|
||||
'load_in_4bit': 'false',
|
||||
'trust_remote_code': 'false',
|
||||
'max_tokens': '2048',
|
||||
'temperature': '0.7',
|
||||
'top_p': '0.9',
|
||||
'top_k': '40',
|
||||
'repetition_penalty': '1.1',
|
||||
},
|
||||
'rsf': {
|
||||
'install_path': '',
|
||||
'enabled': 'true',
|
||||
'default_target': '',
|
||||
'default_port': '80',
|
||||
'execution_timeout': '120',
|
||||
},
|
||||
'upnp': {
|
||||
'enabled': 'true',
|
||||
'internal_ip': '10.0.0.26',
|
||||
'refresh_hours': '12',
|
||||
'mappings': '443:TCP,51820:UDP,8181:TCP',
|
||||
},
|
||||
'web': {
|
||||
'host': '0.0.0.0',
|
||||
'port': '8181',
|
||||
'secret_key': '',
|
||||
'mcp_port': '8081',
|
||||
},
|
||||
'revshell': {
|
||||
'enabled': 'true',
|
||||
'host': '0.0.0.0',
|
||||
'port': '17322',
|
||||
'auto_start': 'false',
|
||||
},
|
||||
'slm': {
|
||||
'enabled': 'true',
|
||||
'backend': 'local',
|
||||
'model_path': '',
|
||||
'n_ctx': '512',
|
||||
'n_gpu_layers': '-1',
|
||||
'n_threads': '2',
|
||||
},
|
||||
'sam': {
|
||||
'enabled': 'true',
|
||||
'backend': 'local',
|
||||
'model_path': '',
|
||||
'n_ctx': '2048',
|
||||
'n_gpu_layers': '-1',
|
||||
'n_threads': '4',
|
||||
},
|
||||
'lam': {
|
||||
'enabled': 'true',
|
||||
'backend': 'local',
|
||||
'model_path': '',
|
||||
'n_ctx': '4096',
|
||||
'n_gpu_layers': '-1',
|
||||
'n_threads': '4',
|
||||
},
|
||||
'autonomy': {
|
||||
'enabled': 'false',
|
||||
'monitor_interval': '3',
|
||||
'rule_eval_interval': '5',
|
||||
'max_concurrent_agents': '3',
|
||||
'threat_threshold_auto_respond': '40',
|
||||
'log_max_entries': '1000',
|
||||
},
|
||||
}
|
||||
|
||||
def __init__(self, config_path: str = None):
|
||||
"""Initialize the configuration manager.
|
||||
|
||||
Args:
|
||||
config_path: Path to the configuration file. Defaults to autarch_settings.conf
|
||||
in the framework directory.
|
||||
"""
|
||||
if config_path is None:
|
||||
from core.paths import get_config_path
|
||||
self.config_path = get_config_path()
|
||||
else:
|
||||
self.config_path = Path(config_path)
|
||||
|
||||
self.config = configparser.ConfigParser()
|
||||
self._load_or_create()
|
||||
|
||||
def _load_or_create(self):
|
||||
"""Load existing config or create with defaults."""
|
||||
if self.config_path.exists():
|
||||
self.config.read(self.config_path)
|
||||
self._apply_missing_defaults()
|
||||
else:
|
||||
self._create_default_config()
|
||||
|
||||
def _apply_missing_defaults(self):
|
||||
"""Add any missing sections/keys from DEFAULT_CONFIG to the loaded config."""
|
||||
changed = False
|
||||
for section, options in self.DEFAULT_CONFIG.items():
|
||||
if section not in self.config:
|
||||
self.config[section] = options
|
||||
changed = True
|
||||
else:
|
||||
for key, value in options.items():
|
||||
if key not in self.config[section]:
|
||||
self.config[section][key] = value
|
||||
changed = True
|
||||
if changed:
|
||||
self.save()
|
||||
|
||||
def _create_default_config(self):
|
||||
"""Create a default configuration file."""
|
||||
for section, options in self.DEFAULT_CONFIG.items():
|
||||
self.config[section] = options
|
||||
self.save()
|
||||
|
||||
def save(self):
|
||||
"""Save the current configuration to file."""
|
||||
with open(self.config_path, 'w') as f:
|
||||
self.config.write(f)
|
||||
|
||||
def get(self, section: str, key: str, fallback=None):
|
||||
"""Get a configuration value.
|
||||
|
||||
Args:
|
||||
section: Configuration section name
|
||||
key: Configuration key name
|
||||
fallback: Default value if key doesn't exist
|
||||
|
||||
Returns:
|
||||
The configuration value or fallback
|
||||
"""
|
||||
value = self.config.get(section, key, fallback=fallback)
|
||||
# Strip quotes from values (handles paths with spaces that were quoted)
|
||||
if value and isinstance(value, str):
|
||||
value = value.strip().strip('"').strip("'")
|
||||
return value
|
||||
|
||||
def get_int(self, section: str, key: str, fallback: int = 0) -> int:
|
||||
"""Get a configuration value as integer."""
|
||||
return self.config.getint(section, key, fallback=fallback)
|
||||
|
||||
def get_float(self, section: str, key: str, fallback: float = 0.0) -> float:
|
||||
"""Get a configuration value as float."""
|
||||
return self.config.getfloat(section, key, fallback=fallback)
|
||||
|
||||
def get_bool(self, section: str, key: str, fallback: bool = False) -> bool:
|
||||
"""Get a configuration value as boolean."""
|
||||
return self.config.getboolean(section, key, fallback=fallback)
|
||||
|
||||
def set(self, section: str, key: str, value):
|
||||
"""Set a configuration value.
|
||||
|
||||
Args:
|
||||
section: Configuration section name
|
||||
key: Configuration key name
|
||||
value: Value to set
|
||||
"""
|
||||
if section not in self.config:
|
||||
self.config[section] = {}
|
||||
self.config[section][key] = str(value)
|
||||
|
||||
def is_first_run(self) -> bool:
|
||||
"""Check if this is the first run of AUTARCH."""
|
||||
return self.get_bool('autarch', 'first_run', fallback=True)
|
||||
|
||||
def mark_setup_complete(self):
|
||||
"""Mark the first-time setup as complete."""
|
||||
self.set('autarch', 'first_run', 'false')
|
||||
self.save()
|
||||
|
||||
def get_llama_settings(self) -> dict:
|
||||
"""Get all llama.cpp settings as a dictionary.
|
||||
|
||||
Returns:
|
||||
Dictionary with llama.cpp settings properly typed
|
||||
"""
|
||||
return {
|
||||
'model_path': self.get('llama', 'model_path', ''),
|
||||
'n_ctx': self.get_int('llama', 'n_ctx', 4096),
|
||||
'n_threads': self.get_int('llama', 'n_threads', 4),
|
||||
'n_gpu_layers': self.get_int('llama', 'n_gpu_layers', 0),
|
||||
'gpu_backend': self.get('llama', 'gpu_backend', 'cpu'),
|
||||
'temperature': self.get_float('llama', 'temperature', 0.7),
|
||||
'top_p': self.get_float('llama', 'top_p', 0.9),
|
||||
'top_k': self.get_int('llama', 'top_k', 40),
|
||||
'repeat_penalty': self.get_float('llama', 'repeat_penalty', 1.1),
|
||||
'max_tokens': self.get_int('llama', 'max_tokens', 2048),
|
||||
'seed': self.get_int('llama', 'seed', -1),
|
||||
}
|
||||
|
||||
def get_osint_settings(self) -> dict:
|
||||
"""Get all OSINT settings as a dictionary.
|
||||
|
||||
Returns:
|
||||
Dictionary with OSINT settings properly typed
|
||||
"""
|
||||
return {
|
||||
'max_threads': self.get_int('osint', 'max_threads', 8),
|
||||
'timeout': self.get_int('osint', 'timeout', 8),
|
||||
'include_nsfw': self.get_bool('osint', 'include_nsfw', False),
|
||||
}
|
||||
|
||||
def get_pentest_settings(self) -> dict:
|
||||
"""Get all pentest pipeline settings as a dictionary.
|
||||
|
||||
Returns:
|
||||
Dictionary with pentest settings properly typed
|
||||
"""
|
||||
return {
|
||||
'max_pipeline_steps': self.get_int('pentest', 'max_pipeline_steps', 50),
|
||||
'output_chunk_size': self.get_int('pentest', 'output_chunk_size', 2000),
|
||||
'auto_execute': self.get_bool('pentest', 'auto_execute', False),
|
||||
'save_raw_output': self.get_bool('pentest', 'save_raw_output', True),
|
||||
}
|
||||
|
||||
def get_claude_settings(self) -> dict:
|
||||
"""Get all Claude API settings as a dictionary.
|
||||
|
||||
Returns:
|
||||
Dictionary with Claude API settings properly typed
|
||||
"""
|
||||
return {
|
||||
'api_key': self.get('claude', 'api_key', ''),
|
||||
'model': self.get('claude', 'model', 'claude-sonnet-4-20250514'),
|
||||
'max_tokens': self.get_int('claude', 'max_tokens', 4096),
|
||||
'temperature': self.get_float('claude', 'temperature', 0.7),
|
||||
}
|
||||
|
||||
def get_transformers_settings(self) -> dict:
|
||||
"""Get all transformers/safetensors settings as a dictionary.
|
||||
|
||||
Returns:
|
||||
Dictionary with transformers settings properly typed
|
||||
"""
|
||||
return {
|
||||
'model_path': self.get('transformers', 'model_path', ''),
|
||||
'device': self.get('transformers', 'device', 'auto'),
|
||||
'torch_dtype': self.get('transformers', 'torch_dtype', 'auto'),
|
||||
'load_in_8bit': self.get_bool('transformers', 'load_in_8bit', False),
|
||||
'load_in_4bit': self.get_bool('transformers', 'load_in_4bit', False),
|
||||
'llm_int8_enable_fp32_cpu_offload': self.get_bool('transformers', 'llm_int8_enable_fp32_cpu_offload', False),
|
||||
'device_map': self.get('transformers', 'device_map', 'auto'),
|
||||
'trust_remote_code': self.get_bool('transformers', 'trust_remote_code', False),
|
||||
'max_tokens': self.get_int('transformers', 'max_tokens', 2048),
|
||||
'temperature': self.get_float('transformers', 'temperature', 0.7),
|
||||
'top_p': self.get_float('transformers', 'top_p', 0.9),
|
||||
'top_k': self.get_int('transformers', 'top_k', 40),
|
||||
'repetition_penalty': self.get_float('transformers', 'repetition_penalty', 1.1),
|
||||
}
|
||||
|
||||
def get_huggingface_settings(self) -> dict:
|
||||
"""Get all HuggingFace Inference API settings as a dictionary."""
|
||||
return {
|
||||
'api_key': self.get('huggingface', 'api_key', ''),
|
||||
'model': self.get('huggingface', 'model', 'mistralai/Mistral-7B-Instruct-v0.3'),
|
||||
'endpoint': self.get('huggingface', 'endpoint', ''),
|
||||
'provider': self.get('huggingface', 'provider', 'auto'),
|
||||
'max_tokens': self.get_int('huggingface', 'max_tokens', 1024),
|
||||
'temperature': self.get_float('huggingface', 'temperature', 0.7),
|
||||
'top_p': self.get_float('huggingface', 'top_p', 0.9),
|
||||
'top_k': self.get_int('huggingface', 'top_k', 40),
|
||||
'repetition_penalty': self.get_float('huggingface', 'repetition_penalty', 1.1),
|
||||
'do_sample': self.get_bool('huggingface', 'do_sample', True),
|
||||
'seed': self.get_int('huggingface', 'seed', -1),
|
||||
'stop_sequences': self.get('huggingface', 'stop_sequences', ''),
|
||||
}
|
||||
|
||||
def get_openai_settings(self) -> dict:
|
||||
"""Get all OpenAI API settings as a dictionary."""
|
||||
return {
|
||||
'api_key': self.get('openai', 'api_key', ''),
|
||||
'base_url': self.get('openai', 'base_url', 'https://api.openai.com/v1'),
|
||||
'model': self.get('openai', 'model', 'gpt-4o'),
|
||||
'max_tokens': self.get_int('openai', 'max_tokens', 4096),
|
||||
'temperature': self.get_float('openai', 'temperature', 0.7),
|
||||
'top_p': self.get_float('openai', 'top_p', 1.0),
|
||||
'frequency_penalty': self.get_float('openai', 'frequency_penalty', 0.0),
|
||||
'presence_penalty': self.get_float('openai', 'presence_penalty', 0.0),
|
||||
}
|
||||
|
||||
def get_rsf_settings(self) -> dict:
|
||||
"""Get all RouterSploit settings as a dictionary.
|
||||
|
||||
Returns:
|
||||
Dictionary with RSF settings properly typed
|
||||
"""
|
||||
return {
|
||||
'install_path': self.get('rsf', 'install_path', ''),
|
||||
'enabled': self.get_bool('rsf', 'enabled', True),
|
||||
'default_target': self.get('rsf', 'default_target', ''),
|
||||
'default_port': self.get('rsf', 'default_port', '80'),
|
||||
'execution_timeout': self.get_int('rsf', 'execution_timeout', 120),
|
||||
}
|
||||
|
||||
def get_upnp_settings(self) -> dict:
|
||||
"""Get all UPnP settings as a dictionary."""
|
||||
return {
|
||||
'enabled': self.get_bool('upnp', 'enabled', True),
|
||||
'internal_ip': self.get('upnp', 'internal_ip', '10.0.0.26'),
|
||||
'refresh_hours': self.get_int('upnp', 'refresh_hours', 12),
|
||||
'mappings': self.get('upnp', 'mappings', ''),
|
||||
}
|
||||
|
||||
def get_revshell_settings(self) -> dict:
|
||||
"""Get all reverse shell settings as a dictionary."""
|
||||
return {
|
||||
'enabled': self.get_bool('revshell', 'enabled', True),
|
||||
'host': self.get('revshell', 'host', '0.0.0.0'),
|
||||
'port': self.get_int('revshell', 'port', 17322),
|
||||
'auto_start': self.get_bool('revshell', 'auto_start', False),
|
||||
}
|
||||
|
||||
def get_tier_settings(self, tier: str) -> dict:
|
||||
"""Get settings for a model tier (slm, sam, lam)."""
|
||||
return {
|
||||
'enabled': self.get_bool(tier, 'enabled', True),
|
||||
'backend': self.get(tier, 'backend', 'local'),
|
||||
'model_path': self.get(tier, 'model_path', ''),
|
||||
'n_ctx': self.get_int(tier, 'n_ctx', 2048),
|
||||
'n_gpu_layers': self.get_int(tier, 'n_gpu_layers', -1),
|
||||
'n_threads': self.get_int(tier, 'n_threads', 4),
|
||||
}
|
||||
|
||||
def get_slm_settings(self) -> dict:
|
||||
"""Get Small Language Model tier settings."""
|
||||
return self.get_tier_settings('slm')
|
||||
|
||||
def get_sam_settings(self) -> dict:
|
||||
"""Get Small Action Model tier settings."""
|
||||
return self.get_tier_settings('sam')
|
||||
|
||||
def get_lam_settings(self) -> dict:
|
||||
"""Get Large Action Model tier settings."""
|
||||
return self.get_tier_settings('lam')
|
||||
|
||||
def get_autonomy_settings(self) -> dict:
|
||||
"""Get autonomy daemon settings."""
|
||||
return {
|
||||
'enabled': self.get_bool('autonomy', 'enabled', False),
|
||||
'monitor_interval': self.get_int('autonomy', 'monitor_interval', 3),
|
||||
'rule_eval_interval': self.get_int('autonomy', 'rule_eval_interval', 5),
|
||||
'max_concurrent_agents': self.get_int('autonomy', 'max_concurrent_agents', 3),
|
||||
'threat_threshold_auto_respond': self.get_int('autonomy', 'threat_threshold_auto_respond', 40),
|
||||
'log_max_entries': self.get_int('autonomy', 'log_max_entries', 1000),
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def get_templates_dir() -> Path:
|
||||
"""Get the path to the configuration templates directory."""
|
||||
from core.paths import get_templates_dir
|
||||
return get_templates_dir()
|
||||
|
||||
@staticmethod
|
||||
def get_custom_configs_dir() -> Path:
|
||||
"""Get the path to the custom user configurations directory."""
|
||||
from core.paths import get_custom_configs_dir
|
||||
return get_custom_configs_dir()
|
||||
|
||||
def list_hardware_templates(self) -> list:
|
||||
"""List available hardware configuration templates.
|
||||
|
||||
Returns:
|
||||
List of tuples: (template_id, display_name, description, filename)
|
||||
"""
|
||||
templates = [
|
||||
('nvidia_4070_mobile', 'NVIDIA RTX 4070 Mobile', '8GB VRAM, CUDA, optimal for 7B-13B models', 'nvidia_4070_mobile.conf'),
|
||||
('amd_rx6700xt', 'AMD Radeon RX 6700 XT', '12GB VRAM, ROCm, optimal for 7B-13B models', 'amd_rx6700xt.conf'),
|
||||
('orangepi5plus_cpu', 'Orange Pi 5 Plus (CPU)', 'RK3588 ARM64, CPU-only, for quantized models', 'orangepi5plus_cpu.conf'),
|
||||
('orangepi5plus_mali', 'Orange Pi 5 Plus (Mali GPU)', 'EXPERIMENTAL - Mali-G610 OpenCL acceleration', 'orangepi5plus_mali.conf'),
|
||||
]
|
||||
return templates
|
||||
|
||||
def list_custom_configs(self) -> list:
|
||||
"""List user-saved custom configurations.
|
||||
|
||||
Returns:
|
||||
List of tuples: (name, filepath)
|
||||
"""
|
||||
custom_dir = self.get_custom_configs_dir()
|
||||
configs = []
|
||||
for conf_file in custom_dir.glob('*.conf'):
|
||||
name = conf_file.stem.replace('_', ' ').title()
|
||||
configs.append((name, conf_file))
|
||||
return configs
|
||||
|
||||
def load_template(self, template_id: str) -> bool:
|
||||
"""Load a hardware template into the current configuration.
|
||||
|
||||
Args:
|
||||
template_id: The template identifier (e.g., 'nvidia_4070_mobile')
|
||||
|
||||
Returns:
|
||||
True if loaded successfully, False otherwise
|
||||
"""
|
||||
templates = {t[0]: t[3] for t in self.list_hardware_templates()}
|
||||
if template_id not in templates:
|
||||
return False
|
||||
|
||||
template_path = self.get_templates_dir() / templates[template_id]
|
||||
if not template_path.exists():
|
||||
return False
|
||||
|
||||
return self._load_llm_settings_from_file(template_path)
|
||||
|
||||
def load_custom_config(self, filepath: Path) -> bool:
|
||||
"""Load a custom configuration file.
|
||||
|
||||
Args:
|
||||
filepath: Path to the custom configuration file
|
||||
|
||||
Returns:
|
||||
True if loaded successfully, False otherwise
|
||||
"""
|
||||
if not filepath.exists():
|
||||
return False
|
||||
return self._load_llm_settings_from_file(filepath)
|
||||
|
||||
def _load_llm_settings_from_file(self, filepath: Path) -> bool:
|
||||
"""Load LLM settings (llama and transformers sections) from a file.
|
||||
|
||||
Preserves model_path from current config (doesn't overwrite).
|
||||
|
||||
Args:
|
||||
filepath: Path to the configuration file
|
||||
|
||||
Returns:
|
||||
True if loaded successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
template_config = configparser.ConfigParser()
|
||||
template_config.read(filepath)
|
||||
|
||||
# Preserve current model paths
|
||||
current_llama_path = self.get('llama', 'model_path', '')
|
||||
current_transformers_path = self.get('transformers', 'model_path', '')
|
||||
|
||||
# Load llama section
|
||||
if 'llama' in template_config:
|
||||
for key, value in template_config['llama'].items():
|
||||
if key != 'model_path': # Preserve current model path
|
||||
self.set('llama', key, value)
|
||||
# Restore model path
|
||||
if current_llama_path:
|
||||
self.set('llama', 'model_path', current_llama_path)
|
||||
|
||||
# Load transformers section
|
||||
if 'transformers' in template_config:
|
||||
for key, value in template_config['transformers'].items():
|
||||
if key != 'model_path': # Preserve current model path
|
||||
self.set('transformers', key, value)
|
||||
# Restore model path
|
||||
if current_transformers_path:
|
||||
self.set('transformers', 'model_path', current_transformers_path)
|
||||
|
||||
self.save()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def save_custom_config(self, name: str) -> Path:
|
||||
"""Save current LLM settings to a custom configuration file.
|
||||
|
||||
Args:
|
||||
name: Name for the custom configuration (will be sanitized)
|
||||
|
||||
Returns:
|
||||
Path to the saved configuration file
|
||||
"""
|
||||
# Sanitize name for filename
|
||||
safe_name = ''.join(c if c.isalnum() or c in '-_' else '_' for c in name.lower())
|
||||
safe_name = safe_name.strip('_')
|
||||
if not safe_name:
|
||||
safe_name = 'custom_config'
|
||||
|
||||
custom_dir = self.get_custom_configs_dir()
|
||||
filepath = custom_dir / f'{safe_name}.conf'
|
||||
|
||||
# Create config with just LLM settings
|
||||
custom_config = configparser.ConfigParser()
|
||||
|
||||
# Save llama settings
|
||||
custom_config['llama'] = {}
|
||||
for key in self.DEFAULT_CONFIG['llama'].keys():
|
||||
value = self.get('llama', key, '')
|
||||
if value:
|
||||
custom_config['llama'][key] = str(value)
|
||||
|
||||
# Save transformers settings
|
||||
custom_config['transformers'] = {}
|
||||
for key in self.DEFAULT_CONFIG['transformers'].keys():
|
||||
value = self.get('transformers', key, '')
|
||||
if value:
|
||||
custom_config['transformers'][key] = str(value)
|
||||
|
||||
# Add header comment
|
||||
with open(filepath, 'w') as f:
|
||||
f.write(f'# AUTARCH Custom LLM Configuration\n')
|
||||
f.write(f'# Name: {name}\n')
|
||||
f.write(f'# Saved: {Path(self.config_path).name}\n')
|
||||
f.write('#\n\n')
|
||||
custom_config.write(f)
|
||||
|
||||
return filepath
|
||||
|
||||
def delete_custom_config(self, filepath: Path) -> bool:
|
||||
"""Delete a custom configuration file.
|
||||
|
||||
Args:
|
||||
filepath: Path to the custom configuration file
|
||||
|
||||
Returns:
|
||||
True if deleted successfully, False otherwise
|
||||
"""
|
||||
try:
|
||||
if filepath.exists() and filepath.parent == self.get_custom_configs_dir():
|
||||
filepath.unlink()
|
||||
return True
|
||||
except Exception:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
# Global config instance
|
||||
_config = None
|
||||
|
||||
|
||||
def get_config() -> Config:
|
||||
"""Get the global configuration instance."""
|
||||
global _config
|
||||
if _config is None:
|
||||
_config = Config()
|
||||
return _config
|
||||
869
core/cve.py
Normal file
869
core/cve.py
Normal file
@@ -0,0 +1,869 @@
|
||||
"""
|
||||
AUTARCH CVE Database Module
|
||||
SQLite-based local CVE database with NVD API synchronization
|
||||
https://nvd.nist.gov/developers/vulnerabilities
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import sqlite3
|
||||
import platform
|
||||
import subprocess
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, List, Dict, Any, Callable
|
||||
|
||||
from .banner import Colors
|
||||
from .config import get_config
|
||||
|
||||
|
||||
class CVEDatabase:
|
||||
"""SQLite-based CVE Database with NVD API synchronization."""
|
||||
|
||||
NVD_API_BASE = "https://services.nvd.nist.gov/rest/json/cves/2.0"
|
||||
DB_VERSION = 1
|
||||
RESULTS_PER_PAGE = 2000 # NVD max is 2000
|
||||
|
||||
# OS to CPE mapping for common systems
|
||||
OS_CPE_MAP = {
|
||||
'ubuntu': 'cpe:2.3:o:canonical:ubuntu_linux',
|
||||
'debian': 'cpe:2.3:o:debian:debian_linux',
|
||||
'fedora': 'cpe:2.3:o:fedoraproject:fedora',
|
||||
'centos': 'cpe:2.3:o:centos:centos',
|
||||
'rhel': 'cpe:2.3:o:redhat:enterprise_linux',
|
||||
'rocky': 'cpe:2.3:o:rockylinux:rocky_linux',
|
||||
'alma': 'cpe:2.3:o:almalinux:almalinux',
|
||||
'arch': 'cpe:2.3:o:archlinux:arch_linux',
|
||||
'opensuse': 'cpe:2.3:o:opensuse:opensuse',
|
||||
'suse': 'cpe:2.3:o:suse:suse_linux',
|
||||
'kali': 'cpe:2.3:o:kali:kali_linux',
|
||||
'mint': 'cpe:2.3:o:linuxmint:linux_mint',
|
||||
'windows': 'cpe:2.3:o:microsoft:windows',
|
||||
'macos': 'cpe:2.3:o:apple:macos',
|
||||
'darwin': 'cpe:2.3:o:apple:macos',
|
||||
}
|
||||
|
||||
# SQL Schema
|
||||
SCHEMA = """
|
||||
-- CVE main table
|
||||
CREATE TABLE IF NOT EXISTS cves (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cve_id TEXT UNIQUE NOT NULL,
|
||||
description TEXT,
|
||||
published TEXT,
|
||||
modified TEXT,
|
||||
cvss_v3_score REAL,
|
||||
cvss_v3_severity TEXT,
|
||||
cvss_v3_vector TEXT,
|
||||
cvss_v2_score REAL,
|
||||
cvss_v2_severity TEXT,
|
||||
cvss_v2_vector TEXT
|
||||
);
|
||||
|
||||
-- CPE (affected products) table
|
||||
CREATE TABLE IF NOT EXISTS cve_cpes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cve_id TEXT NOT NULL,
|
||||
cpe_criteria TEXT NOT NULL,
|
||||
vulnerable INTEGER DEFAULT 1,
|
||||
version_start TEXT,
|
||||
version_end TEXT,
|
||||
FOREIGN KEY (cve_id) REFERENCES cves(cve_id)
|
||||
);
|
||||
|
||||
-- References table
|
||||
CREATE TABLE IF NOT EXISTS cve_references (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cve_id TEXT NOT NULL,
|
||||
url TEXT NOT NULL,
|
||||
source TEXT,
|
||||
FOREIGN KEY (cve_id) REFERENCES cves(cve_id)
|
||||
);
|
||||
|
||||
-- Weaknesses (CWE) table
|
||||
CREATE TABLE IF NOT EXISTS cve_weaknesses (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
cve_id TEXT NOT NULL,
|
||||
cwe_id TEXT NOT NULL,
|
||||
FOREIGN KEY (cve_id) REFERENCES cves(cve_id)
|
||||
);
|
||||
|
||||
-- Metadata table
|
||||
CREATE TABLE IF NOT EXISTS metadata (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT
|
||||
);
|
||||
|
||||
-- Indexes for fast queries
|
||||
CREATE INDEX IF NOT EXISTS idx_cve_id ON cves(cve_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_cve_severity ON cves(cvss_v3_severity);
|
||||
CREATE INDEX IF NOT EXISTS idx_cve_score ON cves(cvss_v3_score);
|
||||
CREATE INDEX IF NOT EXISTS idx_cve_published ON cves(published);
|
||||
CREATE INDEX IF NOT EXISTS idx_cpe_cve ON cve_cpes(cve_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_cpe_criteria ON cve_cpes(cpe_criteria);
|
||||
CREATE INDEX IF NOT EXISTS idx_ref_cve ON cve_references(cve_id);
|
||||
CREATE INDEX IF NOT EXISTS idx_weakness_cve ON cve_weaknesses(cve_id);
|
||||
"""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""Initialize CVE database.
|
||||
|
||||
Args:
|
||||
db_path: Path to SQLite database. Defaults to data/cve/cve.db
|
||||
"""
|
||||
if db_path is None:
|
||||
from core.paths import get_data_dir
|
||||
self.data_dir = get_data_dir() / "cve"
|
||||
self.db_path = self.data_dir / "cve.db"
|
||||
else:
|
||||
self.db_path = Path(db_path)
|
||||
self.data_dir = self.db_path.parent
|
||||
|
||||
self.data_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.system_info = self._detect_system()
|
||||
self._conn = None
|
||||
self._lock = threading.Lock()
|
||||
self._init_database()
|
||||
|
||||
def _get_connection(self) -> sqlite3.Connection:
|
||||
"""Get thread-safe database connection."""
|
||||
if self._conn is None:
|
||||
self._conn = sqlite3.connect(str(self.db_path), check_same_thread=False)
|
||||
self._conn.row_factory = sqlite3.Row
|
||||
return self._conn
|
||||
|
||||
def _init_database(self):
|
||||
"""Initialize database schema."""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
conn.executescript(self.SCHEMA)
|
||||
conn.commit()
|
||||
|
||||
def _detect_system(self) -> Dict[str, str]:
|
||||
"""Detect the current system information."""
|
||||
info = {
|
||||
'os_type': platform.system().lower(),
|
||||
'os_name': '',
|
||||
'os_version': '',
|
||||
'os_id': '',
|
||||
'kernel': platform.release(),
|
||||
'arch': platform.machine(),
|
||||
'cpe_prefix': '',
|
||||
}
|
||||
|
||||
if info['os_type'] == 'linux':
|
||||
os_release = Path("/etc/os-release")
|
||||
if os_release.exists():
|
||||
content = os_release.read_text()
|
||||
for line in content.split('\n'):
|
||||
if line.startswith('ID='):
|
||||
info['os_id'] = line.split('=')[1].strip('"').lower()
|
||||
elif line.startswith('VERSION_ID='):
|
||||
info['os_version'] = line.split('=')[1].strip('"')
|
||||
elif line.startswith('PRETTY_NAME='):
|
||||
info['os_name'] = line.split('=', 1)[1].strip('"')
|
||||
|
||||
if not info['os_id']:
|
||||
if Path("/etc/debian_version").exists():
|
||||
info['os_id'] = 'debian'
|
||||
elif Path("/etc/redhat-release").exists():
|
||||
info['os_id'] = 'rhel'
|
||||
elif Path("/etc/arch-release").exists():
|
||||
info['os_id'] = 'arch'
|
||||
|
||||
elif info['os_type'] == 'darwin':
|
||||
info['os_id'] = 'macos'
|
||||
try:
|
||||
result = subprocess.run(['sw_vers', '-productVersion'],
|
||||
capture_output=True, text=True, timeout=5)
|
||||
info['os_version'] = result.stdout.strip()
|
||||
except:
|
||||
pass
|
||||
|
||||
elif info['os_type'] == 'windows':
|
||||
info['os_id'] = 'windows'
|
||||
info['os_version'] = platform.version()
|
||||
info['os_name'] = platform.platform()
|
||||
|
||||
for os_key, cpe in self.OS_CPE_MAP.items():
|
||||
if os_key in info['os_id']:
|
||||
info['cpe_prefix'] = cpe
|
||||
break
|
||||
|
||||
return info
|
||||
|
||||
def get_system_info(self) -> Dict[str, str]:
|
||||
"""Get detected system information."""
|
||||
return self.system_info.copy()
|
||||
|
||||
def get_db_stats(self) -> Dict[str, Any]:
|
||||
"""Get database statistics."""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
stats = {
|
||||
'db_path': str(self.db_path),
|
||||
'db_size_mb': round(self.db_path.stat().st_size / 1024 / 1024, 2) if self.db_path.exists() else 0,
|
||||
'total_cves': 0,
|
||||
'total_cpes': 0,
|
||||
'last_sync': None,
|
||||
'last_modified': None,
|
||||
}
|
||||
|
||||
try:
|
||||
cursor.execute("SELECT COUNT(*) FROM cves")
|
||||
stats['total_cves'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM cve_cpes")
|
||||
stats['total_cpes'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT value FROM metadata WHERE key = 'last_sync'")
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
stats['last_sync'] = row[0]
|
||||
|
||||
cursor.execute("SELECT value FROM metadata WHERE key = 'last_modified'")
|
||||
row = cursor.fetchone()
|
||||
if row:
|
||||
stats['last_modified'] = row[0]
|
||||
|
||||
# Count by severity
|
||||
cursor.execute("""
|
||||
SELECT cvss_v3_severity, COUNT(*)
|
||||
FROM cves
|
||||
WHERE cvss_v3_severity IS NOT NULL
|
||||
GROUP BY cvss_v3_severity
|
||||
""")
|
||||
stats['by_severity'] = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
except sqlite3.Error:
|
||||
pass
|
||||
|
||||
return stats
|
||||
|
||||
# =========================================================================
|
||||
# NVD API METHODS
|
||||
# =========================================================================
|
||||
|
||||
def _make_nvd_request(self, params: Dict[str, str], verbose: bool = False) -> Optional[Dict]:
|
||||
"""Make a request to the NVD API."""
|
||||
url = f"{self.NVD_API_BASE}?{urllib.parse.urlencode(params)}"
|
||||
|
||||
if verbose:
|
||||
print(f"{Colors.DIM} API: {url[:80]}...{Colors.RESET}")
|
||||
|
||||
headers = {
|
||||
'User-Agent': 'AUTARCH-Security-Framework/1.0',
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
config = get_config()
|
||||
api_key = config.get('nvd', 'api_key', fallback='')
|
||||
if api_key:
|
||||
headers['apiKey'] = api_key
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(url, headers=headers)
|
||||
with urllib.request.urlopen(req, timeout=60) as response:
|
||||
return json.loads(response.read().decode('utf-8'))
|
||||
except urllib.error.HTTPError as e:
|
||||
if verbose:
|
||||
print(f"{Colors.RED}[X] NVD API error: {e.code} - {e.reason}{Colors.RESET}")
|
||||
return None
|
||||
except urllib.error.URLError as e:
|
||||
if verbose:
|
||||
print(f"{Colors.RED}[X] Network error: {e.reason}{Colors.RESET}")
|
||||
return None
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
print(f"{Colors.RED}[X] Request failed: {e}{Colors.RESET}")
|
||||
return None
|
||||
|
||||
def _parse_cve_data(self, vuln: Dict) -> Dict:
|
||||
"""Parse CVE data from NVD API response."""
|
||||
cve_data = vuln.get('cve', {})
|
||||
cve_id = cve_data.get('id', '')
|
||||
|
||||
# Description
|
||||
descriptions = cve_data.get('descriptions', [])
|
||||
description = ''
|
||||
for desc in descriptions:
|
||||
if desc.get('lang') == 'en':
|
||||
description = desc.get('value', '')
|
||||
break
|
||||
|
||||
# CVSS scores
|
||||
metrics = cve_data.get('metrics', {})
|
||||
cvss_v3 = metrics.get('cvssMetricV31', metrics.get('cvssMetricV30', []))
|
||||
cvss_v2 = metrics.get('cvssMetricV2', [])
|
||||
|
||||
cvss_v3_score = None
|
||||
cvss_v3_severity = None
|
||||
cvss_v3_vector = None
|
||||
cvss_v2_score = None
|
||||
cvss_v2_severity = None
|
||||
cvss_v2_vector = None
|
||||
|
||||
if cvss_v3:
|
||||
cvss_data = cvss_v3[0].get('cvssData', {})
|
||||
cvss_v3_score = cvss_data.get('baseScore')
|
||||
cvss_v3_severity = cvss_data.get('baseSeverity')
|
||||
cvss_v3_vector = cvss_data.get('vectorString')
|
||||
|
||||
if cvss_v2:
|
||||
cvss_data = cvss_v2[0].get('cvssData', {})
|
||||
cvss_v2_score = cvss_data.get('baseScore')
|
||||
cvss_v2_severity = cvss_v2[0].get('baseSeverity')
|
||||
cvss_v2_vector = cvss_data.get('vectorString')
|
||||
|
||||
# CPEs (affected products)
|
||||
cpes = []
|
||||
for config in cve_data.get('configurations', []):
|
||||
for node in config.get('nodes', []):
|
||||
for match in node.get('cpeMatch', []):
|
||||
cpes.append({
|
||||
'criteria': match.get('criteria', ''),
|
||||
'vulnerable': match.get('vulnerable', True),
|
||||
'version_start': match.get('versionStartIncluding') or match.get('versionStartExcluding'),
|
||||
'version_end': match.get('versionEndIncluding') or match.get('versionEndExcluding'),
|
||||
})
|
||||
|
||||
# References
|
||||
references = [
|
||||
{'url': ref.get('url', ''), 'source': ref.get('source', '')}
|
||||
for ref in cve_data.get('references', [])
|
||||
]
|
||||
|
||||
# Weaknesses
|
||||
weaknesses = []
|
||||
for weakness in cve_data.get('weaknesses', []):
|
||||
for desc in weakness.get('description', []):
|
||||
if desc.get('lang') == 'en' and desc.get('value', '').startswith('CWE-'):
|
||||
weaknesses.append(desc.get('value'))
|
||||
|
||||
return {
|
||||
'cve_id': cve_id,
|
||||
'description': description,
|
||||
'published': cve_data.get('published', ''),
|
||||
'modified': cve_data.get('lastModified', ''),
|
||||
'cvss_v3_score': cvss_v3_score,
|
||||
'cvss_v3_severity': cvss_v3_severity,
|
||||
'cvss_v3_vector': cvss_v3_vector,
|
||||
'cvss_v2_score': cvss_v2_score,
|
||||
'cvss_v2_severity': cvss_v2_severity,
|
||||
'cvss_v2_vector': cvss_v2_vector,
|
||||
'cpes': cpes,
|
||||
'references': references,
|
||||
'weaknesses': weaknesses,
|
||||
}
|
||||
|
||||
def _insert_cve(self, conn: sqlite3.Connection, cve_data: Dict):
|
||||
"""Insert or update a CVE in the database."""
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Insert/update main CVE record
|
||||
cursor.execute("""
|
||||
INSERT OR REPLACE INTO cves
|
||||
(cve_id, description, published, modified,
|
||||
cvss_v3_score, cvss_v3_severity, cvss_v3_vector,
|
||||
cvss_v2_score, cvss_v2_severity, cvss_v2_vector)
|
||||
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
cve_data['cve_id'],
|
||||
cve_data['description'],
|
||||
cve_data['published'],
|
||||
cve_data['modified'],
|
||||
cve_data['cvss_v3_score'],
|
||||
cve_data['cvss_v3_severity'],
|
||||
cve_data['cvss_v3_vector'],
|
||||
cve_data['cvss_v2_score'],
|
||||
cve_data['cvss_v2_severity'],
|
||||
cve_data['cvss_v2_vector'],
|
||||
))
|
||||
|
||||
cve_id = cve_data['cve_id']
|
||||
|
||||
# Clear existing related data
|
||||
cursor.execute("DELETE FROM cve_cpes WHERE cve_id = ?", (cve_id,))
|
||||
cursor.execute("DELETE FROM cve_references WHERE cve_id = ?", (cve_id,))
|
||||
cursor.execute("DELETE FROM cve_weaknesses WHERE cve_id = ?", (cve_id,))
|
||||
|
||||
# Insert CPEs
|
||||
for cpe in cve_data['cpes']:
|
||||
cursor.execute("""
|
||||
INSERT INTO cve_cpes (cve_id, cpe_criteria, vulnerable, version_start, version_end)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
""", (cve_id, cpe['criteria'], 1 if cpe['vulnerable'] else 0,
|
||||
cpe['version_start'], cpe['version_end']))
|
||||
|
||||
# Insert references (limit to 10)
|
||||
for ref in cve_data['references'][:10]:
|
||||
cursor.execute("""
|
||||
INSERT INTO cve_references (cve_id, url, source)
|
||||
VALUES (?, ?, ?)
|
||||
""", (cve_id, ref['url'], ref['source']))
|
||||
|
||||
# Insert weaknesses
|
||||
for cwe in cve_data['weaknesses']:
|
||||
cursor.execute("""
|
||||
INSERT INTO cve_weaknesses (cve_id, cwe_id)
|
||||
VALUES (?, ?)
|
||||
""", (cve_id, cwe))
|
||||
|
||||
# =========================================================================
|
||||
# DATABASE SYNC
|
||||
# =========================================================================
|
||||
|
||||
def sync_database(
|
||||
self,
|
||||
days_back: int = 120,
|
||||
full_sync: bool = False,
|
||||
progress_callback: Callable[[str, int, int], None] = None,
|
||||
verbose: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""Synchronize database with NVD.
|
||||
|
||||
Args:
|
||||
days_back: For incremental sync, get CVEs from last N days.
|
||||
full_sync: If True, download entire database (WARNING: slow, 200k+ CVEs).
|
||||
progress_callback: Callback function(message, current, total).
|
||||
verbose: Show progress messages.
|
||||
|
||||
Returns:
|
||||
Sync statistics dictionary.
|
||||
"""
|
||||
stats = {
|
||||
'started': datetime.now().isoformat(),
|
||||
'cves_processed': 0,
|
||||
'cves_added': 0,
|
||||
'cves_updated': 0,
|
||||
'errors': 0,
|
||||
'completed': False,
|
||||
}
|
||||
|
||||
if verbose:
|
||||
print(f"{Colors.CYAN}[*] Starting CVE database sync...{Colors.RESET}")
|
||||
|
||||
# Determine date range
|
||||
if full_sync:
|
||||
# Start from 1999 (first CVEs)
|
||||
start_date = datetime(1999, 1, 1)
|
||||
if verbose:
|
||||
print(f"{Colors.YELLOW}[!] Full sync requested - this may take a while...{Colors.RESET}")
|
||||
else:
|
||||
start_date = datetime.utcnow() - timedelta(days=days_back)
|
||||
|
||||
end_date = datetime.utcnow()
|
||||
|
||||
# Calculate total CVEs to fetch (estimate)
|
||||
params = {
|
||||
'pubStartDate': start_date.strftime('%Y-%m-%dT00:00:00.000'),
|
||||
'pubEndDate': end_date.strftime('%Y-%m-%dT23:59:59.999'),
|
||||
'resultsPerPage': '1',
|
||||
}
|
||||
|
||||
response = self._make_nvd_request(params, verbose)
|
||||
if not response:
|
||||
if verbose:
|
||||
print(f"{Colors.RED}[X] Failed to connect to NVD API{Colors.RESET}")
|
||||
return stats
|
||||
|
||||
total_results = response.get('totalResults', 0)
|
||||
|
||||
if verbose:
|
||||
print(f"{Colors.CYAN}[*] Found {total_results:,} CVEs to process{Colors.RESET}")
|
||||
|
||||
if total_results == 0:
|
||||
stats['completed'] = True
|
||||
return stats
|
||||
|
||||
# Process in batches
|
||||
start_index = 0
|
||||
batch_num = 0
|
||||
total_batches = (total_results + self.RESULTS_PER_PAGE - 1) // self.RESULTS_PER_PAGE
|
||||
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
|
||||
while start_index < total_results:
|
||||
batch_num += 1
|
||||
|
||||
if verbose:
|
||||
pct = int((start_index / total_results) * 100)
|
||||
print(f"{Colors.CYAN}[*] Batch {batch_num}/{total_batches} ({pct}%) - {start_index:,}/{total_results:,}{Colors.RESET}")
|
||||
|
||||
if progress_callback:
|
||||
progress_callback(f"Downloading batch {batch_num}/{total_batches}", start_index, total_results)
|
||||
|
||||
params = {
|
||||
'pubStartDate': start_date.strftime('%Y-%m-%dT00:00:00.000'),
|
||||
'pubEndDate': end_date.strftime('%Y-%m-%dT23:59:59.999'),
|
||||
'resultsPerPage': str(self.RESULTS_PER_PAGE),
|
||||
'startIndex': str(start_index),
|
||||
}
|
||||
|
||||
response = self._make_nvd_request(params, verbose=False)
|
||||
|
||||
if not response:
|
||||
stats['errors'] += 1
|
||||
if verbose:
|
||||
print(f"{Colors.YELLOW}[!] Batch {batch_num} failed, retrying...{Colors.RESET}")
|
||||
time.sleep(6) # NVD rate limit
|
||||
continue
|
||||
|
||||
vulnerabilities = response.get('vulnerabilities', [])
|
||||
|
||||
for vuln in vulnerabilities:
|
||||
try:
|
||||
cve_data = self._parse_cve_data(vuln)
|
||||
self._insert_cve(conn, cve_data)
|
||||
stats['cves_processed'] += 1
|
||||
stats['cves_added'] += 1
|
||||
except Exception as e:
|
||||
stats['errors'] += 1
|
||||
if verbose:
|
||||
print(f"{Colors.RED}[X] Error processing CVE: {e}{Colors.RESET}")
|
||||
|
||||
conn.commit()
|
||||
start_index += self.RESULTS_PER_PAGE
|
||||
|
||||
# Rate limiting - NVD allows 5 requests per 30 seconds without API key
|
||||
config = get_config()
|
||||
if not config.get('nvd', 'api_key', fallback=''):
|
||||
time.sleep(6)
|
||||
else:
|
||||
time.sleep(0.6) # With API key: 50 requests per 30 seconds
|
||||
|
||||
# Update metadata
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO metadata (key, value) VALUES ('last_sync', ?)
|
||||
""", (datetime.now().isoformat(),))
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO metadata (key, value) VALUES ('last_modified', ?)
|
||||
""", (end_date.isoformat(),))
|
||||
conn.commit()
|
||||
|
||||
stats['completed'] = True
|
||||
stats['finished'] = datetime.now().isoformat()
|
||||
|
||||
if verbose:
|
||||
print(f"{Colors.GREEN}[+] Sync complete: {stats['cves_processed']:,} CVEs processed{Colors.RESET}")
|
||||
|
||||
return stats
|
||||
|
||||
def sync_recent(self, days: int = 7, verbose: bool = True) -> Dict[str, Any]:
|
||||
"""Quick sync of recent CVEs only."""
|
||||
return self.sync_database(days_back=days, full_sync=False, verbose=verbose)
|
||||
|
||||
# =========================================================================
|
||||
# QUERY METHODS
|
||||
# =========================================================================
|
||||
|
||||
def search_cves(
|
||||
self,
|
||||
keyword: str = None,
|
||||
cpe_pattern: str = None,
|
||||
severity: str = None,
|
||||
min_score: float = None,
|
||||
max_results: int = 100,
|
||||
days_back: int = None
|
||||
) -> List[Dict]:
|
||||
"""Search CVEs in local database.
|
||||
|
||||
Args:
|
||||
keyword: Search in CVE ID or description.
|
||||
cpe_pattern: CPE pattern to match (uses LIKE).
|
||||
severity: Filter by severity (LOW, MEDIUM, HIGH, CRITICAL).
|
||||
min_score: Minimum CVSS v3 score.
|
||||
max_results: Maximum results to return.
|
||||
days_back: Only return CVEs from last N days.
|
||||
|
||||
Returns:
|
||||
List of matching CVE dictionaries.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT DISTINCT c.* FROM cves c"
|
||||
conditions = []
|
||||
params = []
|
||||
|
||||
if cpe_pattern:
|
||||
query += " LEFT JOIN cve_cpes cp ON c.cve_id = cp.cve_id"
|
||||
conditions.append("cp.cpe_criteria LIKE ?")
|
||||
params.append(f"%{cpe_pattern}%")
|
||||
|
||||
if keyword:
|
||||
conditions.append("(c.cve_id LIKE ? OR c.description LIKE ?)")
|
||||
params.extend([f"%{keyword}%", f"%{keyword}%"])
|
||||
|
||||
if severity:
|
||||
conditions.append("c.cvss_v3_severity = ?")
|
||||
params.append(severity.upper())
|
||||
|
||||
if min_score is not None:
|
||||
conditions.append("c.cvss_v3_score >= ?")
|
||||
params.append(min_score)
|
||||
|
||||
if days_back:
|
||||
cutoff = (datetime.utcnow() - timedelta(days=days_back)).strftime('%Y-%m-%d')
|
||||
conditions.append("c.published >= ?")
|
||||
params.append(cutoff)
|
||||
|
||||
if conditions:
|
||||
query += " WHERE " + " AND ".join(conditions)
|
||||
|
||||
query += " ORDER BY c.cvss_v3_score DESC NULLS LAST, c.published DESC"
|
||||
query += f" LIMIT {max_results}"
|
||||
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
return [self._row_to_dict(row) for row in rows]
|
||||
|
||||
def get_cve(self, cve_id: str) -> Optional[Dict]:
|
||||
"""Get detailed information about a specific CVE.
|
||||
|
||||
Args:
|
||||
cve_id: The CVE ID (e.g., CVE-2024-1234).
|
||||
|
||||
Returns:
|
||||
CVE details dictionary or None if not found.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get main CVE data
|
||||
cursor.execute("SELECT * FROM cves WHERE cve_id = ?", (cve_id,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
if not row:
|
||||
return None
|
||||
|
||||
cve = self._row_to_dict(row)
|
||||
|
||||
# Get CPEs
|
||||
cursor.execute("SELECT * FROM cve_cpes WHERE cve_id = ?", (cve_id,))
|
||||
cve['cpes'] = [dict(r) for r in cursor.fetchall()]
|
||||
|
||||
# Get references
|
||||
cursor.execute("SELECT url, source FROM cve_references WHERE cve_id = ?", (cve_id,))
|
||||
cve['references'] = [dict(r) for r in cursor.fetchall()]
|
||||
|
||||
# Get weaknesses
|
||||
cursor.execute("SELECT cwe_id FROM cve_weaknesses WHERE cve_id = ?", (cve_id,))
|
||||
cve['weaknesses'] = [r['cwe_id'] for r in cursor.fetchall()]
|
||||
|
||||
return cve
|
||||
|
||||
def get_system_cves(
|
||||
self,
|
||||
severity_filter: str = None,
|
||||
max_results: int = 100
|
||||
) -> List[Dict]:
|
||||
"""Get CVEs relevant to the detected system.
|
||||
|
||||
Args:
|
||||
severity_filter: Filter by severity.
|
||||
max_results: Maximum results.
|
||||
|
||||
Returns:
|
||||
List of relevant CVEs.
|
||||
"""
|
||||
cpe_prefix = self.system_info.get('cpe_prefix', '')
|
||||
if not cpe_prefix:
|
||||
return []
|
||||
|
||||
# Build CPE pattern for this system
|
||||
cpe_pattern = cpe_prefix
|
||||
if self.system_info.get('os_version'):
|
||||
version = self.system_info['os_version'].split('.')[0]
|
||||
cpe_pattern = f"{cpe_prefix}:{version}"
|
||||
|
||||
return self.search_cves(
|
||||
cpe_pattern=cpe_pattern,
|
||||
severity=severity_filter,
|
||||
max_results=max_results
|
||||
)
|
||||
|
||||
def get_software_cves(
|
||||
self,
|
||||
software: str,
|
||||
vendor: str = None,
|
||||
version: str = None,
|
||||
max_results: int = 100
|
||||
) -> List[Dict]:
|
||||
"""Search CVEs for specific software.
|
||||
|
||||
Args:
|
||||
software: Software/product name.
|
||||
vendor: Vendor name (optional).
|
||||
version: Software version (optional).
|
||||
max_results: Maximum results.
|
||||
|
||||
Returns:
|
||||
List of CVEs.
|
||||
"""
|
||||
# Try CPE-based search first
|
||||
cpe_pattern = software.lower().replace(' ', '_')
|
||||
if vendor:
|
||||
cpe_pattern = f"{vendor.lower()}:{cpe_pattern}"
|
||||
if version:
|
||||
cpe_pattern = f"{cpe_pattern}:{version}"
|
||||
|
||||
results = self.search_cves(cpe_pattern=cpe_pattern, max_results=max_results)
|
||||
|
||||
# Also search by keyword if CPE search returns few results
|
||||
if len(results) < 10:
|
||||
keyword = software
|
||||
if vendor:
|
||||
keyword = f"{vendor} {software}"
|
||||
keyword_results = self.search_cves(keyword=keyword, max_results=max_results)
|
||||
|
||||
# Merge results, avoiding duplicates
|
||||
seen = {r['cve_id'] for r in results}
|
||||
for r in keyword_results:
|
||||
if r['cve_id'] not in seen:
|
||||
results.append(r)
|
||||
seen.add(r['cve_id'])
|
||||
|
||||
return results[:max_results]
|
||||
|
||||
def get_cves_by_severity(self, severity: str, max_results: int = 100) -> List[Dict]:
|
||||
"""Get CVEs by severity level."""
|
||||
return self.search_cves(severity=severity, max_results=max_results)
|
||||
|
||||
def get_recent_cves(self, days: int = 30, max_results: int = 100) -> List[Dict]:
|
||||
"""Get recently published CVEs."""
|
||||
return self.search_cves(days_back=days, max_results=max_results)
|
||||
|
||||
def _row_to_dict(self, row: sqlite3.Row) -> Dict:
|
||||
"""Convert database row to dictionary."""
|
||||
return {
|
||||
'cve_id': row['cve_id'],
|
||||
'id': row['cve_id'], # Alias for compatibility
|
||||
'description': row['description'],
|
||||
'published': row['published'],
|
||||
'modified': row['modified'],
|
||||
'cvss_score': row['cvss_v3_score'] or row['cvss_v2_score'] or 0,
|
||||
'cvss_v3_score': row['cvss_v3_score'],
|
||||
'cvss_v3_severity': row['cvss_v3_severity'],
|
||||
'cvss_v3_vector': row['cvss_v3_vector'],
|
||||
'cvss_v2_score': row['cvss_v2_score'],
|
||||
'cvss_v2_severity': row['cvss_v2_severity'],
|
||||
'cvss_v2_vector': row['cvss_v2_vector'],
|
||||
'severity': row['cvss_v3_severity'] or row['cvss_v2_severity'] or 'UNKNOWN',
|
||||
}
|
||||
|
||||
# =========================================================================
|
||||
# ONLINE FALLBACK
|
||||
# =========================================================================
|
||||
|
||||
def fetch_cve_online(self, cve_id: str, verbose: bool = False) -> Optional[Dict]:
|
||||
"""Fetch a specific CVE from NVD API (online fallback).
|
||||
|
||||
Args:
|
||||
cve_id: The CVE ID.
|
||||
verbose: Show progress.
|
||||
|
||||
Returns:
|
||||
CVE details or None.
|
||||
"""
|
||||
params = {'cveId': cve_id}
|
||||
|
||||
if verbose:
|
||||
print(f"{Colors.CYAN}[*] Fetching {cve_id} from NVD...{Colors.RESET}")
|
||||
|
||||
response = self._make_nvd_request(params, verbose)
|
||||
|
||||
if not response or not response.get('vulnerabilities'):
|
||||
return None
|
||||
|
||||
cve_data = self._parse_cve_data(response['vulnerabilities'][0])
|
||||
|
||||
# Store in database
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
self._insert_cve(conn, cve_data)
|
||||
conn.commit()
|
||||
|
||||
return self.get_cve(cve_id)
|
||||
|
||||
def search_online(
|
||||
self,
|
||||
keyword: str = None,
|
||||
cpe_name: str = None,
|
||||
severity: str = None,
|
||||
days_back: int = 120,
|
||||
max_results: int = 100,
|
||||
verbose: bool = False
|
||||
) -> List[Dict]:
|
||||
"""Search NVD API directly (online mode).
|
||||
|
||||
Use this when local database is empty or for real-time results.
|
||||
"""
|
||||
params = {
|
||||
'resultsPerPage': str(min(max_results, 2000)),
|
||||
}
|
||||
|
||||
if keyword:
|
||||
params['keywordSearch'] = keyword
|
||||
|
||||
if cpe_name:
|
||||
params['cpeName'] = cpe_name
|
||||
|
||||
if severity:
|
||||
params['cvssV3Severity'] = severity.upper()
|
||||
|
||||
if days_back > 0:
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=days_back)
|
||||
params['pubStartDate'] = start_date.strftime('%Y-%m-%dT00:00:00.000')
|
||||
params['pubEndDate'] = end_date.strftime('%Y-%m-%dT23:59:59.999')
|
||||
|
||||
if verbose:
|
||||
print(f"{Colors.CYAN}[*] Searching NVD online...{Colors.RESET}")
|
||||
|
||||
response = self._make_nvd_request(params, verbose)
|
||||
|
||||
if not response:
|
||||
return []
|
||||
|
||||
cves = []
|
||||
for vuln in response.get('vulnerabilities', []):
|
||||
cve_data = self._parse_cve_data(vuln)
|
||||
cves.append({
|
||||
'cve_id': cve_data['cve_id'],
|
||||
'id': cve_data['cve_id'],
|
||||
'description': cve_data['description'][:200] + '...' if len(cve_data['description']) > 200 else cve_data['description'],
|
||||
'cvss_score': cve_data['cvss_v3_score'] or cve_data['cvss_v2_score'] or 0,
|
||||
'severity': cve_data['cvss_v3_severity'] or cve_data['cvss_v2_severity'] or 'UNKNOWN',
|
||||
'published': cve_data['published'][:10] if cve_data['published'] else '',
|
||||
})
|
||||
|
||||
return cves
|
||||
|
||||
def close(self):
|
||||
"""Close database connection."""
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
|
||||
|
||||
# Global instance
|
||||
_cve_db: Optional[CVEDatabase] = None
|
||||
|
||||
|
||||
def get_cve_db() -> CVEDatabase:
|
||||
"""Get the global CVE database instance."""
|
||||
global _cve_db
|
||||
if _cve_db is None:
|
||||
_cve_db = CVEDatabase()
|
||||
return _cve_db
|
||||
423
core/discovery.py
Normal file
423
core/discovery.py
Normal file
@@ -0,0 +1,423 @@
|
||||
"""
|
||||
AUTARCH Network Discovery
|
||||
Advertises AUTARCH on the local network so companion apps can find it.
|
||||
|
||||
Discovery methods (priority order):
|
||||
1. mDNS/Zeroconf — LAN service advertisement (_autarch._tcp.local.)
|
||||
2. Bluetooth — RFCOMM service advertisement (requires BT adapter + security enabled)
|
||||
|
||||
Dependencies:
|
||||
- mDNS: pip install zeroconf (optional, graceful fallback)
|
||||
- Bluetooth: system bluetoothctl + hcitool (no pip package needed)
|
||||
"""
|
||||
|
||||
import json
|
||||
import socket
|
||||
import subprocess
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Service constants
|
||||
MDNS_SERVICE_TYPE = "_autarch._tcp.local."
|
||||
MDNS_SERVICE_NAME = "AUTARCH._autarch._tcp.local."
|
||||
BT_SERVICE_NAME = "AUTARCH"
|
||||
BT_SERVICE_UUID = "a1b2c3d4-e5f6-7890-abcd-ef1234567890"
|
||||
|
||||
|
||||
def _get_local_ip() -> str:
|
||||
"""Get the primary local IP address."""
|
||||
try:
|
||||
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
s.connect(("8.8.8.8", 80))
|
||||
ip = s.getsockname()[0]
|
||||
s.close()
|
||||
return ip
|
||||
except Exception:
|
||||
return "127.0.0.1"
|
||||
|
||||
|
||||
class DiscoveryManager:
|
||||
"""Manages network discovery advertising for AUTARCH."""
|
||||
|
||||
def __init__(self, config=None):
|
||||
self._config = config or {}
|
||||
self._web_port = int(self._config.get('web_port', 8181))
|
||||
self._hostname = socket.gethostname()
|
||||
|
||||
# mDNS state
|
||||
self._zeroconf = None
|
||||
self._mdns_info = None
|
||||
self._mdns_running = False
|
||||
|
||||
# Bluetooth state
|
||||
self._bt_running = False
|
||||
self._bt_thread = None
|
||||
self._bt_process = None
|
||||
|
||||
# Settings
|
||||
self._mdns_enabled = self._config.get('mdns_enabled', 'true').lower() == 'true'
|
||||
self._bt_enabled = self._config.get('bluetooth_enabled', 'true').lower() == 'true'
|
||||
self._bt_require_security = self._config.get('bt_require_security', 'true').lower() == 'true'
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Status
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get_status(self) -> Dict:
|
||||
"""Get current discovery status for all methods."""
|
||||
return {
|
||||
'local_ip': _get_local_ip(),
|
||||
'hostname': self._hostname,
|
||||
'web_port': self._web_port,
|
||||
'mdns': {
|
||||
'available': self._is_zeroconf_available(),
|
||||
'enabled': self._mdns_enabled,
|
||||
'running': self._mdns_running,
|
||||
'service_type': MDNS_SERVICE_TYPE,
|
||||
},
|
||||
'bluetooth': {
|
||||
'available': self._is_bt_available(),
|
||||
'adapter_present': self._bt_adapter_present(),
|
||||
'enabled': self._bt_enabled,
|
||||
'running': self._bt_running,
|
||||
'secure': self._bt_is_secure() if self._bt_adapter_present() else False,
|
||||
'require_security': self._bt_require_security,
|
||||
'service_name': BT_SERVICE_NAME,
|
||||
}
|
||||
}
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# mDNS / Zeroconf
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _is_zeroconf_available(self) -> bool:
|
||||
"""Check if the zeroconf Python package is installed."""
|
||||
try:
|
||||
import zeroconf # noqa: F401
|
||||
return True
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
def start_mdns(self) -> Tuple[bool, str]:
|
||||
"""Start mDNS service advertisement."""
|
||||
if self._mdns_running:
|
||||
return True, "mDNS already running"
|
||||
|
||||
if not self._is_zeroconf_available():
|
||||
return False, "zeroconf not installed. Run: pip install zeroconf"
|
||||
|
||||
try:
|
||||
from zeroconf import Zeroconf, ServiceInfo
|
||||
import socket as sock
|
||||
|
||||
local_ip = _get_local_ip()
|
||||
|
||||
self._mdns_info = ServiceInfo(
|
||||
MDNS_SERVICE_TYPE,
|
||||
MDNS_SERVICE_NAME,
|
||||
addresses=[sock.inet_aton(local_ip)],
|
||||
port=self._web_port,
|
||||
properties={
|
||||
'version': '1.0',
|
||||
'hostname': self._hostname,
|
||||
'platform': 'autarch',
|
||||
},
|
||||
server=f"{self._hostname}.local.",
|
||||
)
|
||||
|
||||
self._zeroconf = Zeroconf()
|
||||
self._zeroconf.register_service(self._mdns_info)
|
||||
self._mdns_running = True
|
||||
|
||||
logger.info(f"mDNS: advertising {MDNS_SERVICE_NAME} at {local_ip}:{self._web_port}")
|
||||
return True, f"mDNS started — {local_ip}:{self._web_port}"
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"mDNS start failed: {e}")
|
||||
return False, f"mDNS failed: {e}"
|
||||
|
||||
def stop_mdns(self) -> Tuple[bool, str]:
|
||||
"""Stop mDNS service advertisement."""
|
||||
if not self._mdns_running:
|
||||
return True, "mDNS not running"
|
||||
|
||||
try:
|
||||
if self._zeroconf and self._mdns_info:
|
||||
self._zeroconf.unregister_service(self._mdns_info)
|
||||
self._zeroconf.close()
|
||||
self._zeroconf = None
|
||||
self._mdns_info = None
|
||||
self._mdns_running = False
|
||||
logger.info("mDNS: stopped")
|
||||
return True, "mDNS stopped"
|
||||
except Exception as e:
|
||||
self._mdns_running = False
|
||||
return False, f"mDNS stop error: {e}"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Bluetooth
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def _is_bt_available(self) -> bool:
|
||||
"""Check if Bluetooth CLI tools are available."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['which', 'bluetoothctl'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
return result.returncode == 0
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _bt_adapter_present(self) -> bool:
|
||||
"""Check if a Bluetooth adapter is physically present."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['hciconfig'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
return 'hci0' in result.stdout
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _bt_is_secure(self) -> bool:
|
||||
"""Check if Bluetooth security (SSP/authentication) is enabled."""
|
||||
try:
|
||||
# Check if adapter requires authentication
|
||||
result = subprocess.run(
|
||||
['hciconfig', 'hci0', 'auth'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
# Also check hciconfig output for AUTH flag
|
||||
status = subprocess.run(
|
||||
['hciconfig', 'hci0'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
# Look for AUTH in flags
|
||||
return 'AUTH' in status.stdout
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def _bt_enable_security(self) -> Tuple[bool, str]:
|
||||
"""Enable Bluetooth authentication/security on the adapter."""
|
||||
try:
|
||||
# Enable authentication
|
||||
subprocess.run(
|
||||
['sudo', 'hciconfig', 'hci0', 'auth'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
# Enable encryption
|
||||
subprocess.run(
|
||||
['sudo', 'hciconfig', 'hci0', 'encrypt'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
# Enable SSP (Secure Simple Pairing)
|
||||
subprocess.run(
|
||||
['sudo', 'hciconfig', 'hci0', 'sspmode', '1'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
if self._bt_is_secure():
|
||||
return True, "Bluetooth security enabled (AUTH + ENCRYPT + SSP)"
|
||||
return False, "Security flags set but AUTH not confirmed"
|
||||
except Exception as e:
|
||||
return False, f"Failed to enable BT security: {e}"
|
||||
|
||||
def start_bluetooth(self) -> Tuple[bool, str]:
|
||||
"""Start Bluetooth service advertisement.
|
||||
|
||||
Only advertises if:
|
||||
1. Bluetooth adapter is present
|
||||
2. bluetoothctl is available
|
||||
3. Security is enabled (if bt_require_security is true)
|
||||
"""
|
||||
if self._bt_running:
|
||||
return True, "Bluetooth already advertising"
|
||||
|
||||
if not self._is_bt_available():
|
||||
return False, "bluetoothctl not found"
|
||||
|
||||
if not self._bt_adapter_present():
|
||||
return False, "No Bluetooth adapter detected"
|
||||
|
||||
# Ensure adapter is up
|
||||
try:
|
||||
subprocess.run(
|
||||
['sudo', 'hciconfig', 'hci0', 'up'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Security check
|
||||
if self._bt_require_security:
|
||||
if not self._bt_is_secure():
|
||||
ok, msg = self._bt_enable_security()
|
||||
if not ok:
|
||||
return False, f"Bluetooth security required but not available: {msg}"
|
||||
|
||||
# Make discoverable and set name
|
||||
try:
|
||||
# Set device name
|
||||
subprocess.run(
|
||||
['sudo', 'hciconfig', 'hci0', 'name', BT_SERVICE_NAME],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
|
||||
# Enable discoverable mode
|
||||
subprocess.run(
|
||||
['sudo', 'hciconfig', 'hci0', 'piscan'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
|
||||
# Use bluetoothctl to set discoverable with timeout 0 (always)
|
||||
# and set the alias
|
||||
cmds = [
|
||||
'power on',
|
||||
f'system-alias {BT_SERVICE_NAME}',
|
||||
'discoverable on',
|
||||
'discoverable-timeout 0',
|
||||
'pairable on',
|
||||
]
|
||||
for cmd in cmds:
|
||||
subprocess.run(
|
||||
['bluetoothctl', cmd.split()[0]] + cmd.split()[1:],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
|
||||
# Start an RFCOMM advertisement thread so the app can find us
|
||||
# and read connection info (IP + port) after pairing
|
||||
self._bt_running = True
|
||||
self._bt_thread = threading.Thread(
|
||||
target=self._bt_rfcomm_server,
|
||||
daemon=True,
|
||||
name="autarch-bt-discovery"
|
||||
)
|
||||
self._bt_thread.start()
|
||||
|
||||
logger.info("Bluetooth: advertising as AUTARCH")
|
||||
return True, f"Bluetooth advertising — name: {BT_SERVICE_NAME}"
|
||||
|
||||
except Exception as e:
|
||||
self._bt_running = False
|
||||
return False, f"Bluetooth start failed: {e}"
|
||||
|
||||
def _bt_rfcomm_server(self):
|
||||
"""Background thread: RFCOMM server that sends connection info to paired clients.
|
||||
|
||||
When a paired device connects, we send them a JSON blob with our IP and port
|
||||
so the companion app can auto-configure.
|
||||
"""
|
||||
try:
|
||||
# Use a simple TCP socket on a known port as a Bluetooth-adjacent info service
|
||||
# (full RFCOMM requires pybluez which may not be installed)
|
||||
# Instead, we'll use sdptool to register the service and bluetoothctl for visibility
|
||||
#
|
||||
# The companion app discovers us via BT name "AUTARCH", then connects via
|
||||
# the IP it gets from the BT device info or mDNS
|
||||
while self._bt_running:
|
||||
time.sleep(5)
|
||||
except Exception as e:
|
||||
logger.error(f"BT RFCOMM server error: {e}")
|
||||
finally:
|
||||
self._bt_running = False
|
||||
|
||||
def stop_bluetooth(self) -> Tuple[bool, str]:
|
||||
"""Stop Bluetooth advertisement."""
|
||||
if not self._bt_running:
|
||||
return True, "Bluetooth not advertising"
|
||||
|
||||
self._bt_running = False
|
||||
|
||||
try:
|
||||
# Disable discoverable
|
||||
subprocess.run(
|
||||
['bluetoothctl', 'discoverable', 'off'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
subprocess.run(
|
||||
['sudo', 'hciconfig', 'hci0', 'noscan'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
|
||||
if self._bt_thread:
|
||||
self._bt_thread.join(timeout=3)
|
||||
self._bt_thread = None
|
||||
|
||||
logger.info("Bluetooth: stopped advertising")
|
||||
return True, "Bluetooth advertising stopped"
|
||||
|
||||
except Exception as e:
|
||||
return False, f"Bluetooth stop error: {e}"
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Start / Stop All
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def start_all(self) -> Dict:
|
||||
"""Start all enabled discovery methods."""
|
||||
results = {}
|
||||
|
||||
if self._mdns_enabled:
|
||||
ok, msg = self.start_mdns()
|
||||
results['mdns'] = {'ok': ok, 'message': msg}
|
||||
else:
|
||||
results['mdns'] = {'ok': False, 'message': 'Disabled in config'}
|
||||
|
||||
if self._bt_enabled:
|
||||
ok, msg = self.start_bluetooth()
|
||||
results['bluetooth'] = {'ok': ok, 'message': msg}
|
||||
else:
|
||||
results['bluetooth'] = {'ok': False, 'message': 'Disabled in config'}
|
||||
|
||||
return results
|
||||
|
||||
def stop_all(self) -> Dict:
|
||||
"""Stop all discovery methods."""
|
||||
results = {}
|
||||
|
||||
ok, msg = self.stop_mdns()
|
||||
results['mdns'] = {'ok': ok, 'message': msg}
|
||||
|
||||
ok, msg = self.stop_bluetooth()
|
||||
results['bluetooth'] = {'ok': ok, 'message': msg}
|
||||
|
||||
return results
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Cleanup
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def shutdown(self):
|
||||
"""Clean shutdown of all discovery services."""
|
||||
self.stop_all()
|
||||
|
||||
|
||||
# ======================================================================
|
||||
# Singleton
|
||||
# ======================================================================
|
||||
|
||||
_manager = None
|
||||
|
||||
|
||||
def get_discovery_manager(config=None) -> DiscoveryManager:
|
||||
"""Get or create the DiscoveryManager singleton."""
|
||||
global _manager
|
||||
if _manager is None:
|
||||
if config is None:
|
||||
try:
|
||||
from core.config import get_config
|
||||
cfg = get_config()
|
||||
config = {}
|
||||
if cfg.has_section('discovery'):
|
||||
config = dict(cfg.items('discovery'))
|
||||
if cfg.has_section('web'):
|
||||
config['web_port'] = cfg.get('web', 'port', fallback='8181')
|
||||
except Exception:
|
||||
config = {}
|
||||
_manager = DiscoveryManager(config)
|
||||
return _manager
|
||||
324
core/dns_service.py
Normal file
324
core/dns_service.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""AUTARCH DNS Service Manager — controls the Go-based autarch-dns binary."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
import signal
|
||||
import socket
|
||||
import subprocess
|
||||
import threading
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
from core.paths import find_tool, get_data_dir
|
||||
except ImportError:
|
||||
def find_tool(name):
|
||||
import shutil
|
||||
return shutil.which(name)
|
||||
def get_data_dir():
|
||||
return str(Path(__file__).parent.parent / 'data')
|
||||
|
||||
try:
|
||||
import requests
|
||||
_HAS_REQUESTS = True
|
||||
except ImportError:
|
||||
_HAS_REQUESTS = False
|
||||
|
||||
|
||||
class DNSServiceManager:
|
||||
"""Manage the autarch-dns Go binary (start/stop/API calls)."""
|
||||
|
||||
def __init__(self):
|
||||
self._process = None
|
||||
self._pid = None
|
||||
self._config = None
|
||||
self._config_path = os.path.join(get_data_dir(), 'dns', 'config.json')
|
||||
self._load_config()
|
||||
|
||||
def _load_config(self):
|
||||
if os.path.exists(self._config_path):
|
||||
try:
|
||||
with open(self._config_path, 'r') as f:
|
||||
self._config = json.load(f)
|
||||
except Exception:
|
||||
self._config = None
|
||||
if not self._config:
|
||||
self._config = {
|
||||
'listen_dns': '0.0.0.0:53',
|
||||
'listen_api': '127.0.0.1:5380',
|
||||
'api_token': os.urandom(16).hex(),
|
||||
'upstream': [], # Empty = pure recursive from root hints
|
||||
'cache_ttl': 300,
|
||||
'zones_dir': os.path.join(get_data_dir(), 'dns', 'zones'),
|
||||
'dnssec_keys_dir': os.path.join(get_data_dir(), 'dns', 'keys'),
|
||||
'log_queries': True,
|
||||
}
|
||||
self._save_config()
|
||||
|
||||
def _save_config(self):
|
||||
os.makedirs(os.path.dirname(self._config_path), exist_ok=True)
|
||||
with open(self._config_path, 'w') as f:
|
||||
json.dump(self._config, f, indent=2)
|
||||
|
||||
@property
|
||||
def api_base(self) -> str:
|
||||
addr = self._config.get('listen_api', '127.0.0.1:5380')
|
||||
return f'http://{addr}'
|
||||
|
||||
@property
|
||||
def api_token(self) -> str:
|
||||
return self._config.get('api_token', '')
|
||||
|
||||
def find_binary(self) -> str:
|
||||
"""Find the autarch-dns binary."""
|
||||
binary = find_tool('autarch-dns')
|
||||
if binary:
|
||||
return binary
|
||||
# Check common locations
|
||||
base = Path(__file__).parent.parent
|
||||
candidates = [
|
||||
base / 'services' / 'dns-server' / 'autarch-dns',
|
||||
base / 'services' / 'dns-server' / 'autarch-dns.exe',
|
||||
base / 'tools' / 'windows-x86_64' / 'autarch-dns.exe',
|
||||
base / 'tools' / 'linux-arm64' / 'autarch-dns',
|
||||
base / 'tools' / 'linux-x86_64' / 'autarch-dns',
|
||||
]
|
||||
for c in candidates:
|
||||
if c.exists():
|
||||
return str(c)
|
||||
return None
|
||||
|
||||
def is_running(self) -> bool:
|
||||
"""Check if the DNS service is running."""
|
||||
# Check process
|
||||
if self._process and self._process.poll() is None:
|
||||
return True
|
||||
# Check by API
|
||||
try:
|
||||
resp = self._api_get('/api/status')
|
||||
return resp.get('ok', False)
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def start(self) -> dict:
|
||||
"""Start the DNS service."""
|
||||
if self.is_running():
|
||||
return {'ok': True, 'message': 'DNS service already running'}
|
||||
|
||||
binary = self.find_binary()
|
||||
if not binary:
|
||||
return {'ok': False, 'error': 'autarch-dns binary not found. Build it with: cd services/dns-server && go build'}
|
||||
|
||||
# Ensure zone dirs exist
|
||||
os.makedirs(self._config.get('zones_dir', ''), exist_ok=True)
|
||||
os.makedirs(self._config.get('dnssec_keys_dir', ''), exist_ok=True)
|
||||
|
||||
# Save config for the Go binary to read
|
||||
self._save_config()
|
||||
|
||||
cmd = [
|
||||
binary,
|
||||
'-config', self._config_path,
|
||||
]
|
||||
|
||||
try:
|
||||
kwargs = {
|
||||
'stdout': subprocess.DEVNULL,
|
||||
'stderr': subprocess.DEVNULL,
|
||||
}
|
||||
if sys.platform == 'win32':
|
||||
kwargs['creationflags'] = (
|
||||
subprocess.CREATE_NEW_PROCESS_GROUP |
|
||||
subprocess.CREATE_NO_WINDOW
|
||||
)
|
||||
else:
|
||||
kwargs['start_new_session'] = True
|
||||
|
||||
self._process = subprocess.Popen(cmd, **kwargs)
|
||||
self._pid = self._process.pid
|
||||
|
||||
# Wait for API to be ready
|
||||
for _ in range(30):
|
||||
time.sleep(0.5)
|
||||
try:
|
||||
resp = self._api_get('/api/status')
|
||||
if resp.get('ok'):
|
||||
return {
|
||||
'ok': True,
|
||||
'message': f'DNS service started (PID {self._pid})',
|
||||
'pid': self._pid,
|
||||
}
|
||||
except Exception:
|
||||
if self._process.poll() is not None:
|
||||
return {'ok': False, 'error': 'DNS service exited immediately — may need admin/root for port 53'}
|
||||
continue
|
||||
|
||||
return {'ok': False, 'error': 'DNS service started but API not responding'}
|
||||
except PermissionError:
|
||||
return {'ok': False, 'error': 'Permission denied — DNS on port 53 requires admin/root'}
|
||||
except Exception as e:
|
||||
return {'ok': False, 'error': str(e)}
|
||||
|
||||
def stop(self) -> dict:
|
||||
"""Stop the DNS service."""
|
||||
if self._process and self._process.poll() is None:
|
||||
try:
|
||||
if sys.platform == 'win32':
|
||||
self._process.terminate()
|
||||
else:
|
||||
os.kill(self._process.pid, signal.SIGTERM)
|
||||
self._process.wait(timeout=5)
|
||||
except Exception:
|
||||
self._process.kill()
|
||||
self._process = None
|
||||
self._pid = None
|
||||
return {'ok': True, 'message': 'DNS service stopped'}
|
||||
return {'ok': True, 'message': 'DNS service was not running'}
|
||||
|
||||
def status(self) -> dict:
|
||||
"""Get service status."""
|
||||
running = self.is_running()
|
||||
result = {
|
||||
'running': running,
|
||||
'pid': self._pid,
|
||||
'listen_dns': self._config.get('listen_dns', ''),
|
||||
'listen_api': self._config.get('listen_api', ''),
|
||||
}
|
||||
if running:
|
||||
try:
|
||||
resp = self._api_get('/api/status')
|
||||
result.update(resp)
|
||||
except Exception:
|
||||
pass
|
||||
return result
|
||||
|
||||
# ── API wrappers ─────────────────────────────────────────────────────
|
||||
|
||||
def _api_get(self, endpoint: str) -> dict:
|
||||
if not _HAS_REQUESTS:
|
||||
return self._api_urllib(endpoint, 'GET')
|
||||
resp = requests.get(
|
||||
f'{self.api_base}{endpoint}',
|
||||
headers={'Authorization': f'Bearer {self.api_token}'},
|
||||
timeout=5,
|
||||
)
|
||||
return resp.json()
|
||||
|
||||
def _api_post(self, endpoint: str, data: dict = None) -> dict:
|
||||
if not _HAS_REQUESTS:
|
||||
return self._api_urllib(endpoint, 'POST', data)
|
||||
resp = requests.post(
|
||||
f'{self.api_base}{endpoint}',
|
||||
headers={'Authorization': f'Bearer {self.api_token}', 'Content-Type': 'application/json'},
|
||||
json=data or {},
|
||||
timeout=5,
|
||||
)
|
||||
return resp.json()
|
||||
|
||||
def _api_delete(self, endpoint: str) -> dict:
|
||||
if not _HAS_REQUESTS:
|
||||
return self._api_urllib(endpoint, 'DELETE')
|
||||
resp = requests.delete(
|
||||
f'{self.api_base}{endpoint}',
|
||||
headers={'Authorization': f'Bearer {self.api_token}'},
|
||||
timeout=5,
|
||||
)
|
||||
return resp.json()
|
||||
|
||||
def _api_put(self, endpoint: str, data: dict = None) -> dict:
|
||||
if not _HAS_REQUESTS:
|
||||
return self._api_urllib(endpoint, 'PUT', data)
|
||||
resp = requests.put(
|
||||
f'{self.api_base}{endpoint}',
|
||||
headers={'Authorization': f'Bearer {self.api_token}', 'Content-Type': 'application/json'},
|
||||
json=data or {},
|
||||
timeout=5,
|
||||
)
|
||||
return resp.json()
|
||||
|
||||
def _api_urllib(self, endpoint: str, method: str, data: dict = None) -> dict:
|
||||
"""Fallback using urllib (no requests dependency)."""
|
||||
import urllib.request
|
||||
url = f'{self.api_base}{endpoint}'
|
||||
body = json.dumps(data).encode() if data else None
|
||||
req = urllib.request.Request(
|
||||
url, data=body, method=method,
|
||||
headers={
|
||||
'Authorization': f'Bearer {self.api_token}',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
)
|
||||
with urllib.request.urlopen(req, timeout=5) as resp:
|
||||
return json.loads(resp.read())
|
||||
|
||||
# ── High-level zone operations ───────────────────────────────────────
|
||||
|
||||
def list_zones(self) -> list:
|
||||
return self._api_get('/api/zones').get('zones', [])
|
||||
|
||||
def create_zone(self, domain: str) -> dict:
|
||||
return self._api_post('/api/zones', {'domain': domain})
|
||||
|
||||
def get_zone(self, domain: str) -> dict:
|
||||
return self._api_get(f'/api/zones/{domain}')
|
||||
|
||||
def delete_zone(self, domain: str) -> dict:
|
||||
return self._api_delete(f'/api/zones/{domain}')
|
||||
|
||||
def list_records(self, domain: str) -> list:
|
||||
return self._api_get(f'/api/zones/{domain}/records').get('records', [])
|
||||
|
||||
def add_record(self, domain: str, rtype: str, name: str, value: str,
|
||||
ttl: int = 300, priority: int = 0) -> dict:
|
||||
return self._api_post(f'/api/zones/{domain}/records', {
|
||||
'type': rtype, 'name': name, 'value': value,
|
||||
'ttl': ttl, 'priority': priority,
|
||||
})
|
||||
|
||||
def delete_record(self, domain: str, record_id: str) -> dict:
|
||||
return self._api_delete(f'/api/zones/{domain}/records/{record_id}')
|
||||
|
||||
def setup_mail_records(self, domain: str, mx_host: str = '',
|
||||
dkim_key: str = '', spf_allow: str = '') -> dict:
|
||||
return self._api_post(f'/api/zones/{domain}/mail-setup', {
|
||||
'mx_host': mx_host, 'dkim_key': dkim_key, 'spf_allow': spf_allow,
|
||||
})
|
||||
|
||||
def enable_dnssec(self, domain: str) -> dict:
|
||||
return self._api_post(f'/api/zones/{domain}/dnssec/enable')
|
||||
|
||||
def disable_dnssec(self, domain: str) -> dict:
|
||||
return self._api_post(f'/api/zones/{domain}/dnssec/disable')
|
||||
|
||||
def get_metrics(self) -> dict:
|
||||
return self._api_get('/api/metrics').get('metrics', {})
|
||||
|
||||
def get_config(self) -> dict:
|
||||
return self._config.copy()
|
||||
|
||||
def update_config(self, updates: dict) -> dict:
|
||||
for k, v in updates.items():
|
||||
if k in self._config:
|
||||
self._config[k] = v
|
||||
self._save_config()
|
||||
# Also update running service
|
||||
try:
|
||||
return self._api_put('/api/config', updates)
|
||||
except Exception:
|
||||
return {'ok': True, 'message': 'Config saved (service not running)'}
|
||||
|
||||
|
||||
# ── Singleton ────────────────────────────────────────────────────────────────
|
||||
|
||||
_instance = None
|
||||
_lock = threading.Lock()
|
||||
|
||||
|
||||
def get_dns_service() -> DNSServiceManager:
|
||||
global _instance
|
||||
if _instance is None:
|
||||
with _lock:
|
||||
if _instance is None:
|
||||
_instance = DNSServiceManager()
|
||||
return _instance
|
||||
640
core/hardware.py
Normal file
640
core/hardware.py
Normal file
@@ -0,0 +1,640 @@
|
||||
"""
|
||||
AUTARCH Hardware Manager
|
||||
ADB/Fastboot device management and ESP32 serial flashing.
|
||||
|
||||
Provides server-side access to USB-connected devices:
|
||||
- ADB: Android device shell, sideload, push/pull, logcat
|
||||
- Fastboot: Partition flashing, OEM unlock, device info
|
||||
- Serial/ESP32: Port detection, chip ID, firmware flash, serial monitor
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import time
|
||||
import subprocess
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Any, Callable
|
||||
|
||||
from core.paths import find_tool, get_data_dir
|
||||
|
||||
# Try importing serial
|
||||
PYSERIAL_AVAILABLE = False
|
||||
try:
|
||||
import serial
|
||||
import serial.tools.list_ports
|
||||
PYSERIAL_AVAILABLE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Try importing esptool
|
||||
ESPTOOL_AVAILABLE = False
|
||||
try:
|
||||
import esptool
|
||||
ESPTOOL_AVAILABLE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
class HardwareManager:
|
||||
"""Manages ADB, Fastboot, and Serial/ESP32 devices."""
|
||||
|
||||
def __init__(self):
|
||||
# Tool paths - find_tool checks system PATH first, then bundled
|
||||
self.adb_path = find_tool('adb')
|
||||
self.fastboot_path = find_tool('fastboot')
|
||||
|
||||
# Data directory
|
||||
self._data_dir = get_data_dir() / 'hardware'
|
||||
self._data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Serial monitor state
|
||||
self._monitor_thread = None
|
||||
self._monitor_running = False
|
||||
self._monitor_serial = None
|
||||
self._monitor_buffer = []
|
||||
self._monitor_lock = threading.Lock()
|
||||
|
||||
# Flash/sideload progress state
|
||||
self._operation_progress = {}
|
||||
self._operation_lock = threading.Lock()
|
||||
|
||||
# ── Status ──────────────────────────────────────────────────────
|
||||
|
||||
def get_status(self):
|
||||
"""Get availability status of all backends."""
|
||||
return {
|
||||
'adb': self.adb_path is not None,
|
||||
'adb_path': self.adb_path or '',
|
||||
'fastboot': self.fastboot_path is not None,
|
||||
'fastboot_path': self.fastboot_path or '',
|
||||
'serial': PYSERIAL_AVAILABLE,
|
||||
'esptool': ESPTOOL_AVAILABLE,
|
||||
}
|
||||
|
||||
# ── ADB Methods ────────────────────────────────────────────────
|
||||
|
||||
def _run_adb(self, args, serial=None, timeout=30):
|
||||
"""Run an adb command and return (stdout, stderr, returncode)."""
|
||||
if not self.adb_path:
|
||||
return '', 'adb not found', 1
|
||||
cmd = [self.adb_path]
|
||||
if serial:
|
||||
cmd += ['-s', serial]
|
||||
cmd += args
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=timeout
|
||||
)
|
||||
return result.stdout, result.stderr, result.returncode
|
||||
except subprocess.TimeoutExpired:
|
||||
return '', 'Command timed out', 1
|
||||
except Exception as e:
|
||||
return '', str(e), 1
|
||||
|
||||
def adb_devices(self):
|
||||
"""List connected ADB devices."""
|
||||
stdout, stderr, rc = self._run_adb(['devices', '-l'])
|
||||
if rc != 0:
|
||||
return []
|
||||
devices = []
|
||||
for line in stdout.strip().split('\n')[1:]:
|
||||
line = line.strip()
|
||||
if not line or 'List of' in line:
|
||||
continue
|
||||
parts = line.split()
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
dev = {
|
||||
'serial': parts[0],
|
||||
'state': parts[1],
|
||||
'model': '',
|
||||
'product': '',
|
||||
'transport_id': '',
|
||||
}
|
||||
for part in parts[2:]:
|
||||
if ':' in part:
|
||||
key, val = part.split(':', 1)
|
||||
if key == 'model':
|
||||
dev['model'] = val
|
||||
elif key == 'product':
|
||||
dev['product'] = val
|
||||
elif key == 'transport_id':
|
||||
dev['transport_id'] = val
|
||||
elif key == 'device':
|
||||
dev['device'] = val
|
||||
devices.append(dev)
|
||||
return devices
|
||||
|
||||
def adb_device_info(self, serial):
|
||||
"""Get detailed info about an ADB device."""
|
||||
props = {}
|
||||
prop_keys = {
|
||||
'ro.product.model': 'model',
|
||||
'ro.product.brand': 'brand',
|
||||
'ro.product.name': 'product',
|
||||
'ro.build.version.release': 'android_version',
|
||||
'ro.build.version.sdk': 'sdk',
|
||||
'ro.build.display.id': 'build',
|
||||
'ro.build.version.security_patch': 'security_patch',
|
||||
'ro.product.cpu.abi': 'cpu_abi',
|
||||
'ro.serialno': 'serialno',
|
||||
'ro.bootimage.build.date': 'build_date',
|
||||
}
|
||||
# Get all properties at once
|
||||
stdout, _, rc = self._run_adb(['shell', 'getprop'], serial=serial)
|
||||
if rc == 0:
|
||||
for line in stdout.split('\n'):
|
||||
m = re.match(r'\[(.+?)\]:\s*\[(.+?)\]', line)
|
||||
if m:
|
||||
key, val = m.group(1), m.group(2)
|
||||
if key in prop_keys:
|
||||
props[prop_keys[key]] = val
|
||||
|
||||
# Battery level
|
||||
stdout, _, rc = self._run_adb(['shell', 'dumpsys', 'battery'], serial=serial)
|
||||
if rc == 0:
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('level:'):
|
||||
props['battery'] = line.split(':')[1].strip()
|
||||
elif line.startswith('status:'):
|
||||
status_map = {'2': 'Charging', '3': 'Discharging', '4': 'Not charging', '5': 'Full'}
|
||||
val = line.split(':')[1].strip()
|
||||
props['battery_status'] = status_map.get(val, val)
|
||||
|
||||
# Storage
|
||||
stdout, _, rc = self._run_adb(['shell', 'df', '/data'], serial=serial, timeout=10)
|
||||
if rc == 0:
|
||||
lines = stdout.strip().split('\n')
|
||||
if len(lines) >= 2:
|
||||
parts = lines[1].split()
|
||||
if len(parts) >= 4:
|
||||
props['storage_total'] = parts[1]
|
||||
props['storage_used'] = parts[2]
|
||||
props['storage_free'] = parts[3]
|
||||
|
||||
props['serial'] = serial
|
||||
return props
|
||||
|
||||
def adb_shell(self, serial, command):
|
||||
"""Run a shell command on an ADB device."""
|
||||
# Sanitize: block dangerous commands
|
||||
dangerous = ['rm -rf /', 'mkfs', 'dd if=/dev/zero', 'format', '> /dev/', 'reboot']
|
||||
cmd_lower = command.lower().strip()
|
||||
for d in dangerous:
|
||||
if d in cmd_lower:
|
||||
return {'output': f'Blocked dangerous command: {d}', 'returncode': 1}
|
||||
|
||||
stdout, stderr, rc = self._run_adb(['shell', command], serial=serial, timeout=30)
|
||||
return {
|
||||
'output': stdout or stderr,
|
||||
'returncode': rc,
|
||||
}
|
||||
|
||||
def adb_shell_raw(self, serial, command, timeout=30):
|
||||
"""Run shell command without safety filter. For exploit modules."""
|
||||
stdout, stderr, rc = self._run_adb(['shell', command], serial=serial, timeout=timeout)
|
||||
return {'output': stdout or stderr, 'returncode': rc}
|
||||
|
||||
def adb_reboot(self, serial, mode='system'):
|
||||
"""Reboot an ADB device. mode: system, recovery, bootloader"""
|
||||
args = ['reboot']
|
||||
if mode and mode != 'system':
|
||||
args.append(mode)
|
||||
stdout, stderr, rc = self._run_adb(args, serial=serial, timeout=15)
|
||||
return {'success': rc == 0, 'output': stdout or stderr}
|
||||
|
||||
def adb_install(self, serial, apk_path):
|
||||
"""Install an APK on device."""
|
||||
if not os.path.isfile(apk_path):
|
||||
return {'success': False, 'error': f'File not found: {apk_path}'}
|
||||
stdout, stderr, rc = self._run_adb(
|
||||
['install', '-r', apk_path], serial=serial, timeout=120
|
||||
)
|
||||
return {'success': rc == 0, 'output': stdout or stderr}
|
||||
|
||||
def adb_sideload(self, serial, filepath):
|
||||
"""Sideload a file (APK/ZIP). Returns operation ID for progress tracking."""
|
||||
if not os.path.isfile(filepath):
|
||||
return {'success': False, 'error': f'File not found: {filepath}'}
|
||||
|
||||
op_id = f'sideload_{int(time.time())}'
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'starting', 'progress': 0, 'message': 'Starting sideload...'
|
||||
}
|
||||
|
||||
def _do_sideload():
|
||||
try:
|
||||
ext = os.path.splitext(filepath)[1].lower()
|
||||
if ext == '.apk':
|
||||
cmd = [self.adb_path, '-s', serial, 'install', '-r', filepath]
|
||||
else:
|
||||
cmd = [self.adb_path, '-s', serial, 'sideload', filepath]
|
||||
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id]['status'] = 'running'
|
||||
self._operation_progress[op_id]['progress'] = 10
|
||||
self._operation_progress[op_id]['message'] = 'Transferring...'
|
||||
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=600)
|
||||
|
||||
with self._operation_lock:
|
||||
if result.returncode == 0:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'done', 'progress': 100,
|
||||
'message': 'Sideload complete',
|
||||
'output': result.stdout,
|
||||
}
|
||||
else:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'error', 'progress': 0,
|
||||
'message': result.stderr or 'Sideload failed',
|
||||
}
|
||||
except Exception as e:
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'error', 'progress': 0, 'message': str(e),
|
||||
}
|
||||
|
||||
thread = threading.Thread(target=_do_sideload, daemon=True)
|
||||
thread.start()
|
||||
return {'success': True, 'op_id': op_id}
|
||||
|
||||
def adb_push(self, serial, local_path, remote_path):
|
||||
"""Push a file to device."""
|
||||
if not os.path.isfile(local_path):
|
||||
return {'success': False, 'error': f'File not found: {local_path}'}
|
||||
stdout, stderr, rc = self._run_adb(
|
||||
['push', local_path, remote_path], serial=serial, timeout=120
|
||||
)
|
||||
return {'success': rc == 0, 'output': stdout or stderr}
|
||||
|
||||
def adb_pull(self, serial, remote_path, local_path=None):
|
||||
"""Pull a file from device."""
|
||||
if not local_path:
|
||||
local_path = str(self._data_dir / os.path.basename(remote_path))
|
||||
stdout, stderr, rc = self._run_adb(
|
||||
['pull', remote_path, local_path], serial=serial, timeout=120
|
||||
)
|
||||
return {'success': rc == 0, 'output': stdout or stderr, 'local_path': local_path}
|
||||
|
||||
def adb_logcat(self, serial, lines=100):
|
||||
"""Get last N lines of logcat."""
|
||||
stdout, stderr, rc = self._run_adb(
|
||||
['logcat', '-d', '-t', str(lines)], serial=serial, timeout=15
|
||||
)
|
||||
return {'output': stdout or stderr, 'lines': lines}
|
||||
|
||||
# ── Fastboot Methods ───────────────────────────────────────────
|
||||
|
||||
def _run_fastboot(self, args, serial=None, timeout=30):
|
||||
"""Run a fastboot command."""
|
||||
if not self.fastboot_path:
|
||||
return '', 'fastboot not found', 1
|
||||
cmd = [self.fastboot_path]
|
||||
if serial:
|
||||
cmd += ['-s', serial]
|
||||
cmd += args
|
||||
try:
|
||||
result = subprocess.run(
|
||||
cmd, capture_output=True, text=True, timeout=timeout
|
||||
)
|
||||
# fastboot outputs to stderr for many commands
|
||||
return result.stdout, result.stderr, result.returncode
|
||||
except subprocess.TimeoutExpired:
|
||||
return '', 'Command timed out', 1
|
||||
except Exception as e:
|
||||
return '', str(e), 1
|
||||
|
||||
def fastboot_devices(self):
|
||||
"""List fastboot devices."""
|
||||
stdout, stderr, rc = self._run_fastboot(['devices'])
|
||||
if rc != 0:
|
||||
return []
|
||||
devices = []
|
||||
output = stdout or stderr
|
||||
for line in output.strip().split('\n'):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
parts = line.split('\t')
|
||||
if len(parts) >= 2:
|
||||
devices.append({
|
||||
'serial': parts[0].strip(),
|
||||
'state': parts[1].strip(),
|
||||
})
|
||||
return devices
|
||||
|
||||
def fastboot_device_info(self, serial):
|
||||
"""Get fastboot device variables."""
|
||||
info = {}
|
||||
vars_to_get = [
|
||||
'product', 'variant', 'serialno', 'secure', 'unlocked',
|
||||
'is-userspace', 'hw-revision', 'battery-level',
|
||||
'current-slot', 'slot-count',
|
||||
]
|
||||
for var in vars_to_get:
|
||||
stdout, stderr, rc = self._run_fastboot(
|
||||
['getvar', var], serial=serial, timeout=10
|
||||
)
|
||||
output = stderr or stdout # fastboot puts getvar in stderr
|
||||
for line in output.split('\n'):
|
||||
if line.startswith(f'{var}:'):
|
||||
info[var] = line.split(':', 1)[1].strip()
|
||||
break
|
||||
info['serial'] = serial
|
||||
return info
|
||||
|
||||
def fastboot_flash(self, serial, partition, filepath):
|
||||
"""Flash a partition. Returns operation ID for progress tracking."""
|
||||
if not os.path.isfile(filepath):
|
||||
return {'success': False, 'error': f'File not found: {filepath}'}
|
||||
|
||||
valid_partitions = [
|
||||
'boot', 'recovery', 'system', 'vendor', 'vbmeta', 'dtbo',
|
||||
'radio', 'bootloader', 'super', 'userdata', 'cache',
|
||||
'product', 'system_ext', 'vendor_boot', 'init_boot',
|
||||
]
|
||||
if partition not in valid_partitions:
|
||||
return {'success': False, 'error': f'Invalid partition: {partition}'}
|
||||
|
||||
op_id = f'flash_{int(time.time())}'
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'starting', 'progress': 0,
|
||||
'message': f'Flashing {partition}...',
|
||||
}
|
||||
|
||||
def _do_flash():
|
||||
try:
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id]['status'] = 'running'
|
||||
self._operation_progress[op_id]['progress'] = 10
|
||||
|
||||
result = subprocess.run(
|
||||
[self.fastboot_path, '-s', serial, 'flash', partition, filepath],
|
||||
capture_output=True, text=True, timeout=600,
|
||||
)
|
||||
|
||||
with self._operation_lock:
|
||||
output = result.stderr or result.stdout
|
||||
if result.returncode == 0:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'done', 'progress': 100,
|
||||
'message': f'Flashed {partition} successfully',
|
||||
'output': output,
|
||||
}
|
||||
else:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'error', 'progress': 0,
|
||||
'message': output or 'Flash failed',
|
||||
}
|
||||
except Exception as e:
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'error', 'progress': 0, 'message': str(e),
|
||||
}
|
||||
|
||||
thread = threading.Thread(target=_do_flash, daemon=True)
|
||||
thread.start()
|
||||
return {'success': True, 'op_id': op_id}
|
||||
|
||||
def fastboot_reboot(self, serial, mode='system'):
|
||||
"""Reboot a fastboot device. mode: system, bootloader, recovery"""
|
||||
if mode == 'system':
|
||||
args = ['reboot']
|
||||
elif mode == 'bootloader':
|
||||
args = ['reboot-bootloader']
|
||||
elif mode == 'recovery':
|
||||
args = ['reboot', 'recovery']
|
||||
else:
|
||||
args = ['reboot']
|
||||
stdout, stderr, rc = self._run_fastboot(args, serial=serial, timeout=15)
|
||||
return {'success': rc == 0, 'output': stderr or stdout}
|
||||
|
||||
def fastboot_oem_unlock(self, serial):
|
||||
"""OEM unlock (requires user confirmation in UI)."""
|
||||
stdout, stderr, rc = self._run_fastboot(
|
||||
['flashing', 'unlock'], serial=serial, timeout=30
|
||||
)
|
||||
return {'success': rc == 0, 'output': stderr or stdout}
|
||||
|
||||
def get_operation_progress(self, op_id):
|
||||
"""Get progress for a running operation."""
|
||||
with self._operation_lock:
|
||||
return self._operation_progress.get(op_id, {
|
||||
'status': 'unknown', 'progress': 0, 'message': 'Unknown operation',
|
||||
})
|
||||
|
||||
# ── Serial / ESP32 Methods ─────────────────────────────────────
|
||||
|
||||
def list_serial_ports(self):
|
||||
"""List available serial ports."""
|
||||
if not PYSERIAL_AVAILABLE:
|
||||
return []
|
||||
ports = []
|
||||
for port in serial.tools.list_ports.comports():
|
||||
ports.append({
|
||||
'port': port.device,
|
||||
'desc': port.description,
|
||||
'hwid': port.hwid,
|
||||
'vid': f'{port.vid:04x}' if port.vid else '',
|
||||
'pid': f'{port.pid:04x}' if port.pid else '',
|
||||
'manufacturer': port.manufacturer or '',
|
||||
'serial_number': port.serial_number or '',
|
||||
})
|
||||
return ports
|
||||
|
||||
def detect_esp_chip(self, port, baud=115200):
|
||||
"""Detect ESP chip type using esptool."""
|
||||
if not ESPTOOL_AVAILABLE:
|
||||
return {'success': False, 'error': 'esptool not installed'}
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['python3', '-m', 'esptool', '--port', port, '--baud', str(baud), 'chip_id'],
|
||||
capture_output=True, text=True, timeout=15,
|
||||
)
|
||||
output = result.stdout + result.stderr
|
||||
chip = 'Unknown'
|
||||
chip_id = ''
|
||||
for line in output.split('\n'):
|
||||
if 'Chip is' in line:
|
||||
chip = line.split('Chip is')[1].strip()
|
||||
elif 'Chip ID:' in line:
|
||||
chip_id = line.split('Chip ID:')[1].strip()
|
||||
return {
|
||||
'success': result.returncode == 0,
|
||||
'chip': chip,
|
||||
'chip_id': chip_id,
|
||||
'output': output,
|
||||
}
|
||||
except subprocess.TimeoutExpired:
|
||||
return {'success': False, 'error': 'Detection timed out'}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def flash_esp(self, port, firmware_path, baud=460800):
|
||||
"""Flash ESP32 firmware. Returns operation ID for progress tracking."""
|
||||
if not ESPTOOL_AVAILABLE:
|
||||
return {'success': False, 'error': 'esptool not installed'}
|
||||
if not os.path.isfile(firmware_path):
|
||||
return {'success': False, 'error': f'File not found: {firmware_path}'}
|
||||
|
||||
op_id = f'esp_flash_{int(time.time())}'
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'starting', 'progress': 0,
|
||||
'message': 'Starting ESP flash...',
|
||||
}
|
||||
|
||||
def _do_flash():
|
||||
try:
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id]['status'] = 'running'
|
||||
self._operation_progress[op_id]['progress'] = 5
|
||||
self._operation_progress[op_id]['message'] = 'Connecting to chip...'
|
||||
|
||||
cmd = [
|
||||
'python3', '-m', 'esptool',
|
||||
'--port', port,
|
||||
'--baud', str(baud),
|
||||
'write_flash', '0x0', firmware_path,
|
||||
]
|
||||
proc = subprocess.Popen(
|
||||
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True,
|
||||
)
|
||||
|
||||
output_lines = []
|
||||
for line in proc.stdout:
|
||||
line = line.strip()
|
||||
output_lines.append(line)
|
||||
|
||||
# Parse progress from esptool output
|
||||
if 'Writing at' in line and '%' in line:
|
||||
m = re.search(r'\((\d+)\s*%\)', line)
|
||||
if m:
|
||||
pct = int(m.group(1))
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id]['progress'] = pct
|
||||
self._operation_progress[op_id]['message'] = f'Flashing... {pct}%'
|
||||
elif 'Connecting' in line:
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id]['message'] = 'Connecting...'
|
||||
elif 'Erasing' in line:
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id]['progress'] = 3
|
||||
self._operation_progress[op_id]['message'] = 'Erasing flash...'
|
||||
|
||||
proc.wait(timeout=300)
|
||||
output = '\n'.join(output_lines)
|
||||
|
||||
with self._operation_lock:
|
||||
if proc.returncode == 0:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'done', 'progress': 100,
|
||||
'message': 'Flash complete',
|
||||
'output': output,
|
||||
}
|
||||
else:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'error', 'progress': 0,
|
||||
'message': output or 'Flash failed',
|
||||
}
|
||||
except Exception as e:
|
||||
with self._operation_lock:
|
||||
self._operation_progress[op_id] = {
|
||||
'status': 'error', 'progress': 0, 'message': str(e),
|
||||
}
|
||||
|
||||
thread = threading.Thread(target=_do_flash, daemon=True)
|
||||
thread.start()
|
||||
return {'success': True, 'op_id': op_id}
|
||||
|
||||
# ── Serial Monitor ─────────────────────────────────────────────
|
||||
|
||||
def serial_monitor_start(self, port, baud=115200):
|
||||
"""Start serial monitor on a port."""
|
||||
if not PYSERIAL_AVAILABLE:
|
||||
return {'success': False, 'error': 'pyserial not installed'}
|
||||
if self._monitor_running:
|
||||
return {'success': False, 'error': 'Monitor already running'}
|
||||
|
||||
try:
|
||||
self._monitor_serial = serial.Serial(port, baud, timeout=0.1)
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
self._monitor_running = True
|
||||
self._monitor_buffer = []
|
||||
|
||||
def _read_loop():
|
||||
while self._monitor_running and self._monitor_serial and self._monitor_serial.is_open:
|
||||
try:
|
||||
data = self._monitor_serial.readline()
|
||||
if data:
|
||||
text = data.decode('utf-8', errors='replace').rstrip()
|
||||
with self._monitor_lock:
|
||||
self._monitor_buffer.append({
|
||||
'time': time.time(),
|
||||
'data': text,
|
||||
})
|
||||
# Keep buffer manageable
|
||||
if len(self._monitor_buffer) > 5000:
|
||||
self._monitor_buffer = self._monitor_buffer[-3000:]
|
||||
except Exception:
|
||||
if not self._monitor_running:
|
||||
break
|
||||
time.sleep(0.1)
|
||||
|
||||
self._monitor_thread = threading.Thread(target=_read_loop, daemon=True)
|
||||
self._monitor_thread.start()
|
||||
return {'success': True, 'port': port, 'baud': baud}
|
||||
|
||||
def serial_monitor_stop(self):
|
||||
"""Stop serial monitor."""
|
||||
self._monitor_running = False
|
||||
if self._monitor_serial and self._monitor_serial.is_open:
|
||||
try:
|
||||
self._monitor_serial.close()
|
||||
except Exception:
|
||||
pass
|
||||
self._monitor_serial = None
|
||||
return {'success': True}
|
||||
|
||||
def serial_monitor_send(self, data):
|
||||
"""Send data to the monitored serial port."""
|
||||
if not self._monitor_running or not self._monitor_serial:
|
||||
return {'success': False, 'error': 'Monitor not running'}
|
||||
try:
|
||||
self._monitor_serial.write((data + '\n').encode('utf-8'))
|
||||
return {'success': True}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def serial_monitor_get_output(self, since_index=0):
|
||||
"""Get buffered serial output since given index."""
|
||||
with self._monitor_lock:
|
||||
data = self._monitor_buffer[since_index:]
|
||||
return {
|
||||
'lines': data,
|
||||
'total': len(self._monitor_buffer),
|
||||
'running': self._monitor_running,
|
||||
}
|
||||
|
||||
@property
|
||||
def monitor_running(self):
|
||||
return self._monitor_running
|
||||
|
||||
|
||||
# ── Singleton ──────────────────────────────────────────────────────
|
||||
|
||||
_manager = None
|
||||
|
||||
def get_hardware_manager():
|
||||
global _manager
|
||||
if _manager is None:
|
||||
_manager = HardwareManager()
|
||||
return _manager
|
||||
683
core/iphone_exploit.py
Normal file
683
core/iphone_exploit.py
Normal file
@@ -0,0 +1,683 @@
|
||||
"""
|
||||
AUTARCH iPhone Exploitation Manager
|
||||
Local USB device access via libimobiledevice tools.
|
||||
Device info, screenshots, syslog, app management, backup extraction,
|
||||
filesystem mounting, provisioning profiles, port forwarding.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import time
|
||||
import shutil
|
||||
import sqlite3
|
||||
import subprocess
|
||||
import plistlib
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, Any
|
||||
|
||||
from core.paths import get_data_dir, find_tool
|
||||
|
||||
|
||||
class IPhoneExploitManager:
|
||||
"""All iPhone USB exploitation logic using libimobiledevice."""
|
||||
|
||||
# Tools we look for
|
||||
TOOLS = [
|
||||
'idevice_id', 'ideviceinfo', 'idevicepair', 'idevicename',
|
||||
'idevicedate', 'idevicescreenshot', 'idevicesyslog',
|
||||
'idevicecrashreport', 'idevicediagnostics', 'ideviceinstaller',
|
||||
'idevicebackup2', 'ideviceprovision', 'idevicedebug',
|
||||
'ideviceactivation', 'ifuse', 'iproxy',
|
||||
]
|
||||
|
||||
def __init__(self):
|
||||
self._base = get_data_dir() / 'iphone_exploit'
|
||||
for sub in ('backups', 'screenshots', 'recon', 'apps', 'crash_reports'):
|
||||
(self._base / sub).mkdir(parents=True, exist_ok=True)
|
||||
# Find available tools
|
||||
self._tools = {}
|
||||
for name in self.TOOLS:
|
||||
path = find_tool(name)
|
||||
if not path:
|
||||
path = shutil.which(name)
|
||||
if path:
|
||||
self._tools[name] = path
|
||||
|
||||
def _udid_dir(self, category, udid):
|
||||
d = self._base / category / udid
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
def _run(self, tool_name, args, timeout=30):
|
||||
"""Run a libimobiledevice tool."""
|
||||
path = self._tools.get(tool_name)
|
||||
if not path:
|
||||
return '', f'{tool_name} not found', 1
|
||||
cmd = [path] + args
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout)
|
||||
return result.stdout, result.stderr, result.returncode
|
||||
except subprocess.TimeoutExpired:
|
||||
return '', 'Command timed out', 1
|
||||
except Exception as e:
|
||||
return '', str(e), 1
|
||||
|
||||
def _run_udid(self, tool_name, udid, args, timeout=30):
|
||||
"""Run tool with -u UDID flag."""
|
||||
return self._run(tool_name, ['-u', udid] + args, timeout=timeout)
|
||||
|
||||
def get_status(self):
|
||||
"""Get availability of libimobiledevice tools."""
|
||||
available = {name: bool(path) for name, path in self._tools.items()}
|
||||
total = len(self.TOOLS)
|
||||
found = sum(1 for v in available.values() if v)
|
||||
return {
|
||||
'tools': available,
|
||||
'total': total,
|
||||
'found': found,
|
||||
'ready': found >= 3, # At minimum need idevice_id, ideviceinfo, idevicepair
|
||||
}
|
||||
|
||||
# ── Device Management ────────────────────────────────────────────
|
||||
|
||||
def list_devices(self):
|
||||
"""List connected iOS devices."""
|
||||
stdout, stderr, rc = self._run('idevice_id', ['-l'])
|
||||
if rc != 0:
|
||||
return []
|
||||
devices = []
|
||||
for line in stdout.strip().split('\n'):
|
||||
udid = line.strip()
|
||||
if udid:
|
||||
info = self.device_info_brief(udid)
|
||||
devices.append({
|
||||
'udid': udid,
|
||||
'name': info.get('DeviceName', ''),
|
||||
'model': info.get('ProductType', ''),
|
||||
'ios_version': info.get('ProductVersion', ''),
|
||||
})
|
||||
return devices
|
||||
|
||||
def device_info(self, udid):
|
||||
"""Get full device information."""
|
||||
stdout, stderr, rc = self._run_udid('ideviceinfo', udid, [])
|
||||
if rc != 0:
|
||||
return {'error': stderr or 'Cannot get device info'}
|
||||
info = {}
|
||||
for line in stdout.split('\n'):
|
||||
if ':' in line:
|
||||
key, _, val = line.partition(':')
|
||||
info[key.strip()] = val.strip()
|
||||
return info
|
||||
|
||||
def device_info_brief(self, udid):
|
||||
"""Get key device info (name, model, iOS version)."""
|
||||
keys = ['DeviceName', 'ProductType', 'ProductVersion', 'BuildVersion',
|
||||
'SerialNumber', 'UniqueChipID', 'WiFiAddress', 'BluetoothAddress']
|
||||
info = {}
|
||||
for key in keys:
|
||||
stdout, _, rc = self._run_udid('ideviceinfo', udid, ['-k', key])
|
||||
if rc == 0:
|
||||
info[key] = stdout.strip()
|
||||
return info
|
||||
|
||||
def device_info_domain(self, udid, domain):
|
||||
"""Get device info for a specific domain."""
|
||||
stdout, stderr, rc = self._run_udid('ideviceinfo', udid, ['-q', domain])
|
||||
if rc != 0:
|
||||
return {'error': stderr}
|
||||
info = {}
|
||||
for line in stdout.split('\n'):
|
||||
if ':' in line:
|
||||
key, _, val = line.partition(':')
|
||||
info[key.strip()] = val.strip()
|
||||
return info
|
||||
|
||||
def pair_device(self, udid):
|
||||
"""Pair with device (requires user trust on device)."""
|
||||
stdout, stderr, rc = self._run_udid('idevicepair', udid, ['pair'])
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def unpair_device(self, udid):
|
||||
"""Unpair from device."""
|
||||
stdout, stderr, rc = self._run_udid('idevicepair', udid, ['unpair'])
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def validate_pair(self, udid):
|
||||
"""Check if device is properly paired."""
|
||||
stdout, stderr, rc = self._run_udid('idevicepair', udid, ['validate'])
|
||||
return {'success': rc == 0, 'paired': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def get_name(self, udid):
|
||||
"""Get device name."""
|
||||
stdout, stderr, rc = self._run_udid('idevicename', udid, [])
|
||||
return {'success': rc == 0, 'name': stdout.strip()}
|
||||
|
||||
def set_name(self, udid, name):
|
||||
"""Set device name."""
|
||||
stdout, stderr, rc = self._run_udid('idevicename', udid, [name])
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def get_date(self, udid):
|
||||
"""Get device date/time."""
|
||||
stdout, stderr, rc = self._run_udid('idevicedate', udid, [])
|
||||
return {'success': rc == 0, 'date': stdout.strip()}
|
||||
|
||||
def set_date(self, udid, timestamp):
|
||||
"""Set device date (epoch timestamp)."""
|
||||
stdout, stderr, rc = self._run_udid('idevicedate', udid, ['-s', str(timestamp)])
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def restart_device(self, udid):
|
||||
"""Restart device."""
|
||||
stdout, stderr, rc = self._run_udid('idevicediagnostics', udid, ['restart'])
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def shutdown_device(self, udid):
|
||||
"""Shutdown device."""
|
||||
stdout, stderr, rc = self._run_udid('idevicediagnostics', udid, ['shutdown'])
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def sleep_device(self, udid):
|
||||
"""Put device to sleep."""
|
||||
stdout, stderr, rc = self._run_udid('idevicediagnostics', udid, ['sleep'])
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
# ── Screenshot & Syslog ──────────────────────────────────────────
|
||||
|
||||
def screenshot(self, udid):
|
||||
"""Take a screenshot."""
|
||||
out_dir = self._udid_dir('screenshots', udid)
|
||||
filename = f'screen_{int(time.time())}.png'
|
||||
filepath = str(out_dir / filename)
|
||||
stdout, stderr, rc = self._run_udid('idevicescreenshot', udid, [filepath])
|
||||
if rc == 0 and os.path.exists(filepath):
|
||||
return {'success': True, 'path': filepath, 'size': os.path.getsize(filepath)}
|
||||
return {'success': False, 'error': (stderr or stdout).strip()}
|
||||
|
||||
def syslog_dump(self, udid, duration=5):
|
||||
"""Capture syslog for a duration."""
|
||||
out_dir = self._udid_dir('recon', udid)
|
||||
logfile = str(out_dir / f'syslog_{int(time.time())}.txt')
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
[self._tools.get('idevicesyslog', 'idevicesyslog'), '-u', udid],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True
|
||||
)
|
||||
time.sleep(duration)
|
||||
proc.terminate()
|
||||
stdout, _ = proc.communicate(timeout=3)
|
||||
with open(logfile, 'w') as f:
|
||||
f.write(stdout)
|
||||
return {'success': True, 'path': logfile, 'lines': len(stdout.split('\n'))}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def syslog_grep(self, udid, pattern, duration=5):
|
||||
"""Capture syslog and grep for pattern (passwords, tokens, etc)."""
|
||||
result = self.syslog_dump(udid, duration=duration)
|
||||
if not result['success']:
|
||||
return result
|
||||
matches = []
|
||||
try:
|
||||
with open(result['path']) as f:
|
||||
for line in f:
|
||||
if re.search(pattern, line, re.IGNORECASE):
|
||||
matches.append(line.strip())
|
||||
except Exception:
|
||||
pass
|
||||
return {'success': True, 'matches': matches, 'count': len(matches), 'pattern': pattern}
|
||||
|
||||
def crash_reports(self, udid):
|
||||
"""Pull crash reports from device."""
|
||||
out_dir = self._udid_dir('crash_reports', udid)
|
||||
stdout, stderr, rc = self._run_udid('idevicecrashreport', udid,
|
||||
['-e', str(out_dir)], timeout=60)
|
||||
if rc == 0:
|
||||
files = list(out_dir.iterdir()) if out_dir.exists() else []
|
||||
return {'success': True, 'output_dir': str(out_dir),
|
||||
'count': len(files), 'output': stdout.strip()}
|
||||
return {'success': False, 'error': (stderr or stdout).strip()}
|
||||
|
||||
# ── App Management ───────────────────────────────────────────────
|
||||
|
||||
def list_apps(self, udid, app_type='user'):
|
||||
"""List installed apps. type: user, system, all."""
|
||||
flags = {
|
||||
'user': ['-l', '-o', 'list_user'],
|
||||
'system': ['-l', '-o', 'list_system'],
|
||||
'all': ['-l', '-o', 'list_all'],
|
||||
}
|
||||
args = flags.get(app_type, ['-l'])
|
||||
stdout, stderr, rc = self._run_udid('ideviceinstaller', udid, args, timeout=30)
|
||||
if rc != 0:
|
||||
return {'success': False, 'error': (stderr or stdout).strip(), 'apps': []}
|
||||
apps = []
|
||||
for line in stdout.strip().split('\n'):
|
||||
line = line.strip()
|
||||
if not line or line.startswith('CFBundle') or line.startswith('Total'):
|
||||
continue
|
||||
# Format: com.example.app, "App Name", "1.0"
|
||||
parts = line.split(',', 2)
|
||||
if parts:
|
||||
app = {'bundle_id': parts[0].strip().strip('"')}
|
||||
if len(parts) >= 2:
|
||||
app['name'] = parts[1].strip().strip('"')
|
||||
if len(parts) >= 3:
|
||||
app['version'] = parts[2].strip().strip('"')
|
||||
apps.append(app)
|
||||
return {'success': True, 'apps': apps, 'count': len(apps)}
|
||||
|
||||
def install_app(self, udid, ipa_path):
|
||||
"""Install an IPA on device."""
|
||||
if not os.path.isfile(ipa_path):
|
||||
return {'success': False, 'error': f'File not found: {ipa_path}'}
|
||||
stdout, stderr, rc = self._run_udid('ideviceinstaller', udid,
|
||||
['-i', ipa_path], timeout=120)
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def uninstall_app(self, udid, bundle_id):
|
||||
"""Uninstall an app by bundle ID."""
|
||||
stdout, stderr, rc = self._run_udid('ideviceinstaller', udid,
|
||||
['-U', bundle_id], timeout=30)
|
||||
return {'success': rc == 0, 'bundle_id': bundle_id, 'output': (stdout or stderr).strip()}
|
||||
|
||||
# ── Backup & Data Extraction ─────────────────────────────────────
|
||||
|
||||
def create_backup(self, udid, encrypted=False, password=''):
|
||||
"""Create a full device backup."""
|
||||
backup_dir = str(self._base / 'backups')
|
||||
args = ['backup', '--full', backup_dir]
|
||||
if encrypted and password:
|
||||
args = ['backup', '--full', backup_dir, '-p', password]
|
||||
stdout, stderr, rc = self._run_udid('idevicebackup2', udid, args, timeout=600)
|
||||
backup_path = os.path.join(backup_dir, udid)
|
||||
success = os.path.isdir(backup_path)
|
||||
return {
|
||||
'success': success,
|
||||
'backup_path': backup_path if success else None,
|
||||
'encrypted': encrypted,
|
||||
'output': (stdout or stderr).strip()[:500],
|
||||
}
|
||||
|
||||
def list_backups(self):
|
||||
"""List available local backups."""
|
||||
backup_dir = self._base / 'backups'
|
||||
backups = []
|
||||
if backup_dir.exists():
|
||||
for d in backup_dir.iterdir():
|
||||
if d.is_dir():
|
||||
manifest = d / 'Manifest.db'
|
||||
info_plist = d / 'Info.plist'
|
||||
backup_info = {'udid': d.name, 'path': str(d)}
|
||||
if manifest.exists():
|
||||
backup_info['has_manifest'] = True
|
||||
backup_info['size_mb'] = sum(
|
||||
f.stat().st_size for f in d.rglob('*') if f.is_file()
|
||||
) / (1024 * 1024)
|
||||
if info_plist.exists():
|
||||
try:
|
||||
with open(info_plist, 'rb') as f:
|
||||
plist = plistlib.load(f)
|
||||
backup_info['device_name'] = plist.get('Device Name', '')
|
||||
backup_info['product_type'] = plist.get('Product Type', '')
|
||||
backup_info['ios_version'] = plist.get('Product Version', '')
|
||||
backup_info['date'] = str(plist.get('Last Backup Date', ''))
|
||||
except Exception:
|
||||
pass
|
||||
backups.append(backup_info)
|
||||
return {'backups': backups, 'count': len(backups)}
|
||||
|
||||
def extract_backup_sms(self, backup_path):
|
||||
"""Extract SMS/iMessage from a backup."""
|
||||
manifest = os.path.join(backup_path, 'Manifest.db')
|
||||
if not os.path.exists(manifest):
|
||||
return {'success': False, 'error': 'Manifest.db not found'}
|
||||
try:
|
||||
conn = sqlite3.connect(manifest)
|
||||
cur = conn.cursor()
|
||||
# Find SMS database file hash
|
||||
cur.execute("SELECT fileID FROM Files WHERE relativePath = 'Library/SMS/sms.db' AND domain = 'HomeDomain'")
|
||||
row = cur.fetchone()
|
||||
conn.close()
|
||||
if not row:
|
||||
return {'success': False, 'error': 'SMS database not found in backup'}
|
||||
file_hash = row[0]
|
||||
sms_db = os.path.join(backup_path, file_hash[:2], file_hash)
|
||||
if not os.path.exists(sms_db):
|
||||
return {'success': False, 'error': f'SMS db file not found: {file_hash}'}
|
||||
# Query messages
|
||||
conn = sqlite3.connect(sms_db)
|
||||
cur = conn.cursor()
|
||||
cur.execute('''
|
||||
SELECT m.rowid, m.text, m.date, m.is_from_me,
|
||||
h.id AS handle_id, h.uncanonicalized_id
|
||||
FROM message m
|
||||
LEFT JOIN handle h ON m.handle_id = h.rowid
|
||||
ORDER BY m.date DESC LIMIT 500
|
||||
''')
|
||||
messages = []
|
||||
for row in cur.fetchall():
|
||||
# Apple timestamps: seconds since 2001-01-01
|
||||
apple_epoch = 978307200
|
||||
ts = row[2]
|
||||
if ts and ts > 1e17:
|
||||
ts = ts / 1e9 # nanoseconds
|
||||
date_readable = ''
|
||||
if ts:
|
||||
try:
|
||||
date_readable = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(ts + apple_epoch))
|
||||
except (ValueError, OSError):
|
||||
pass
|
||||
messages.append({
|
||||
'id': row[0], 'text': row[1] or '', 'date': date_readable,
|
||||
'is_from_me': bool(row[3]),
|
||||
'handle': row[4] or row[5] or '',
|
||||
})
|
||||
conn.close()
|
||||
return {'success': True, 'messages': messages, 'count': len(messages)}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def extract_backup_contacts(self, backup_path):
|
||||
"""Extract contacts from backup."""
|
||||
manifest = os.path.join(backup_path, 'Manifest.db')
|
||||
if not os.path.exists(manifest):
|
||||
return {'success': False, 'error': 'Manifest.db not found'}
|
||||
try:
|
||||
conn = sqlite3.connect(manifest)
|
||||
cur = conn.cursor()
|
||||
cur.execute("SELECT fileID FROM Files WHERE relativePath = 'Library/AddressBook/AddressBook.sqlitedb' AND domain = 'HomeDomain'")
|
||||
row = cur.fetchone()
|
||||
conn.close()
|
||||
if not row:
|
||||
return {'success': False, 'error': 'AddressBook not found in backup'}
|
||||
file_hash = row[0]
|
||||
ab_db = os.path.join(backup_path, file_hash[:2], file_hash)
|
||||
if not os.path.exists(ab_db):
|
||||
return {'success': False, 'error': 'AddressBook file not found'}
|
||||
conn = sqlite3.connect(ab_db)
|
||||
cur = conn.cursor()
|
||||
cur.execute('''
|
||||
SELECT p.rowid, p.First, p.Last, p.Organization,
|
||||
mv.value AS phone_or_email
|
||||
FROM ABPerson p
|
||||
LEFT JOIN ABMultiValue mv ON p.rowid = mv.record_id
|
||||
ORDER BY p.Last, p.First
|
||||
''')
|
||||
contacts = {}
|
||||
for row in cur.fetchall():
|
||||
rid = row[0]
|
||||
if rid not in contacts:
|
||||
contacts[rid] = {
|
||||
'first': row[1] or '', 'last': row[2] or '',
|
||||
'organization': row[3] or '', 'values': []
|
||||
}
|
||||
if row[4]:
|
||||
contacts[rid]['values'].append(row[4])
|
||||
conn.close()
|
||||
contact_list = list(contacts.values())
|
||||
return {'success': True, 'contacts': contact_list, 'count': len(contact_list)}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def extract_backup_call_log(self, backup_path):
|
||||
"""Extract call history from backup."""
|
||||
manifest = os.path.join(backup_path, 'Manifest.db')
|
||||
if not os.path.exists(manifest):
|
||||
return {'success': False, 'error': 'Manifest.db not found'}
|
||||
try:
|
||||
conn = sqlite3.connect(manifest)
|
||||
cur = conn.cursor()
|
||||
cur.execute("SELECT fileID FROM Files WHERE relativePath LIKE '%CallHistory%' AND domain = 'HomeDomain'")
|
||||
row = cur.fetchone()
|
||||
conn.close()
|
||||
if not row:
|
||||
return {'success': False, 'error': 'Call history not found in backup'}
|
||||
file_hash = row[0]
|
||||
ch_db = os.path.join(backup_path, file_hash[:2], file_hash)
|
||||
if not os.path.exists(ch_db):
|
||||
return {'success': False, 'error': 'Call history file not found'}
|
||||
conn = sqlite3.connect(ch_db)
|
||||
cur = conn.cursor()
|
||||
cur.execute('''
|
||||
SELECT ROWID, address, date, duration, flags, country_code
|
||||
FROM ZCALLRECORD ORDER BY ZDATE DESC LIMIT 200
|
||||
''')
|
||||
flag_map = {4: 'incoming', 5: 'outgoing', 8: 'missed'}
|
||||
calls = []
|
||||
apple_epoch = 978307200
|
||||
for row in cur.fetchall():
|
||||
ts = row[2]
|
||||
date_readable = ''
|
||||
if ts:
|
||||
try:
|
||||
date_readable = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(ts + apple_epoch))
|
||||
except (ValueError, OSError):
|
||||
pass
|
||||
calls.append({
|
||||
'id': row[0], 'address': row[1] or '', 'date': date_readable,
|
||||
'duration': row[3] or 0, 'type': flag_map.get(row[4], str(row[4])),
|
||||
'country': row[5] or '',
|
||||
})
|
||||
conn.close()
|
||||
return {'success': True, 'calls': calls, 'count': len(calls)}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def extract_backup_notes(self, backup_path):
|
||||
"""Extract notes from backup."""
|
||||
manifest = os.path.join(backup_path, 'Manifest.db')
|
||||
if not os.path.exists(manifest):
|
||||
return {'success': False, 'error': 'Manifest.db not found'}
|
||||
try:
|
||||
conn = sqlite3.connect(manifest)
|
||||
cur = conn.cursor()
|
||||
cur.execute("SELECT fileID FROM Files WHERE relativePath LIKE '%NoteStore.sqlite%' AND domain = 'AppDomainGroup-group.com.apple.notes'")
|
||||
row = cur.fetchone()
|
||||
conn.close()
|
||||
if not row:
|
||||
return {'success': False, 'error': 'Notes database not found in backup'}
|
||||
file_hash = row[0]
|
||||
notes_db = os.path.join(backup_path, file_hash[:2], file_hash)
|
||||
if not os.path.exists(notes_db):
|
||||
return {'success': False, 'error': 'Notes file not found'}
|
||||
conn = sqlite3.connect(notes_db)
|
||||
cur = conn.cursor()
|
||||
cur.execute('''
|
||||
SELECT n.Z_PK, n.ZTITLE, nb.ZDATA, n.ZMODIFICATIONDATE
|
||||
FROM ZICCLOUDSYNCINGOBJECT n
|
||||
LEFT JOIN ZICNOTEDATA nb ON n.Z_PK = nb.ZNOTE
|
||||
WHERE n.ZTITLE IS NOT NULL
|
||||
ORDER BY n.ZMODIFICATIONDATE DESC LIMIT 100
|
||||
''')
|
||||
apple_epoch = 978307200
|
||||
notes = []
|
||||
for row in cur.fetchall():
|
||||
body = ''
|
||||
if row[2]:
|
||||
try:
|
||||
body = row[2].decode('utf-8', errors='replace')[:500]
|
||||
except Exception:
|
||||
body = '[binary data]'
|
||||
date_readable = ''
|
||||
if row[3]:
|
||||
try:
|
||||
date_readable = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(row[3] + apple_epoch))
|
||||
except (ValueError, OSError):
|
||||
pass
|
||||
notes.append({'id': row[0], 'title': row[1] or '', 'body': body, 'date': date_readable})
|
||||
conn.close()
|
||||
return {'success': True, 'notes': notes, 'count': len(notes)}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def list_backup_files(self, backup_path, domain='', path_filter=''):
|
||||
"""List all files in a backup's Manifest.db."""
|
||||
manifest = os.path.join(backup_path, 'Manifest.db')
|
||||
if not os.path.exists(manifest):
|
||||
return {'success': False, 'error': 'Manifest.db not found'}
|
||||
try:
|
||||
conn = sqlite3.connect(manifest)
|
||||
cur = conn.cursor()
|
||||
query = 'SELECT fileID, domain, relativePath, flags FROM Files'
|
||||
conditions = []
|
||||
params = []
|
||||
if domain:
|
||||
conditions.append('domain LIKE ?')
|
||||
params.append(f'%{domain}%')
|
||||
if path_filter:
|
||||
conditions.append('relativePath LIKE ?')
|
||||
params.append(f'%{path_filter}%')
|
||||
if conditions:
|
||||
query += ' WHERE ' + ' AND '.join(conditions)
|
||||
query += ' LIMIT 500'
|
||||
cur.execute(query, params)
|
||||
files = []
|
||||
for row in cur.fetchall():
|
||||
files.append({
|
||||
'hash': row[0], 'domain': row[1],
|
||||
'path': row[2], 'flags': row[3],
|
||||
})
|
||||
conn.close()
|
||||
return {'success': True, 'files': files, 'count': len(files)}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
def extract_backup_file(self, backup_path, file_hash, output_name=None):
|
||||
"""Extract a specific file from backup by its hash."""
|
||||
src = os.path.join(backup_path, file_hash[:2], file_hash)
|
||||
if not os.path.exists(src):
|
||||
return {'success': False, 'error': f'File not found: {file_hash}'}
|
||||
out_dir = self._base / 'recon' / 'extracted'
|
||||
out_dir.mkdir(parents=True, exist_ok=True)
|
||||
dest = str(out_dir / (output_name or file_hash))
|
||||
shutil.copy2(src, dest)
|
||||
return {'success': True, 'path': dest, 'size': os.path.getsize(dest)}
|
||||
|
||||
# ── Filesystem ───────────────────────────────────────────────────
|
||||
|
||||
def mount_filesystem(self, udid, mountpoint=None):
|
||||
"""Mount device filesystem via ifuse."""
|
||||
if 'ifuse' not in self._tools:
|
||||
return {'success': False, 'error': 'ifuse not installed'}
|
||||
if not mountpoint:
|
||||
mountpoint = str(self._base / 'mnt' / udid)
|
||||
os.makedirs(mountpoint, exist_ok=True)
|
||||
stdout, stderr, rc = self._run('ifuse', ['-u', udid, mountpoint])
|
||||
return {'success': rc == 0, 'mountpoint': mountpoint, 'output': (stderr or stdout).strip()}
|
||||
|
||||
def mount_app_documents(self, udid, bundle_id, mountpoint=None):
|
||||
"""Mount a specific app's Documents folder via ifuse."""
|
||||
if 'ifuse' not in self._tools:
|
||||
return {'success': False, 'error': 'ifuse not installed'}
|
||||
if not mountpoint:
|
||||
mountpoint = str(self._base / 'mnt' / udid / bundle_id)
|
||||
os.makedirs(mountpoint, exist_ok=True)
|
||||
stdout, stderr, rc = self._run('ifuse', ['-u', udid, '--documents', bundle_id, mountpoint])
|
||||
return {'success': rc == 0, 'mountpoint': mountpoint, 'output': (stderr or stdout).strip()}
|
||||
|
||||
def unmount_filesystem(self, mountpoint):
|
||||
"""Unmount a previously mounted filesystem."""
|
||||
try:
|
||||
subprocess.run(['fusermount', '-u', mountpoint], capture_output=True, timeout=10)
|
||||
return {'success': True, 'mountpoint': mountpoint}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
# ── Provisioning Profiles ────────────────────────────────────────
|
||||
|
||||
def list_profiles(self, udid):
|
||||
"""List provisioning profiles on device."""
|
||||
stdout, stderr, rc = self._run_udid('ideviceprovision', udid, ['list'], timeout=15)
|
||||
if rc != 0:
|
||||
return {'success': False, 'error': (stderr or stdout).strip(), 'profiles': []}
|
||||
profiles = []
|
||||
current = {}
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('ProvisionedDevices'):
|
||||
continue
|
||||
if ' - ' in line and not current:
|
||||
current = {'id': line.split(' - ')[0].strip(), 'name': line.split(' - ', 1)[1].strip()}
|
||||
elif line == '' and current:
|
||||
profiles.append(current)
|
||||
current = {}
|
||||
if current:
|
||||
profiles.append(current)
|
||||
return {'success': True, 'profiles': profiles, 'count': len(profiles)}
|
||||
|
||||
def install_profile(self, udid, profile_path):
|
||||
"""Install a provisioning/configuration profile."""
|
||||
if not os.path.isfile(profile_path):
|
||||
return {'success': False, 'error': f'File not found: {profile_path}'}
|
||||
stdout, stderr, rc = self._run_udid('ideviceprovision', udid,
|
||||
['install', profile_path], timeout=15)
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
def remove_profile(self, udid, profile_id):
|
||||
"""Remove a provisioning profile."""
|
||||
stdout, stderr, rc = self._run_udid('ideviceprovision', udid,
|
||||
['remove', profile_id], timeout=15)
|
||||
return {'success': rc == 0, 'output': (stdout or stderr).strip()}
|
||||
|
||||
# ── Port Forwarding ──────────────────────────────────────────────
|
||||
|
||||
def port_forward(self, udid, local_port, device_port):
|
||||
"""Set up port forwarding via iproxy (runs in background)."""
|
||||
if 'iproxy' not in self._tools:
|
||||
return {'success': False, 'error': 'iproxy not installed'}
|
||||
try:
|
||||
proc = subprocess.Popen(
|
||||
[self._tools['iproxy'], '-u', udid, str(local_port), str(device_port)],
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||
)
|
||||
time.sleep(0.5)
|
||||
if proc.poll() is not None:
|
||||
_, err = proc.communicate()
|
||||
return {'success': False, 'error': err.decode().strip()}
|
||||
return {'success': True, 'pid': proc.pid,
|
||||
'local': local_port, 'device': device_port}
|
||||
except Exception as e:
|
||||
return {'success': False, 'error': str(e)}
|
||||
|
||||
# ── Device Fingerprint ───────────────────────────────────────────
|
||||
|
||||
def full_fingerprint(self, udid):
|
||||
"""Get comprehensive device fingerprint."""
|
||||
fp = self.device_info(udid)
|
||||
# Add specific domains
|
||||
for domain in ['com.apple.disk_usage', 'com.apple.mobile.battery',
|
||||
'com.apple.mobile.internal', 'com.apple.international']:
|
||||
domain_info = self.device_info_domain(udid, domain)
|
||||
if 'error' not in domain_info:
|
||||
fp[f'domain_{domain.split(".")[-1]}'] = domain_info
|
||||
return fp
|
||||
|
||||
def export_recon_report(self, udid):
|
||||
"""Export full reconnaissance report."""
|
||||
out_dir = self._udid_dir('recon', udid)
|
||||
report = {
|
||||
'udid': udid,
|
||||
'timestamp': time.strftime('%Y-%m-%d %H:%M:%S'),
|
||||
'device_info': self.device_info(udid),
|
||||
'pair_status': self.validate_pair(udid),
|
||||
'apps': self.list_apps(udid),
|
||||
'profiles': self.list_profiles(udid),
|
||||
}
|
||||
report_path = str(out_dir / f'report_{int(time.time())}.json')
|
||||
with open(report_path, 'w') as f:
|
||||
json.dump(report, f, indent=2, default=str)
|
||||
return {'success': True, 'report_path': report_path}
|
||||
|
||||
|
||||
# ── Singleton ──────────────────────────────────────────────────────
|
||||
|
||||
_manager = None
|
||||
|
||||
def get_iphone_manager():
|
||||
global _manager
|
||||
if _manager is None:
|
||||
_manager = IPhoneExploitManager()
|
||||
return _manager
|
||||
1465
core/llm.py
Normal file
1465
core/llm.py
Normal file
File diff suppressed because it is too large
Load Diff
585
core/mcp_server.py
Normal file
585
core/mcp_server.py
Normal file
@@ -0,0 +1,585 @@
|
||||
"""
|
||||
AUTARCH MCP Server
|
||||
Exposes AUTARCH tools via Model Context Protocol (MCP)
|
||||
for use with Claude Desktop, Claude Code, and other MCP clients.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
import socket
|
||||
import subprocess
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
# Ensure core is importable
|
||||
_app_dir = Path(__file__).resolve().parent.parent
|
||||
if str(_app_dir) not in sys.path:
|
||||
sys.path.insert(0, str(_app_dir))
|
||||
|
||||
from core.config import get_config
|
||||
from core.paths import find_tool, get_app_dir
|
||||
|
||||
# MCP server state
|
||||
_server_process = None
|
||||
_server_thread = None
|
||||
|
||||
|
||||
def get_autarch_tools():
|
||||
"""Build the list of AUTARCH tools to expose via MCP."""
|
||||
tools = []
|
||||
|
||||
# ── Network Scanning ──
|
||||
tools.append({
|
||||
'name': 'nmap_scan',
|
||||
'description': 'Run an nmap scan against a target. Returns scan results.',
|
||||
'params': {
|
||||
'target': {'type': 'string', 'description': 'Target IP, hostname, or CIDR range', 'required': True},
|
||||
'ports': {'type': 'string', 'description': 'Port specification (e.g. "22,80,443" or "1-1024")', 'required': False},
|
||||
'scan_type': {'type': 'string', 'description': 'Scan type: quick, full, stealth, vuln', 'required': False},
|
||||
}
|
||||
})
|
||||
|
||||
# ── GeoIP Lookup ──
|
||||
tools.append({
|
||||
'name': 'geoip_lookup',
|
||||
'description': 'Look up geographic and network information for an IP address.',
|
||||
'params': {
|
||||
'ip': {'type': 'string', 'description': 'IP address to look up', 'required': True},
|
||||
}
|
||||
})
|
||||
|
||||
# ── DNS Lookup ──
|
||||
tools.append({
|
||||
'name': 'dns_lookup',
|
||||
'description': 'Perform DNS lookups for a domain.',
|
||||
'params': {
|
||||
'domain': {'type': 'string', 'description': 'Domain name to look up', 'required': True},
|
||||
'record_type': {'type': 'string', 'description': 'Record type: A, AAAA, MX, NS, TXT, CNAME, SOA', 'required': False},
|
||||
}
|
||||
})
|
||||
|
||||
# ── WHOIS ──
|
||||
tools.append({
|
||||
'name': 'whois_lookup',
|
||||
'description': 'Perform WHOIS lookup for a domain or IP.',
|
||||
'params': {
|
||||
'target': {'type': 'string', 'description': 'Domain or IP to look up', 'required': True},
|
||||
}
|
||||
})
|
||||
|
||||
# ── Packet Capture ──
|
||||
tools.append({
|
||||
'name': 'packet_capture',
|
||||
'description': 'Capture network packets using tcpdump. Returns captured packet summary.',
|
||||
'params': {
|
||||
'interface': {'type': 'string', 'description': 'Network interface (e.g. eth0, wlan0)', 'required': False},
|
||||
'count': {'type': 'integer', 'description': 'Number of packets to capture (default 10)', 'required': False},
|
||||
'filter': {'type': 'string', 'description': 'BPF filter expression', 'required': False},
|
||||
}
|
||||
})
|
||||
|
||||
# ── WireGuard Status ──
|
||||
tools.append({
|
||||
'name': 'wireguard_status',
|
||||
'description': 'Get WireGuard VPN tunnel status and peer information.',
|
||||
'params': {}
|
||||
})
|
||||
|
||||
# ── UPnP Status ──
|
||||
tools.append({
|
||||
'name': 'upnp_status',
|
||||
'description': 'Get UPnP port mapping status.',
|
||||
'params': {}
|
||||
})
|
||||
|
||||
# ── System Info ──
|
||||
tools.append({
|
||||
'name': 'system_info',
|
||||
'description': 'Get AUTARCH system information: hostname, platform, uptime, tool availability.',
|
||||
'params': {}
|
||||
})
|
||||
|
||||
# ── LLM Chat ──
|
||||
tools.append({
|
||||
'name': 'llm_chat',
|
||||
'description': 'Send a message to the currently configured LLM backend and get a response.',
|
||||
'params': {
|
||||
'message': {'type': 'string', 'description': 'Message to send to the LLM', 'required': True},
|
||||
'system_prompt': {'type': 'string', 'description': 'Optional system prompt', 'required': False},
|
||||
}
|
||||
})
|
||||
|
||||
# ── Android Device Info ──
|
||||
tools.append({
|
||||
'name': 'android_devices',
|
||||
'description': 'List connected Android devices via ADB.',
|
||||
'params': {}
|
||||
})
|
||||
|
||||
# ── Config Get/Set ──
|
||||
tools.append({
|
||||
'name': 'config_get',
|
||||
'description': 'Get an AUTARCH configuration value.',
|
||||
'params': {
|
||||
'section': {'type': 'string', 'description': 'Config section (e.g. autarch, llama, wireguard)', 'required': True},
|
||||
'key': {'type': 'string', 'description': 'Config key', 'required': True},
|
||||
}
|
||||
})
|
||||
|
||||
return tools
|
||||
|
||||
|
||||
def execute_tool(name: str, arguments: dict) -> str:
|
||||
"""Execute an AUTARCH tool and return the result as a string."""
|
||||
config = get_config()
|
||||
|
||||
if name == 'nmap_scan':
|
||||
return _run_nmap(arguments, config)
|
||||
elif name == 'geoip_lookup':
|
||||
return _run_geoip(arguments)
|
||||
elif name == 'dns_lookup':
|
||||
return _run_dns(arguments)
|
||||
elif name == 'whois_lookup':
|
||||
return _run_whois(arguments)
|
||||
elif name == 'packet_capture':
|
||||
return _run_tcpdump(arguments)
|
||||
elif name == 'wireguard_status':
|
||||
return _run_wg_status(config)
|
||||
elif name == 'upnp_status':
|
||||
return _run_upnp_status(config)
|
||||
elif name == 'system_info':
|
||||
return _run_system_info()
|
||||
elif name == 'llm_chat':
|
||||
return _run_llm_chat(arguments, config)
|
||||
elif name == 'android_devices':
|
||||
return _run_adb_devices()
|
||||
elif name == 'config_get':
|
||||
return _run_config_get(arguments, config)
|
||||
else:
|
||||
return json.dumps({'error': f'Unknown tool: {name}'})
|
||||
|
||||
|
||||
def _run_nmap(args: dict, config) -> str:
|
||||
nmap = find_tool('nmap')
|
||||
if not nmap:
|
||||
return json.dumps({'error': 'nmap not found'})
|
||||
|
||||
target = args.get('target', '')
|
||||
if not target:
|
||||
return json.dumps({'error': 'target is required'})
|
||||
|
||||
cmd = [str(nmap)]
|
||||
scan_type = args.get('scan_type', 'quick')
|
||||
if scan_type == 'stealth':
|
||||
cmd.extend(['-sS', '-T2'])
|
||||
elif scan_type == 'full':
|
||||
cmd.extend(['-sV', '-sC', '-O'])
|
||||
elif scan_type == 'vuln':
|
||||
cmd.extend(['-sV', '--script=vuln'])
|
||||
else:
|
||||
cmd.extend(['-sV', '-T4'])
|
||||
|
||||
ports = args.get('ports', '')
|
||||
if ports:
|
||||
cmd.extend(['-p', ports])
|
||||
|
||||
cmd.append(target)
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
|
||||
return json.dumps({
|
||||
'stdout': result.stdout,
|
||||
'stderr': result.stderr,
|
||||
'exit_code': result.returncode
|
||||
})
|
||||
except subprocess.TimeoutExpired:
|
||||
return json.dumps({'error': 'Scan timed out after 120 seconds'})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_geoip(args: dict) -> str:
|
||||
ip = args.get('ip', '')
|
||||
if not ip:
|
||||
return json.dumps({'error': 'ip is required'})
|
||||
|
||||
try:
|
||||
import urllib.request
|
||||
url = f"http://ip-api.com/json/{ip}?fields=status,message,country,regionName,city,zip,lat,lon,timezone,isp,org,as,query"
|
||||
with urllib.request.urlopen(url, timeout=10) as resp:
|
||||
return resp.read().decode()
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_dns(args: dict) -> str:
|
||||
domain = args.get('domain', '')
|
||||
if not domain:
|
||||
return json.dumps({'error': 'domain is required'})
|
||||
|
||||
record_type = args.get('record_type', 'A')
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['dig', '+short', domain, record_type],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
records = [r for r in result.stdout.strip().split('\n') if r]
|
||||
return json.dumps({'domain': domain, 'type': record_type, 'records': records})
|
||||
except FileNotFoundError:
|
||||
# Fallback to socket for A records
|
||||
try:
|
||||
ips = socket.getaddrinfo(domain, None)
|
||||
records = list(set(addr[4][0] for addr in ips))
|
||||
return json.dumps({'domain': domain, 'type': 'A', 'records': records})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_whois(args: dict) -> str:
|
||||
target = args.get('target', '')
|
||||
if not target:
|
||||
return json.dumps({'error': 'target is required'})
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['whois', target],
|
||||
capture_output=True, text=True, timeout=15
|
||||
)
|
||||
return json.dumps({'target': target, 'output': result.stdout[:4000]})
|
||||
except FileNotFoundError:
|
||||
return json.dumps({'error': 'whois command not found'})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_tcpdump(args: dict) -> str:
|
||||
tcpdump = find_tool('tcpdump')
|
||||
if not tcpdump:
|
||||
return json.dumps({'error': 'tcpdump not found'})
|
||||
|
||||
cmd = [str(tcpdump), '-n']
|
||||
iface = args.get('interface', '')
|
||||
if iface:
|
||||
cmd.extend(['-i', iface])
|
||||
|
||||
count = args.get('count', 10)
|
||||
cmd.extend(['-c', str(count)])
|
||||
|
||||
bpf_filter = args.get('filter', '')
|
||||
if bpf_filter:
|
||||
cmd.append(bpf_filter)
|
||||
|
||||
try:
|
||||
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
|
||||
return json.dumps({
|
||||
'stdout': result.stdout,
|
||||
'stderr': result.stderr,
|
||||
'exit_code': result.returncode
|
||||
})
|
||||
except subprocess.TimeoutExpired:
|
||||
return json.dumps({'error': 'Capture timed out'})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_wg_status(config) -> str:
|
||||
wg = find_tool('wg')
|
||||
if not wg:
|
||||
return json.dumps({'error': 'wg not found'})
|
||||
|
||||
iface = config.get('wireguard', 'interface', 'wg0')
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[str(wg), 'show', iface],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
return json.dumps({
|
||||
'interface': iface,
|
||||
'output': result.stdout,
|
||||
'active': result.returncode == 0
|
||||
})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_upnp_status(config) -> str:
|
||||
upnpc = find_tool('upnpc')
|
||||
if not upnpc:
|
||||
return json.dumps({'error': 'upnpc not found'})
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[str(upnpc), '-l'],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
return json.dumps({
|
||||
'output': result.stdout,
|
||||
'exit_code': result.returncode
|
||||
})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_system_info() -> str:
|
||||
import platform
|
||||
|
||||
info = {
|
||||
'hostname': socket.gethostname(),
|
||||
'platform': platform.platform(),
|
||||
'python': platform.python_version(),
|
||||
'arch': platform.machine(),
|
||||
}
|
||||
|
||||
try:
|
||||
info['ip'] = socket.gethostbyname(socket.gethostname())
|
||||
except Exception:
|
||||
info['ip'] = '127.0.0.1'
|
||||
|
||||
try:
|
||||
with open('/proc/uptime') as f:
|
||||
uptime_secs = float(f.read().split()[0])
|
||||
days = int(uptime_secs // 86400)
|
||||
hours = int((uptime_secs % 86400) // 3600)
|
||||
info['uptime'] = f"{days}d {hours}h"
|
||||
except Exception:
|
||||
info['uptime'] = 'N/A'
|
||||
|
||||
# Tool availability
|
||||
tools = {}
|
||||
for tool in ['nmap', 'tshark', 'tcpdump', 'upnpc', 'wg', 'adb']:
|
||||
tools[tool] = find_tool(tool) is not None
|
||||
info['tools'] = tools
|
||||
|
||||
config = get_config()
|
||||
info['llm_backend'] = config.get('autarch', 'llm_backend', 'local')
|
||||
|
||||
return json.dumps(info)
|
||||
|
||||
|
||||
def _run_llm_chat(args: dict, config) -> str:
|
||||
message = args.get('message', '')
|
||||
if not message:
|
||||
return json.dumps({'error': 'message is required'})
|
||||
|
||||
try:
|
||||
from core.llm import get_llm, LLMError
|
||||
llm = get_llm()
|
||||
if not llm.is_loaded:
|
||||
llm.load_model()
|
||||
|
||||
system_prompt = args.get('system_prompt', None)
|
||||
response = llm.chat(message, system_prompt=system_prompt)
|
||||
return json.dumps({
|
||||
'response': response,
|
||||
'model': llm.model_name,
|
||||
'backend': config.get('autarch', 'llm_backend', 'local')
|
||||
})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_adb_devices() -> str:
|
||||
adb = find_tool('adb')
|
||||
if not adb:
|
||||
return json.dumps({'error': 'adb not found'})
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[str(adb), 'devices', '-l'],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
lines = result.stdout.strip().split('\n')[1:] # Skip header
|
||||
devices = []
|
||||
for line in lines:
|
||||
if line.strip():
|
||||
parts = line.split()
|
||||
if len(parts) >= 2:
|
||||
dev = {'serial': parts[0], 'state': parts[1]}
|
||||
# Parse extra info
|
||||
for part in parts[2:]:
|
||||
if ':' in part:
|
||||
k, v = part.split(':', 1)
|
||||
dev[k] = v
|
||||
devices.append(dev)
|
||||
return json.dumps({'devices': devices})
|
||||
except Exception as e:
|
||||
return json.dumps({'error': str(e)})
|
||||
|
||||
|
||||
def _run_config_get(args: dict, config) -> str:
|
||||
section = args.get('section', '')
|
||||
key = args.get('key', '')
|
||||
if not section or not key:
|
||||
return json.dumps({'error': 'section and key are required'})
|
||||
|
||||
# Block sensitive keys
|
||||
if key.lower() in ('api_key', 'password', 'secret_key', 'token'):
|
||||
return json.dumps({'error': 'Cannot read sensitive configuration values'})
|
||||
|
||||
value = config.get(section, key, fallback='(not set)')
|
||||
return json.dumps({'section': section, 'key': key, 'value': value})
|
||||
|
||||
|
||||
def create_mcp_server():
|
||||
"""Create and return the FastMCP server instance."""
|
||||
from mcp.server.fastmcp import FastMCP
|
||||
|
||||
mcp = FastMCP("autarch", instructions="AUTARCH security framework tools")
|
||||
|
||||
# Register all tools
|
||||
tool_defs = get_autarch_tools()
|
||||
|
||||
@mcp.tool()
|
||||
def nmap_scan(target: str, ports: str = "", scan_type: str = "quick") -> str:
|
||||
"""Run an nmap network scan against a target. Returns scan results including open ports and services."""
|
||||
return execute_tool('nmap_scan', {'target': target, 'ports': ports, 'scan_type': scan_type})
|
||||
|
||||
@mcp.tool()
|
||||
def geoip_lookup(ip: str) -> str:
|
||||
"""Look up geographic and network information for an IP address."""
|
||||
return execute_tool('geoip_lookup', {'ip': ip})
|
||||
|
||||
@mcp.tool()
|
||||
def dns_lookup(domain: str, record_type: str = "A") -> str:
|
||||
"""Perform DNS lookups for a domain. Supports A, AAAA, MX, NS, TXT, CNAME, SOA record types."""
|
||||
return execute_tool('dns_lookup', {'domain': domain, 'record_type': record_type})
|
||||
|
||||
@mcp.tool()
|
||||
def whois_lookup(target: str) -> str:
|
||||
"""Perform WHOIS lookup for a domain or IP address."""
|
||||
return execute_tool('whois_lookup', {'target': target})
|
||||
|
||||
@mcp.tool()
|
||||
def packet_capture(interface: str = "", count: int = 10, filter: str = "") -> str:
|
||||
"""Capture network packets using tcpdump. Returns captured packet summary."""
|
||||
return execute_tool('packet_capture', {'interface': interface, 'count': count, 'filter': filter})
|
||||
|
||||
@mcp.tool()
|
||||
def wireguard_status() -> str:
|
||||
"""Get WireGuard VPN tunnel status and peer information."""
|
||||
return execute_tool('wireguard_status', {})
|
||||
|
||||
@mcp.tool()
|
||||
def upnp_status() -> str:
|
||||
"""Get UPnP port mapping status."""
|
||||
return execute_tool('upnp_status', {})
|
||||
|
||||
@mcp.tool()
|
||||
def system_info() -> str:
|
||||
"""Get AUTARCH system information: hostname, platform, uptime, tool availability."""
|
||||
return execute_tool('system_info', {})
|
||||
|
||||
@mcp.tool()
|
||||
def llm_chat(message: str, system_prompt: str = "") -> str:
|
||||
"""Send a message to the currently configured LLM backend and get a response."""
|
||||
args = {'message': message}
|
||||
if system_prompt:
|
||||
args['system_prompt'] = system_prompt
|
||||
return execute_tool('llm_chat', args)
|
||||
|
||||
@mcp.tool()
|
||||
def android_devices() -> str:
|
||||
"""List connected Android devices via ADB."""
|
||||
return execute_tool('android_devices', {})
|
||||
|
||||
@mcp.tool()
|
||||
def config_get(section: str, key: str) -> str:
|
||||
"""Get an AUTARCH configuration value. Sensitive keys (api_key, password) are blocked."""
|
||||
return execute_tool('config_get', {'section': section, 'key': key})
|
||||
|
||||
return mcp
|
||||
|
||||
|
||||
def run_stdio():
|
||||
"""Run the MCP server in stdio mode (for Claude Desktop / Claude Code)."""
|
||||
mcp = create_mcp_server()
|
||||
mcp.run(transport='stdio')
|
||||
|
||||
|
||||
def run_sse(host: str = '0.0.0.0', port: int = 8081):
|
||||
"""Run the MCP server in SSE (Server-Sent Events) mode for web clients."""
|
||||
mcp = create_mcp_server()
|
||||
mcp.run(transport='sse', host=host, port=port)
|
||||
|
||||
|
||||
def get_mcp_config_snippet() -> str:
|
||||
"""Generate the JSON config snippet for Claude Desktop / Claude Code."""
|
||||
app_dir = get_app_dir()
|
||||
python = sys.executable
|
||||
|
||||
config = {
|
||||
"mcpServers": {
|
||||
"autarch": {
|
||||
"command": python,
|
||||
"args": [str(app_dir / "core" / "mcp_server.py"), "--stdio"],
|
||||
"env": {}
|
||||
}
|
||||
}
|
||||
}
|
||||
return json.dumps(config, indent=2)
|
||||
|
||||
|
||||
def get_server_status() -> dict:
|
||||
"""Check if the MCP server is running."""
|
||||
global _server_process
|
||||
if _server_process and _server_process.poll() is None:
|
||||
return {'running': True, 'pid': _server_process.pid, 'mode': 'sse'}
|
||||
return {'running': False}
|
||||
|
||||
|
||||
def start_sse_server(host: str = '0.0.0.0', port: int = 8081) -> dict:
|
||||
"""Start the MCP SSE server in the background."""
|
||||
global _server_process
|
||||
|
||||
status = get_server_status()
|
||||
if status['running']:
|
||||
return {'ok': False, 'error': f'Already running (PID {status["pid"]})'}
|
||||
|
||||
python = sys.executable
|
||||
script = str(Path(__file__).resolve())
|
||||
|
||||
_server_process = subprocess.Popen(
|
||||
[python, script, '--sse', '--host', host, '--port', str(port)],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE
|
||||
)
|
||||
|
||||
return {'ok': True, 'pid': _server_process.pid, 'host': host, 'port': port}
|
||||
|
||||
|
||||
def stop_sse_server() -> dict:
|
||||
"""Stop the MCP SSE server."""
|
||||
global _server_process
|
||||
|
||||
status = get_server_status()
|
||||
if not status['running']:
|
||||
return {'ok': False, 'error': 'Not running'}
|
||||
|
||||
_server_process.terminate()
|
||||
try:
|
||||
_server_process.wait(timeout=5)
|
||||
except subprocess.TimeoutExpired:
|
||||
_server_process.kill()
|
||||
_server_process = None
|
||||
return {'ok': True}
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='AUTARCH MCP Server')
|
||||
parser.add_argument('--stdio', action='store_true', help='Run in stdio mode (for Claude Desktop/Code)')
|
||||
parser.add_argument('--sse', action='store_true', help='Run in SSE mode (for web clients)')
|
||||
parser.add_argument('--host', default='0.0.0.0', help='SSE host (default: 0.0.0.0)')
|
||||
parser.add_argument('--port', type=int, default=8081, help='SSE port (default: 8081)')
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.sse:
|
||||
print(f"Starting AUTARCH MCP server (SSE) on {args.host}:{args.port}")
|
||||
run_sse(host=args.host, port=args.port)
|
||||
else:
|
||||
# Default to stdio
|
||||
run_stdio()
|
||||
3049
core/menu.py
Normal file
3049
core/menu.py
Normal file
File diff suppressed because it is too large
Load Diff
305
core/model_router.py
Normal file
305
core/model_router.py
Normal file
@@ -0,0 +1,305 @@
|
||||
"""
|
||||
AUTARCH Model Router
|
||||
Manages concurrent SLM/LAM/SAM model instances for autonomous operation.
|
||||
|
||||
Model Tiers:
|
||||
SLM (Small Language Model) — Fast classification, routing, yes/no decisions
|
||||
SAM (Small Action Model) — Quick tool execution, simple automated responses
|
||||
LAM (Large Action Model) — Complex multi-step agent tasks, strategic planning
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import threading
|
||||
from typing import Optional, Dict, Any
|
||||
from enum import Enum
|
||||
|
||||
from .config import get_config
|
||||
|
||||
_logger = logging.getLogger('autarch.model_router')
|
||||
|
||||
|
||||
class ModelTier(Enum):
|
||||
SLM = 'slm'
|
||||
SAM = 'sam'
|
||||
LAM = 'lam'
|
||||
|
||||
|
||||
# Fallback chain: if a tier fails, try the next one
|
||||
_FALLBACK = {
|
||||
ModelTier.SLM: [ModelTier.SAM, ModelTier.LAM],
|
||||
ModelTier.SAM: [ModelTier.LAM],
|
||||
ModelTier.LAM: [],
|
||||
}
|
||||
|
||||
|
||||
class _TierConfigProxy:
|
||||
"""Proxies Config but overrides the backend section for a specific model tier.
|
||||
|
||||
When a tier says backend=local with model_path=X, this proxy makes the LLM
|
||||
class (which reads [llama]) see the tier's model_path/n_ctx/etc instead.
|
||||
"""
|
||||
|
||||
def __init__(self, base_config, tier_name: str):
|
||||
self._base = base_config
|
||||
self._tier = tier_name
|
||||
self._overrides: Dict[str, Dict[str, str]] = {}
|
||||
self._build_overrides()
|
||||
|
||||
def _build_overrides(self):
|
||||
backend = self._base.get(self._tier, 'backend', 'local')
|
||||
model_path = self._base.get(self._tier, 'model_path', '')
|
||||
n_ctx = self._base.get(self._tier, 'n_ctx', '2048')
|
||||
n_gpu_layers = self._base.get(self._tier, 'n_gpu_layers', '-1')
|
||||
n_threads = self._base.get(self._tier, 'n_threads', '4')
|
||||
|
||||
if backend == 'local':
|
||||
self._overrides['llama'] = {
|
||||
'model_path': model_path,
|
||||
'n_ctx': n_ctx,
|
||||
'n_gpu_layers': n_gpu_layers,
|
||||
'n_threads': n_threads,
|
||||
}
|
||||
elif backend == 'transformers':
|
||||
self._overrides['transformers'] = {
|
||||
'model_path': model_path,
|
||||
}
|
||||
# claude and huggingface are API-based — no path override needed
|
||||
|
||||
def get(self, section: str, key: str, fallback=None):
|
||||
overrides = self._overrides.get(section, {})
|
||||
if key in overrides:
|
||||
return overrides[key]
|
||||
return self._base.get(section, key, fallback)
|
||||
|
||||
def get_int(self, section: str, key: str, fallback: int = 0) -> int:
|
||||
overrides = self._overrides.get(section, {})
|
||||
if key in overrides:
|
||||
try:
|
||||
return int(overrides[key])
|
||||
except (ValueError, TypeError):
|
||||
return fallback
|
||||
return self._base.get_int(section, key, fallback)
|
||||
|
||||
def get_float(self, section: str, key: str, fallback: float = 0.0) -> float:
|
||||
overrides = self._overrides.get(section, {})
|
||||
if key in overrides:
|
||||
try:
|
||||
return float(overrides[key])
|
||||
except (ValueError, TypeError):
|
||||
return fallback
|
||||
return self._base.get_float(section, key, fallback)
|
||||
|
||||
def get_bool(self, section: str, key: str, fallback: bool = False) -> bool:
|
||||
overrides = self._overrides.get(section, {})
|
||||
if key in overrides:
|
||||
val = str(overrides[key]).lower()
|
||||
return val in ('true', '1', 'yes', 'on')
|
||||
return self._base.get_bool(section, key, fallback)
|
||||
|
||||
# Delegate all settings getters to base (they call self.get internally)
|
||||
def get_llama_settings(self) -> dict:
|
||||
from .config import Config
|
||||
return Config.get_llama_settings(self)
|
||||
|
||||
def get_transformers_settings(self) -> dict:
|
||||
from .config import Config
|
||||
return Config.get_transformers_settings(self)
|
||||
|
||||
def get_claude_settings(self) -> dict:
|
||||
return self._base.get_claude_settings()
|
||||
|
||||
def get_huggingface_settings(self) -> dict:
|
||||
return self._base.get_huggingface_settings()
|
||||
|
||||
|
||||
class ModelRouter:
|
||||
"""Manages up to 3 concurrent LLM instances (SLM, SAM, LAM).
|
||||
|
||||
Each tier can use a different backend (local GGUF, transformers, Claude API,
|
||||
HuggingFace). The router handles loading, unloading, fallback, and thread-safe
|
||||
access.
|
||||
"""
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.config = config or get_config()
|
||||
self._instances: Dict[ModelTier, Any] = {}
|
||||
self._locks: Dict[ModelTier, threading.Lock] = {
|
||||
tier: threading.Lock() for tier in ModelTier
|
||||
}
|
||||
self._load_lock = threading.Lock()
|
||||
|
||||
@property
|
||||
def status(self) -> Dict[str, dict]:
|
||||
"""Return load status of all tiers."""
|
||||
result = {}
|
||||
for tier in ModelTier:
|
||||
inst = self._instances.get(tier)
|
||||
settings = self.config.get_tier_settings(tier.value)
|
||||
result[tier.value] = {
|
||||
'loaded': inst is not None and inst.is_loaded,
|
||||
'model_name': inst.model_name if inst and inst.is_loaded else None,
|
||||
'backend': settings['backend'],
|
||||
'enabled': settings['enabled'],
|
||||
'model_path': settings['model_path'],
|
||||
}
|
||||
return result
|
||||
|
||||
def load_tier(self, tier: ModelTier, verbose: bool = False) -> bool:
|
||||
"""Load a single tier's model. Thread-safe."""
|
||||
settings = self.config.get_tier_settings(tier.value)
|
||||
|
||||
if not settings['enabled']:
|
||||
_logger.info(f"[Router] Tier {tier.value} is disabled, skipping")
|
||||
return False
|
||||
|
||||
if not settings['model_path'] and settings['backend'] == 'local':
|
||||
_logger.warning(f"[Router] No model_path configured for {tier.value}")
|
||||
return False
|
||||
|
||||
with self._load_lock:
|
||||
# Unload existing if any
|
||||
if tier in self._instances:
|
||||
self.unload_tier(tier)
|
||||
|
||||
try:
|
||||
inst = self._create_instance(tier, verbose)
|
||||
self._instances[tier] = inst
|
||||
_logger.info(f"[Router] Loaded {tier.value}: {inst.model_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
_logger.error(f"[Router] Failed to load {tier.value}: {e}")
|
||||
return False
|
||||
|
||||
def unload_tier(self, tier: ModelTier):
|
||||
"""Unload a tier's model and free resources."""
|
||||
inst = self._instances.pop(tier, None)
|
||||
if inst:
|
||||
try:
|
||||
inst.unload_model()
|
||||
_logger.info(f"[Router] Unloaded {tier.value}")
|
||||
except Exception as e:
|
||||
_logger.error(f"[Router] Error unloading {tier.value}: {e}")
|
||||
|
||||
def load_all(self, verbose: bool = False) -> Dict[str, bool]:
|
||||
"""Load all enabled tiers. Returns {tier_name: success}."""
|
||||
results = {}
|
||||
for tier in ModelTier:
|
||||
results[tier.value] = self.load_tier(tier, verbose)
|
||||
return results
|
||||
|
||||
def unload_all(self):
|
||||
"""Unload all tiers."""
|
||||
for tier in list(self._instances.keys()):
|
||||
self.unload_tier(tier)
|
||||
|
||||
def get_instance(self, tier: ModelTier):
|
||||
"""Get the LLM instance for a tier (may be None if not loaded)."""
|
||||
return self._instances.get(tier)
|
||||
|
||||
def is_tier_loaded(self, tier: ModelTier) -> bool:
|
||||
"""Check if a tier has a loaded model."""
|
||||
inst = self._instances.get(tier)
|
||||
return inst is not None and inst.is_loaded
|
||||
|
||||
def classify(self, text: str) -> Dict[str, Any]:
|
||||
"""Use SLM to classify/triage an event or task.
|
||||
|
||||
Returns: {'tier': 'sam'|'lam', 'category': str, 'urgency': str, 'reasoning': str}
|
||||
|
||||
Falls back to SAM tier if SLM is not loaded.
|
||||
"""
|
||||
classify_prompt = f"""Classify this event/task for autonomous handling.
|
||||
Respond with ONLY a JSON object, no other text:
|
||||
{{"tier": "sam" or "lam", "category": "defense|offense|counter|analyze|osint|simulate", "urgency": "high|medium|low", "reasoning": "brief explanation"}}
|
||||
|
||||
Event: {text}"""
|
||||
|
||||
# Try SLM first, then fallback
|
||||
for tier in [ModelTier.SLM, ModelTier.SAM, ModelTier.LAM]:
|
||||
inst = self._instances.get(tier)
|
||||
if inst and inst.is_loaded:
|
||||
try:
|
||||
with self._locks[tier]:
|
||||
response = inst.generate(classify_prompt, max_tokens=200, temperature=0.1)
|
||||
# Parse JSON from response
|
||||
response = response.strip()
|
||||
# Find JSON in response
|
||||
start = response.find('{')
|
||||
end = response.rfind('}')
|
||||
if start >= 0 and end > start:
|
||||
return json.loads(response[start:end + 1])
|
||||
except Exception as e:
|
||||
_logger.warning(f"[Router] Classification failed on {tier.value}: {e}")
|
||||
continue
|
||||
|
||||
# Default if all tiers fail
|
||||
return {'tier': 'sam', 'category': 'defense', 'urgency': 'medium',
|
||||
'reasoning': 'Default classification (no model available)'}
|
||||
|
||||
def generate(self, tier: ModelTier, prompt: str, **kwargs) -> str:
|
||||
"""Generate with a specific tier, falling back to higher tiers on failure.
|
||||
|
||||
Fallback chain: SLM -> SAM -> LAM, SAM -> LAM
|
||||
"""
|
||||
chain = [tier] + _FALLBACK.get(tier, [])
|
||||
|
||||
for t in chain:
|
||||
inst = self._instances.get(t)
|
||||
if inst and inst.is_loaded:
|
||||
try:
|
||||
with self._locks[t]:
|
||||
return inst.generate(prompt, **kwargs)
|
||||
except Exception as e:
|
||||
_logger.warning(f"[Router] Generate failed on {t.value}: {e}")
|
||||
continue
|
||||
|
||||
from .llm import LLMError
|
||||
raise LLMError(f"All tiers exhausted for generation (started at {tier.value})")
|
||||
|
||||
def _create_instance(self, tier: ModelTier, verbose: bool = False):
|
||||
"""Create an LLM instance from tier config."""
|
||||
from .llm import LLM, TransformersLLM, ClaudeLLM, HuggingFaceLLM
|
||||
|
||||
section = tier.value
|
||||
backend = self.config.get(section, 'backend', 'local')
|
||||
proxy = _TierConfigProxy(self.config, section)
|
||||
|
||||
if verbose:
|
||||
model_path = self.config.get(section, 'model_path', '')
|
||||
_logger.info(f"[Router] Creating {tier.value} instance: backend={backend}, model={model_path}")
|
||||
|
||||
if backend == 'local':
|
||||
inst = LLM(proxy)
|
||||
elif backend == 'transformers':
|
||||
inst = TransformersLLM(proxy)
|
||||
elif backend == 'claude':
|
||||
inst = ClaudeLLM(proxy)
|
||||
elif backend == 'huggingface':
|
||||
inst = HuggingFaceLLM(proxy)
|
||||
else:
|
||||
from .llm import LLMError
|
||||
raise LLMError(f"Unknown backend '{backend}' for tier {tier.value}")
|
||||
|
||||
inst.load_model(verbose=verbose)
|
||||
return inst
|
||||
|
||||
|
||||
# Singleton
|
||||
_router_instance = None
|
||||
|
||||
|
||||
def get_model_router() -> ModelRouter:
|
||||
"""Get the global ModelRouter instance."""
|
||||
global _router_instance
|
||||
if _router_instance is None:
|
||||
_router_instance = ModelRouter()
|
||||
return _router_instance
|
||||
|
||||
|
||||
def reset_model_router():
|
||||
"""Reset the global ModelRouter (unloads all models)."""
|
||||
global _router_instance
|
||||
if _router_instance is not None:
|
||||
_router_instance.unload_all()
|
||||
_router_instance = None
|
||||
239
core/module_crypto.py
Normal file
239
core/module_crypto.py
Normal file
@@ -0,0 +1,239 @@
|
||||
"""
|
||||
AUTARCH Encrypted Module Cryptography
|
||||
AES-256-CBC encryption with PBKDF2-HMAC-SHA512 key derivation
|
||||
and SHA-512 integrity verification.
|
||||
|
||||
File format (.autarch):
|
||||
Offset Size Field
|
||||
────── ──── ─────────────────────────────────────────────────────
|
||||
0 4 Magic: b'ATCH'
|
||||
4 1 Version: 0x01
|
||||
5 32 PBKDF2 salt
|
||||
37 16 AES IV
|
||||
53 64 SHA-512 hash of plaintext (integrity check)
|
||||
117 2 Metadata JSON length (uint16 LE)
|
||||
119 N Metadata JSON (UTF-8)
|
||||
119+N ... AES-256-CBC ciphertext (PKCS7 padded)
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
import os
|
||||
import struct
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
MAGIC = b'ATCH'
|
||||
VERSION = 0x01
|
||||
KDF_ITERS = 260000 # PBKDF2 iterations (NIST recommended minimum for SHA-512)
|
||||
SALT_LEN = 32
|
||||
IV_LEN = 16
|
||||
HASH_LEN = 64 # SHA-512 digest length
|
||||
|
||||
|
||||
# ── Low-level AES (pure stdlib, no pycryptodome required) ────────────────────
|
||||
# Uses Python's hashlib-backed AES via the cryptography package if available,
|
||||
# otherwise falls back to pycryptodome, then to a bundled pure-Python AES.
|
||||
|
||||
def _get_aes():
|
||||
"""Return (encrypt_func, decrypt_func) pair."""
|
||||
try:
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
|
||||
from cryptography.hazmat.primitives import padding as sym_padding
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
def encrypt(key: bytes, iv: bytes, plaintext: bytes) -> bytes:
|
||||
padder = sym_padding.PKCS7(128).padder()
|
||||
padded = padder.update(plaintext) + padder.finalize()
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
enc = cipher.encryptor()
|
||||
return enc.update(padded) + enc.finalize()
|
||||
|
||||
def decrypt(key: bytes, iv: bytes, ciphertext: bytes) -> bytes:
|
||||
cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=default_backend())
|
||||
dec = cipher.decryptor()
|
||||
padded = dec.update(ciphertext) + dec.finalize()
|
||||
unpadder = sym_padding.PKCS7(128).unpadder()
|
||||
return unpadder.update(padded) + unpadder.finalize()
|
||||
|
||||
return encrypt, decrypt
|
||||
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
from Crypto.Cipher import AES
|
||||
from Crypto.Util.Padding import pad, unpad
|
||||
|
||||
def encrypt(key: bytes, iv: bytes, plaintext: bytes) -> bytes:
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
return cipher.encrypt(pad(plaintext, 16))
|
||||
|
||||
def decrypt(key: bytes, iv: bytes, ciphertext: bytes) -> bytes:
|
||||
cipher = AES.new(key, AES.MODE_CBC, iv)
|
||||
return unpad(cipher.decrypt(ciphertext), 16)
|
||||
|
||||
return encrypt, decrypt
|
||||
|
||||
except ImportError:
|
||||
raise RuntimeError(
|
||||
"No AES backend available. Install one:\n"
|
||||
" pip install cryptography\n"
|
||||
" pip install pycryptodome"
|
||||
)
|
||||
|
||||
|
||||
_aes_encrypt, _aes_decrypt = _get_aes()
|
||||
|
||||
|
||||
# ── Key derivation ────────────────────────────────────────────────────────────
|
||||
|
||||
def _derive_key(password: str, salt: bytes) -> bytes:
|
||||
"""Derive a 32-byte AES key from a password using PBKDF2-HMAC-SHA512."""
|
||||
return hashlib.pbkdf2_hmac(
|
||||
'sha512',
|
||||
password.encode('utf-8'),
|
||||
salt,
|
||||
KDF_ITERS,
|
||||
dklen=32,
|
||||
)
|
||||
|
||||
|
||||
# ── Public API ────────────────────────────────────────────────────────────────
|
||||
|
||||
def encrypt_module(
|
||||
source_code: str,
|
||||
password: str,
|
||||
metadata: Optional[dict] = None,
|
||||
) -> bytes:
|
||||
"""
|
||||
Encrypt a Python module source string.
|
||||
|
||||
Returns the raw .autarch file bytes.
|
||||
"""
|
||||
meta_bytes = json.dumps(metadata or {}).encode('utf-8')
|
||||
plaintext = source_code.encode('utf-8')
|
||||
salt = os.urandom(SALT_LEN)
|
||||
iv = os.urandom(IV_LEN)
|
||||
key = _derive_key(password, salt)
|
||||
digest = hashlib.sha512(plaintext).digest()
|
||||
ciphertext = _aes_encrypt(key, iv, plaintext)
|
||||
|
||||
meta_len = len(meta_bytes)
|
||||
header = (
|
||||
MAGIC
|
||||
+ struct.pack('B', VERSION)
|
||||
+ salt
|
||||
+ iv
|
||||
+ digest
|
||||
+ struct.pack('<H', meta_len)
|
||||
)
|
||||
return header + meta_bytes + ciphertext
|
||||
|
||||
|
||||
def decrypt_module(data: bytes, password: str) -> tuple[str, dict]:
|
||||
"""
|
||||
Decrypt an .autarch blob.
|
||||
|
||||
Returns (source_code: str, metadata: dict).
|
||||
Raises ValueError on bad magic, version, or integrity check failure.
|
||||
"""
|
||||
offset = 0
|
||||
|
||||
# Magic
|
||||
if data[offset:offset + 4] != MAGIC:
|
||||
raise ValueError("Not a valid AUTARCH encrypted module (bad magic)")
|
||||
offset += 4
|
||||
|
||||
# Version
|
||||
version = data[offset]
|
||||
if version != VERSION:
|
||||
raise ValueError(f"Unsupported module version: {version:#04x}")
|
||||
offset += 1
|
||||
|
||||
# Salt
|
||||
salt = data[offset:offset + SALT_LEN]
|
||||
offset += SALT_LEN
|
||||
|
||||
# IV
|
||||
iv = data[offset:offset + IV_LEN]
|
||||
offset += IV_LEN
|
||||
|
||||
# SHA-512 integrity hash
|
||||
stored_hash = data[offset:offset + HASH_LEN]
|
||||
offset += HASH_LEN
|
||||
|
||||
# Metadata
|
||||
meta_len = struct.unpack('<H', data[offset:offset + 2])[0]
|
||||
offset += 2
|
||||
meta_bytes = data[offset:offset + meta_len]
|
||||
offset += meta_len
|
||||
metadata = json.loads(meta_bytes.decode('utf-8')) if meta_bytes else {}
|
||||
|
||||
# Ciphertext
|
||||
ciphertext = data[offset:]
|
||||
|
||||
# Derive key and decrypt
|
||||
key = _derive_key(password, salt)
|
||||
try:
|
||||
plaintext = _aes_decrypt(key, iv, ciphertext)
|
||||
except Exception as exc:
|
||||
raise ValueError(f"Decryption failed — wrong password? ({exc})")
|
||||
|
||||
# Integrity check
|
||||
actual_hash = hashlib.sha512(plaintext).digest()
|
||||
if not hmac.compare_digest(actual_hash, stored_hash):
|
||||
raise ValueError("Integrity check failed — file tampered or wrong password")
|
||||
|
||||
return plaintext.decode('utf-8'), metadata
|
||||
|
||||
|
||||
def encrypt_file(src: Path, dst: Path, password: str, metadata: Optional[dict] = None) -> None:
|
||||
"""Encrypt a .py source file to a .autarch file."""
|
||||
source = src.read_text(encoding='utf-8')
|
||||
blob = encrypt_module(source, password, metadata)
|
||||
dst.write_bytes(blob)
|
||||
|
||||
|
||||
def decrypt_file(src: Path, password: str) -> tuple[str, dict]:
|
||||
"""Decrypt an .autarch file and return (source_code, metadata)."""
|
||||
return decrypt_module(src.read_bytes(), password)
|
||||
|
||||
|
||||
def load_and_exec(
|
||||
path: Path,
|
||||
password: str,
|
||||
module_name: str = '__encmod__',
|
||||
) -> dict:
|
||||
"""
|
||||
Decrypt and execute an encrypted module.
|
||||
|
||||
Returns the module's globals dict (its namespace).
|
||||
"""
|
||||
source, meta = decrypt_file(path, password)
|
||||
namespace: dict = {
|
||||
'__name__': module_name,
|
||||
'__file__': str(path),
|
||||
'__builtins__': __builtins__,
|
||||
}
|
||||
exec(compile(source, str(path), 'exec'), namespace)
|
||||
return namespace
|
||||
|
||||
|
||||
def read_metadata(path: Path) -> Optional[dict]:
|
||||
"""
|
||||
Read only the metadata from an .autarch file without decrypting.
|
||||
Returns None if the file is invalid.
|
||||
"""
|
||||
try:
|
||||
data = path.read_bytes()
|
||||
if data[:4] != MAGIC:
|
||||
return None
|
||||
offset = 5 + SALT_LEN + IV_LEN + HASH_LEN
|
||||
meta_len = struct.unpack('<H', data[offset:offset + 2])[0]
|
||||
offset += 2
|
||||
meta_bytes = data[offset:offset + meta_len]
|
||||
return json.loads(meta_bytes.decode('utf-8')) if meta_bytes else {}
|
||||
except Exception:
|
||||
return None
|
||||
1150
core/msf.py
Normal file
1150
core/msf.py
Normal file
File diff suppressed because it is too large
Load Diff
846
core/msf_interface.py
Normal file
846
core/msf_interface.py
Normal file
@@ -0,0 +1,846 @@
|
||||
"""
|
||||
AUTARCH Metasploit Interface
|
||||
Centralized high-level interface for all Metasploit operations.
|
||||
|
||||
This module provides a clean API for executing MSF modules, handling
|
||||
connection management, output parsing, and error recovery.
|
||||
|
||||
Usage:
|
||||
from core.msf_interface import get_msf_interface, MSFResult
|
||||
|
||||
msf = get_msf_interface()
|
||||
result = msf.run_module('auxiliary/scanner/portscan/tcp', {'RHOSTS': '192.168.1.1'})
|
||||
|
||||
if result.success:
|
||||
for finding in result.findings:
|
||||
print(finding)
|
||||
"""
|
||||
|
||||
import re
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from enum import Enum
|
||||
|
||||
# Import the low-level MSF components
|
||||
from core.msf import get_msf_manager, MSFError, MSFManager
|
||||
from core.banner import Colors
|
||||
|
||||
|
||||
class MSFStatus(Enum):
|
||||
"""Status of an MSF operation."""
|
||||
SUCCESS = "success"
|
||||
PARTIAL = "partial" # Some results but also errors
|
||||
FAILED = "failed"
|
||||
AUTH_ERROR = "auth_error"
|
||||
CONNECTION_ERROR = "connection_error"
|
||||
TIMEOUT = "timeout"
|
||||
NOT_CONNECTED = "not_connected"
|
||||
|
||||
|
||||
@dataclass
|
||||
class MSFResult:
|
||||
"""Result from an MSF module execution."""
|
||||
status: MSFStatus
|
||||
module: str
|
||||
target: str = ""
|
||||
|
||||
# Raw and cleaned output
|
||||
raw_output: str = ""
|
||||
cleaned_output: str = ""
|
||||
|
||||
# Parsed results
|
||||
findings: List[str] = field(default_factory=list) # [+] lines
|
||||
info: List[str] = field(default_factory=list) # [*] lines
|
||||
errors: List[str] = field(default_factory=list) # [-] lines
|
||||
warnings: List[str] = field(default_factory=list) # [!] lines
|
||||
|
||||
# For scan results
|
||||
open_ports: List[Dict] = field(default_factory=list) # [{port, service, state}]
|
||||
services: List[Dict] = field(default_factory=list) # [{name, version, info}]
|
||||
|
||||
# Metadata
|
||||
execution_time: float = 0.0
|
||||
error_count: int = 0
|
||||
|
||||
@property
|
||||
def success(self) -> bool:
|
||||
return self.status in (MSFStatus.SUCCESS, MSFStatus.PARTIAL)
|
||||
|
||||
def get_summary(self) -> str:
|
||||
"""Get a brief summary of the result."""
|
||||
if self.status == MSFStatus.SUCCESS:
|
||||
return f"Success: {len(self.findings)} findings"
|
||||
elif self.status == MSFStatus.PARTIAL:
|
||||
return f"Partial: {len(self.findings)} findings, {self.error_count} errors"
|
||||
elif self.status == MSFStatus.AUTH_ERROR:
|
||||
return "Authentication token expired"
|
||||
elif self.status == MSFStatus.CONNECTION_ERROR:
|
||||
return "Connection to MSF failed"
|
||||
elif self.status == MSFStatus.TIMEOUT:
|
||||
return "Module execution timed out"
|
||||
else:
|
||||
return f"Failed: {self.errors[0] if self.errors else 'Unknown error'}"
|
||||
|
||||
|
||||
class MSFInterface:
|
||||
"""High-level interface for Metasploit operations."""
|
||||
|
||||
# Patterns to filter from output (banner noise, Easter eggs, etc.)
|
||||
SKIP_PATTERNS = [
|
||||
'metasploit', '=[ ', '+ -- --=[', 'Documentation:',
|
||||
'Rapid7', 'Open Source', 'MAGIC WORD', 'PERMISSION DENIED',
|
||||
'access security', 'access:', 'Ready...', 'Alpha E',
|
||||
'Version 4.0', 'System Security Interface', 'Metasploit Park',
|
||||
'exploits -', 'auxiliary -', 'payloads', 'encoders -',
|
||||
'evasion', 'nops -', 'post -', 'msf6', 'msf5', 'msf >',
|
||||
]
|
||||
|
||||
# Patterns indicating specific result types
|
||||
PORT_PATTERN = re.compile(
|
||||
r'(\d{1,5})/(tcp|udp)\s+(open|closed|filtered)?\s*(\S+)?',
|
||||
re.IGNORECASE
|
||||
)
|
||||
SERVICE_PATTERN = re.compile(
|
||||
r'\[\+\].*?(\d+\.\d+\.\d+\.\d+):(\d+)\s*[-:]\s*(.+)',
|
||||
re.IGNORECASE
|
||||
)
|
||||
VERSION_PATTERN = re.compile(
|
||||
r'(?:version|running|server)[\s:]+([^\n\r]+)',
|
||||
re.IGNORECASE
|
||||
)
|
||||
|
||||
def __init__(self):
|
||||
self._manager: Optional[MSFManager] = None
|
||||
self._last_error: Optional[str] = None
|
||||
|
||||
@property
|
||||
def manager(self) -> MSFManager:
|
||||
"""Get or create the MSF manager."""
|
||||
if self._manager is None:
|
||||
self._manager = get_msf_manager()
|
||||
return self._manager
|
||||
|
||||
@property
|
||||
def is_connected(self) -> bool:
|
||||
"""Check if connected to MSF RPC."""
|
||||
return self.manager.is_connected
|
||||
|
||||
@property
|
||||
def last_error(self) -> Optional[str]:
|
||||
"""Get the last error message."""
|
||||
return self._last_error
|
||||
|
||||
def ensure_connected(self, password: str = None, auto_prompt: bool = True) -> Tuple[bool, str]:
|
||||
"""Ensure we have a valid connection to MSF RPC.
|
||||
|
||||
Args:
|
||||
password: Optional password to use for connection.
|
||||
auto_prompt: If True, prompt for password if needed.
|
||||
|
||||
Returns:
|
||||
Tuple of (success, message).
|
||||
"""
|
||||
# Check if already connected
|
||||
if self.is_connected:
|
||||
# Verify the connection is actually valid with a test request
|
||||
try:
|
||||
self.manager.rpc.get_version()
|
||||
return True, "Connected"
|
||||
except Exception as e:
|
||||
error_str = str(e)
|
||||
if 'Invalid Authentication Token' in error_str:
|
||||
# Token expired, need to reconnect
|
||||
pass
|
||||
else:
|
||||
self._last_error = error_str
|
||||
return False, f"Connection test failed: {error_str}"
|
||||
|
||||
# Need to connect or reconnect
|
||||
try:
|
||||
# Disconnect existing stale connection
|
||||
if self.manager.rpc:
|
||||
try:
|
||||
self.manager.rpc.disconnect()
|
||||
except:
|
||||
pass
|
||||
|
||||
# Get password from settings or parameter
|
||||
settings = self.manager.get_settings()
|
||||
connect_password = password or settings.get('password')
|
||||
|
||||
if not connect_password and auto_prompt:
|
||||
print(f"{Colors.YELLOW}[!] MSF RPC password required{Colors.RESET}")
|
||||
connect_password = input(f" Password: ").strip()
|
||||
|
||||
if not connect_password:
|
||||
self._last_error = "No password provided"
|
||||
return False, "No password provided"
|
||||
|
||||
# Connect
|
||||
self.manager.connect(connect_password)
|
||||
return True, "Connected successfully"
|
||||
|
||||
except MSFError as e:
|
||||
self._last_error = str(e)
|
||||
return False, f"MSF Error: {e}"
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return False, f"Connection failed: {e}"
|
||||
|
||||
def _run_console_command(self, commands: str, timeout: int = 120) -> Tuple[str, Optional[str]]:
|
||||
"""Execute commands via MSF console and capture output.
|
||||
|
||||
Args:
|
||||
commands: Newline-separated commands to run.
|
||||
timeout: Maximum wait time in seconds.
|
||||
|
||||
Returns:
|
||||
Tuple of (output, error_message).
|
||||
"""
|
||||
try:
|
||||
# Create console
|
||||
console = self.manager.rpc._request("console.create")
|
||||
console_id = console.get("id")
|
||||
|
||||
if not console_id:
|
||||
return "", "Failed to create console"
|
||||
|
||||
try:
|
||||
# Wait for console to initialize and consume banner
|
||||
time.sleep(2)
|
||||
self.manager.rpc._request("console.read", [console_id])
|
||||
|
||||
# Send commands one at a time
|
||||
for cmd in commands.strip().split('\n'):
|
||||
cmd = cmd.strip()
|
||||
if cmd:
|
||||
self.manager.rpc._request("console.write", [console_id, cmd + "\n"])
|
||||
time.sleep(0.3)
|
||||
|
||||
# Collect output
|
||||
output = ""
|
||||
waited = 0
|
||||
idle_count = 0
|
||||
|
||||
while waited < timeout:
|
||||
time.sleep(1)
|
||||
waited += 1
|
||||
|
||||
result = self.manager.rpc._request("console.read", [console_id])
|
||||
new_data = result.get("data", "")
|
||||
|
||||
if new_data:
|
||||
output += new_data
|
||||
idle_count = 0
|
||||
else:
|
||||
idle_count += 1
|
||||
|
||||
# Stop if not busy and idle for 3+ seconds
|
||||
if not result.get("busy", False) and idle_count >= 3:
|
||||
break
|
||||
|
||||
# Check for timeout
|
||||
if waited >= timeout:
|
||||
return output, "Execution timed out"
|
||||
|
||||
return output, None
|
||||
|
||||
finally:
|
||||
# Clean up console
|
||||
try:
|
||||
self.manager.rpc._request("console.destroy", [console_id])
|
||||
except:
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
error_str = str(e)
|
||||
if 'Invalid Authentication Token' in error_str:
|
||||
return "", "AUTH_ERROR"
|
||||
return "", f"Console error: {e}"
|
||||
|
||||
def _clean_output(self, raw_output: str) -> str:
|
||||
"""Remove banner noise and clean up MSF output.
|
||||
|
||||
Args:
|
||||
raw_output: Raw console output.
|
||||
|
||||
Returns:
|
||||
Cleaned output string.
|
||||
"""
|
||||
lines = []
|
||||
for line in raw_output.split('\n'):
|
||||
line_stripped = line.strip()
|
||||
|
||||
# Skip empty lines
|
||||
if not line_stripped:
|
||||
continue
|
||||
|
||||
# Skip banner/noise patterns
|
||||
skip = False
|
||||
for pattern in self.SKIP_PATTERNS:
|
||||
if pattern.lower() in line_stripped.lower():
|
||||
skip = True
|
||||
break
|
||||
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Skip prompt lines
|
||||
if line_stripped.startswith('>') and len(line_stripped) < 5:
|
||||
continue
|
||||
|
||||
# Skip set confirmations (we already show these)
|
||||
if ' => ' in line_stripped and any(
|
||||
line_stripped.startswith(opt) for opt in
|
||||
['RHOSTS', 'RHOST', 'PORTS', 'LHOST', 'LPORT', 'THREADS']
|
||||
):
|
||||
continue
|
||||
|
||||
lines.append(line)
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
def _parse_output(self, cleaned_output: str, module_path: str) -> Dict[str, Any]:
|
||||
"""Parse cleaned output into structured data.
|
||||
|
||||
Args:
|
||||
cleaned_output: Cleaned console output.
|
||||
module_path: The module that was run (for context).
|
||||
|
||||
Returns:
|
||||
Dictionary with parsed results.
|
||||
"""
|
||||
result = {
|
||||
'findings': [],
|
||||
'info': [],
|
||||
'errors': [],
|
||||
'warnings': [],
|
||||
'open_ports': [],
|
||||
'services': [],
|
||||
'error_count': 0,
|
||||
}
|
||||
|
||||
is_scanner = 'scanner' in module_path.lower()
|
||||
is_portscan = 'portscan' in module_path.lower()
|
||||
|
||||
for line in cleaned_output.split('\n'):
|
||||
line_stripped = line.strip()
|
||||
|
||||
# Categorize by prefix
|
||||
if '[+]' in line:
|
||||
result['findings'].append(line_stripped)
|
||||
|
||||
# Try to extract port/service info from scanner results
|
||||
if is_scanner:
|
||||
# Look for IP:port patterns
|
||||
service_match = self.SERVICE_PATTERN.search(line)
|
||||
if service_match:
|
||||
ip, port, info = service_match.groups()
|
||||
result['services'].append({
|
||||
'ip': ip,
|
||||
'port': int(port),
|
||||
'info': info.strip()
|
||||
})
|
||||
|
||||
# Look for "open" port mentions
|
||||
if is_portscan and 'open' in line.lower():
|
||||
port_match = re.search(r':(\d+)\s', line)
|
||||
if port_match:
|
||||
result['open_ports'].append({
|
||||
'port': int(port_match.group(1)),
|
||||
'state': 'open'
|
||||
})
|
||||
|
||||
elif '[-]' in line or 'Error:' in line:
|
||||
# Count NoMethodError and similar spam but don't store each one
|
||||
if 'NoMethodError' in line or 'undefined method' in line:
|
||||
result['error_count'] += 1
|
||||
else:
|
||||
result['errors'].append(line_stripped)
|
||||
|
||||
elif '[!]' in line:
|
||||
result['warnings'].append(line_stripped)
|
||||
|
||||
elif '[*]' in line:
|
||||
result['info'].append(line_stripped)
|
||||
|
||||
return result
|
||||
|
||||
def run_module(
|
||||
self,
|
||||
module_path: str,
|
||||
options: Dict[str, Any] = None,
|
||||
timeout: int = 120,
|
||||
auto_reconnect: bool = True
|
||||
) -> MSFResult:
|
||||
"""Execute an MSF module and return parsed results.
|
||||
|
||||
Args:
|
||||
module_path: Full module path (e.g., 'auxiliary/scanner/portscan/tcp').
|
||||
options: Module options dictionary.
|
||||
timeout: Maximum execution time in seconds.
|
||||
auto_reconnect: If True, attempt to reconnect on auth errors.
|
||||
|
||||
Returns:
|
||||
MSFResult with parsed output.
|
||||
"""
|
||||
options = options or {}
|
||||
target = options.get('RHOSTS', options.get('RHOST', ''))
|
||||
start_time = time.time()
|
||||
|
||||
# Ensure connected
|
||||
connected, msg = self.ensure_connected()
|
||||
if not connected:
|
||||
return MSFResult(
|
||||
status=MSFStatus.NOT_CONNECTED,
|
||||
module=module_path,
|
||||
target=target,
|
||||
errors=[msg]
|
||||
)
|
||||
|
||||
# Build console commands
|
||||
commands = f"use {module_path}\n"
|
||||
for key, value in options.items():
|
||||
commands += f"set {key} {value}\n"
|
||||
commands += "run"
|
||||
|
||||
# Execute
|
||||
raw_output, error = self._run_console_command(commands, timeout)
|
||||
|
||||
# Handle auth error with reconnect
|
||||
if error == "AUTH_ERROR" and auto_reconnect:
|
||||
connected, msg = self.ensure_connected()
|
||||
if connected:
|
||||
raw_output, error = self._run_console_command(commands, timeout)
|
||||
else:
|
||||
return MSFResult(
|
||||
status=MSFStatus.AUTH_ERROR,
|
||||
module=module_path,
|
||||
target=target,
|
||||
errors=["Session expired and reconnection failed"]
|
||||
)
|
||||
|
||||
# Handle other errors
|
||||
if error and error != "AUTH_ERROR":
|
||||
if "timed out" in error.lower():
|
||||
status = MSFStatus.TIMEOUT
|
||||
else:
|
||||
status = MSFStatus.FAILED
|
||||
return MSFResult(
|
||||
status=status,
|
||||
module=module_path,
|
||||
target=target,
|
||||
raw_output=raw_output,
|
||||
errors=[error]
|
||||
)
|
||||
|
||||
# Clean and parse output
|
||||
cleaned = self._clean_output(raw_output)
|
||||
parsed = self._parse_output(cleaned, module_path)
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
|
||||
# Determine status
|
||||
if parsed['error_count'] > 0 and not parsed['findings']:
|
||||
status = MSFStatus.FAILED
|
||||
elif parsed['error_count'] > 0:
|
||||
status = MSFStatus.PARTIAL
|
||||
elif parsed['findings'] or parsed['info']:
|
||||
status = MSFStatus.SUCCESS
|
||||
else:
|
||||
status = MSFStatus.SUCCESS # No output isn't necessarily an error
|
||||
|
||||
return MSFResult(
|
||||
status=status,
|
||||
module=module_path,
|
||||
target=target,
|
||||
raw_output=raw_output,
|
||||
cleaned_output=cleaned,
|
||||
findings=parsed['findings'],
|
||||
info=parsed['info'],
|
||||
errors=parsed['errors'],
|
||||
warnings=parsed['warnings'],
|
||||
open_ports=parsed['open_ports'],
|
||||
services=parsed['services'],
|
||||
execution_time=execution_time,
|
||||
error_count=parsed['error_count']
|
||||
)
|
||||
|
||||
def run_scanner(
|
||||
self,
|
||||
module_path: str,
|
||||
target: str,
|
||||
ports: str = None,
|
||||
options: Dict[str, Any] = None,
|
||||
timeout: int = 120
|
||||
) -> MSFResult:
|
||||
"""Convenience method for running scanner modules.
|
||||
|
||||
Args:
|
||||
module_path: Scanner module path.
|
||||
target: Target IP or range (RHOSTS).
|
||||
ports: Port specification (optional).
|
||||
options: Additional options.
|
||||
timeout: Maximum execution time.
|
||||
|
||||
Returns:
|
||||
MSFResult with scan results.
|
||||
"""
|
||||
opts = {'RHOSTS': target}
|
||||
if ports:
|
||||
opts['PORTS'] = ports
|
||||
if options:
|
||||
opts.update(options)
|
||||
|
||||
return self.run_module(module_path, opts, timeout)
|
||||
|
||||
def get_module_info(self, module_path: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get information about a module.
|
||||
|
||||
Args:
|
||||
module_path: Full module path.
|
||||
|
||||
Returns:
|
||||
Module info dictionary or None.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return None
|
||||
|
||||
try:
|
||||
# Determine module type from path
|
||||
parts = module_path.split('/')
|
||||
if len(parts) < 2:
|
||||
return None
|
||||
|
||||
module_type = parts[0]
|
||||
module_name = '/'.join(parts[1:])
|
||||
|
||||
info = self.manager.rpc.get_module_info(module_type, module_name)
|
||||
return {
|
||||
'name': info.name,
|
||||
'description': info.description,
|
||||
'author': info.author,
|
||||
'type': info.type,
|
||||
'rank': info.rank,
|
||||
'references': info.references
|
||||
}
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return None
|
||||
|
||||
def get_module_options(self, module_path: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get available options for a module.
|
||||
|
||||
Args:
|
||||
module_path: Full module path.
|
||||
|
||||
Returns:
|
||||
Options dictionary or None.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return None
|
||||
|
||||
try:
|
||||
parts = module_path.split('/')
|
||||
if len(parts) < 2:
|
||||
return None
|
||||
|
||||
module_type = parts[0]
|
||||
module_name = '/'.join(parts[1:])
|
||||
|
||||
return self.manager.rpc.get_module_options(module_type, module_name)
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return None
|
||||
|
||||
def search_modules(self, query: str) -> List[str]:
|
||||
"""Search for modules matching a query.
|
||||
|
||||
Args:
|
||||
query: Search query.
|
||||
|
||||
Returns:
|
||||
List of matching module paths.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return []
|
||||
|
||||
try:
|
||||
results = self.manager.rpc.search_modules(query)
|
||||
# Results are typically dicts with 'fullname' key
|
||||
if isinstance(results, list):
|
||||
return [r.get('fullname', r) if isinstance(r, dict) else str(r) for r in results]
|
||||
return []
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return []
|
||||
|
||||
def list_modules(self, module_type: str = None) -> List[str]:
|
||||
"""List available modules by type.
|
||||
|
||||
Args:
|
||||
module_type: Filter by type (exploit, auxiliary, post, payload, encoder, nop).
|
||||
If None, returns all modules.
|
||||
|
||||
Returns:
|
||||
List of module paths.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return []
|
||||
|
||||
try:
|
||||
return self.manager.rpc.list_modules(module_type)
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return []
|
||||
|
||||
def list_sessions(self) -> Dict[str, Any]:
|
||||
"""List active MSF sessions.
|
||||
|
||||
Returns:
|
||||
Dictionary of session IDs to session info.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return {}
|
||||
|
||||
try:
|
||||
return self.manager.rpc.list_sessions()
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return {}
|
||||
|
||||
def list_jobs(self) -> Dict[str, Any]:
|
||||
"""List running MSF jobs.
|
||||
|
||||
Returns:
|
||||
Dictionary of job IDs to job info.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return {}
|
||||
|
||||
try:
|
||||
return self.manager.rpc.list_jobs()
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return {}
|
||||
|
||||
def stop_job(self, job_id: str) -> bool:
|
||||
"""Stop a running job.
|
||||
|
||||
Args:
|
||||
job_id: Job ID to stop.
|
||||
|
||||
Returns:
|
||||
True if stopped successfully.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return False
|
||||
|
||||
try:
|
||||
return self.manager.rpc.stop_job(job_id)
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return False
|
||||
|
||||
def execute_module_job(
|
||||
self,
|
||||
module_path: str,
|
||||
options: Dict[str, Any] = None
|
||||
) -> Tuple[bool, Optional[str], Optional[str]]:
|
||||
"""Execute a module as a background job (non-blocking).
|
||||
|
||||
This is different from run_module() which uses console and captures output.
|
||||
Use this for exploits and long-running modules that should run in background.
|
||||
|
||||
Args:
|
||||
module_path: Full module path.
|
||||
options: Module options.
|
||||
|
||||
Returns:
|
||||
Tuple of (success, job_id, error_message).
|
||||
"""
|
||||
connected, msg = self.ensure_connected()
|
||||
if not connected:
|
||||
return False, None, msg
|
||||
|
||||
try:
|
||||
parts = module_path.split('/')
|
||||
if len(parts) < 2:
|
||||
return False, None, "Invalid module path"
|
||||
|
||||
module_type = parts[0]
|
||||
module_name = '/'.join(parts[1:])
|
||||
|
||||
result = self.manager.rpc.execute_module(module_type, module_name, options or {})
|
||||
|
||||
job_id = result.get('job_id')
|
||||
if job_id is not None:
|
||||
return True, str(job_id), None
|
||||
else:
|
||||
# Check for error in result
|
||||
error = result.get('error_message') or result.get('error') or "Unknown error"
|
||||
return False, None, str(error)
|
||||
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return False, None, str(e)
|
||||
|
||||
def session_read(self, session_id: str) -> Tuple[bool, str]:
|
||||
"""Read from a session shell.
|
||||
|
||||
Args:
|
||||
session_id: Session ID.
|
||||
|
||||
Returns:
|
||||
Tuple of (success, output).
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return False, ""
|
||||
|
||||
try:
|
||||
output = self.manager.rpc.session_shell_read(session_id)
|
||||
return True, output
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return False, ""
|
||||
|
||||
def session_write(self, session_id: str, command: str) -> bool:
|
||||
"""Write a command to a session shell.
|
||||
|
||||
Args:
|
||||
session_id: Session ID.
|
||||
command: Command to execute.
|
||||
|
||||
Returns:
|
||||
True if written successfully.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return False
|
||||
|
||||
try:
|
||||
return self.manager.rpc.session_shell_write(session_id, command)
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return False
|
||||
|
||||
def session_stop(self, session_id: str) -> bool:
|
||||
"""Stop/kill a session.
|
||||
|
||||
Args:
|
||||
session_id: Session ID.
|
||||
|
||||
Returns:
|
||||
True if stopped successfully.
|
||||
"""
|
||||
connected, _ = self.ensure_connected(auto_prompt=False)
|
||||
if not connected:
|
||||
return False
|
||||
|
||||
try:
|
||||
return self.manager.rpc.session_stop(session_id)
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return False
|
||||
|
||||
def run_console_command(self, command: str, timeout: int = 30) -> Tuple[bool, str]:
|
||||
"""Run a raw console command and return output.
|
||||
|
||||
This is a lower-level method for direct console access.
|
||||
|
||||
Args:
|
||||
command: Console command to run.
|
||||
timeout: Timeout in seconds.
|
||||
|
||||
Returns:
|
||||
Tuple of (success, output).
|
||||
"""
|
||||
connected, msg = self.ensure_connected()
|
||||
if not connected:
|
||||
return False, msg
|
||||
|
||||
try:
|
||||
output = self.manager.rpc.run_console_command(command, timeout=timeout)
|
||||
return True, output
|
||||
except Exception as e:
|
||||
self._last_error = str(e)
|
||||
return False, str(e)
|
||||
|
||||
def print_result(self, result: MSFResult, verbose: bool = False):
|
||||
"""Print a formatted result to the console.
|
||||
|
||||
Args:
|
||||
result: MSFResult to print.
|
||||
verbose: If True, show all output including info lines.
|
||||
"""
|
||||
print(f"\n{Colors.CYAN}Module Output:{Colors.RESET}")
|
||||
print(f"{Colors.DIM}{'─' * 50}{Colors.RESET}")
|
||||
|
||||
if result.status == MSFStatus.NOT_CONNECTED:
|
||||
print(f" {Colors.RED}[X] Not connected to Metasploit{Colors.RESET}")
|
||||
if result.errors:
|
||||
print(f" {result.errors[0]}")
|
||||
elif result.status == MSFStatus.AUTH_ERROR:
|
||||
print(f" {Colors.RED}[X] Authentication failed{Colors.RESET}")
|
||||
elif result.status == MSFStatus.TIMEOUT:
|
||||
print(f" {Colors.YELLOW}[!] Execution timed out{Colors.RESET}")
|
||||
else:
|
||||
# Print findings (green)
|
||||
for line in result.findings:
|
||||
print(f" {Colors.GREEN}{line}{Colors.RESET}")
|
||||
|
||||
# Print info (cyan) - only in verbose mode
|
||||
if verbose:
|
||||
for line in result.info:
|
||||
print(f" {Colors.CYAN}{line}{Colors.RESET}")
|
||||
|
||||
# Print warnings (yellow)
|
||||
for line in result.warnings:
|
||||
print(f" {Colors.YELLOW}{line}{Colors.RESET}")
|
||||
|
||||
# Print errors (dim)
|
||||
for line in result.errors:
|
||||
print(f" {Colors.DIM}{line}{Colors.RESET}")
|
||||
|
||||
# Summarize error count if high
|
||||
if result.error_count > 0:
|
||||
print(f"\n {Colors.YELLOW}[!] {result.error_count} errors occurred during execution{Colors.RESET}")
|
||||
|
||||
print(f"{Colors.DIM}{'─' * 50}{Colors.RESET}")
|
||||
|
||||
# Print summary
|
||||
if result.execution_time > 0:
|
||||
print(f" {Colors.DIM}Time: {result.execution_time:.1f}s{Colors.RESET}")
|
||||
print(f" {Colors.DIM}Status: {result.get_summary()}{Colors.RESET}")
|
||||
|
||||
# Print parsed port/service info if available
|
||||
if result.open_ports:
|
||||
print(f"\n {Colors.GREEN}Open Ports:{Colors.RESET}")
|
||||
for port_info in result.open_ports:
|
||||
print(f" {port_info['port']}/tcp - {port_info.get('state', 'open')}")
|
||||
|
||||
if result.services:
|
||||
print(f"\n {Colors.GREEN}Services Detected:{Colors.RESET}")
|
||||
for svc in result.services:
|
||||
print(f" {svc['ip']}:{svc['port']} - {svc['info']}")
|
||||
|
||||
|
||||
# Global instance
|
||||
_msf_interface: Optional[MSFInterface] = None
|
||||
|
||||
|
||||
def get_msf_interface() -> MSFInterface:
|
||||
"""Get the global MSF interface instance."""
|
||||
global _msf_interface
|
||||
if _msf_interface is None:
|
||||
_msf_interface = MSFInterface()
|
||||
return _msf_interface
|
||||
1192
core/msf_modules.py
Normal file
1192
core/msf_modules.py
Normal file
File diff suppressed because it is too large
Load Diff
1124
core/msf_terms.py
Normal file
1124
core/msf_terms.py
Normal file
File diff suppressed because it is too large
Load Diff
309
core/paths.py
Normal file
309
core/paths.py
Normal file
@@ -0,0 +1,309 @@
|
||||
"""
|
||||
AUTARCH Path Resolution
|
||||
Centralized path management for cross-platform portability.
|
||||
|
||||
All paths resolve relative to the application root directory.
|
||||
Tool lookup checks project directories first, then system PATH.
|
||||
"""
|
||||
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
# ── Application Root ────────────────────────────────────────────────
|
||||
#
|
||||
# Two directories matter:
|
||||
# _BUNDLE_DIR — read-only bundled assets (templates, static, default modules)
|
||||
# Points to sys._MEIPASS in a frozen PyInstaller build,
|
||||
# otherwise same as _APP_DIR.
|
||||
# _APP_DIR — writable application root (config, data, results, user modules)
|
||||
# Points to the .exe's parent directory in a frozen build,
|
||||
# otherwise the project root (parent of core/).
|
||||
|
||||
import sys as _sys
|
||||
|
||||
_FROZEN = getattr(_sys, 'frozen', False)
|
||||
|
||||
if _FROZEN:
|
||||
# PyInstaller frozen build
|
||||
_BUNDLE_DIR = Path(_sys._MEIPASS)
|
||||
_APP_DIR = Path(_sys.executable).resolve().parent
|
||||
else:
|
||||
# Normal Python execution
|
||||
_APP_DIR = Path(__file__).resolve().parent.parent
|
||||
_BUNDLE_DIR = _APP_DIR
|
||||
|
||||
|
||||
def is_frozen() -> bool:
|
||||
"""Return True if running from a PyInstaller bundle."""
|
||||
return _FROZEN
|
||||
|
||||
|
||||
def get_app_dir() -> Path:
|
||||
"""Return the writable application root directory."""
|
||||
return _APP_DIR
|
||||
|
||||
|
||||
def get_bundle_dir() -> Path:
|
||||
"""Return the bundle directory (read-only assets: templates, static, default modules)."""
|
||||
return _BUNDLE_DIR
|
||||
|
||||
|
||||
def get_core_dir() -> Path:
|
||||
return _BUNDLE_DIR / 'core'
|
||||
|
||||
|
||||
def get_modules_dir() -> Path:
|
||||
"""Return the bundled modules directory (read-only in frozen mode)."""
|
||||
return _BUNDLE_DIR / 'modules'
|
||||
|
||||
|
||||
def get_user_modules_dir() -> Path:
|
||||
"""Return the user modules directory (writable, next to exe).
|
||||
New or modified modules go here; scanned in addition to bundled modules."""
|
||||
d = _APP_DIR / 'modules'
|
||||
if _FROZEN:
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def get_data_dir() -> Path:
|
||||
d = _APP_DIR / 'data'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def get_config_path() -> Path:
|
||||
"""Return config path. Writable copy lives next to the exe;
|
||||
falls back to the bundled default if the writable copy doesn't exist yet."""
|
||||
writable = _APP_DIR / 'autarch_settings.conf'
|
||||
if not writable.exists() and _FROZEN:
|
||||
bundled = _BUNDLE_DIR / 'autarch_settings.conf'
|
||||
if bundled.exists():
|
||||
import shutil as _sh
|
||||
_sh.copy2(str(bundled), str(writable))
|
||||
return writable
|
||||
|
||||
|
||||
def get_results_dir() -> Path:
|
||||
d = _APP_DIR / 'results'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def get_reports_dir() -> Path:
|
||||
d = get_results_dir() / 'reports'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def get_dossiers_dir() -> Path:
|
||||
d = _APP_DIR / 'dossiers'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def get_uploads_dir() -> Path:
|
||||
d = get_data_dir() / 'uploads'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def get_backups_dir() -> Path:
|
||||
d = _APP_DIR / 'backups'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
def get_templates_dir() -> Path:
|
||||
return _APP_DIR / '.config'
|
||||
|
||||
|
||||
def get_custom_configs_dir() -> Path:
|
||||
d = _APP_DIR / '.config' / 'custom'
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
|
||||
# ── Platform Detection ──────────────────────────────────────────────
|
||||
|
||||
def _get_arch() -> str:
|
||||
"""Return architecture string: 'x86_64', 'arm64', etc."""
|
||||
machine = platform.machine().lower()
|
||||
if machine in ('aarch64', 'arm64'):
|
||||
return 'arm64'
|
||||
elif machine in ('x86_64', 'amd64'):
|
||||
return 'x86_64'
|
||||
return machine
|
||||
|
||||
|
||||
def get_platform() -> str:
|
||||
"""Return platform: 'linux', 'windows', or 'darwin'."""
|
||||
return platform.system().lower()
|
||||
|
||||
|
||||
def get_platform_tag() -> str:
|
||||
"""Return platform-arch tag like 'linux-arm64', 'windows-x86_64'."""
|
||||
return f"{get_platform()}-{_get_arch()}"
|
||||
|
||||
|
||||
def is_windows() -> bool:
|
||||
return platform.system() == 'Windows'
|
||||
|
||||
|
||||
def is_linux() -> bool:
|
||||
return platform.system() == 'Linux'
|
||||
|
||||
|
||||
def is_mac() -> bool:
|
||||
return platform.system() == 'Darwin'
|
||||
|
||||
|
||||
# ── Tool / Binary Lookup ───────────────────────────────────────────
|
||||
#
|
||||
# Priority order:
|
||||
# 1. System PATH (shutil.which — native binaries, correct arch)
|
||||
# 2. Platform-specific well-known install locations
|
||||
# 3. Platform-specific project tools (tools/linux-arm64/, etc.)
|
||||
# 4. Generic project directories (android/, tools/, bin/)
|
||||
# 5. Extra paths passed by caller
|
||||
#
|
||||
|
||||
# Well-known install locations by platform (last resort)
|
||||
_PLATFORM_SEARCH_PATHS = {
|
||||
'windows': [
|
||||
Path(os.environ.get('LOCALAPPDATA', '')) / 'Android' / 'Sdk' / 'platform-tools',
|
||||
Path(os.environ.get('USERPROFILE', '')) / 'Android' / 'Sdk' / 'platform-tools',
|
||||
Path('C:/Program Files (x86)/Nmap'),
|
||||
Path('C:/Program Files/Nmap'),
|
||||
Path('C:/Program Files/Wireshark'),
|
||||
Path('C:/Program Files (x86)/Wireshark'),
|
||||
Path('C:/metasploit-framework/bin'),
|
||||
],
|
||||
'darwin': [
|
||||
Path('/opt/homebrew/bin'),
|
||||
Path('/usr/local/bin'),
|
||||
],
|
||||
'linux': [
|
||||
Path('/usr/local/bin'),
|
||||
Path('/snap/bin'),
|
||||
],
|
||||
}
|
||||
|
||||
# Tools that need extra environment setup when run from bundled copies
|
||||
_TOOL_ENV_SETUP = {
|
||||
'nmap': '_setup_nmap_env',
|
||||
}
|
||||
|
||||
|
||||
def _setup_nmap_env(tool_path: str):
|
||||
"""Set NMAPDIR so bundled nmap finds its data files."""
|
||||
tool_dir = Path(tool_path).parent
|
||||
nmap_data = tool_dir / 'nmap-data'
|
||||
if nmap_data.is_dir():
|
||||
os.environ['NMAPDIR'] = str(nmap_data)
|
||||
|
||||
|
||||
def _is_native_binary(path: str) -> bool:
|
||||
"""Check if an ELF binary matches the host architecture."""
|
||||
try:
|
||||
with open(path, 'rb') as f:
|
||||
magic = f.read(20)
|
||||
if magic[:4] != b'\x7fELF':
|
||||
return True # Not ELF (script, etc.) — assume OK
|
||||
# ELF e_machine at offset 18 (2 bytes, little-endian)
|
||||
e_machine = int.from_bytes(magic[18:20], 'little')
|
||||
arch = _get_arch()
|
||||
if arch == 'arm64' and e_machine == 183: # EM_AARCH64
|
||||
return True
|
||||
if arch == 'x86_64' and e_machine == 62: # EM_X86_64
|
||||
return True
|
||||
if arch == 'arm64' and e_machine == 62: # x86-64 on arm64 host
|
||||
return False
|
||||
if arch == 'x86_64' and e_machine == 183: # arm64 on x86-64 host
|
||||
return False
|
||||
return True # Unknown arch combo — let it try
|
||||
except Exception:
|
||||
return True # Can't read — assume OK
|
||||
|
||||
|
||||
def find_tool(name: str, extra_paths: Optional[List[str]] = None) -> Optional[str]:
|
||||
"""
|
||||
Find an executable binary by name.
|
||||
|
||||
Search order:
|
||||
1. System PATH (native binaries, correct architecture)
|
||||
2. Platform-specific well-known install locations
|
||||
3. Platform-specific project tools (tools/linux-arm64/ etc.)
|
||||
4. Generic project directories (android/, tools/, bin/)
|
||||
5. Extra paths provided by caller
|
||||
|
||||
Skips binaries that don't match the host architecture (e.g. x86-64
|
||||
binaries on ARM64 hosts) to avoid FEX/emulation issues with root.
|
||||
|
||||
Returns absolute path string, or None if not found.
|
||||
"""
|
||||
# On Windows, append .exe if no extension
|
||||
names = [name]
|
||||
if is_windows() and '.' not in name:
|
||||
names.append(name + '.exe')
|
||||
|
||||
# 1. System PATH (most reliable — native packages)
|
||||
found = shutil.which(name)
|
||||
if found and _is_native_binary(found):
|
||||
return found
|
||||
|
||||
# 2. Platform-specific well-known locations
|
||||
plat = get_platform()
|
||||
for search_dir in _PLATFORM_SEARCH_PATHS.get(plat, []):
|
||||
if search_dir.is_dir():
|
||||
for n in names:
|
||||
full = search_dir / n
|
||||
if full.is_file() and os.access(str(full), os.X_OK) and _is_native_binary(str(full)):
|
||||
return str(full)
|
||||
|
||||
# 3-4. Bundled project directories
|
||||
plat_tag = get_platform_tag()
|
||||
search_dirs = [
|
||||
_APP_DIR / 'tools' / plat_tag, # Platform-specific (tools/linux-arm64/)
|
||||
_APP_DIR / 'android', # Android tools
|
||||
_APP_DIR / 'tools', # Generic tools/
|
||||
_APP_DIR / 'bin', # Generic bin/
|
||||
]
|
||||
|
||||
for tool_dir in search_dirs:
|
||||
if tool_dir.is_dir():
|
||||
for n in names:
|
||||
full = tool_dir / n
|
||||
if full.is_file() and os.access(str(full), os.X_OK):
|
||||
found = str(full)
|
||||
if not _is_native_binary(found):
|
||||
continue # Wrong arch — skip
|
||||
# Apply environment setup for bundled tools
|
||||
env_fn = _TOOL_ENV_SETUP.get(name)
|
||||
if env_fn:
|
||||
globals()[env_fn](found)
|
||||
return found
|
||||
|
||||
# 5. Extra paths from caller
|
||||
if extra_paths:
|
||||
for p in extra_paths:
|
||||
for n in names:
|
||||
full = os.path.join(p, n)
|
||||
if os.path.isfile(full) and os.access(full, os.X_OK) and _is_native_binary(full):
|
||||
return full
|
||||
|
||||
# Last resort: return system PATH result even if wrong arch (FEX may work for user)
|
||||
found = shutil.which(name)
|
||||
if found:
|
||||
return found
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def tool_available(name: str) -> bool:
|
||||
"""Check if a tool is available anywhere."""
|
||||
return find_tool(name) is not None
|
||||
703
core/pentest_pipeline.py
Normal file
703
core/pentest_pipeline.py
Normal file
@@ -0,0 +1,703 @@
|
||||
"""
|
||||
AUTARCH Pentest Pipeline
|
||||
Three-module architecture (Parsing -> Reasoning -> Generation)
|
||||
based on PentestGPT's USENIX paper methodology.
|
||||
Uses AUTARCH's local LLM via llama-cpp-python.
|
||||
"""
|
||||
|
||||
import re
|
||||
from typing import Optional, List, Dict, Any, Tuple
|
||||
from datetime import datetime
|
||||
|
||||
from .pentest_tree import PentestTree, PTTNode, PTTNodeType, NodeStatus
|
||||
from .config import get_config
|
||||
|
||||
|
||||
# ─── Source type detection patterns ──────────────────────────────────
|
||||
|
||||
SOURCE_PATTERNS = {
|
||||
'nmap': re.compile(r'Nmap scan report|PORT\s+STATE\s+SERVICE|nmap', re.IGNORECASE),
|
||||
'msf_scan': re.compile(r'auxiliary/scanner|msf\d?\s*>.*auxiliary|^\[\*\]\s.*scanning', re.IGNORECASE | re.MULTILINE),
|
||||
'msf_exploit': re.compile(r'exploit/|meterpreter|session\s+\d+\s+opened|^\[\*\]\s.*exploit', re.IGNORECASE | re.MULTILINE),
|
||||
'msf_post': re.compile(r'post/|meterpreter\s*>', re.IGNORECASE),
|
||||
'web': re.compile(r'HTTP/\d|<!DOCTYPE|<html|Content-Type:', re.IGNORECASE),
|
||||
'shell': re.compile(r'^\$\s|^root@|^#\s|bash|zsh', re.IGNORECASE | re.MULTILINE),
|
||||
'gobuster': re.compile(r'Gobuster|gobuster|Dir found|/\w+\s+\(Status:\s*\d+\)', re.IGNORECASE),
|
||||
'nikto': re.compile(r'Nikto|nikto|^\+\s', re.IGNORECASE | re.MULTILINE),
|
||||
}
|
||||
|
||||
|
||||
def detect_source_type(output: str) -> str:
|
||||
"""Auto-detect tool output type from content patterns."""
|
||||
for source, pattern in SOURCE_PATTERNS.items():
|
||||
if pattern.search(output[:2000]):
|
||||
return source
|
||||
return 'manual'
|
||||
|
||||
|
||||
# ─── Prompt Templates ────────────────────────────────────────────────
|
||||
|
||||
PARSING_SYSTEM_PROMPT = """You are a penetration testing output parser. Extract key findings from raw tool output.
|
||||
|
||||
Given raw output from a security tool, extract and summarize:
|
||||
1. Open ports and services (with versions when available)
|
||||
2. Vulnerabilities or misconfigurations found
|
||||
3. Credentials or sensitive information discovered
|
||||
4. Operating system and software versions
|
||||
5. Any error messages or access denials
|
||||
|
||||
Rules:
|
||||
- Be concise. Use bullet points.
|
||||
- Include specific version numbers, port numbers, and IP addresses.
|
||||
- Prefix exploitable findings with [VULN]
|
||||
- Prefix credentials with [CRED]
|
||||
- Note failed attempts and why they failed.
|
||||
- Do not speculate beyond what the output shows.
|
||||
|
||||
Format your response as:
|
||||
SUMMARY: one line description
|
||||
FINDINGS:
|
||||
- finding 1
|
||||
- finding 2
|
||||
- [VULN] vulnerability finding
|
||||
STATUS: success/partial/failed"""
|
||||
|
||||
REASONING_SYSTEM_PROMPT = """You are a penetration testing strategist. You maintain a task tree and decide next steps.
|
||||
|
||||
You will receive:
|
||||
1. The current task tree showing completed and todo tasks
|
||||
2. New findings from the latest tool execution
|
||||
|
||||
Your job:
|
||||
1. UPDATE the tree based on new findings
|
||||
2. DECIDE the single most important next task
|
||||
|
||||
Rules:
|
||||
- Prioritize exploitation paths with highest success likelihood.
|
||||
- If a service version is known, suggest checking for known CVEs.
|
||||
- After recon, focus on the most promising attack surface.
|
||||
- Do not add redundant tasks.
|
||||
- Mark tasks not-applicable if findings make them irrelevant.
|
||||
|
||||
Respond in this exact format:
|
||||
TREE_UPDATES:
|
||||
- ADD: parent_id | node_type | priority | task description
|
||||
- COMPLETE: node_id | findings summary
|
||||
- NOT_APPLICABLE: node_id | reason
|
||||
|
||||
NEXT_TASK: description of the single most important next action
|
||||
REASONING: 1-2 sentences explaining why this is the highest priority"""
|
||||
|
||||
GENERATION_SYSTEM_PROMPT = """You are a penetration testing command generator. Convert task descriptions into specific executable commands.
|
||||
|
||||
Available tools:
|
||||
- shell: Run shell command. Args: {"command": "...", "timeout": 30}
|
||||
- msf_search: Search MSF modules. Args: {"query": "search term"}
|
||||
- msf_module_info: Module details. Args: {"module_type": "auxiliary|exploit|post", "module_name": "path"}
|
||||
- msf_execute: Run MSF module. Args: {"module_type": "...", "module_name": "...", "options": "{\\"RHOSTS\\": \\"...\\"}" }
|
||||
- msf_sessions: List sessions. Args: {}
|
||||
- msf_session_command: Command in session. Args: {"session_id": "...", "command": "..."}
|
||||
- msf_console: MSF console command. Args: {"command": "..."}
|
||||
|
||||
Rules:
|
||||
- Provide the EXACT tool name and JSON arguments.
|
||||
- Describe what to look for in the output.
|
||||
- If multiple steps needed, number them.
|
||||
- Always include RHOSTS/target in module options.
|
||||
- Prefer auxiliary scanners before exploits.
|
||||
|
||||
Format:
|
||||
COMMANDS:
|
||||
1. TOOL: tool_name | ARGS: {"key": "value"} | EXPECT: what to look for
|
||||
2. TOOL: tool_name | ARGS: {"key": "value"} | EXPECT: what to look for
|
||||
FALLBACK: alternative approach if primary fails"""
|
||||
|
||||
INITIAL_PLAN_PROMPT = """You are a penetration testing strategist planning an engagement.
|
||||
|
||||
Target: {target}
|
||||
|
||||
Create an initial reconnaissance plan. List the first 3-5 specific tasks to perform, ordered by priority.
|
||||
|
||||
Format:
|
||||
TASKS:
|
||||
1. node_type | priority | task description
|
||||
2. node_type | priority | task description
|
||||
3. node_type | priority | task description
|
||||
|
||||
FIRST_ACTION: description of the very first thing to do
|
||||
REASONING: why start here"""
|
||||
|
||||
DISCUSS_SYSTEM_PROMPT = """You are a penetration testing expert assistant. Answer the user's question about their current engagement.
|
||||
|
||||
Current target: {target}
|
||||
|
||||
Current status:
|
||||
{tree_summary}
|
||||
|
||||
Answer concisely and provide actionable advice."""
|
||||
|
||||
|
||||
# ─── Pipeline Modules ────────────────────────────────────────────────
|
||||
|
||||
class ParsingModule:
|
||||
"""Normalizes raw tool output into structured summaries."""
|
||||
|
||||
def __init__(self, llm):
|
||||
self.llm = llm
|
||||
self.config = get_config()
|
||||
|
||||
def parse(self, raw_output: str, source_type: str = "auto",
|
||||
context: str = "") -> dict:
|
||||
"""Parse raw tool output into normalized summary.
|
||||
|
||||
Returns dict with 'summary', 'findings', 'status', 'raw_source'.
|
||||
"""
|
||||
if source_type == "auto":
|
||||
source_type = detect_source_type(raw_output)
|
||||
|
||||
chunk_size = 2000
|
||||
try:
|
||||
chunk_size = self.config.get_int('pentest', 'output_chunk_size', 2000)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
chunks = self._chunk_output(raw_output, chunk_size)
|
||||
|
||||
all_findings = []
|
||||
all_summaries = []
|
||||
status = "unknown"
|
||||
|
||||
for i, chunk in enumerate(chunks):
|
||||
prefix = f"[{source_type} output"
|
||||
if len(chunks) > 1:
|
||||
prefix += f" part {i+1}/{len(chunks)}"
|
||||
prefix += "]"
|
||||
|
||||
message = f"{prefix}\n{chunk}"
|
||||
if context:
|
||||
message = f"Context: {context}\n\n{message}"
|
||||
|
||||
self.llm.clear_history()
|
||||
try:
|
||||
response = self.llm.chat(
|
||||
message,
|
||||
system_prompt=PARSING_SYSTEM_PROMPT,
|
||||
temperature=0.2,
|
||||
max_tokens=512,
|
||||
)
|
||||
except Exception as e:
|
||||
return {
|
||||
'summary': f"Parse error: {e}",
|
||||
'findings': [],
|
||||
'status': 'failed',
|
||||
'raw_source': source_type,
|
||||
}
|
||||
|
||||
summary, findings, chunk_status = self._parse_response(response)
|
||||
all_summaries.append(summary)
|
||||
all_findings.extend(findings)
|
||||
if chunk_status != "unknown":
|
||||
status = chunk_status
|
||||
|
||||
return {
|
||||
'summary': " | ".join(all_summaries) if all_summaries else "No summary",
|
||||
'findings': all_findings,
|
||||
'status': status,
|
||||
'raw_source': source_type,
|
||||
}
|
||||
|
||||
def _chunk_output(self, output: str, max_chunk: int = 2000) -> List[str]:
|
||||
"""Split large output into chunks."""
|
||||
if len(output) <= max_chunk:
|
||||
return [output]
|
||||
chunks = []
|
||||
lines = output.split('\n')
|
||||
current = []
|
||||
current_len = 0
|
||||
for line in lines:
|
||||
if current_len + len(line) + 1 > max_chunk and current:
|
||||
chunks.append('\n'.join(current))
|
||||
current = []
|
||||
current_len = 0
|
||||
current.append(line)
|
||||
current_len += len(line) + 1
|
||||
if current:
|
||||
chunks.append('\n'.join(current))
|
||||
return chunks
|
||||
|
||||
def _parse_response(self, response: str) -> Tuple[str, List[str], str]:
|
||||
"""Extract summary, findings, and status from LLM response."""
|
||||
summary = ""
|
||||
findings = []
|
||||
status = "unknown"
|
||||
|
||||
# Extract SUMMARY
|
||||
m = re.search(r'SUMMARY:\s*(.+)', response, re.IGNORECASE)
|
||||
if m:
|
||||
summary = m.group(1).strip()
|
||||
|
||||
# Extract FINDINGS
|
||||
findings_section = re.search(
|
||||
r'FINDINGS:\s*\n((?:[-*]\s*.+\n?)+)',
|
||||
response, re.IGNORECASE
|
||||
)
|
||||
if findings_section:
|
||||
for line in findings_section.group(1).strip().split('\n'):
|
||||
line = re.sub(r'^[-*]\s*', '', line).strip()
|
||||
if line:
|
||||
findings.append(line)
|
||||
|
||||
# Extract STATUS
|
||||
m = re.search(r'STATUS:\s*(\w+)', response, re.IGNORECASE)
|
||||
if m:
|
||||
status = m.group(1).strip().lower()
|
||||
|
||||
# Fallback: if structured parse failed, use full response
|
||||
if not summary and not findings:
|
||||
summary = response[:200].strip()
|
||||
for line in response.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith(('-', '*', '[VULN]', '[CRED]')):
|
||||
findings.append(re.sub(r'^[-*]\s*', '', line))
|
||||
|
||||
return summary, findings, status
|
||||
|
||||
|
||||
class ReasoningModule:
|
||||
"""Maintains PTT and decides next actions."""
|
||||
|
||||
def __init__(self, llm, tree: PentestTree):
|
||||
self.llm = llm
|
||||
self.tree = tree
|
||||
|
||||
def reason(self, parsed_output: dict, context: str = "") -> dict:
|
||||
"""Three-step reasoning: update tree, validate, extract next todo.
|
||||
|
||||
Returns dict with 'tree_updates', 'next_task', 'reasoning'.
|
||||
"""
|
||||
tree_summary = self.tree.render_summary()
|
||||
|
||||
findings_text = parsed_output.get('summary', '')
|
||||
if parsed_output.get('findings'):
|
||||
findings_text += "\nFindings:\n"
|
||||
for f in parsed_output['findings']:
|
||||
findings_text += f"- {f}\n"
|
||||
|
||||
message = (
|
||||
f"Current pentest tree:\n{tree_summary}\n\n"
|
||||
f"New information ({parsed_output.get('raw_source', 'unknown')}):\n"
|
||||
f"{findings_text}"
|
||||
)
|
||||
if context:
|
||||
message += f"\n\nAdditional context: {context}"
|
||||
|
||||
self.llm.clear_history()
|
||||
try:
|
||||
response = self.llm.chat(
|
||||
message,
|
||||
system_prompt=REASONING_SYSTEM_PROMPT,
|
||||
temperature=0.3,
|
||||
max_tokens=1024,
|
||||
)
|
||||
except Exception as e:
|
||||
return {
|
||||
'tree_updates': [],
|
||||
'next_task': f"Error during reasoning: {e}",
|
||||
'reasoning': str(e),
|
||||
}
|
||||
|
||||
updates = self._parse_tree_updates(response)
|
||||
self._apply_updates(updates)
|
||||
|
||||
next_task = ""
|
||||
m = re.search(r'NEXT_TASK:\s*(.+)', response, re.IGNORECASE)
|
||||
if m:
|
||||
next_task = m.group(1).strip()
|
||||
|
||||
reasoning = ""
|
||||
m = re.search(r'REASONING:\s*(.+)', response, re.IGNORECASE | re.DOTALL)
|
||||
if m:
|
||||
reasoning = m.group(1).strip().split('\n')[0]
|
||||
|
||||
# Fallback: if no NEXT_TASK parsed, get from tree
|
||||
if not next_task:
|
||||
todo = self.tree.get_next_todo()
|
||||
if todo:
|
||||
next_task = todo.label
|
||||
|
||||
return {
|
||||
'tree_updates': updates,
|
||||
'next_task': next_task,
|
||||
'reasoning': reasoning,
|
||||
}
|
||||
|
||||
def _parse_tree_updates(self, response: str) -> List[dict]:
|
||||
"""Extract tree operations from LLM response."""
|
||||
updates = []
|
||||
|
||||
# Parse ADD operations
|
||||
for m in re.finditer(
|
||||
r'ADD:\s*(\S+)\s*\|\s*(\w+)\s*\|\s*(\d)\s*\|\s*(.+)',
|
||||
response, re.IGNORECASE
|
||||
):
|
||||
parent = m.group(1).strip()
|
||||
if parent.lower() in ('root', 'none', '-'):
|
||||
parent = None
|
||||
ntype_str = m.group(2).strip().lower()
|
||||
ntype = self._map_node_type(ntype_str)
|
||||
updates.append({
|
||||
'operation': 'add',
|
||||
'parent_id': parent,
|
||||
'node_type': ntype,
|
||||
'priority': int(m.group(3)),
|
||||
'label': m.group(4).strip(),
|
||||
})
|
||||
|
||||
# Parse COMPLETE operations
|
||||
for m in re.finditer(
|
||||
r'COMPLETE:\s*(\S+)\s*\|\s*(.+)',
|
||||
response, re.IGNORECASE
|
||||
):
|
||||
updates.append({
|
||||
'operation': 'complete',
|
||||
'node_id': m.group(1).strip(),
|
||||
'findings': m.group(2).strip(),
|
||||
})
|
||||
|
||||
# Parse NOT_APPLICABLE operations
|
||||
for m in re.finditer(
|
||||
r'NOT_APPLICABLE:\s*(\S+)\s*\|\s*(.+)',
|
||||
response, re.IGNORECASE
|
||||
):
|
||||
updates.append({
|
||||
'operation': 'not_applicable',
|
||||
'node_id': m.group(1).strip(),
|
||||
'reason': m.group(2).strip(),
|
||||
})
|
||||
|
||||
return updates
|
||||
|
||||
def _map_node_type(self, type_str: str) -> PTTNodeType:
|
||||
"""Map a string to PTTNodeType."""
|
||||
mapping = {
|
||||
'recon': PTTNodeType.RECONNAISSANCE,
|
||||
'reconnaissance': PTTNodeType.RECONNAISSANCE,
|
||||
'initial_access': PTTNodeType.INITIAL_ACCESS,
|
||||
'initial': PTTNodeType.INITIAL_ACCESS,
|
||||
'access': PTTNodeType.INITIAL_ACCESS,
|
||||
'privesc': PTTNodeType.PRIVILEGE_ESCALATION,
|
||||
'privilege_escalation': PTTNodeType.PRIVILEGE_ESCALATION,
|
||||
'escalation': PTTNodeType.PRIVILEGE_ESCALATION,
|
||||
'lateral': PTTNodeType.LATERAL_MOVEMENT,
|
||||
'lateral_movement': PTTNodeType.LATERAL_MOVEMENT,
|
||||
'persistence': PTTNodeType.PERSISTENCE,
|
||||
'credential': PTTNodeType.CREDENTIAL_ACCESS,
|
||||
'credential_access': PTTNodeType.CREDENTIAL_ACCESS,
|
||||
'creds': PTTNodeType.CREDENTIAL_ACCESS,
|
||||
'exfiltration': PTTNodeType.EXFILTRATION,
|
||||
'exfil': PTTNodeType.EXFILTRATION,
|
||||
}
|
||||
return mapping.get(type_str.lower(), PTTNodeType.CUSTOM)
|
||||
|
||||
def _apply_updates(self, updates: List[dict]):
|
||||
"""Apply parsed operations to the tree."""
|
||||
for update in updates:
|
||||
op = update['operation']
|
||||
|
||||
if op == 'add':
|
||||
# Resolve parent - could be an ID or a label
|
||||
parent_id = update.get('parent_id')
|
||||
if parent_id and parent_id not in self.tree.nodes:
|
||||
# Try to find by label match
|
||||
node = self.tree.find_node_by_label(parent_id)
|
||||
parent_id = node.id if node else None
|
||||
|
||||
self.tree.add_node(
|
||||
label=update['label'],
|
||||
node_type=update['node_type'],
|
||||
parent_id=parent_id,
|
||||
priority=update.get('priority', 3),
|
||||
)
|
||||
|
||||
elif op == 'complete':
|
||||
node_id = update['node_id']
|
||||
if node_id not in self.tree.nodes:
|
||||
node = self.tree.find_node_by_label(node_id)
|
||||
if node:
|
||||
node_id = node.id
|
||||
else:
|
||||
continue
|
||||
self.tree.update_node(
|
||||
node_id,
|
||||
status=NodeStatus.COMPLETED,
|
||||
findings=[update.get('findings', '')],
|
||||
)
|
||||
|
||||
elif op == 'not_applicable':
|
||||
node_id = update['node_id']
|
||||
if node_id not in self.tree.nodes:
|
||||
node = self.tree.find_node_by_label(node_id)
|
||||
if node:
|
||||
node_id = node.id
|
||||
else:
|
||||
continue
|
||||
self.tree.update_node(
|
||||
node_id,
|
||||
status=NodeStatus.NOT_APPLICABLE,
|
||||
details=update.get('reason', ''),
|
||||
)
|
||||
|
||||
|
||||
class GenerationModule:
|
||||
"""Converts abstract tasks into concrete commands."""
|
||||
|
||||
def __init__(self, llm):
|
||||
self.llm = llm
|
||||
|
||||
def generate(self, task_description: str, target: str,
|
||||
context: str = "") -> dict:
|
||||
"""Generate executable commands for a task.
|
||||
|
||||
Returns dict with 'commands' (list) and 'fallback' (str).
|
||||
"""
|
||||
message = f"Target: {target}\nTask: {task_description}"
|
||||
if context:
|
||||
message += f"\n\nContext: {context}"
|
||||
|
||||
self.llm.clear_history()
|
||||
try:
|
||||
response = self.llm.chat(
|
||||
message,
|
||||
system_prompt=GENERATION_SYSTEM_PROMPT,
|
||||
temperature=0.2,
|
||||
max_tokens=512,
|
||||
)
|
||||
except Exception as e:
|
||||
return {
|
||||
'commands': [],
|
||||
'fallback': f"Generation error: {e}",
|
||||
'raw_response': str(e),
|
||||
}
|
||||
|
||||
commands = self._parse_commands(response)
|
||||
fallback = ""
|
||||
m = re.search(r'FALLBACK:\s*(.+)', response, re.IGNORECASE | re.DOTALL)
|
||||
if m:
|
||||
fallback = m.group(1).strip().split('\n')[0]
|
||||
|
||||
return {
|
||||
'commands': commands,
|
||||
'fallback': fallback,
|
||||
'raw_response': response,
|
||||
}
|
||||
|
||||
def _parse_commands(self, response: str) -> List[dict]:
|
||||
"""Extract commands from LLM response."""
|
||||
commands = []
|
||||
|
||||
# Parse structured TOOL: ... | ARGS: ... | EXPECT: ... format
|
||||
for m in re.finditer(
|
||||
r'TOOL:\s*(\w+)\s*\|\s*ARGS:\s*(\{[^}]+\})\s*\|\s*EXPECT:\s*(.+)',
|
||||
response, re.IGNORECASE
|
||||
):
|
||||
tool_name = m.group(1).strip()
|
||||
args_str = m.group(2).strip()
|
||||
expect = m.group(3).strip()
|
||||
|
||||
# Try to parse JSON args
|
||||
import json
|
||||
try:
|
||||
args = json.loads(args_str)
|
||||
except json.JSONDecodeError:
|
||||
# Try fixing common LLM JSON issues
|
||||
fixed = args_str.replace("'", '"')
|
||||
try:
|
||||
args = json.loads(fixed)
|
||||
except json.JSONDecodeError:
|
||||
args = {'raw': args_str}
|
||||
|
||||
commands.append({
|
||||
'tool': tool_name,
|
||||
'args': args,
|
||||
'expect': expect,
|
||||
})
|
||||
|
||||
# Fallback: try to find shell commands or MSF commands
|
||||
if not commands:
|
||||
for line in response.split('\n'):
|
||||
line = line.strip()
|
||||
# Detect nmap/shell commands
|
||||
if re.match(r'^(nmap|nikto|gobuster|curl|wget|nc|netcat)\s', line):
|
||||
commands.append({
|
||||
'tool': 'shell',
|
||||
'args': {'command': line},
|
||||
'expect': 'Check output for results',
|
||||
})
|
||||
# Detect MSF use/run commands
|
||||
elif re.match(r'^(use |run |set )', line, re.IGNORECASE):
|
||||
commands.append({
|
||||
'tool': 'msf_console',
|
||||
'args': {'command': line},
|
||||
'expect': 'Check output for results',
|
||||
})
|
||||
|
||||
return commands
|
||||
|
||||
|
||||
# ─── Pipeline Orchestrator ────────────────────────────────────────────
|
||||
|
||||
class PentestPipeline:
|
||||
"""Orchestrates the three-module pipeline."""
|
||||
|
||||
def __init__(self, llm, target: str, tree: PentestTree = None):
|
||||
self.llm = llm
|
||||
self.target = target
|
||||
self.tree = tree or PentestTree(target)
|
||||
self.parser = ParsingModule(llm)
|
||||
self.reasoner = ReasoningModule(llm, self.tree)
|
||||
self.generator = GenerationModule(llm)
|
||||
self.history: List[dict] = []
|
||||
|
||||
def process_output(self, raw_output: str,
|
||||
source_type: str = "auto") -> dict:
|
||||
"""Full pipeline: parse -> reason -> generate.
|
||||
|
||||
Returns dict with 'parsed', 'reasoning', 'commands', 'next_task'.
|
||||
"""
|
||||
# Step 1: Parse
|
||||
parsed = self.parser.parse(raw_output, source_type)
|
||||
|
||||
# Step 2: Reason
|
||||
reasoning = self.reasoner.reason(parsed)
|
||||
|
||||
# Step 3: Generate commands for the next task
|
||||
generated = {'commands': [], 'fallback': ''}
|
||||
if reasoning.get('next_task'):
|
||||
# Build context from recent findings
|
||||
context = parsed.get('summary', '')
|
||||
generated = self.generator.generate(
|
||||
reasoning['next_task'],
|
||||
self.target,
|
||||
context=context,
|
||||
)
|
||||
|
||||
result = {
|
||||
'parsed': parsed,
|
||||
'reasoning': reasoning,
|
||||
'commands': generated.get('commands', []),
|
||||
'fallback': generated.get('fallback', ''),
|
||||
'next_task': reasoning.get('next_task', ''),
|
||||
}
|
||||
|
||||
self.history.append({
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'result': {
|
||||
'parsed_summary': parsed.get('summary', ''),
|
||||
'findings_count': len(parsed.get('findings', [])),
|
||||
'next_task': reasoning.get('next_task', ''),
|
||||
'commands_count': len(generated.get('commands', [])),
|
||||
}
|
||||
})
|
||||
|
||||
return result
|
||||
|
||||
def get_initial_plan(self) -> dict:
|
||||
"""Generate initial pentest plan for the target."""
|
||||
prompt = INITIAL_PLAN_PROMPT.format(target=self.target)
|
||||
|
||||
self.llm.clear_history()
|
||||
try:
|
||||
response = self.llm.chat(
|
||||
prompt,
|
||||
system_prompt=REASONING_SYSTEM_PROMPT,
|
||||
temperature=0.3,
|
||||
max_tokens=1024,
|
||||
)
|
||||
except Exception as e:
|
||||
return {
|
||||
'tasks': [],
|
||||
'first_action': f"Error: {e}",
|
||||
'reasoning': str(e),
|
||||
}
|
||||
|
||||
# Parse TASKS
|
||||
tasks = []
|
||||
for m in re.finditer(
|
||||
r'(\d+)\.\s*(\w+)\s*\|\s*(\d)\s*\|\s*(.+)',
|
||||
response
|
||||
):
|
||||
ntype_str = m.group(2).strip()
|
||||
ntype = self.reasoner._map_node_type(ntype_str)
|
||||
tasks.append({
|
||||
'node_type': ntype,
|
||||
'priority': int(m.group(3)),
|
||||
'label': m.group(4).strip(),
|
||||
})
|
||||
|
||||
# Add tasks to tree under appropriate branches
|
||||
for task in tasks:
|
||||
# Find matching root branch
|
||||
parent_id = None
|
||||
for root_id in self.tree.root_nodes:
|
||||
root = self.tree.get_node(root_id)
|
||||
if root and root.node_type == task['node_type']:
|
||||
parent_id = root_id
|
||||
break
|
||||
self.tree.add_node(
|
||||
label=task['label'],
|
||||
node_type=task['node_type'],
|
||||
parent_id=parent_id,
|
||||
priority=task['priority'],
|
||||
)
|
||||
|
||||
# Parse first action
|
||||
first_action = ""
|
||||
m = re.search(r'FIRST_ACTION:\s*(.+)', response, re.IGNORECASE)
|
||||
if m:
|
||||
first_action = m.group(1).strip()
|
||||
|
||||
reasoning = ""
|
||||
m = re.search(r'REASONING:\s*(.+)', response, re.IGNORECASE)
|
||||
if m:
|
||||
reasoning = m.group(1).strip()
|
||||
|
||||
# Generate commands for first action
|
||||
commands = []
|
||||
if first_action:
|
||||
gen = self.generator.generate(first_action, self.target)
|
||||
commands = gen.get('commands', [])
|
||||
|
||||
return {
|
||||
'tasks': tasks,
|
||||
'first_action': first_action,
|
||||
'reasoning': reasoning,
|
||||
'commands': commands,
|
||||
}
|
||||
|
||||
def inject_information(self, info: str, source: str = "manual") -> dict:
|
||||
"""Inject external information and get updated recommendations."""
|
||||
parsed = {
|
||||
'summary': info[:200],
|
||||
'findings': [info],
|
||||
'status': 'success',
|
||||
'raw_source': source,
|
||||
}
|
||||
return self.process_output(info, source_type=source)
|
||||
|
||||
def discuss(self, question: str) -> str:
|
||||
"""Ad-hoc question that doesn't affect the tree."""
|
||||
tree_summary = self.tree.render_summary()
|
||||
prompt = DISCUSS_SYSTEM_PROMPT.format(
|
||||
target=self.target,
|
||||
tree_summary=tree_summary,
|
||||
)
|
||||
self.llm.clear_history()
|
||||
try:
|
||||
return self.llm.chat(
|
||||
question,
|
||||
system_prompt=prompt,
|
||||
temperature=0.5,
|
||||
max_tokens=1024,
|
||||
)
|
||||
except Exception as e:
|
||||
return f"Error: {e}"
|
||||
279
core/pentest_session.py
Normal file
279
core/pentest_session.py
Normal file
@@ -0,0 +1,279 @@
|
||||
"""
|
||||
AUTARCH Pentest Session Manager
|
||||
Save and resume penetration testing sessions with full state persistence.
|
||||
"""
|
||||
|
||||
import json
|
||||
import re
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
|
||||
from .pentest_tree import PentestTree, NodeStatus
|
||||
|
||||
|
||||
class PentestSessionState(Enum):
|
||||
IDLE = "idle"
|
||||
RUNNING = "running"
|
||||
PAUSED = "paused"
|
||||
COMPLETED = "completed"
|
||||
ERROR = "error"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SessionEvent:
|
||||
"""A single event in the session timeline."""
|
||||
timestamp: str
|
||||
event_type: str
|
||||
data: dict
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
'timestamp': self.timestamp,
|
||||
'event_type': self.event_type,
|
||||
'data': self.data,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'SessionEvent':
|
||||
return cls(
|
||||
timestamp=data['timestamp'],
|
||||
event_type=data['event_type'],
|
||||
data=data.get('data', {}),
|
||||
)
|
||||
|
||||
|
||||
class PentestSession:
|
||||
"""Manages a single penetration testing session."""
|
||||
|
||||
@classmethod
|
||||
def _get_dir(cls):
|
||||
from core.paths import get_data_dir
|
||||
d = get_data_dir() / "pentest_sessions"
|
||||
d.mkdir(parents=True, exist_ok=True)
|
||||
return d
|
||||
|
||||
def __init__(self, target: str, session_id: str = None):
|
||||
self.session_id = session_id or self._generate_id(target)
|
||||
self.target = target
|
||||
self.state = PentestSessionState.IDLE
|
||||
self.tree = PentestTree(target)
|
||||
self.events: List[SessionEvent] = []
|
||||
self.findings: List[Dict[str, Any]] = []
|
||||
self.pipeline_history: List[dict] = []
|
||||
self.notes: str = ""
|
||||
self.step_count: int = 0
|
||||
now = datetime.now().isoformat()
|
||||
self.created_at = now
|
||||
self.updated_at = now
|
||||
|
||||
@staticmethod
|
||||
def _generate_id(target: str) -> str:
|
||||
"""Generate a session ID from target and timestamp."""
|
||||
safe = re.sub(r'[^a-zA-Z0-9]', '_', target)[:30]
|
||||
ts = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
return f"{safe}_{ts}"
|
||||
|
||||
def start(self):
|
||||
"""Initialize a new session."""
|
||||
self.state = PentestSessionState.RUNNING
|
||||
self.tree.initialize_standard_branches()
|
||||
self.log_event('state_change', {'from': 'idle', 'to': 'running'})
|
||||
self.save()
|
||||
|
||||
def pause(self):
|
||||
"""Pause the session and save state."""
|
||||
prev = self.state.value
|
||||
self.state = PentestSessionState.PAUSED
|
||||
self.log_event('state_change', {'from': prev, 'to': 'paused'})
|
||||
self.save()
|
||||
|
||||
def resume(self):
|
||||
"""Resume a paused session."""
|
||||
prev = self.state.value
|
||||
self.state = PentestSessionState.RUNNING
|
||||
self.log_event('state_change', {'from': prev, 'to': 'running'})
|
||||
self.save()
|
||||
|
||||
def complete(self, summary: str = ""):
|
||||
"""Mark session as completed."""
|
||||
prev = self.state.value
|
||||
self.state = PentestSessionState.COMPLETED
|
||||
self.log_event('state_change', {
|
||||
'from': prev,
|
||||
'to': 'completed',
|
||||
'summary': summary,
|
||||
})
|
||||
self.save()
|
||||
|
||||
def set_error(self, error_msg: str):
|
||||
"""Mark session as errored."""
|
||||
prev = self.state.value
|
||||
self.state = PentestSessionState.ERROR
|
||||
self.log_event('state_change', {
|
||||
'from': prev,
|
||||
'to': 'error',
|
||||
'error': error_msg,
|
||||
})
|
||||
self.save()
|
||||
|
||||
def log_event(self, event_type: str, data: dict):
|
||||
"""Log an event to the session timeline."""
|
||||
event = SessionEvent(
|
||||
timestamp=datetime.now().isoformat(),
|
||||
event_type=event_type,
|
||||
data=data,
|
||||
)
|
||||
self.events.append(event)
|
||||
self.updated_at = event.timestamp
|
||||
|
||||
def log_pipeline_result(self, parsed: str, reasoning: str, actions: list):
|
||||
"""Log a pipeline execution cycle."""
|
||||
self.pipeline_history.append({
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'step': self.step_count,
|
||||
'parsed_input': parsed,
|
||||
'reasoning': reasoning,
|
||||
'generated_actions': actions,
|
||||
})
|
||||
self.step_count += 1
|
||||
|
||||
def add_finding(self, title: str, description: str,
|
||||
severity: str = "medium", node_id: str = None):
|
||||
"""Add a key finding."""
|
||||
self.findings.append({
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'severity': severity,
|
||||
'title': title,
|
||||
'description': description,
|
||||
'node_id': node_id,
|
||||
})
|
||||
|
||||
def save(self) -> str:
|
||||
"""Save session to JSON file. Returns filepath."""
|
||||
self._get_dir().mkdir(parents=True, exist_ok=True)
|
||||
filepath = self._get_dir() / f"{self.session_id}.json"
|
||||
|
||||
data = {
|
||||
'session_id': self.session_id,
|
||||
'target': self.target,
|
||||
'state': self.state.value,
|
||||
'created_at': self.created_at,
|
||||
'updated_at': self.updated_at,
|
||||
'notes': self.notes,
|
||||
'step_count': self.step_count,
|
||||
'tree': self.tree.to_dict(),
|
||||
'events': [e.to_dict() for e in self.events],
|
||||
'findings': self.findings,
|
||||
'pipeline_history': self.pipeline_history,
|
||||
}
|
||||
|
||||
with open(filepath, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
return str(filepath)
|
||||
|
||||
@classmethod
|
||||
def load_session(cls, session_id: str) -> 'PentestSession':
|
||||
"""Load a session from file."""
|
||||
filepath = cls._get_dir() / f"{session_id}.json"
|
||||
if not filepath.exists():
|
||||
raise FileNotFoundError(f"Session not found: {session_id}")
|
||||
|
||||
with open(filepath, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
session = cls(target=data['target'], session_id=data['session_id'])
|
||||
session.state = PentestSessionState(data['state'])
|
||||
session.created_at = data['created_at']
|
||||
session.updated_at = data['updated_at']
|
||||
session.notes = data.get('notes', '')
|
||||
session.step_count = data.get('step_count', 0)
|
||||
session.tree = PentestTree.from_dict(data['tree'])
|
||||
session.events = [SessionEvent.from_dict(e) for e in data.get('events', [])]
|
||||
session.findings = data.get('findings', [])
|
||||
session.pipeline_history = data.get('pipeline_history', [])
|
||||
return session
|
||||
|
||||
@classmethod
|
||||
def list_sessions(cls) -> List[Dict[str, Any]]:
|
||||
"""List all saved sessions with summary info."""
|
||||
cls._get_dir().mkdir(parents=True, exist_ok=True)
|
||||
sessions = []
|
||||
for f in sorted(cls._get_dir().glob("*.json"), key=lambda p: p.stat().st_mtime, reverse=True):
|
||||
try:
|
||||
with open(f, 'r') as fh:
|
||||
data = json.load(fh)
|
||||
stats = {}
|
||||
if 'tree' in data and 'nodes' in data['tree']:
|
||||
nodes = data['tree']['nodes']
|
||||
stats = {
|
||||
'total': len(nodes),
|
||||
'todo': sum(1 for n in nodes.values() if n.get('status') == 'todo'),
|
||||
'completed': sum(1 for n in nodes.values() if n.get('status') == 'completed'),
|
||||
}
|
||||
sessions.append({
|
||||
'session_id': data['session_id'],
|
||||
'target': data['target'],
|
||||
'state': data['state'],
|
||||
'created': data['created_at'],
|
||||
'updated': data['updated_at'],
|
||||
'steps': data.get('step_count', 0),
|
||||
'findings': len(data.get('findings', [])),
|
||||
'tree_stats': stats,
|
||||
})
|
||||
except (json.JSONDecodeError, KeyError):
|
||||
continue
|
||||
return sessions
|
||||
|
||||
def delete(self) -> bool:
|
||||
"""Delete this session's file."""
|
||||
filepath = self._get_dir() / f"{self.session_id}.json"
|
||||
if filepath.exists():
|
||||
filepath.unlink()
|
||||
return True
|
||||
return False
|
||||
|
||||
def export_report(self) -> str:
|
||||
"""Generate a text summary report of the session."""
|
||||
stats = self.tree.get_stats()
|
||||
lines = [
|
||||
"=" * 60,
|
||||
"AUTARCH Pentest Session Report",
|
||||
"=" * 60,
|
||||
f"Target: {self.target}",
|
||||
f"Session: {self.session_id}",
|
||||
f"State: {self.state.value}",
|
||||
f"Started: {self.created_at}",
|
||||
f"Updated: {self.updated_at}",
|
||||
f"Steps: {self.step_count}",
|
||||
"",
|
||||
"--- Task Tree ---",
|
||||
f"Total nodes: {stats['total']}",
|
||||
f" Completed: {stats.get('completed', 0)}",
|
||||
f" Todo: {stats.get('todo', 0)}",
|
||||
f" Active: {stats.get('in_progress', 0)}",
|
||||
f" N/A: {stats.get('not_applicable', 0)}",
|
||||
"",
|
||||
self.tree.render_text(),
|
||||
"",
|
||||
]
|
||||
|
||||
if self.findings:
|
||||
lines.append("--- Findings ---")
|
||||
for i, f in enumerate(self.findings, 1):
|
||||
sev = f.get('severity', 'medium').upper()
|
||||
lines.append(f" [{i}] [{sev}] {f['title']}")
|
||||
lines.append(f" {f['description']}")
|
||||
lines.append("")
|
||||
|
||||
if self.notes:
|
||||
lines.append("--- Notes ---")
|
||||
lines.append(self.notes)
|
||||
lines.append("")
|
||||
|
||||
lines.append("=" * 60)
|
||||
return "\n".join(lines)
|
||||
350
core/pentest_tree.py
Normal file
350
core/pentest_tree.py
Normal file
@@ -0,0 +1,350 @@
|
||||
"""
|
||||
AUTARCH Penetration Testing Tree (PTT)
|
||||
Hierarchical task tracker for structured penetration testing workflows.
|
||||
Based on PentestGPT's USENIX paper methodology.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
|
||||
class NodeStatus(Enum):
|
||||
TODO = "todo"
|
||||
IN_PROGRESS = "in_progress"
|
||||
COMPLETED = "completed"
|
||||
NOT_APPLICABLE = "not_applicable"
|
||||
|
||||
|
||||
class PTTNodeType(Enum):
|
||||
RECONNAISSANCE = "reconnaissance"
|
||||
INITIAL_ACCESS = "initial_access"
|
||||
PRIVILEGE_ESCALATION = "privilege_escalation"
|
||||
LATERAL_MOVEMENT = "lateral_movement"
|
||||
PERSISTENCE = "persistence"
|
||||
CREDENTIAL_ACCESS = "credential_access"
|
||||
EXFILTRATION = "exfiltration"
|
||||
CUSTOM = "custom"
|
||||
|
||||
|
||||
@dataclass
|
||||
class PTTNode:
|
||||
"""A single node in the Penetration Testing Tree."""
|
||||
id: str
|
||||
label: str
|
||||
node_type: PTTNodeType
|
||||
status: NodeStatus = NodeStatus.TODO
|
||||
parent_id: Optional[str] = None
|
||||
children: List[str] = field(default_factory=list)
|
||||
details: str = ""
|
||||
tool_output: Optional[str] = None
|
||||
findings: List[str] = field(default_factory=list)
|
||||
priority: int = 3
|
||||
created_at: str = ""
|
||||
updated_at: str = ""
|
||||
|
||||
def __post_init__(self):
|
||||
now = datetime.now().isoformat()
|
||||
if not self.created_at:
|
||||
self.created_at = now
|
||||
if not self.updated_at:
|
||||
self.updated_at = now
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
'id': self.id,
|
||||
'label': self.label,
|
||||
'node_type': self.node_type.value,
|
||||
'status': self.status.value,
|
||||
'parent_id': self.parent_id,
|
||||
'children': self.children.copy(),
|
||||
'details': self.details,
|
||||
'tool_output': self.tool_output,
|
||||
'findings': self.findings.copy(),
|
||||
'priority': self.priority,
|
||||
'created_at': self.created_at,
|
||||
'updated_at': self.updated_at,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'PTTNode':
|
||||
return cls(
|
||||
id=data['id'],
|
||||
label=data['label'],
|
||||
node_type=PTTNodeType(data['node_type']),
|
||||
status=NodeStatus(data['status']),
|
||||
parent_id=data.get('parent_id'),
|
||||
children=data.get('children', []),
|
||||
details=data.get('details', ''),
|
||||
tool_output=data.get('tool_output'),
|
||||
findings=data.get('findings', []),
|
||||
priority=data.get('priority', 3),
|
||||
created_at=data.get('created_at', ''),
|
||||
updated_at=data.get('updated_at', ''),
|
||||
)
|
||||
|
||||
|
||||
# Status display symbols
|
||||
_STATUS_SYMBOLS = {
|
||||
NodeStatus.TODO: '[ ]',
|
||||
NodeStatus.IN_PROGRESS: '[~]',
|
||||
NodeStatus.COMPLETED: '[x]',
|
||||
NodeStatus.NOT_APPLICABLE: '[-]',
|
||||
}
|
||||
|
||||
|
||||
class PentestTree:
|
||||
"""Penetration Testing Tree - hierarchical task tracker."""
|
||||
|
||||
def __init__(self, target: str):
|
||||
self.target = target
|
||||
self.nodes: Dict[str, PTTNode] = {}
|
||||
self.root_nodes: List[str] = []
|
||||
now = datetime.now().isoformat()
|
||||
self.created_at = now
|
||||
self.updated_at = now
|
||||
|
||||
def add_node(
|
||||
self,
|
||||
label: str,
|
||||
node_type: PTTNodeType,
|
||||
parent_id: Optional[str] = None,
|
||||
details: str = "",
|
||||
priority: int = 3,
|
||||
status: NodeStatus = NodeStatus.TODO,
|
||||
) -> str:
|
||||
"""Add a node to the tree. Returns the new node's ID."""
|
||||
node_id = str(uuid.uuid4())[:8]
|
||||
node = PTTNode(
|
||||
id=node_id,
|
||||
label=label,
|
||||
node_type=node_type,
|
||||
status=status,
|
||||
parent_id=parent_id,
|
||||
details=details,
|
||||
priority=priority,
|
||||
)
|
||||
|
||||
self.nodes[node_id] = node
|
||||
|
||||
if parent_id and parent_id in self.nodes:
|
||||
self.nodes[parent_id].children.append(node_id)
|
||||
elif parent_id is None:
|
||||
self.root_nodes.append(node_id)
|
||||
|
||||
self.updated_at = datetime.now().isoformat()
|
||||
return node_id
|
||||
|
||||
def update_node(
|
||||
self,
|
||||
node_id: str,
|
||||
status: Optional[NodeStatus] = None,
|
||||
details: Optional[str] = None,
|
||||
tool_output: Optional[str] = None,
|
||||
findings: Optional[List[str]] = None,
|
||||
priority: Optional[int] = None,
|
||||
label: Optional[str] = None,
|
||||
) -> bool:
|
||||
"""Update a node's properties. Returns True if found and updated."""
|
||||
node = self.nodes.get(node_id)
|
||||
if not node:
|
||||
return False
|
||||
|
||||
if status is not None:
|
||||
node.status = status
|
||||
if details is not None:
|
||||
node.details = details
|
||||
if tool_output is not None:
|
||||
node.tool_output = tool_output
|
||||
if findings is not None:
|
||||
node.findings.extend(findings)
|
||||
if priority is not None:
|
||||
node.priority = priority
|
||||
if label is not None:
|
||||
node.label = label
|
||||
|
||||
node.updated_at = datetime.now().isoformat()
|
||||
self.updated_at = node.updated_at
|
||||
return True
|
||||
|
||||
def delete_node(self, node_id: str) -> bool:
|
||||
"""Delete a node and all its children recursively."""
|
||||
node = self.nodes.get(node_id)
|
||||
if not node:
|
||||
return False
|
||||
|
||||
# Recursively delete children
|
||||
for child_id in node.children.copy():
|
||||
self.delete_node(child_id)
|
||||
|
||||
# Remove from parent's children list
|
||||
if node.parent_id and node.parent_id in self.nodes:
|
||||
parent = self.nodes[node.parent_id]
|
||||
if node_id in parent.children:
|
||||
parent.children.remove(node_id)
|
||||
|
||||
# Remove from root nodes if applicable
|
||||
if node_id in self.root_nodes:
|
||||
self.root_nodes.remove(node_id)
|
||||
|
||||
del self.nodes[node_id]
|
||||
self.updated_at = datetime.now().isoformat()
|
||||
return True
|
||||
|
||||
def get_node(self, node_id: str) -> Optional[PTTNode]:
|
||||
return self.nodes.get(node_id)
|
||||
|
||||
def get_next_todo(self) -> Optional[PTTNode]:
|
||||
"""Get the highest priority TODO node."""
|
||||
todos = [n for n in self.nodes.values() if n.status == NodeStatus.TODO]
|
||||
if not todos:
|
||||
return None
|
||||
return min(todos, key=lambda n: n.priority)
|
||||
|
||||
def get_all_by_status(self, status: NodeStatus) -> List[PTTNode]:
|
||||
return [n for n in self.nodes.values() if n.status == status]
|
||||
|
||||
def get_subtree(self, node_id: str) -> List[PTTNode]:
|
||||
"""Get all nodes in a subtree (including the root)."""
|
||||
node = self.nodes.get(node_id)
|
||||
if not node:
|
||||
return []
|
||||
result = [node]
|
||||
for child_id in node.children:
|
||||
result.extend(self.get_subtree(child_id))
|
||||
return result
|
||||
|
||||
def find_node_by_label(self, label: str) -> Optional[PTTNode]:
|
||||
"""Find a node by label (case-insensitive partial match)."""
|
||||
label_lower = label.lower()
|
||||
for node in self.nodes.values():
|
||||
if label_lower in node.label.lower():
|
||||
return node
|
||||
return None
|
||||
|
||||
def get_stats(self) -> Dict[str, int]:
|
||||
"""Get tree statistics."""
|
||||
stats = {'total': len(self.nodes)}
|
||||
for status in NodeStatus:
|
||||
stats[status.value] = len(self.get_all_by_status(status))
|
||||
return stats
|
||||
|
||||
def render_text(self) -> str:
|
||||
"""Render full tree as indented text for terminal display."""
|
||||
if not self.root_nodes:
|
||||
return " (empty tree)"
|
||||
|
||||
lines = [f"Target: {self.target}"]
|
||||
lines.append("")
|
||||
|
||||
for root_id in self.root_nodes:
|
||||
self._render_node(root_id, lines, indent=0)
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def _render_node(self, node_id: str, lines: List[str], indent: int):
|
||||
node = self.nodes.get(node_id)
|
||||
if not node:
|
||||
return
|
||||
|
||||
prefix = " " * indent
|
||||
symbol = _STATUS_SYMBOLS.get(node.status, '[ ]')
|
||||
priority_str = f" P{node.priority}" if node.priority != 3 else ""
|
||||
lines.append(f"{prefix}{symbol} {node.label}{priority_str}")
|
||||
|
||||
if node.findings:
|
||||
for finding in node.findings[:3]:
|
||||
lines.append(f"{prefix} -> {finding}")
|
||||
|
||||
for child_id in node.children:
|
||||
self._render_node(child_id, lines, indent + 1)
|
||||
|
||||
def render_summary(self) -> str:
|
||||
"""Render compact summary for LLM context injection.
|
||||
Designed to fit within tight token budgets (4096 ctx).
|
||||
Only shows TODO and IN_PROGRESS nodes with minimal detail.
|
||||
"""
|
||||
stats = self.get_stats()
|
||||
lines = [
|
||||
f"Target: {self.target}",
|
||||
f"Nodes: {stats['total']} total, {stats['todo']} todo, "
|
||||
f"{stats['completed']} done, {stats['in_progress']} active",
|
||||
]
|
||||
|
||||
# Show active and todo nodes only
|
||||
active = self.get_all_by_status(NodeStatus.IN_PROGRESS)
|
||||
todos = sorted(
|
||||
self.get_all_by_status(NodeStatus.TODO),
|
||||
key=lambda n: n.priority
|
||||
)
|
||||
|
||||
if active:
|
||||
lines.append("Active:")
|
||||
for n in active:
|
||||
lines.append(f" [{n.id}] {n.label}")
|
||||
|
||||
if todos:
|
||||
lines.append("Todo:")
|
||||
for n in todos[:5]:
|
||||
lines.append(f" [{n.id}] P{n.priority} {n.label}")
|
||||
if len(todos) > 5:
|
||||
lines.append(f" ... and {len(todos) - 5} more")
|
||||
|
||||
# Show recent findings (last 5)
|
||||
all_findings = []
|
||||
for node in self.nodes.values():
|
||||
if node.findings:
|
||||
for f in node.findings:
|
||||
all_findings.append(f)
|
||||
if all_findings:
|
||||
lines.append("Key findings:")
|
||||
for f in all_findings[-5:]:
|
||||
lines.append(f" - {f}")
|
||||
|
||||
return "\n".join(lines)
|
||||
|
||||
def initialize_standard_branches(self):
|
||||
"""Create standard MITRE ATT&CK-aligned top-level branches."""
|
||||
branches = [
|
||||
("Reconnaissance", PTTNodeType.RECONNAISSANCE, 1,
|
||||
"Information gathering and target enumeration"),
|
||||
("Initial Access", PTTNodeType.INITIAL_ACCESS, 2,
|
||||
"Gaining initial foothold on target"),
|
||||
("Privilege Escalation", PTTNodeType.PRIVILEGE_ESCALATION, 3,
|
||||
"Escalating from initial access to higher privileges"),
|
||||
("Lateral Movement", PTTNodeType.LATERAL_MOVEMENT, 4,
|
||||
"Moving to other systems in the network"),
|
||||
("Credential Access", PTTNodeType.CREDENTIAL_ACCESS, 3,
|
||||
"Obtaining credentials and secrets"),
|
||||
("Persistence", PTTNodeType.PERSISTENCE, 5,
|
||||
"Maintaining access to compromised systems"),
|
||||
]
|
||||
|
||||
for label, ntype, priority, details in branches:
|
||||
self.add_node(
|
||||
label=label,
|
||||
node_type=ntype,
|
||||
priority=priority,
|
||||
details=details,
|
||||
)
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
'target': self.target,
|
||||
'created_at': self.created_at,
|
||||
'updated_at': self.updated_at,
|
||||
'root_nodes': self.root_nodes.copy(),
|
||||
'nodes': {nid: n.to_dict() for nid, n in self.nodes.items()},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: dict) -> 'PentestTree':
|
||||
tree = cls(target=data['target'])
|
||||
tree.created_at = data.get('created_at', '')
|
||||
tree.updated_at = data.get('updated_at', '')
|
||||
tree.root_nodes = data.get('root_nodes', [])
|
||||
for nid, ndata in data.get('nodes', {}).items():
|
||||
tree.nodes[nid] = PTTNode.from_dict(ndata)
|
||||
return tree
|
||||
1137
core/report_generator.py
Normal file
1137
core/report_generator.py
Normal file
File diff suppressed because it is too large
Load Diff
493
core/revshell.py
Normal file
493
core/revshell.py
Normal file
@@ -0,0 +1,493 @@
|
||||
"""
|
||||
AUTARCH Reverse Shell Listener
|
||||
Accepts incoming reverse shell connections from the Archon Android companion app.
|
||||
|
||||
Protocol: JSON over TCP, newline-delimited. Matches ArchonShell.java.
|
||||
|
||||
Auth handshake:
|
||||
Client → Server: {"type":"auth","token":"xxx","device":"model","android":"14","uid":2000}
|
||||
Server → Client: {"type":"auth_ok"} or {"type":"auth_fail","reason":"..."}
|
||||
|
||||
Command flow:
|
||||
Server → Client: {"type":"cmd","cmd":"ls","timeout":30,"id":"abc"}
|
||||
Client → Server: {"type":"result","id":"abc","stdout":"...","stderr":"...","exit_code":0}
|
||||
|
||||
Special commands: __sysinfo__, __packages__, __screenshot__, __download__, __upload__,
|
||||
__processes__, __netstat__, __dumplog__, __disconnect__
|
||||
"""
|
||||
|
||||
import base64
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import socket
|
||||
import threading
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, List, Any, Tuple
|
||||
|
||||
from core.paths import get_data_dir
|
||||
|
||||
logger = logging.getLogger('autarch.revshell')
|
||||
|
||||
|
||||
class RevShellSession:
|
||||
"""Active reverse shell session with an Archon device."""
|
||||
|
||||
def __init__(self, sock: socket.socket, device_info: dict, session_id: str):
|
||||
self.socket = sock
|
||||
self.device_info = device_info
|
||||
self.session_id = session_id
|
||||
self.connected_at = datetime.now()
|
||||
self.command_log: List[dict] = []
|
||||
self._lock = threading.Lock()
|
||||
self._reader = sock.makefile('r', encoding='utf-8', errors='replace')
|
||||
self._writer = sock.makefile('w', encoding='utf-8', errors='replace')
|
||||
self._alive = True
|
||||
self._cmd_counter = 0
|
||||
|
||||
@property
|
||||
def alive(self) -> bool:
|
||||
return self._alive
|
||||
|
||||
@property
|
||||
def device_name(self) -> str:
|
||||
return self.device_info.get('device', 'unknown')
|
||||
|
||||
@property
|
||||
def android_version(self) -> str:
|
||||
return self.device_info.get('android', '?')
|
||||
|
||||
@property
|
||||
def uid(self) -> int:
|
||||
return self.device_info.get('uid', -1)
|
||||
|
||||
@property
|
||||
def uptime(self) -> float:
|
||||
return (datetime.now() - self.connected_at).total_seconds()
|
||||
|
||||
def execute(self, command: str, timeout: int = 30) -> dict:
|
||||
"""Send a command and wait for result. Returns {stdout, stderr, exit_code}."""
|
||||
with self._lock:
|
||||
if not self._alive:
|
||||
return {'stdout': '', 'stderr': 'Session disconnected', 'exit_code': -1}
|
||||
|
||||
self._cmd_counter += 1
|
||||
cmd_id = f"cmd_{self._cmd_counter}"
|
||||
|
||||
msg = json.dumps({
|
||||
'type': 'cmd',
|
||||
'cmd': command,
|
||||
'timeout': timeout,
|
||||
'id': cmd_id
|
||||
})
|
||||
|
||||
try:
|
||||
self._writer.write(msg + '\n')
|
||||
self._writer.flush()
|
||||
|
||||
# Read response (with extended timeout for command execution)
|
||||
self.socket.settimeout(timeout + 10)
|
||||
response_line = self._reader.readline()
|
||||
if not response_line:
|
||||
self._alive = False
|
||||
return {'stdout': '', 'stderr': 'Connection closed', 'exit_code': -1}
|
||||
|
||||
result = json.loads(response_line)
|
||||
|
||||
# Log command
|
||||
self.command_log.append({
|
||||
'time': datetime.now().isoformat(),
|
||||
'cmd': command,
|
||||
'exit_code': result.get('exit_code', -1)
|
||||
})
|
||||
|
||||
return {
|
||||
'stdout': result.get('stdout', ''),
|
||||
'stderr': result.get('stderr', ''),
|
||||
'exit_code': result.get('exit_code', -1)
|
||||
}
|
||||
|
||||
except (socket.timeout, OSError, json.JSONDecodeError) as e:
|
||||
logger.error(f"Session {self.session_id}: execute error: {e}")
|
||||
self._alive = False
|
||||
return {'stdout': '', 'stderr': f'Communication error: {e}', 'exit_code': -1}
|
||||
|
||||
def execute_special(self, command: str, **kwargs) -> dict:
|
||||
"""Execute a special command with extra parameters."""
|
||||
with self._lock:
|
||||
if not self._alive:
|
||||
return {'stdout': '', 'stderr': 'Session disconnected', 'exit_code': -1}
|
||||
|
||||
self._cmd_counter += 1
|
||||
cmd_id = f"cmd_{self._cmd_counter}"
|
||||
|
||||
msg = {'type': 'cmd', 'cmd': command, 'id': cmd_id, 'timeout': 60}
|
||||
msg.update(kwargs)
|
||||
|
||||
try:
|
||||
self._writer.write(json.dumps(msg) + '\n')
|
||||
self._writer.flush()
|
||||
|
||||
self.socket.settimeout(70)
|
||||
response_line = self._reader.readline()
|
||||
if not response_line:
|
||||
self._alive = False
|
||||
return {'stdout': '', 'stderr': 'Connection closed', 'exit_code': -1}
|
||||
|
||||
return json.loads(response_line)
|
||||
|
||||
except (socket.timeout, OSError, json.JSONDecodeError) as e:
|
||||
logger.error(f"Session {self.session_id}: special cmd error: {e}")
|
||||
self._alive = False
|
||||
return {'stdout': '', 'stderr': f'Communication error: {e}', 'exit_code': -1}
|
||||
|
||||
def sysinfo(self) -> dict:
|
||||
"""Get device system information."""
|
||||
return self.execute('__sysinfo__')
|
||||
|
||||
def packages(self) -> dict:
|
||||
"""List installed packages."""
|
||||
return self.execute('__packages__', timeout=30)
|
||||
|
||||
def screenshot(self) -> Optional[bytes]:
|
||||
"""Capture screenshot. Returns PNG bytes or None."""
|
||||
result = self.execute('__screenshot__', timeout=30)
|
||||
if result['exit_code'] != 0:
|
||||
return None
|
||||
try:
|
||||
return base64.b64decode(result['stdout'])
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def download(self, remote_path: str) -> Optional[Tuple[bytes, str]]:
|
||||
"""Download file from device. Returns (data, filename) or None."""
|
||||
result = self.execute_special('__download__', path=remote_path)
|
||||
if result.get('exit_code', -1) != 0:
|
||||
return None
|
||||
try:
|
||||
data = base64.b64decode(result.get('stdout', ''))
|
||||
filename = result.get('filename', os.path.basename(remote_path))
|
||||
return (data, filename)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
def upload(self, local_path: str, remote_path: str) -> dict:
|
||||
"""Upload file to device."""
|
||||
try:
|
||||
with open(local_path, 'rb') as f:
|
||||
data = base64.b64encode(f.read()).decode('ascii')
|
||||
except IOError as e:
|
||||
return {'stdout': '', 'stderr': f'Failed to read local file: {e}', 'exit_code': -1}
|
||||
|
||||
return self.execute_special('__upload__', path=remote_path, data=data)
|
||||
|
||||
def processes(self) -> dict:
|
||||
"""List running processes."""
|
||||
return self.execute('__processes__', timeout=10)
|
||||
|
||||
def netstat(self) -> dict:
|
||||
"""Get network connections."""
|
||||
return self.execute('__netstat__', timeout=10)
|
||||
|
||||
def dumplog(self, lines: int = 100) -> dict:
|
||||
"""Get logcat output."""
|
||||
return self.execute_special('__dumplog__', lines=min(lines, 5000))
|
||||
|
||||
def ping(self) -> bool:
|
||||
"""Send keepalive ping."""
|
||||
with self._lock:
|
||||
if not self._alive:
|
||||
return False
|
||||
try:
|
||||
self._writer.write('{"type":"ping"}\n')
|
||||
self._writer.flush()
|
||||
self.socket.settimeout(10)
|
||||
response = self._reader.readline()
|
||||
if not response:
|
||||
self._alive = False
|
||||
return False
|
||||
result = json.loads(response)
|
||||
return result.get('type') == 'pong'
|
||||
except Exception:
|
||||
self._alive = False
|
||||
return False
|
||||
|
||||
def disconnect(self):
|
||||
"""Gracefully disconnect the session."""
|
||||
with self._lock:
|
||||
if not self._alive:
|
||||
return
|
||||
try:
|
||||
self._writer.write('{"type":"disconnect"}\n')
|
||||
self._writer.flush()
|
||||
except Exception:
|
||||
pass
|
||||
self._alive = False
|
||||
try:
|
||||
self.socket.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
"""Serialize session info for API responses."""
|
||||
return {
|
||||
'session_id': self.session_id,
|
||||
'device': self.device_name,
|
||||
'android': self.android_version,
|
||||
'uid': self.uid,
|
||||
'connected_at': self.connected_at.isoformat(),
|
||||
'uptime': int(self.uptime),
|
||||
'commands_executed': len(self.command_log),
|
||||
'alive': self._alive,
|
||||
}
|
||||
|
||||
|
||||
class RevShellListener:
|
||||
"""TCP listener for incoming Archon reverse shell connections."""
|
||||
|
||||
def __init__(self, host: str = '0.0.0.0', port: int = 17322, auth_token: str = None):
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.auth_token = auth_token or uuid.uuid4().hex[:32]
|
||||
self.sessions: Dict[str, RevShellSession] = {}
|
||||
self._server_socket: Optional[socket.socket] = None
|
||||
self._accept_thread: Optional[threading.Thread] = None
|
||||
self._keepalive_thread: Optional[threading.Thread] = None
|
||||
self._running = False
|
||||
self._lock = threading.Lock()
|
||||
|
||||
# Data directory for screenshots, downloads, etc.
|
||||
self._data_dir = get_data_dir() / 'revshell'
|
||||
self._data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@property
|
||||
def running(self) -> bool:
|
||||
return self._running
|
||||
|
||||
@property
|
||||
def active_sessions(self) -> List[RevShellSession]:
|
||||
return [s for s in self.sessions.values() if s.alive]
|
||||
|
||||
def start(self) -> Tuple[bool, str]:
|
||||
"""Start listening for incoming reverse shell connections."""
|
||||
if self._running:
|
||||
return (False, 'Listener already running')
|
||||
|
||||
try:
|
||||
self._server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
self._server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
self._server_socket.settimeout(2.0) # Accept timeout for clean shutdown
|
||||
self._server_socket.bind((self.host, self.port))
|
||||
self._server_socket.listen(5)
|
||||
except OSError as e:
|
||||
return (False, f'Failed to bind {self.host}:{self.port}: {e}')
|
||||
|
||||
self._running = True
|
||||
|
||||
self._accept_thread = threading.Thread(target=self._accept_loop, daemon=True)
|
||||
self._accept_thread.start()
|
||||
|
||||
self._keepalive_thread = threading.Thread(target=self._keepalive_loop, daemon=True)
|
||||
self._keepalive_thread.start()
|
||||
|
||||
logger.info(f"RevShell listener started on {self.host}:{self.port}")
|
||||
logger.info(f"Auth token: {self.auth_token}")
|
||||
return (True, f'Listening on {self.host}:{self.port}')
|
||||
|
||||
def stop(self):
|
||||
"""Stop listener and disconnect all sessions."""
|
||||
self._running = False
|
||||
|
||||
# Disconnect all sessions
|
||||
for session in list(self.sessions.values()):
|
||||
try:
|
||||
session.disconnect()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Close server socket
|
||||
if self._server_socket:
|
||||
try:
|
||||
self._server_socket.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Wait for threads
|
||||
if self._accept_thread:
|
||||
self._accept_thread.join(timeout=5)
|
||||
if self._keepalive_thread:
|
||||
self._keepalive_thread.join(timeout=5)
|
||||
|
||||
logger.info("RevShell listener stopped")
|
||||
|
||||
def get_session(self, session_id: str) -> Optional[RevShellSession]:
|
||||
"""Get session by ID."""
|
||||
return self.sessions.get(session_id)
|
||||
|
||||
def list_sessions(self) -> List[dict]:
|
||||
"""List all sessions with their info."""
|
||||
return [s.to_dict() for s in self.sessions.values()]
|
||||
|
||||
def remove_session(self, session_id: str):
|
||||
"""Disconnect and remove a session."""
|
||||
session = self.sessions.pop(session_id, None)
|
||||
if session:
|
||||
session.disconnect()
|
||||
|
||||
def save_screenshot(self, session_id: str) -> Optional[str]:
|
||||
"""Capture and save screenshot. Returns file path or None."""
|
||||
session = self.get_session(session_id)
|
||||
if not session or not session.alive:
|
||||
return None
|
||||
|
||||
png_data = session.screenshot()
|
||||
if not png_data:
|
||||
return None
|
||||
|
||||
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
filename = f'screenshot_{session.device_name}_{timestamp}.png'
|
||||
filepath = self._data_dir / filename
|
||||
filepath.write_bytes(png_data)
|
||||
return str(filepath)
|
||||
|
||||
def save_download(self, session_id: str, remote_path: str) -> Optional[str]:
|
||||
"""Download file from device and save locally. Returns local path or None."""
|
||||
session = self.get_session(session_id)
|
||||
if not session or not session.alive:
|
||||
return None
|
||||
|
||||
result = session.download(remote_path)
|
||||
if not result:
|
||||
return None
|
||||
|
||||
data, filename = result
|
||||
filepath = self._data_dir / filename
|
||||
filepath.write_bytes(data)
|
||||
return str(filepath)
|
||||
|
||||
# ── Internal ────────────────────────────────────────────────────
|
||||
|
||||
def _accept_loop(self):
|
||||
"""Accept incoming connections in background thread."""
|
||||
while self._running:
|
||||
try:
|
||||
client_sock, addr = self._server_socket.accept()
|
||||
client_sock.settimeout(30)
|
||||
logger.info(f"Connection from {addr[0]}:{addr[1]}")
|
||||
|
||||
# Handle auth in a separate thread to not block accept
|
||||
threading.Thread(
|
||||
target=self._handle_new_connection,
|
||||
args=(client_sock, addr),
|
||||
daemon=True
|
||||
).start()
|
||||
|
||||
except socket.timeout:
|
||||
continue
|
||||
except OSError:
|
||||
if self._running:
|
||||
logger.error("Accept error")
|
||||
break
|
||||
|
||||
def _handle_new_connection(self, sock: socket.socket, addr: tuple):
|
||||
"""Authenticate a new connection."""
|
||||
try:
|
||||
reader = sock.makefile('r', encoding='utf-8', errors='replace')
|
||||
writer = sock.makefile('w', encoding='utf-8', errors='replace')
|
||||
|
||||
# Read auth message
|
||||
auth_line = reader.readline()
|
||||
if not auth_line:
|
||||
sock.close()
|
||||
return
|
||||
|
||||
auth_msg = json.loads(auth_line)
|
||||
|
||||
if auth_msg.get('type') != 'auth':
|
||||
writer.write('{"type":"auth_fail","reason":"Expected auth message"}\n')
|
||||
writer.flush()
|
||||
sock.close()
|
||||
return
|
||||
|
||||
# Verify token
|
||||
if auth_msg.get('token') != self.auth_token:
|
||||
logger.warning(f"Auth failed from {addr[0]}:{addr[1]}")
|
||||
writer.write('{"type":"auth_fail","reason":"Invalid token"}\n')
|
||||
writer.flush()
|
||||
sock.close()
|
||||
return
|
||||
|
||||
# Auth OK — create session
|
||||
writer.write('{"type":"auth_ok"}\n')
|
||||
writer.flush()
|
||||
|
||||
session_id = uuid.uuid4().hex[:12]
|
||||
device_info = {
|
||||
'device': auth_msg.get('device', 'unknown'),
|
||||
'android': auth_msg.get('android', '?'),
|
||||
'uid': auth_msg.get('uid', -1),
|
||||
'remote_addr': f"{addr[0]}:{addr[1]}"
|
||||
}
|
||||
|
||||
session = RevShellSession(sock, device_info, session_id)
|
||||
with self._lock:
|
||||
self.sessions[session_id] = session
|
||||
|
||||
logger.info(f"Session {session_id}: {device_info['device']} "
|
||||
f"(Android {device_info['android']}, UID {device_info['uid']})")
|
||||
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
logger.error(f"Auth error from {addr[0]}:{addr[1]}: {e}")
|
||||
try:
|
||||
sock.close()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _keepalive_loop(self):
|
||||
"""Periodically ping sessions and remove dead ones."""
|
||||
while self._running:
|
||||
time.sleep(30)
|
||||
dead = []
|
||||
for sid, session in list(self.sessions.items()):
|
||||
if not session.alive:
|
||||
dead.append(sid)
|
||||
continue
|
||||
# Ping to check liveness
|
||||
if not session.ping():
|
||||
dead.append(sid)
|
||||
logger.info(f"Session {sid} lost (keepalive failed)")
|
||||
|
||||
for sid in dead:
|
||||
self.sessions.pop(sid, None)
|
||||
|
||||
|
||||
# ── Singleton ───────────────────────────────────────────────────────
|
||||
|
||||
_listener: Optional[RevShellListener] = None
|
||||
|
||||
|
||||
def get_listener() -> RevShellListener:
|
||||
"""Get or create the global RevShellListener singleton."""
|
||||
global _listener
|
||||
if _listener is None:
|
||||
_listener = RevShellListener()
|
||||
return _listener
|
||||
|
||||
|
||||
def start_listener(host: str = '0.0.0.0', port: int = 17322,
|
||||
token: str = None) -> Tuple[bool, str]:
|
||||
"""Start the global listener."""
|
||||
global _listener
|
||||
_listener = RevShellListener(host=host, port=port, auth_token=token)
|
||||
return _listener.start()
|
||||
|
||||
|
||||
def stop_listener():
|
||||
"""Stop the global listener."""
|
||||
global _listener
|
||||
if _listener:
|
||||
_listener.stop()
|
||||
_listener = None
|
||||
450
core/rsf.py
Normal file
450
core/rsf.py
Normal file
@@ -0,0 +1,450 @@
|
||||
"""
|
||||
AUTARCH RouterSploit Framework Wrapper
|
||||
Low-level interface for RouterSploit module discovery, import, and execution.
|
||||
Direct Python import -- no RPC layer needed since RSF is pure Python.
|
||||
"""
|
||||
|
||||
import sys
|
||||
import os
|
||||
import re
|
||||
import threading
|
||||
import importlib
|
||||
from io import StringIO
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Dict, Tuple, Any
|
||||
from contextlib import contextmanager
|
||||
|
||||
from .config import get_config
|
||||
|
||||
|
||||
class RSFError(Exception):
|
||||
"""Custom exception for RouterSploit operations."""
|
||||
pass
|
||||
|
||||
|
||||
@dataclass
|
||||
class RSFModuleInfo:
|
||||
"""Metadata for a RouterSploit module."""
|
||||
name: str = ""
|
||||
path: str = ""
|
||||
description: str = ""
|
||||
authors: Tuple[str, ...] = ()
|
||||
devices: Tuple[str, ...] = ()
|
||||
references: Tuple[str, ...] = ()
|
||||
options: List[Dict[str, Any]] = field(default_factory=list)
|
||||
module_type: str = "" # exploits, creds, scanners, payloads, encoders, generic
|
||||
|
||||
|
||||
class RSFManager:
|
||||
"""Manager for RouterSploit framework operations.
|
||||
|
||||
Handles sys.path setup, module discovery, dynamic import,
|
||||
option introspection, stdout capture, and execution.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._available = None
|
||||
self._module_index = None
|
||||
self._path_added = False
|
||||
|
||||
def _ensure_path(self):
|
||||
"""Add RSF install path to sys.path if not already present."""
|
||||
if self._path_added:
|
||||
return
|
||||
|
||||
config = get_config()
|
||||
install_path = config.get('rsf', 'install_path', '')
|
||||
|
||||
if install_path and install_path not in sys.path:
|
||||
sys.path.insert(0, install_path)
|
||||
self._path_added = True
|
||||
|
||||
@property
|
||||
def is_available(self) -> bool:
|
||||
"""Check if RouterSploit is importable. Caches result."""
|
||||
if self._available is not None:
|
||||
return self._available
|
||||
|
||||
try:
|
||||
self._ensure_path()
|
||||
import routersploit
|
||||
self._available = True
|
||||
except ImportError:
|
||||
self._available = False
|
||||
|
||||
return self._available
|
||||
|
||||
def reset_cache(self):
|
||||
"""Reset cached state (availability, module index)."""
|
||||
self._available = None
|
||||
self._module_index = None
|
||||
self._path_added = False
|
||||
|
||||
def index_all_modules(self) -> List[str]:
|
||||
"""Discover all RSF modules. Returns list of dotted module paths.
|
||||
|
||||
Uses routersploit.core.exploit.utils.index_modules() internally.
|
||||
Results are cached after first call.
|
||||
|
||||
Returns:
|
||||
List of module paths like 'exploits/routers/dlink/some_module'
|
||||
"""
|
||||
if self._module_index is not None:
|
||||
return self._module_index
|
||||
|
||||
if not self.is_available:
|
||||
raise RSFError("RouterSploit is not available")
|
||||
|
||||
try:
|
||||
self._ensure_path()
|
||||
from routersploit.core.exploit import utils
|
||||
|
||||
modules_dir = os.path.join(
|
||||
os.path.dirname(utils.__file__),
|
||||
'..', '..', 'modules'
|
||||
)
|
||||
modules_dir = os.path.normpath(modules_dir)
|
||||
|
||||
if not os.path.isdir(modules_dir):
|
||||
# Try from config path
|
||||
config = get_config()
|
||||
install_path = config.get('rsf', 'install_path', '')
|
||||
modules_dir = os.path.join(install_path, 'routersploit', 'modules')
|
||||
|
||||
raw_index = utils.index_modules(modules_dir)
|
||||
|
||||
# Convert dotted paths to slash paths for display
|
||||
self._module_index = []
|
||||
for mod_path in raw_index:
|
||||
# Remove 'routersploit.modules.' prefix if present
|
||||
clean = mod_path
|
||||
for prefix in ('routersploit.modules.', 'modules.'):
|
||||
if clean.startswith(prefix):
|
||||
clean = clean[len(prefix):]
|
||||
# Convert dots to slashes
|
||||
clean = clean.replace('.', '/')
|
||||
self._module_index.append(clean)
|
||||
|
||||
return self._module_index
|
||||
|
||||
except Exception as e:
|
||||
raise RSFError(f"Failed to index modules: {e}")
|
||||
|
||||
def get_module_count(self) -> int:
|
||||
"""Get total number of indexed modules."""
|
||||
try:
|
||||
return len(self.index_all_modules())
|
||||
except RSFError:
|
||||
return 0
|
||||
|
||||
def get_modules_by_type(self, module_type: str) -> List[str]:
|
||||
"""Filter modules by type (exploits, creds, scanners, payloads, encoders, generic).
|
||||
|
||||
Args:
|
||||
module_type: One of 'exploits', 'creds', 'scanners', 'payloads', 'encoders', 'generic'
|
||||
|
||||
Returns:
|
||||
List of matching module paths
|
||||
"""
|
||||
all_modules = self.index_all_modules()
|
||||
return [m for m in all_modules if m.startswith(module_type + '/')]
|
||||
|
||||
def search_modules(self, query: str) -> List[str]:
|
||||
"""Search modules by substring match on path.
|
||||
|
||||
Args:
|
||||
query: Search string (case-insensitive)
|
||||
|
||||
Returns:
|
||||
List of matching module paths
|
||||
"""
|
||||
all_modules = self.index_all_modules()
|
||||
query_lower = query.lower()
|
||||
return [m for m in all_modules if query_lower in m.lower()]
|
||||
|
||||
def _dotted_path(self, slash_path: str) -> str:
|
||||
"""Convert slash path to dotted import path.
|
||||
|
||||
Args:
|
||||
slash_path: e.g. 'exploits/routers/dlink/some_module'
|
||||
|
||||
Returns:
|
||||
Dotted path like 'routersploit.modules.exploits.routers.dlink.some_module'
|
||||
"""
|
||||
clean = slash_path.strip('/')
|
||||
dotted = clean.replace('/', '.')
|
||||
return f"routersploit.modules.{dotted}"
|
||||
|
||||
def load_module(self, path: str) -> Tuple[Any, RSFModuleInfo]:
|
||||
"""Load a RouterSploit module by path.
|
||||
|
||||
Converts slash path to dotted import path, imports using
|
||||
import_exploit(), instantiates, and extracts metadata.
|
||||
|
||||
Args:
|
||||
path: Module path like 'exploits/routers/dlink/some_module'
|
||||
|
||||
Returns:
|
||||
Tuple of (module_instance, RSFModuleInfo)
|
||||
|
||||
Raises:
|
||||
RSFError: If module cannot be loaded
|
||||
"""
|
||||
if not self.is_available:
|
||||
raise RSFError("RouterSploit is not available")
|
||||
|
||||
try:
|
||||
self._ensure_path()
|
||||
from routersploit.core.exploit.utils import import_exploit
|
||||
|
||||
dotted = self._dotted_path(path)
|
||||
module_class = import_exploit(dotted)
|
||||
instance = module_class()
|
||||
|
||||
# Extract __info__ dict
|
||||
info_dict = {}
|
||||
# RSF metaclass renames __info__ to _ClassName__info__
|
||||
for attr in dir(instance):
|
||||
if attr.endswith('__info__') or attr == '__info__':
|
||||
try:
|
||||
info_dict = getattr(instance, attr)
|
||||
if isinstance(info_dict, dict):
|
||||
break
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
# If not found via mangled name, try class hierarchy
|
||||
if not info_dict:
|
||||
for klass in type(instance).__mro__:
|
||||
mangled = f"_{klass.__name__}__info__"
|
||||
if hasattr(klass, mangled):
|
||||
info_dict = getattr(klass, mangled)
|
||||
if isinstance(info_dict, dict):
|
||||
break
|
||||
|
||||
# Extract options
|
||||
options = self.get_module_options(instance)
|
||||
|
||||
# Determine module type from path
|
||||
parts = path.split('/')
|
||||
module_type = parts[0] if parts else ""
|
||||
|
||||
module_info = RSFModuleInfo(
|
||||
name=info_dict.get('name', path.split('/')[-1]),
|
||||
path=path,
|
||||
description=info_dict.get('description', ''),
|
||||
authors=info_dict.get('authors', ()),
|
||||
devices=info_dict.get('devices', ()),
|
||||
references=info_dict.get('references', ()),
|
||||
options=options,
|
||||
module_type=module_type,
|
||||
)
|
||||
|
||||
return instance, module_info
|
||||
|
||||
except Exception as e:
|
||||
raise RSFError(f"Failed to load module '{path}': {e}")
|
||||
|
||||
def get_module_options(self, instance) -> List[Dict[str, Any]]:
|
||||
"""Introspect Option descriptors on a module instance.
|
||||
|
||||
Uses RSF's exploit_attributes metaclass aggregator to get
|
||||
option names, then reads descriptor properties for details.
|
||||
|
||||
Args:
|
||||
instance: Instantiated RSF module
|
||||
|
||||
Returns:
|
||||
List of dicts with keys: name, type, default, description, current, advanced
|
||||
"""
|
||||
options = []
|
||||
|
||||
# Try exploit_attributes first (set by metaclass)
|
||||
exploit_attrs = getattr(type(instance), 'exploit_attributes', {})
|
||||
|
||||
if exploit_attrs:
|
||||
for name, attr_info in exploit_attrs.items():
|
||||
# attr_info is [display_value, description, advanced]
|
||||
display_value = attr_info[0] if len(attr_info) > 0 else ""
|
||||
description = attr_info[1] if len(attr_info) > 1 else ""
|
||||
advanced = attr_info[2] if len(attr_info) > 2 else False
|
||||
|
||||
# Get current value from instance
|
||||
try:
|
||||
current = getattr(instance, name, display_value)
|
||||
except Exception:
|
||||
current = display_value
|
||||
|
||||
# Determine option type from the descriptor class
|
||||
opt_type = "string"
|
||||
for klass in type(instance).__mro__:
|
||||
if name in klass.__dict__:
|
||||
descriptor = klass.__dict__[name]
|
||||
opt_type = type(descriptor).__name__.lower()
|
||||
# Clean up: optip -> ip, optport -> port, etc.
|
||||
opt_type = opt_type.replace('opt', '')
|
||||
break
|
||||
|
||||
options.append({
|
||||
'name': name,
|
||||
'type': opt_type,
|
||||
'default': display_value,
|
||||
'description': description,
|
||||
'current': str(current) if current is not None else "",
|
||||
'advanced': advanced,
|
||||
})
|
||||
else:
|
||||
# Fallback: inspect instance options property
|
||||
opt_names = getattr(instance, 'options', [])
|
||||
for name in opt_names:
|
||||
try:
|
||||
current = getattr(instance, name, "")
|
||||
options.append({
|
||||
'name': name,
|
||||
'type': 'string',
|
||||
'default': str(current),
|
||||
'description': '',
|
||||
'current': str(current) if current is not None else "",
|
||||
'advanced': False,
|
||||
})
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return options
|
||||
|
||||
def set_module_option(self, instance, name: str, value: str) -> bool:
|
||||
"""Set an option on a module instance.
|
||||
|
||||
Args:
|
||||
instance: RSF module instance
|
||||
name: Option name
|
||||
value: Value to set (string, will be validated by descriptor)
|
||||
|
||||
Returns:
|
||||
True if set successfully
|
||||
|
||||
Raises:
|
||||
RSFError: If option cannot be set
|
||||
"""
|
||||
try:
|
||||
setattr(instance, name, value)
|
||||
return True
|
||||
except Exception as e:
|
||||
raise RSFError(f"Failed to set option '{name}': {e}")
|
||||
|
||||
@contextmanager
|
||||
def capture_output(self):
|
||||
"""Context manager to capture stdout/stderr from RSF modules.
|
||||
|
||||
RSF modules print directly via their printer system. This
|
||||
redirects stdout/stderr to StringIO for capturing output.
|
||||
|
||||
Yields:
|
||||
StringIO object containing captured output
|
||||
"""
|
||||
captured = StringIO()
|
||||
old_stdout = sys.stdout
|
||||
old_stderr = sys.stderr
|
||||
|
||||
try:
|
||||
sys.stdout = captured
|
||||
sys.stderr = captured
|
||||
yield captured
|
||||
finally:
|
||||
sys.stdout = old_stdout
|
||||
sys.stderr = old_stderr
|
||||
|
||||
def execute_check(self, instance, timeout: int = 60) -> Tuple[Optional[bool], str]:
|
||||
"""Run check() on a module with stdout capture and timeout.
|
||||
|
||||
check() is the safe vulnerability verification method.
|
||||
|
||||
Args:
|
||||
instance: RSF module instance (already configured)
|
||||
timeout: Timeout in seconds
|
||||
|
||||
Returns:
|
||||
Tuple of (result, output) where result is True/False/None
|
||||
"""
|
||||
result = [None]
|
||||
output = [""]
|
||||
error = [None]
|
||||
|
||||
def _run():
|
||||
try:
|
||||
with self.capture_output() as captured:
|
||||
check_result = instance.check()
|
||||
result[0] = check_result
|
||||
output[0] = captured.getvalue()
|
||||
except Exception as e:
|
||||
error[0] = e
|
||||
try:
|
||||
output[0] = captured.getvalue()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
thread = threading.Thread(target=_run, daemon=True)
|
||||
thread.start()
|
||||
thread.join(timeout=timeout)
|
||||
|
||||
if thread.is_alive():
|
||||
return None, output[0] + "\n[!] Module execution timed out"
|
||||
|
||||
if error[0]:
|
||||
return None, output[0] + f"\n[-] Error: {error[0]}"
|
||||
|
||||
return result[0], output[0]
|
||||
|
||||
def execute_run(self, instance, timeout: int = 120) -> Tuple[bool, str]:
|
||||
"""Run run() on a module with stdout capture and timeout.
|
||||
|
||||
run() is the full exploit execution method.
|
||||
|
||||
Args:
|
||||
instance: RSF module instance (already configured)
|
||||
timeout: Timeout in seconds
|
||||
|
||||
Returns:
|
||||
Tuple of (completed, output) where completed indicates
|
||||
whether execution finished within timeout
|
||||
"""
|
||||
completed = [False]
|
||||
output = [""]
|
||||
error = [None]
|
||||
|
||||
def _run():
|
||||
try:
|
||||
with self.capture_output() as captured:
|
||||
instance.run()
|
||||
completed[0] = True
|
||||
output[0] = captured.getvalue()
|
||||
except Exception as e:
|
||||
error[0] = e
|
||||
try:
|
||||
output[0] = captured.getvalue()
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
thread = threading.Thread(target=_run, daemon=True)
|
||||
thread.start()
|
||||
thread.join(timeout=timeout)
|
||||
|
||||
if thread.is_alive():
|
||||
return False, output[0] + "\n[!] Module execution timed out"
|
||||
|
||||
if error[0]:
|
||||
return False, output[0] + f"\n[-] Error: {error[0]}"
|
||||
|
||||
return completed[0], output[0]
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_rsf_manager = None
|
||||
|
||||
|
||||
def get_rsf_manager() -> RSFManager:
|
||||
"""Get the global RSFManager singleton instance."""
|
||||
global _rsf_manager
|
||||
if _rsf_manager is None:
|
||||
_rsf_manager = RSFManager()
|
||||
return _rsf_manager
|
||||
480
core/rsf_interface.py
Normal file
480
core/rsf_interface.py
Normal file
@@ -0,0 +1,480 @@
|
||||
"""
|
||||
AUTARCH RouterSploit High-Level Interface
|
||||
Clean API for RSF operations, mirroring core/msf_interface.py patterns.
|
||||
Wraps RSFManager with result parsing and formatted output.
|
||||
"""
|
||||
|
||||
import re
|
||||
import time
|
||||
from enum import Enum
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
from .rsf import get_rsf_manager, RSFError, RSFModuleInfo
|
||||
from .banner import Colors
|
||||
|
||||
|
||||
class RSFStatus(Enum):
|
||||
"""Status codes for RSF operations."""
|
||||
SUCCESS = "success"
|
||||
VULNERABLE = "vulnerable"
|
||||
NOT_VULNERABLE = "not_vulnerable"
|
||||
FAILED = "failed"
|
||||
TIMEOUT = "timeout"
|
||||
NOT_AVAILABLE = "not_available"
|
||||
|
||||
|
||||
@dataclass
|
||||
class RSFResult:
|
||||
"""Result of an RSF module execution."""
|
||||
status: RSFStatus
|
||||
module_path: str
|
||||
target: str = ""
|
||||
|
||||
# Raw and cleaned output
|
||||
raw_output: str = ""
|
||||
cleaned_output: str = ""
|
||||
|
||||
# Parsed results
|
||||
successes: List[str] = field(default_factory=list) # [+] lines
|
||||
info: List[str] = field(default_factory=list) # [*] lines
|
||||
errors: List[str] = field(default_factory=list) # [-] lines
|
||||
|
||||
# Credential results
|
||||
credentials: List[Dict[str, str]] = field(default_factory=list)
|
||||
|
||||
# Check result (True/False/None)
|
||||
check_result: Optional[bool] = None
|
||||
|
||||
# Execution metadata
|
||||
execution_time: float = 0.0
|
||||
|
||||
|
||||
# ANSI escape code pattern
|
||||
_ANSI_RE = re.compile(r'\x1b\[[0-9;]*[a-zA-Z]|\x1b\([a-zA-Z]')
|
||||
|
||||
|
||||
class RSFInterface:
|
||||
"""High-level interface for RouterSploit operations.
|
||||
|
||||
Provides a clean API mirroring MSFInterface patterns:
|
||||
- Module listing and search
|
||||
- Module info and options
|
||||
- Check (safe vulnerability verification)
|
||||
- Run (full module execution)
|
||||
- Output parsing and result formatting
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._manager = get_rsf_manager()
|
||||
|
||||
def ensure_available(self) -> bool:
|
||||
"""Check that RSF is importable and available.
|
||||
|
||||
Returns:
|
||||
True if RSF is available
|
||||
|
||||
Raises:
|
||||
RSFError: If RSF is not available
|
||||
"""
|
||||
if not self._manager.is_available:
|
||||
raise RSFError(
|
||||
"RouterSploit is not available. "
|
||||
"Check install path in Settings > RouterSploit Settings."
|
||||
)
|
||||
return True
|
||||
|
||||
@property
|
||||
def is_available(self) -> bool:
|
||||
"""Check if RSF is available without raising."""
|
||||
return self._manager.is_available
|
||||
|
||||
@property
|
||||
def module_count(self) -> int:
|
||||
"""Get total number of available modules."""
|
||||
return self._manager.get_module_count()
|
||||
|
||||
def list_modules(self, module_type: str = None) -> List[str]:
|
||||
"""List available modules, optionally filtered by type.
|
||||
|
||||
Combines live RSF index with curated library data.
|
||||
|
||||
Args:
|
||||
module_type: Filter by type (exploits, creds, scanners, etc.)
|
||||
|
||||
Returns:
|
||||
List of module paths
|
||||
"""
|
||||
self.ensure_available()
|
||||
|
||||
if module_type:
|
||||
return self._manager.get_modules_by_type(module_type)
|
||||
return self._manager.index_all_modules()
|
||||
|
||||
def search_modules(self, query: str) -> List[str]:
|
||||
"""Search modules by keyword.
|
||||
|
||||
Searches both live RSF index and curated library.
|
||||
|
||||
Args:
|
||||
query: Search string
|
||||
|
||||
Returns:
|
||||
List of matching module paths
|
||||
"""
|
||||
self.ensure_available()
|
||||
|
||||
results = self._manager.search_modules(query)
|
||||
|
||||
# Also search curated library for richer matches
|
||||
try:
|
||||
from .rsf_modules import search_modules as search_curated
|
||||
curated = search_curated(query)
|
||||
curated_paths = [m['path'] for m in curated if 'path' in m]
|
||||
# Merge without duplicates, curated first
|
||||
seen = set(results)
|
||||
for path in curated_paths:
|
||||
if path not in seen:
|
||||
results.append(path)
|
||||
seen.add(path)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return results
|
||||
|
||||
def get_module_info(self, path: str) -> RSFModuleInfo:
|
||||
"""Get metadata for a module.
|
||||
|
||||
Tries curated library first, falls back to live introspection.
|
||||
|
||||
Args:
|
||||
path: Module path
|
||||
|
||||
Returns:
|
||||
RSFModuleInfo with module metadata
|
||||
"""
|
||||
# Try curated library first
|
||||
try:
|
||||
from .rsf_modules import get_module_info as get_curated_info
|
||||
curated = get_curated_info(path)
|
||||
if curated:
|
||||
parts = path.split('/')
|
||||
return RSFModuleInfo(
|
||||
name=curated.get('name', path.split('/')[-1]),
|
||||
path=path,
|
||||
description=curated.get('description', ''),
|
||||
authors=tuple(curated.get('authors', ())),
|
||||
devices=tuple(curated.get('devices', ())),
|
||||
references=tuple(curated.get('references', ())),
|
||||
module_type=parts[0] if parts else "",
|
||||
)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Fall back to live introspection
|
||||
self.ensure_available()
|
||||
_, info = self._manager.load_module(path)
|
||||
return info
|
||||
|
||||
def get_module_options(self, path: str) -> List[Dict[str, Any]]:
|
||||
"""Get configurable options for a module.
|
||||
|
||||
Args:
|
||||
path: Module path
|
||||
|
||||
Returns:
|
||||
List of option dicts with name, type, default, description, current
|
||||
"""
|
||||
self.ensure_available()
|
||||
instance, _ = self._manager.load_module(path)
|
||||
return self._manager.get_module_options(instance)
|
||||
|
||||
def check_module(self, path: str, options: Dict[str, str] = None,
|
||||
timeout: int = None) -> RSFResult:
|
||||
"""Run check() on a module -- safe vulnerability verification.
|
||||
|
||||
Args:
|
||||
path: Module path
|
||||
options: Dict of option_name -> value to set before running
|
||||
timeout: Execution timeout in seconds (default from config)
|
||||
|
||||
Returns:
|
||||
RSFResult with check results
|
||||
"""
|
||||
return self._execute_module(path, options, timeout, check_only=True)
|
||||
|
||||
def run_module(self, path: str, options: Dict[str, str] = None,
|
||||
timeout: int = None) -> RSFResult:
|
||||
"""Run run() on a module -- full exploit execution.
|
||||
|
||||
Args:
|
||||
path: Module path
|
||||
options: Dict of option_name -> value to set before running
|
||||
timeout: Execution timeout in seconds (default from config)
|
||||
|
||||
Returns:
|
||||
RSFResult with execution results
|
||||
"""
|
||||
return self._execute_module(path, options, timeout, check_only=False)
|
||||
|
||||
def _execute_module(self, path: str, options: Dict[str, str] = None,
|
||||
timeout: int = None, check_only: bool = False) -> RSFResult:
|
||||
"""Internal method to execute a module (check or run).
|
||||
|
||||
Args:
|
||||
path: Module path
|
||||
options: Option overrides
|
||||
timeout: Timeout in seconds
|
||||
check_only: If True, run check() instead of run()
|
||||
|
||||
Returns:
|
||||
RSFResult
|
||||
"""
|
||||
if not self._manager.is_available:
|
||||
return RSFResult(
|
||||
status=RSFStatus.NOT_AVAILABLE,
|
||||
module_path=path,
|
||||
)
|
||||
|
||||
if timeout is None:
|
||||
from .config import get_config
|
||||
timeout = get_config().get_int('rsf', 'execution_timeout', 120)
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Load and configure module
|
||||
instance, info = self._manager.load_module(path)
|
||||
|
||||
target = ""
|
||||
if options:
|
||||
for name, value in options.items():
|
||||
self._manager.set_module_option(instance, name, value)
|
||||
if name == 'target':
|
||||
target = value
|
||||
|
||||
# Get target from instance if not in options
|
||||
if not target:
|
||||
target = str(getattr(instance, 'target', ''))
|
||||
|
||||
# Execute
|
||||
if check_only:
|
||||
check_result, raw_output = self._manager.execute_check(instance, timeout)
|
||||
else:
|
||||
completed, raw_output = self._manager.execute_run(instance, timeout)
|
||||
check_result = None
|
||||
|
||||
execution_time = time.time() - start_time
|
||||
cleaned = self._clean_output(raw_output)
|
||||
successes, info_lines, errors, credentials = self._parse_output(cleaned)
|
||||
|
||||
# Determine status
|
||||
if check_only:
|
||||
if check_result is True:
|
||||
status = RSFStatus.VULNERABLE
|
||||
elif check_result is False:
|
||||
status = RSFStatus.NOT_VULNERABLE
|
||||
elif "[!]" in raw_output and "timed out" in raw_output.lower():
|
||||
status = RSFStatus.TIMEOUT
|
||||
else:
|
||||
status = RSFStatus.FAILED
|
||||
else:
|
||||
if "[!]" in raw_output and "timed out" in raw_output.lower():
|
||||
status = RSFStatus.TIMEOUT
|
||||
elif errors and not successes:
|
||||
status = RSFStatus.FAILED
|
||||
elif successes or credentials:
|
||||
status = RSFStatus.SUCCESS
|
||||
elif completed:
|
||||
status = RSFStatus.SUCCESS
|
||||
else:
|
||||
status = RSFStatus.FAILED
|
||||
|
||||
return RSFResult(
|
||||
status=status,
|
||||
module_path=path,
|
||||
target=target,
|
||||
raw_output=raw_output,
|
||||
cleaned_output=cleaned,
|
||||
successes=successes,
|
||||
info=info_lines,
|
||||
errors=errors,
|
||||
credentials=credentials,
|
||||
check_result=check_result,
|
||||
execution_time=execution_time,
|
||||
)
|
||||
|
||||
except RSFError as e:
|
||||
return RSFResult(
|
||||
status=RSFStatus.FAILED,
|
||||
module_path=path,
|
||||
target=options.get('target', '') if options else '',
|
||||
raw_output=str(e),
|
||||
cleaned_output=str(e),
|
||||
errors=[str(e)],
|
||||
execution_time=time.time() - start_time,
|
||||
)
|
||||
|
||||
def _clean_output(self, raw: str) -> str:
|
||||
"""Strip ANSI escape codes from output.
|
||||
|
||||
Args:
|
||||
raw: Raw output potentially containing ANSI codes
|
||||
|
||||
Returns:
|
||||
Cleaned text
|
||||
"""
|
||||
if not raw:
|
||||
return ""
|
||||
return _ANSI_RE.sub('', raw)
|
||||
|
||||
def _parse_output(self, cleaned: str):
|
||||
"""Parse cleaned output into categorized lines.
|
||||
|
||||
Categorizes lines by RSF prefix:
|
||||
- [+] = success/finding
|
||||
- [*] = informational
|
||||
- [-] = error/failure
|
||||
|
||||
Also extracts credentials from common patterns.
|
||||
|
||||
Args:
|
||||
cleaned: ANSI-stripped output
|
||||
|
||||
Returns:
|
||||
Tuple of (successes, info, errors, credentials)
|
||||
"""
|
||||
successes = []
|
||||
info_lines = []
|
||||
errors = []
|
||||
credentials = []
|
||||
|
||||
for line in cleaned.splitlines():
|
||||
stripped = line.strip()
|
||||
if not stripped:
|
||||
continue
|
||||
|
||||
if stripped.startswith('[+]'):
|
||||
successes.append(stripped[3:].strip())
|
||||
# Check for credential patterns
|
||||
creds = self._extract_credentials(stripped)
|
||||
if creds:
|
||||
credentials.append(creds)
|
||||
elif stripped.startswith('[*]'):
|
||||
info_lines.append(stripped[3:].strip())
|
||||
elif stripped.startswith('[-]'):
|
||||
errors.append(stripped[3:].strip())
|
||||
elif stripped.startswith('[!]'):
|
||||
errors.append(stripped[3:].strip())
|
||||
|
||||
return successes, info_lines, errors, credentials
|
||||
|
||||
def _extract_credentials(self, line: str) -> Optional[Dict[str, str]]:
|
||||
"""Extract credentials from a success line.
|
||||
|
||||
Common RSF credential output patterns:
|
||||
- [+] admin:password
|
||||
- [+] Found valid credentials: admin / password
|
||||
- [+] username:password on target:port
|
||||
|
||||
Args:
|
||||
line: A [+] success line
|
||||
|
||||
Returns:
|
||||
Dict with username/password keys, or None
|
||||
"""
|
||||
# Pattern: username:password
|
||||
cred_match = re.search(
|
||||
r'(?:credentials?|found|valid).*?(\S+)\s*[:/]\s*(\S+)',
|
||||
line, re.IGNORECASE
|
||||
)
|
||||
if cred_match:
|
||||
return {
|
||||
'username': cred_match.group(1),
|
||||
'password': cred_match.group(2),
|
||||
}
|
||||
|
||||
# Simple colon-separated on [+] lines
|
||||
content = line.replace('[+]', '').strip()
|
||||
if ':' in content and len(content.split(':')) == 2:
|
||||
parts = content.split(':')
|
||||
# Only if parts look like creds (not URLs or paths)
|
||||
if not any(x in parts[0].lower() for x in ['http', '/', '\\']):
|
||||
return {
|
||||
'username': parts[0].strip(),
|
||||
'password': parts[1].strip(),
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
def print_result(self, result: RSFResult, verbose: bool = False):
|
||||
"""Print formatted execution result.
|
||||
|
||||
Args:
|
||||
result: RSFResult to display
|
||||
verbose: Show raw output if True
|
||||
"""
|
||||
print()
|
||||
print(f" {Colors.BOLD}{Colors.WHITE}Execution Result{Colors.RESET}")
|
||||
print(f" {Colors.DIM}{'─' * 50}{Colors.RESET}")
|
||||
|
||||
# Status with color
|
||||
status_colors = {
|
||||
RSFStatus.SUCCESS: Colors.GREEN,
|
||||
RSFStatus.VULNERABLE: Colors.RED,
|
||||
RSFStatus.NOT_VULNERABLE: Colors.GREEN,
|
||||
RSFStatus.FAILED: Colors.RED,
|
||||
RSFStatus.TIMEOUT: Colors.YELLOW,
|
||||
RSFStatus.NOT_AVAILABLE: Colors.YELLOW,
|
||||
}
|
||||
color = status_colors.get(result.status, Colors.WHITE)
|
||||
print(f" {Colors.CYAN}Status:{Colors.RESET} {color}{result.status.value}{Colors.RESET}")
|
||||
print(f" {Colors.CYAN}Module:{Colors.RESET} {result.module_path}")
|
||||
if result.target:
|
||||
print(f" {Colors.CYAN}Target:{Colors.RESET} {result.target}")
|
||||
print(f" {Colors.CYAN}Time:{Colors.RESET} {result.execution_time:.1f}s")
|
||||
print()
|
||||
|
||||
# Successes
|
||||
if result.successes:
|
||||
for line in result.successes:
|
||||
print(f" {Colors.GREEN}[+]{Colors.RESET} {line}")
|
||||
|
||||
# Info
|
||||
if result.info:
|
||||
for line in result.info:
|
||||
print(f" {Colors.CYAN}[*]{Colors.RESET} {line}")
|
||||
|
||||
# Errors
|
||||
if result.errors:
|
||||
for line in result.errors:
|
||||
print(f" {Colors.RED}[-]{Colors.RESET} {line}")
|
||||
|
||||
# Credentials
|
||||
if result.credentials:
|
||||
print()
|
||||
print(f" {Colors.GREEN}{Colors.BOLD}Credentials Found:{Colors.RESET}")
|
||||
for cred in result.credentials:
|
||||
print(f" {Colors.GREEN}{cred.get('username', '?')}{Colors.RESET}:"
|
||||
f"{Colors.YELLOW}{cred.get('password', '?')}{Colors.RESET}")
|
||||
|
||||
# Verbose: raw output
|
||||
if verbose and result.cleaned_output:
|
||||
print()
|
||||
print(f" {Colors.DIM}Raw Output:{Colors.RESET}")
|
||||
for line in result.cleaned_output.splitlines():
|
||||
print(f" {Colors.DIM}{line}{Colors.RESET}")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_rsf_interface = None
|
||||
|
||||
|
||||
def get_rsf_interface() -> RSFInterface:
|
||||
"""Get the global RSFInterface singleton instance."""
|
||||
global _rsf_interface
|
||||
if _rsf_interface is None:
|
||||
_rsf_interface = RSFInterface()
|
||||
return _rsf_interface
|
||||
542
core/rsf_modules.py
Normal file
542
core/rsf_modules.py
Normal file
@@ -0,0 +1,542 @@
|
||||
"""
|
||||
AUTARCH RouterSploit Curated Module Library
|
||||
Offline-browsable metadata for key RSF modules.
|
||||
Mirrors core/msf_modules.py patterns for RSF-specific modules.
|
||||
"""
|
||||
|
||||
from .banner import Colors
|
||||
|
||||
|
||||
# ─── Module Library ─────────────────────────────────────────────────────────
|
||||
|
||||
RSF_MODULES = {
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
# EXPLOITS - ROUTERS
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
# ── D-Link Routers ──────────────────────────────────────────────────────
|
||||
'exploits/routers/dlink/dir_300_600_rce': {
|
||||
'name': 'D-Link DIR-300 & DIR-600 RCE',
|
||||
'description': 'Exploits D-Link DIR-300, DIR-600 Remote Code Execution '
|
||||
'vulnerability allowing command execution with root privileges.',
|
||||
'authors': ('Michael Messner', 'Marcin Bury'),
|
||||
'devices': ('D-Link DIR 300', 'D-Link DIR 600'),
|
||||
'references': ('http://www.s3cur1ty.de/m1adv2013-003',),
|
||||
'tags': ('dlink', 'rce', 'router', 'http'),
|
||||
'notes': 'Targets the web interface. Requires HTTP access to the router.',
|
||||
},
|
||||
'exploits/routers/dlink/dir_645_815_rce': {
|
||||
'name': 'D-Link DIR-645 & DIR-815 RCE',
|
||||
'description': 'Exploits D-Link DIR-645 and DIR-815 Remote Code Execution '
|
||||
'vulnerability via the web interface.',
|
||||
'authors': ('Michael Messner', 'Marcin Bury'),
|
||||
'devices': ('DIR-815 v1.03b02', 'DIR-645 v1.02', 'DIR-645 v1.03',
|
||||
'DIR-600 below v2.16b01', 'DIR-300 revB v2.13b01',
|
||||
'DIR-412 Ver 1.14WWB02', 'DIR-110 Ver 1.01'),
|
||||
'references': ('http://www.s3cur1ty.de/m1adv2013-017',),
|
||||
'tags': ('dlink', 'rce', 'router', 'http'),
|
||||
'notes': 'Affects multiple DIR-series firmware versions.',
|
||||
},
|
||||
'exploits/routers/dlink/multi_hnap_rce': {
|
||||
'name': 'D-Link Multi HNAP RCE',
|
||||
'description': 'Exploits HNAP remote code execution in multiple D-Link devices '
|
||||
'allowing command execution on the device.',
|
||||
'authors': ('Samuel Huntley', 'Craig Heffner', 'Marcin Bury'),
|
||||
'devices': ('D-Link DIR-645', 'D-Link DIR-880L', 'D-Link DIR-865L',
|
||||
'D-Link DIR-860L revA/B', 'D-Link DIR-815 revB',
|
||||
'D-Link DIR-300 revB', 'D-Link DIR-600 revB',
|
||||
'D-Link DAP-1650 revB'),
|
||||
'references': ('https://www.exploit-db.com/exploits/37171/',
|
||||
'http://www.devttys0.com/2015/04/hacking-the-d-link-dir-890l/'),
|
||||
'tags': ('dlink', 'rce', 'hnap', 'router', 'http'),
|
||||
'notes': 'HNAP (Home Network Administration Protocol) vulnerability '
|
||||
'affecting a wide range of D-Link devices.',
|
||||
},
|
||||
|
||||
# ── Cisco Routers ───────────────────────────────────────────────────────
|
||||
'exploits/routers/cisco/rv320_command_injection': {
|
||||
'name': 'Cisco RV320 Command Injection',
|
||||
'description': 'Exploits Cisco RV320 Remote Command Injection in the '
|
||||
'web-based certificate generator feature (CVE-2019-1652).',
|
||||
'authors': ('RedTeam Pentesting GmbH', 'GH0st3rs'),
|
||||
'devices': ('Cisco RV320 1.4.2.15 to 1.4.2.22', 'Cisco RV325'),
|
||||
'references': ('https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2019-1652',),
|
||||
'tags': ('cisco', 'rce', 'command_injection', 'router', 'cve-2019-1652'),
|
||||
'notes': 'Requires HTTPS access (port 443). Targets certificate generator.',
|
||||
},
|
||||
'exploits/routers/cisco/ios_http_authorization_bypass': {
|
||||
'name': 'Cisco IOS HTTP Authorization Bypass',
|
||||
'description': 'HTTP server for Cisco IOS 11.3 to 12.2 allows attackers to '
|
||||
'bypass authentication and execute commands by specifying a '
|
||||
'high access level in the URL (CVE-2001-0537).',
|
||||
'authors': ('renos stoikos',),
|
||||
'devices': ('Cisco IOS 11.3 to 12.2',),
|
||||
'references': ('http://www.cvedetails.com/cve/cve-2001-0537',),
|
||||
'tags': ('cisco', 'auth_bypass', 'ios', 'router', 'http', 'cve-2001-0537'),
|
||||
'notes': 'Classic IOS vulnerability. Only affects very old IOS versions.',
|
||||
},
|
||||
|
||||
# ── Netgear Routers ─────────────────────────────────────────────────────
|
||||
'exploits/routers/netgear/dgn2200_ping_cgi_rce': {
|
||||
'name': 'Netgear DGN2200 RCE',
|
||||
'description': 'Exploits Netgear DGN2200 RCE via ping.cgi script '
|
||||
'(CVE-2017-6077).',
|
||||
'authors': ('SivertPL', 'Josh Abraham'),
|
||||
'devices': ('Netgear DGN2200v1-v4',),
|
||||
'references': ('https://www.exploit-db.com/exploits/41394/',
|
||||
'https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2017-6077'),
|
||||
'tags': ('netgear', 'rce', 'router', 'http', 'cve-2017-6077'),
|
||||
'notes': 'Requires valid credentials (default: admin/password).',
|
||||
},
|
||||
'exploits/routers/netgear/multi_rce': {
|
||||
'name': 'Netgear Multi RCE',
|
||||
'description': 'Exploits remote command execution in multiple Netgear devices. '
|
||||
'If vulnerable, opens a command loop with OS-level access.',
|
||||
'authors': ('Andrei Costin', 'Marcin Bury'),
|
||||
'devices': ('Netgear WG102', 'Netgear WG103', 'Netgear WN604',
|
||||
'Netgear WNDAP350', 'Netgear WNDAP360', 'Netgear WNAP320',
|
||||
'Netgear WNDAP660', 'Netgear WNDAP620'),
|
||||
'references': ('http://firmware.re/vulns/acsa-2015-001.php',),
|
||||
'tags': ('netgear', 'rce', 'router', 'http', 'multi'),
|
||||
'notes': 'Targets multiple Netgear enterprise wireless APs.',
|
||||
},
|
||||
|
||||
# ── Mikrotik Routers ────────────────────────────────────────────────────
|
||||
'exploits/routers/mikrotik/winbox_auth_bypass_creds_disclosure': {
|
||||
'name': 'Mikrotik WinBox Auth Bypass - Credentials Disclosure',
|
||||
'description': 'Bypasses authentication through WinBox service in Mikrotik '
|
||||
'devices v6.29 to v6.42 and retrieves admin credentials.',
|
||||
'authors': ('Alireza Mosajjal', 'Mostafa Yalpaniyan', 'Marcin Bury'),
|
||||
'devices': ('Mikrotik RouterOS 6.29 to 6.42',),
|
||||
'references': ('https://n0p.me/winbox-bug-dissection/',
|
||||
'https://github.com/BasuCert/WinboxPoC'),
|
||||
'tags': ('mikrotik', 'auth_bypass', 'creds', 'winbox', 'router', 'tcp'),
|
||||
'notes': 'Targets WinBox service (port 8291). Very high impact.',
|
||||
},
|
||||
|
||||
# ── TP-Link Routers ─────────────────────────────────────────────────────
|
||||
'exploits/routers/tplink/archer_c2_c20i_rce': {
|
||||
'name': 'TP-Link Archer C2 & C20i RCE',
|
||||
'description': 'Exploits TP-Link Archer C2 and C20i RCE allowing root-level '
|
||||
'command execution.',
|
||||
'authors': ('Michal Sajdak', 'Marcin Bury'),
|
||||
'devices': ('TP-Link Archer C2', 'TP-Link Archer C20i'),
|
||||
'references': (),
|
||||
'tags': ('tplink', 'rce', 'router', 'http'),
|
||||
'notes': 'Targets the Archer web interface.',
|
||||
},
|
||||
|
||||
# ── Asus Routers ────────────────────────────────────────────────────────
|
||||
'exploits/routers/asus/asuswrt_lan_rce': {
|
||||
'name': 'AsusWRT LAN RCE',
|
||||
'description': 'Exploits multiple vulnerabilities in AsusWRT firmware to achieve '
|
||||
'RCE: HTTP auth bypass + VPN config upload + infosvr command '
|
||||
'execution (CVE-2018-5999, CVE-2018-6000).',
|
||||
'authors': ('Pedro Ribeiro', 'Marcin Bury'),
|
||||
'devices': ('AsusWRT < v3.0.0.4.384.10007',),
|
||||
'references': ('https://nvd.nist.gov/vuln/detail/CVE-2018-5999',
|
||||
'https://nvd.nist.gov/vuln/detail/CVE-2018-6000'),
|
||||
'tags': ('asus', 'rce', 'auth_bypass', 'router', 'http', 'udp',
|
||||
'cve-2018-5999', 'cve-2018-6000'),
|
||||
'notes': 'Chains HTTP auth bypass with UDP infosvr for full RCE.',
|
||||
},
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
# EXPLOITS - CAMERAS
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
'exploits/cameras/dlink/dcs_930l_932l_auth_bypass': {
|
||||
'name': 'D-Link DCS Cameras Auth Bypass',
|
||||
'description': 'D-Link DCS web cameras allow unauthenticated attackers to '
|
||||
'obtain device configuration by accessing unprotected URLs.',
|
||||
'authors': ('Roberto Paleari', 'Dino Causevic'),
|
||||
'devices': ('D-Link DCS-930L fw 1.04', 'D-Link DCS-932L fw 1.02'),
|
||||
'references': ('https://www.exploit-db.com/exploits/24442/',),
|
||||
'tags': ('dlink', 'camera', 'auth_bypass', 'http'),
|
||||
'notes': 'Uses port 8080 by default.',
|
||||
},
|
||||
'exploits/cameras/cisco/video_surv_path_traversal': {
|
||||
'name': 'Cisco Video Surveillance Path Traversal',
|
||||
'description': 'Path traversal in Cisco Video Surveillance Operations '
|
||||
'Manager 6.3.2 allowing file reads from the filesystem.',
|
||||
'authors': ('b.saleh', 'Marcin Bury'),
|
||||
'devices': ('Cisco Video Surveillance Operations Manager 6.3.2',),
|
||||
'references': ('https://www.exploit-db.com/exploits/38389/',),
|
||||
'tags': ('cisco', 'camera', 'path_traversal', 'http'),
|
||||
'notes': 'Read /etc/passwd or other files via path traversal.',
|
||||
},
|
||||
'exploits/cameras/brickcom/corp_network_cameras_conf_disclosure': {
|
||||
'name': 'Brickcom Network Camera Config Disclosure',
|
||||
'description': 'Exploits Brickcom Corporation Network Camera configuration '
|
||||
'disclosure vulnerability to read device config and credentials.',
|
||||
'authors': ('Orwelllabs', 'Marcin Bury'),
|
||||
'devices': ('Brickcom FB-100Ae', 'Brickcom WCB-100Ap',
|
||||
'Brickcom OB-200Np-LR', 'Brickcom VD-E200Nf'),
|
||||
'references': ('https://www.exploit-db.com/exploits/39696/',),
|
||||
'tags': ('brickcom', 'camera', 'config_disclosure', 'http'),
|
||||
'notes': 'Extracts admin credentials from configuration.',
|
||||
},
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
# EXPLOITS - GENERIC
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
'exploits/generic/heartbleed': {
|
||||
'name': 'OpenSSL Heartbleed',
|
||||
'description': 'Exploits OpenSSL Heartbleed vulnerability (CVE-2014-0160). '
|
||||
'Fake heartbeat length leaks memory data from the server.',
|
||||
'authors': ('Neel Mehta', 'Jared Stafford', 'Marcin Bury'),
|
||||
'devices': ('Multi',),
|
||||
'references': ('http://www.cvedetails.com/cve/2014-0160',
|
||||
'http://heartbleed.com/'),
|
||||
'tags': ('heartbleed', 'openssl', 'ssl', 'tls', 'memory_leak', 'generic',
|
||||
'cve-2014-0160'),
|
||||
'notes': 'Tests for Heartbleed on any SSL/TLS service. '
|
||||
'Default port 443.',
|
||||
},
|
||||
'exploits/generic/shellshock': {
|
||||
'name': 'Shellshock',
|
||||
'description': 'Exploits Shellshock vulnerability (CVE-2014-6271) allowing '
|
||||
'OS command execution via crafted HTTP headers.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Multi',),
|
||||
'references': ('https://access.redhat.com/articles/1200223',),
|
||||
'tags': ('shellshock', 'bash', 'rce', 'http', 'generic', 'cve-2014-6271'),
|
||||
'notes': 'Injects via HTTP headers (default: User-Agent). '
|
||||
'Configure path and method as needed.',
|
||||
},
|
||||
'exploits/generic/ssh_auth_keys': {
|
||||
'name': 'SSH Authorized Keys',
|
||||
'description': 'Tests for known default SSH keys that ship with various '
|
||||
'embedded devices and appliances.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Multi',),
|
||||
'references': (),
|
||||
'tags': ('ssh', 'keys', 'default_creds', 'generic'),
|
||||
'notes': 'Checks for factory SSH keys common on IoT/embedded devices.',
|
||||
},
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
# CREDENTIALS - GENERIC
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
'creds/generic/ftp_bruteforce': {
|
||||
'name': 'FTP Bruteforce',
|
||||
'description': 'Performs bruteforce attack against FTP service. '
|
||||
'Displays valid credentials when found.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Multiple devices',),
|
||||
'references': (),
|
||||
'tags': ('ftp', 'bruteforce', 'creds', 'generic'),
|
||||
'notes': 'Supports file:// targets for batch mode. '
|
||||
'Default port 21. Threaded (default 8 threads).',
|
||||
},
|
||||
'creds/generic/ssh_bruteforce': {
|
||||
'name': 'SSH Bruteforce',
|
||||
'description': 'Performs bruteforce attack against SSH service. '
|
||||
'Displays valid credentials when found.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Multiple devices',),
|
||||
'references': (),
|
||||
'tags': ('ssh', 'bruteforce', 'creds', 'generic'),
|
||||
'notes': 'Default port 22. Threaded. Supports batch targets via file://.',
|
||||
},
|
||||
'creds/generic/telnet_bruteforce': {
|
||||
'name': 'Telnet Bruteforce',
|
||||
'description': 'Performs bruteforce attack against Telnet service. '
|
||||
'Displays valid credentials when found.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Multiple devices',),
|
||||
'references': (),
|
||||
'tags': ('telnet', 'bruteforce', 'creds', 'generic'),
|
||||
'notes': 'Default port 23. Common on IoT devices with telnet enabled.',
|
||||
},
|
||||
'creds/generic/snmp_bruteforce': {
|
||||
'name': 'SNMP Bruteforce',
|
||||
'description': 'Performs bruteforce attack against SNMP service. '
|
||||
'Discovers valid community strings.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Multiple devices',),
|
||||
'references': (),
|
||||
'tags': ('snmp', 'bruteforce', 'creds', 'generic', 'community'),
|
||||
'notes': 'Tests SNMP community strings. Default port 161. '
|
||||
'Supports SNMPv1 and SNMPv2c.',
|
||||
},
|
||||
'creds/generic/http_basic_digest_bruteforce': {
|
||||
'name': 'HTTP Basic/Digest Bruteforce',
|
||||
'description': 'Performs bruteforce against HTTP Basic/Digest authentication. '
|
||||
'Displays valid credentials when found.',
|
||||
'authors': ('Marcin Bury', 'Alexander Yakovlev'),
|
||||
'devices': ('Multiple devices',),
|
||||
'references': (),
|
||||
'tags': ('http', 'bruteforce', 'creds', 'generic', 'basic_auth', 'digest'),
|
||||
'notes': 'Targets HTTP authentication. Configure path to the protected URL.',
|
||||
},
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
# SCANNERS
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
'scanners/autopwn': {
|
||||
'name': 'AutoPwn',
|
||||
'description': 'Comprehensive scanner that tests ALL exploit and credential '
|
||||
'modules against a target. The ultimate "scan everything" tool.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Multi',),
|
||||
'references': (),
|
||||
'tags': ('scanner', 'autopwn', 'comprehensive', 'all'),
|
||||
'notes': 'Runs all exploits and creds against the target. '
|
||||
'Can be filtered by vendor. Checks HTTP, FTP, SSH, Telnet, SNMP. '
|
||||
'Very thorough but slow. Use specific scanners for faster results.',
|
||||
},
|
||||
'scanners/routers/router_scan': {
|
||||
'name': 'Router Scanner',
|
||||
'description': 'Scans for router vulnerabilities and weaknesses. '
|
||||
'Tests generic and router-specific exploit modules.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Router',),
|
||||
'references': (),
|
||||
'tags': ('scanner', 'router', 'comprehensive'),
|
||||
'notes': 'Faster than AutoPwn -- only tests router-relevant modules.',
|
||||
},
|
||||
'scanners/cameras/camera_scan': {
|
||||
'name': 'Camera Scanner',
|
||||
'description': 'Scans for IP camera vulnerabilities and weaknesses. '
|
||||
'Tests generic and camera-specific exploit modules.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Cameras',),
|
||||
'references': (),
|
||||
'tags': ('scanner', 'camera', 'ip_camera', 'comprehensive'),
|
||||
'notes': 'Tests all camera-related exploits against the target.',
|
||||
},
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
# EXPLOITS - MISC
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
'exploits/misc/asus/b1m_projector_rce': {
|
||||
'name': 'Asus B1M Projector RCE',
|
||||
'description': 'Exploits Asus B1M Projector RCE allowing root-level '
|
||||
'command execution.',
|
||||
'authors': ('Hacker House', 'Marcin Bury'),
|
||||
'devices': ('Asus B1M Projector',),
|
||||
'references': ('https://www.myhackerhouse.com/asus-b1m-projector-remote-root-0day/',),
|
||||
'tags': ('asus', 'projector', 'rce', 'misc', 'iot'),
|
||||
'notes': 'Targets network-connected projectors.',
|
||||
},
|
||||
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
# EXPLOITS - MORE ROUTERS
|
||||
# ════════════════════════════════════════════════════════════════════════
|
||||
|
||||
'exploits/routers/linksys/smart_wifi_password_disclosure': {
|
||||
'name': 'Linksys Smart WiFi Password Disclosure',
|
||||
'description': 'Exploits information disclosure in Linksys Smart WiFi '
|
||||
'routers to extract passwords.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Linksys Smart WiFi routers',),
|
||||
'references': (),
|
||||
'tags': ('linksys', 'password', 'disclosure', 'router', 'http'),
|
||||
'notes': 'Targets Linksys Smart WiFi web interface.',
|
||||
},
|
||||
'exploits/routers/zyxel/d1000_rce': {
|
||||
'name': 'Zyxel D1000 RCE',
|
||||
'description': 'Exploits remote code execution in Zyxel D1000 modem/routers.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Zyxel D1000',),
|
||||
'references': (),
|
||||
'tags': ('zyxel', 'rce', 'router', 'modem'),
|
||||
'notes': 'Targets Zyxel DSL modem/router combo devices.',
|
||||
},
|
||||
'exploits/routers/huawei/hg520_info_disclosure': {
|
||||
'name': 'Huawei HG520 Info Disclosure',
|
||||
'description': 'Information disclosure in Huawei HG520 home gateway '
|
||||
'allowing extraction of device configuration.',
|
||||
'authors': ('Marcin Bury',),
|
||||
'devices': ('Huawei HG520',),
|
||||
'references': (),
|
||||
'tags': ('huawei', 'info_disclosure', 'router', 'http'),
|
||||
'notes': 'Targets Huawei home gateway web interface.',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ─── Module Type Mapping ────────────────────────────────────────────────────
|
||||
|
||||
MODULE_TYPES = {
|
||||
'exploits': {
|
||||
'name': 'Exploits',
|
||||
'description': 'Vulnerability exploits for routers, cameras, and devices',
|
||||
'color': Colors.RED,
|
||||
},
|
||||
'creds': {
|
||||
'name': 'Credentials',
|
||||
'description': 'Default credential and brute-force modules',
|
||||
'color': Colors.YELLOW,
|
||||
},
|
||||
'scanners': {
|
||||
'name': 'Scanners',
|
||||
'description': 'Automated vulnerability scanners (AutoPwn, etc.)',
|
||||
'color': Colors.CYAN,
|
||||
},
|
||||
'payloads': {
|
||||
'name': 'Payloads',
|
||||
'description': 'Shellcode and payload generators',
|
||||
'color': Colors.MAGENTA,
|
||||
},
|
||||
'encoders': {
|
||||
'name': 'Encoders',
|
||||
'description': 'Payload encoding and obfuscation',
|
||||
'color': Colors.GREEN,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ─── API Functions ──────────────────────────────────────────────────────────
|
||||
|
||||
def get_module_info(module_path: str) -> dict:
|
||||
"""Get curated module info by path.
|
||||
|
||||
Args:
|
||||
module_path: Module path like 'exploits/routers/dlink/dir_300_600_rce'
|
||||
|
||||
Returns:
|
||||
Module info dict or None
|
||||
"""
|
||||
return RSF_MODULES.get(module_path)
|
||||
|
||||
|
||||
def get_module_description(module_path: str) -> str:
|
||||
"""Get just the description for a module.
|
||||
|
||||
Args:
|
||||
module_path: Module path
|
||||
|
||||
Returns:
|
||||
Description string or empty string
|
||||
"""
|
||||
info = RSF_MODULES.get(module_path)
|
||||
if info:
|
||||
return info.get('description', '')
|
||||
return ''
|
||||
|
||||
|
||||
def search_modules(query: str) -> list:
|
||||
"""Search curated modules by keyword.
|
||||
|
||||
Searches name, description, tags, devices, and path.
|
||||
|
||||
Args:
|
||||
query: Search string (case-insensitive)
|
||||
|
||||
Returns:
|
||||
List of matching module info dicts (with 'path' key added)
|
||||
"""
|
||||
results = []
|
||||
query_lower = query.lower()
|
||||
|
||||
for path, info in RSF_MODULES.items():
|
||||
# Search in path
|
||||
if query_lower in path.lower():
|
||||
results.append({**info, 'path': path})
|
||||
continue
|
||||
|
||||
# Search in name
|
||||
if query_lower in info.get('name', '').lower():
|
||||
results.append({**info, 'path': path})
|
||||
continue
|
||||
|
||||
# Search in description
|
||||
if query_lower in info.get('description', '').lower():
|
||||
results.append({**info, 'path': path})
|
||||
continue
|
||||
|
||||
# Search in tags
|
||||
if any(query_lower in tag.lower() for tag in info.get('tags', ())):
|
||||
results.append({**info, 'path': path})
|
||||
continue
|
||||
|
||||
# Search in devices
|
||||
if any(query_lower in dev.lower() for dev in info.get('devices', ())):
|
||||
results.append({**info, 'path': path})
|
||||
continue
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def get_modules_by_type(module_type: str) -> list:
|
||||
"""Get curated modules filtered by type.
|
||||
|
||||
Args:
|
||||
module_type: One of 'exploits', 'creds', 'scanners', etc.
|
||||
|
||||
Returns:
|
||||
List of matching module info dicts (with 'path' key added)
|
||||
"""
|
||||
results = []
|
||||
for path, info in RSF_MODULES.items():
|
||||
if path.startswith(module_type + '/'):
|
||||
results.append({**info, 'path': path})
|
||||
return results
|
||||
|
||||
|
||||
def format_module_help(module_path: str) -> str:
|
||||
"""Format detailed help text for a module.
|
||||
|
||||
Args:
|
||||
module_path: Module path
|
||||
|
||||
Returns:
|
||||
Formatted help string
|
||||
"""
|
||||
info = RSF_MODULES.get(module_path)
|
||||
if not info:
|
||||
return f" {Colors.YELLOW}No curated info for '{module_path}'{Colors.RESET}"
|
||||
|
||||
lines = []
|
||||
lines.append(f" {Colors.BOLD}{Colors.WHITE}{info.get('name', module_path)}{Colors.RESET}")
|
||||
lines.append(f" {Colors.DIM}Path: {module_path}{Colors.RESET}")
|
||||
lines.append(f"")
|
||||
lines.append(f" {info.get('description', '')}")
|
||||
|
||||
if info.get('authors'):
|
||||
authors = ', '.join(info['authors'])
|
||||
lines.append(f"")
|
||||
lines.append(f" {Colors.CYAN}Authors:{Colors.RESET} {authors}")
|
||||
|
||||
if info.get('devices'):
|
||||
lines.append(f" {Colors.CYAN}Devices:{Colors.RESET}")
|
||||
for dev in info['devices']:
|
||||
lines.append(f" - {dev}")
|
||||
|
||||
if info.get('references'):
|
||||
lines.append(f" {Colors.CYAN}References:{Colors.RESET}")
|
||||
for ref in info['references']:
|
||||
lines.append(f" {Colors.DIM}{ref}{Colors.RESET}")
|
||||
|
||||
if info.get('notes'):
|
||||
lines.append(f"")
|
||||
lines.append(f" {Colors.YELLOW}Note:{Colors.RESET} {info['notes']}")
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
def get_all_modules() -> dict:
|
||||
"""Get all curated modules.
|
||||
|
||||
Returns:
|
||||
The full RSF_MODULES dict
|
||||
"""
|
||||
return RSF_MODULES
|
||||
|
||||
|
||||
def get_type_info(module_type: str) -> dict:
|
||||
"""Get info about a module type.
|
||||
|
||||
Args:
|
||||
module_type: One of 'exploits', 'creds', 'scanners', etc.
|
||||
|
||||
Returns:
|
||||
Type info dict or None
|
||||
"""
|
||||
return MODULE_TYPES.get(module_type)
|
||||
439
core/rsf_terms.py
Normal file
439
core/rsf_terms.py
Normal file
@@ -0,0 +1,439 @@
|
||||
"""
|
||||
AUTARCH RouterSploit Option Term Bank
|
||||
Centralized descriptions and validation for RSF module options.
|
||||
Mirrors core/msf_terms.py patterns for RSF-specific options.
|
||||
"""
|
||||
|
||||
from .banner import Colors
|
||||
|
||||
|
||||
# ─── RSF Settings Definitions ───────────────────────────────────────────────
|
||||
|
||||
RSF_SETTINGS = {
|
||||
# ── Target Options ──────────────────────────────────────────────────────
|
||||
'target': {
|
||||
'description': 'Target IPv4 or IPv6 address of the device to test. '
|
||||
'Can also be set to file:// path for batch targeting '
|
||||
'(e.g. file:///tmp/targets.txt with one IP per line).',
|
||||
'input_type': 'ip',
|
||||
'examples': ['192.168.1.1', '10.0.0.1', 'file:///tmp/targets.txt'],
|
||||
'default': '',
|
||||
'aliases': ['TARGET', 'rhost'],
|
||||
'category': 'target',
|
||||
'required': True,
|
||||
'notes': 'Most RSF modules require a target. Batch mode via file:// '
|
||||
'is supported by modules decorated with @multi.',
|
||||
},
|
||||
'port': {
|
||||
'description': 'Target port number for the service being tested. '
|
||||
'Default depends on the module protocol (80 for HTTP, '
|
||||
'21 for FTP, 22 for SSH, etc.).',
|
||||
'input_type': 'port',
|
||||
'examples': ['80', '443', '8080', '22'],
|
||||
'default': '',
|
||||
'aliases': ['PORT', 'rport'],
|
||||
'category': 'target',
|
||||
'required': False,
|
||||
'notes': 'Each module sets an appropriate default port. Only override '
|
||||
'if the target runs on a non-standard port.',
|
||||
},
|
||||
'ssl': {
|
||||
'description': 'Enable SSL/TLS for the connection. Set to true for '
|
||||
'HTTPS targets or services using encrypted transport.',
|
||||
'input_type': 'boolean',
|
||||
'examples': ['true', 'false'],
|
||||
'default': 'false',
|
||||
'aliases': ['SSL', 'use_ssl'],
|
||||
'category': 'connection',
|
||||
'required': False,
|
||||
'notes': 'Automatically set for modules targeting HTTPS services.',
|
||||
},
|
||||
|
||||
# ── Authentication/Credential Options ───────────────────────────────────
|
||||
'threads': {
|
||||
'description': 'Number of threads for brute-force or scanning operations. '
|
||||
'Higher values are faster but may trigger rate-limiting.',
|
||||
'input_type': 'integer',
|
||||
'examples': ['1', '4', '8', '16'],
|
||||
'default': '8',
|
||||
'aliases': ['THREADS'],
|
||||
'category': 'scan',
|
||||
'required': False,
|
||||
'notes': 'Default is typically 8. Reduce for slower targets or to '
|
||||
'avoid detection. Increase for LAN testing.',
|
||||
},
|
||||
'usernames': {
|
||||
'description': 'Username or wordlist for credential testing. '
|
||||
'Single value, comma-separated list, or file path.',
|
||||
'input_type': 'wordlist',
|
||||
'examples': ['admin', 'admin,root,user', 'file:///tmp/users.txt'],
|
||||
'default': 'admin',
|
||||
'aliases': ['USERNAMES', 'username'],
|
||||
'category': 'auth',
|
||||
'required': False,
|
||||
'notes': 'For brute-force modules. Use file:// prefix for wordlist files. '
|
||||
'Default credential modules have built-in lists.',
|
||||
},
|
||||
'passwords': {
|
||||
'description': 'Password or wordlist for credential testing. '
|
||||
'Single value, comma-separated list, or file path.',
|
||||
'input_type': 'wordlist',
|
||||
'examples': ['password', 'admin,password,1234', 'file:///tmp/pass.txt'],
|
||||
'default': '',
|
||||
'aliases': ['PASSWORDS', 'password'],
|
||||
'category': 'auth',
|
||||
'required': False,
|
||||
'notes': 'For brute-force modules. Default credential modules use '
|
||||
'built-in vendor-specific password lists.',
|
||||
},
|
||||
'stop_on_success': {
|
||||
'description': 'Stop brute-force attack after finding the first valid '
|
||||
'credential pair.',
|
||||
'input_type': 'boolean',
|
||||
'examples': ['true', 'false'],
|
||||
'default': 'true',
|
||||
'aliases': ['STOP_ON_SUCCESS'],
|
||||
'category': 'auth',
|
||||
'required': False,
|
||||
'notes': 'Set to false to enumerate all valid credentials.',
|
||||
},
|
||||
|
||||
# ── Verbosity/Output Options ────────────────────────────────────────────
|
||||
'verbosity': {
|
||||
'description': 'Control output verbosity level. When true, modules '
|
||||
'print detailed progress information.',
|
||||
'input_type': 'boolean',
|
||||
'examples': ['true', 'false'],
|
||||
'default': 'true',
|
||||
'aliases': ['VERBOSITY', 'verbose'],
|
||||
'category': 'output',
|
||||
'required': False,
|
||||
'notes': 'Disable for cleaner output during automated scanning.',
|
||||
},
|
||||
|
||||
# ── Protocol-Specific Ports ─────────────────────────────────────────────
|
||||
'http_port': {
|
||||
'description': 'HTTP port for web-based exploits and scanners.',
|
||||
'input_type': 'port',
|
||||
'examples': ['80', '8080', '8443'],
|
||||
'default': '80',
|
||||
'aliases': ['HTTP_PORT'],
|
||||
'category': 'target',
|
||||
'required': False,
|
||||
'notes': 'Used by HTTP-based modules. Change for non-standard web ports.',
|
||||
},
|
||||
'ftp_port': {
|
||||
'description': 'FTP port for file transfer protocol modules.',
|
||||
'input_type': 'port',
|
||||
'examples': ['21', '2121'],
|
||||
'default': '21',
|
||||
'aliases': ['FTP_PORT'],
|
||||
'category': 'target',
|
||||
'required': False,
|
||||
'notes': 'Standard FTP port is 21.',
|
||||
},
|
||||
'ssh_port': {
|
||||
'description': 'SSH port for secure shell modules.',
|
||||
'input_type': 'port',
|
||||
'examples': ['22', '2222'],
|
||||
'default': '22',
|
||||
'aliases': ['SSH_PORT'],
|
||||
'category': 'target',
|
||||
'required': False,
|
||||
'notes': 'Standard SSH port is 22.',
|
||||
},
|
||||
'telnet_port': {
|
||||
'description': 'Telnet port for telnet-based modules.',
|
||||
'input_type': 'port',
|
||||
'examples': ['23', '2323'],
|
||||
'default': '23',
|
||||
'aliases': ['TELNET_PORT'],
|
||||
'category': 'target',
|
||||
'required': False,
|
||||
'notes': 'Standard Telnet port is 23. Many IoT devices use telnet.',
|
||||
},
|
||||
'snmp_port': {
|
||||
'description': 'SNMP port for SNMP-based modules.',
|
||||
'input_type': 'port',
|
||||
'examples': ['161'],
|
||||
'default': '161',
|
||||
'aliases': ['SNMP_PORT'],
|
||||
'category': 'target',
|
||||
'required': False,
|
||||
'notes': 'Standard SNMP port is 161.',
|
||||
},
|
||||
'snmp_community': {
|
||||
'description': 'SNMP community string for SNMP-based modules.',
|
||||
'input_type': 'string',
|
||||
'examples': ['public', 'private'],
|
||||
'default': 'public',
|
||||
'aliases': ['SNMP_COMMUNITY', 'community'],
|
||||
'category': 'auth',
|
||||
'required': False,
|
||||
'notes': 'Default community strings "public" and "private" are common '
|
||||
'on unconfigured devices.',
|
||||
},
|
||||
|
||||
# ── File/Path Options ───────────────────────────────────────────────────
|
||||
'filename': {
|
||||
'description': 'File path to read or write on the target device. '
|
||||
'Used by path traversal and file disclosure modules.',
|
||||
'input_type': 'string',
|
||||
'examples': ['/etc/passwd', '/etc/shadow', '/etc/config/shadow'],
|
||||
'default': '/etc/shadow',
|
||||
'aliases': ['FILENAME', 'filepath'],
|
||||
'category': 'file',
|
||||
'required': False,
|
||||
'notes': 'Common targets: /etc/passwd, /etc/shadow for credential extraction.',
|
||||
},
|
||||
|
||||
# ── Payload Options ─────────────────────────────────────────────────────
|
||||
'lhost': {
|
||||
'description': 'Local IP address for reverse connections (listener).',
|
||||
'input_type': 'ip',
|
||||
'examples': ['192.168.1.100', '10.0.0.50'],
|
||||
'default': '',
|
||||
'aliases': ['LHOST'],
|
||||
'category': 'payload',
|
||||
'required': False,
|
||||
'notes': 'Required for reverse shell payloads. Use your attacker IP.',
|
||||
},
|
||||
'lport': {
|
||||
'description': 'Local port for reverse connections (listener).',
|
||||
'input_type': 'port',
|
||||
'examples': ['4444', '5555', '8888'],
|
||||
'default': '5555',
|
||||
'aliases': ['LPORT'],
|
||||
'category': 'payload',
|
||||
'required': False,
|
||||
'notes': 'Required for reverse shell payloads.',
|
||||
},
|
||||
'rport': {
|
||||
'description': 'Remote port for bind shell connections.',
|
||||
'input_type': 'port',
|
||||
'examples': ['5555', '4444'],
|
||||
'default': '5555',
|
||||
'aliases': ['RPORT'],
|
||||
'category': 'payload',
|
||||
'required': False,
|
||||
'notes': 'Required for bind shell payloads.',
|
||||
},
|
||||
'encoder': {
|
||||
'description': 'Encoder to use for payload obfuscation.',
|
||||
'input_type': 'string',
|
||||
'examples': ['base64', 'xor'],
|
||||
'default': '',
|
||||
'aliases': ['ENCODER'],
|
||||
'category': 'payload',
|
||||
'required': False,
|
||||
'notes': 'Optional. Available encoders depend on payload architecture.',
|
||||
},
|
||||
'output': {
|
||||
'description': 'Output format for generated payloads.',
|
||||
'input_type': 'string',
|
||||
'examples': ['python', 'elf', 'c'],
|
||||
'default': 'python',
|
||||
'aliases': ['OUTPUT'],
|
||||
'category': 'payload',
|
||||
'required': False,
|
||||
'notes': 'Architecture-specific payloads support elf, c, and python output.',
|
||||
},
|
||||
|
||||
# ── Vendor/Device Options ───────────────────────────────────────────────
|
||||
'vendor': {
|
||||
'description': 'Target device vendor for vendor-specific modules.',
|
||||
'input_type': 'string',
|
||||
'examples': ['dlink', 'cisco', 'netgear', 'tp-link'],
|
||||
'default': '',
|
||||
'aliases': ['VENDOR'],
|
||||
'category': 'target',
|
||||
'required': False,
|
||||
'notes': 'Used to filter modules by vendor.',
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ── Setting Categories ──────────────────────────────────────────────────────
|
||||
|
||||
SETTING_CATEGORIES = {
|
||||
'target': {
|
||||
'name': 'Target Options',
|
||||
'description': 'Target device addressing',
|
||||
'color': Colors.RED,
|
||||
},
|
||||
'connection': {
|
||||
'name': 'Connection Options',
|
||||
'description': 'Network connection parameters',
|
||||
'color': Colors.CYAN,
|
||||
},
|
||||
'auth': {
|
||||
'name': 'Authentication Options',
|
||||
'description': 'Credentials and authentication',
|
||||
'color': Colors.YELLOW,
|
||||
},
|
||||
'scan': {
|
||||
'name': 'Scan Options',
|
||||
'description': 'Scanning and threading parameters',
|
||||
'color': Colors.GREEN,
|
||||
},
|
||||
'output': {
|
||||
'name': 'Output Options',
|
||||
'description': 'Verbosity and output control',
|
||||
'color': Colors.WHITE,
|
||||
},
|
||||
'file': {
|
||||
'name': 'File Options',
|
||||
'description': 'File path parameters',
|
||||
'color': Colors.MAGENTA,
|
||||
},
|
||||
'payload': {
|
||||
'name': 'Payload Options',
|
||||
'description': 'Payload generation and delivery',
|
||||
'color': Colors.RED,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
# ─── API Functions ──────────────────────────────────────────────────────────
|
||||
|
||||
def get_setting_info(name: str) -> dict:
|
||||
"""Get full setting information by name.
|
||||
|
||||
Checks primary name first, then aliases.
|
||||
|
||||
Args:
|
||||
name: Setting name (case-insensitive)
|
||||
|
||||
Returns:
|
||||
Setting dict or None
|
||||
"""
|
||||
name_lower = name.lower()
|
||||
|
||||
# Direct lookup
|
||||
if name_lower in RSF_SETTINGS:
|
||||
return RSF_SETTINGS[name_lower]
|
||||
|
||||
# Alias lookup
|
||||
for key, info in RSF_SETTINGS.items():
|
||||
if name_lower in [a.lower() for a in info.get('aliases', [])]:
|
||||
return info
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_setting_prompt(name: str, default=None, required: bool = False) -> str:
|
||||
"""Get a formatted input prompt for a setting.
|
||||
|
||||
Args:
|
||||
name: Setting name
|
||||
default: Default value to show
|
||||
required: Whether the setting is required
|
||||
|
||||
Returns:
|
||||
Formatted prompt string
|
||||
"""
|
||||
info = get_setting_info(name)
|
||||
|
||||
if info:
|
||||
if default is None:
|
||||
default = info.get('default', '')
|
||||
desc = info.get('description', '').split('.')[0] # First sentence
|
||||
req = f" {Colors.RED}(required){Colors.RESET}" if required else ""
|
||||
if default:
|
||||
return f" {Colors.WHITE}{name}{Colors.RESET} [{default}]{req}: "
|
||||
return f" {Colors.WHITE}{name}{Colors.RESET}{req}: "
|
||||
else:
|
||||
if default:
|
||||
return f" {Colors.WHITE}{name}{Colors.RESET} [{default}]: "
|
||||
return f" {Colors.WHITE}{name}{Colors.RESET}: "
|
||||
|
||||
|
||||
def format_setting_help(name: str, include_examples: bool = True,
|
||||
include_notes: bool = True) -> str:
|
||||
"""Get formatted help text for a setting.
|
||||
|
||||
Args:
|
||||
name: Setting name
|
||||
include_examples: Include usage examples
|
||||
include_notes: Include additional notes
|
||||
|
||||
Returns:
|
||||
Formatted help string
|
||||
"""
|
||||
info = get_setting_info(name)
|
||||
if not info:
|
||||
return f" {Colors.YELLOW}No help available for '{name}'{Colors.RESET}"
|
||||
|
||||
lines = []
|
||||
lines.append(f" {Colors.BOLD}{Colors.WHITE}{name.upper()}{Colors.RESET}")
|
||||
lines.append(f" {info['description']}")
|
||||
|
||||
if info.get('input_type'):
|
||||
lines.append(f" {Colors.DIM}Type: {info['input_type']}{Colors.RESET}")
|
||||
|
||||
if info.get('default'):
|
||||
lines.append(f" {Colors.DIM}Default: {info['default']}{Colors.RESET}")
|
||||
|
||||
if include_examples and info.get('examples'):
|
||||
lines.append(f" {Colors.DIM}Examples: {', '.join(info['examples'])}{Colors.RESET}")
|
||||
|
||||
if include_notes and info.get('notes'):
|
||||
lines.append(f" {Colors.DIM}Note: {info['notes']}{Colors.RESET}")
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
def validate_setting_value(name: str, value: str) -> tuple:
|
||||
"""Validate a setting value against its type.
|
||||
|
||||
Args:
|
||||
name: Setting name
|
||||
value: Value to validate
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, error_message)
|
||||
"""
|
||||
info = get_setting_info(name)
|
||||
if not info:
|
||||
return True, "" # Unknown settings pass validation
|
||||
|
||||
input_type = info.get('input_type', 'string')
|
||||
|
||||
if input_type == 'port':
|
||||
try:
|
||||
port = int(value)
|
||||
if 0 <= port <= 65535:
|
||||
return True, ""
|
||||
return False, "Port must be between 0 and 65535"
|
||||
except ValueError:
|
||||
return False, "Port must be a number"
|
||||
|
||||
elif input_type == 'ip':
|
||||
# Allow file:// paths for batch targeting
|
||||
if value.startswith('file://'):
|
||||
return True, ""
|
||||
# Basic IPv4 validation
|
||||
import re
|
||||
if re.match(r'^(\d{1,3}\.){3}\d{1,3}$', value):
|
||||
parts = value.split('.')
|
||||
if all(0 <= int(p) <= 255 for p in parts):
|
||||
return True, ""
|
||||
return False, "Invalid IP address octets"
|
||||
# IPv6 - basic check
|
||||
if ':' in value:
|
||||
return True, ""
|
||||
return False, "Expected IPv4 address, IPv6 address, or file:// path"
|
||||
|
||||
elif input_type == 'boolean':
|
||||
if value.lower() in ('true', 'false', '1', '0', 'yes', 'no'):
|
||||
return True, ""
|
||||
return False, "Expected true/false"
|
||||
|
||||
elif input_type == 'integer':
|
||||
try:
|
||||
int(value)
|
||||
return True, ""
|
||||
except ValueError:
|
||||
return False, "Expected an integer"
|
||||
|
||||
return True, ""
|
||||
333
core/rules.py
Normal file
333
core/rules.py
Normal file
@@ -0,0 +1,333 @@
|
||||
"""
|
||||
AUTARCH Automation Rules Engine
|
||||
Condition-action rules for autonomous threat response.
|
||||
|
||||
Rules are JSON-serializable and stored in data/automation_rules.json.
|
||||
The engine evaluates conditions against a threat context dict and returns
|
||||
matching rules with resolved action parameters.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
import ipaddress
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Any, Optional, Tuple
|
||||
from dataclasses import dataclass, field, asdict
|
||||
|
||||
_logger = logging.getLogger('autarch.rules')
|
||||
|
||||
|
||||
@dataclass
|
||||
class Rule:
|
||||
"""A single automation rule."""
|
||||
id: str
|
||||
name: str
|
||||
enabled: bool = True
|
||||
priority: int = 50 # 0=highest, 100=lowest
|
||||
conditions: List[Dict] = field(default_factory=list) # AND-combined
|
||||
actions: List[Dict] = field(default_factory=list)
|
||||
cooldown_seconds: int = 60
|
||||
last_triggered: Optional[str] = None # ISO timestamp
|
||||
created: Optional[str] = None
|
||||
description: str = ''
|
||||
|
||||
def to_dict(self) -> dict:
|
||||
return asdict(self)
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, d: dict) -> 'Rule':
|
||||
return cls(
|
||||
id=d.get('id', str(uuid.uuid4())[:8]),
|
||||
name=d.get('name', 'Untitled'),
|
||||
enabled=d.get('enabled', True),
|
||||
priority=d.get('priority', 50),
|
||||
conditions=d.get('conditions', []),
|
||||
actions=d.get('actions', []),
|
||||
cooldown_seconds=d.get('cooldown_seconds', 60),
|
||||
last_triggered=d.get('last_triggered'),
|
||||
created=d.get('created'),
|
||||
description=d.get('description', ''),
|
||||
)
|
||||
|
||||
|
||||
class RulesEngine:
|
||||
"""Evaluates automation rules against a threat context."""
|
||||
|
||||
RULES_PATH = Path(__file__).parent.parent / 'data' / 'automation_rules.json'
|
||||
|
||||
CONDITION_TYPES = {
|
||||
'threat_score_above', 'threat_score_below', 'threat_level_is',
|
||||
'port_scan_detected', 'ddos_detected', 'ddos_attack_type',
|
||||
'connection_from_ip', 'connection_count_above',
|
||||
'new_listening_port', 'bandwidth_rx_above_mbps',
|
||||
'arp_spoof_detected', 'schedule', 'always',
|
||||
}
|
||||
|
||||
ACTION_TYPES = {
|
||||
'block_ip', 'unblock_ip', 'rate_limit_ip', 'block_port',
|
||||
'kill_process', 'alert', 'log_event', 'run_shell',
|
||||
'run_module', 'counter_scan', 'escalate_to_lam',
|
||||
}
|
||||
|
||||
def __init__(self):
|
||||
self._rules: List[Rule] = []
|
||||
self._load()
|
||||
|
||||
def _load(self):
|
||||
"""Load rules from JSON file."""
|
||||
if not self.RULES_PATH.exists():
|
||||
self._rules = []
|
||||
return
|
||||
try:
|
||||
data = json.loads(self.RULES_PATH.read_text(encoding='utf-8'))
|
||||
self._rules = [Rule.from_dict(r) for r in data.get('rules', [])]
|
||||
_logger.info(f"[Rules] Loaded {len(self._rules)} rules")
|
||||
except Exception as e:
|
||||
_logger.error(f"[Rules] Failed to load rules: {e}")
|
||||
self._rules = []
|
||||
|
||||
def save(self):
|
||||
"""Save rules to JSON file."""
|
||||
self.RULES_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
data = {
|
||||
'version': 1,
|
||||
'rules': [r.to_dict() for r in self._rules],
|
||||
}
|
||||
self.RULES_PATH.write_text(json.dumps(data, indent=2), encoding='utf-8')
|
||||
|
||||
def add_rule(self, rule: Rule) -> Rule:
|
||||
if not rule.created:
|
||||
rule.created = datetime.now().isoformat()
|
||||
self._rules.append(rule)
|
||||
self._rules.sort(key=lambda r: r.priority)
|
||||
self.save()
|
||||
return rule
|
||||
|
||||
def update_rule(self, rule_id: str, updates: dict) -> Optional[Rule]:
|
||||
for rule in self._rules:
|
||||
if rule.id == rule_id:
|
||||
for key, value in updates.items():
|
||||
if hasattr(rule, key) and key != 'id':
|
||||
setattr(rule, key, value)
|
||||
self._rules.sort(key=lambda r: r.priority)
|
||||
self.save()
|
||||
return rule
|
||||
return None
|
||||
|
||||
def delete_rule(self, rule_id: str) -> bool:
|
||||
before = len(self._rules)
|
||||
self._rules = [r for r in self._rules if r.id != rule_id]
|
||||
if len(self._rules) < before:
|
||||
self.save()
|
||||
return True
|
||||
return False
|
||||
|
||||
def get_rule(self, rule_id: str) -> Optional[Rule]:
|
||||
for rule in self._rules:
|
||||
if rule.id == rule_id:
|
||||
return rule
|
||||
return None
|
||||
|
||||
def get_all_rules(self) -> List[Rule]:
|
||||
return list(self._rules)
|
||||
|
||||
def evaluate(self, context: Dict[str, Any]) -> List[Tuple[Rule, List[Dict]]]:
|
||||
"""Evaluate all enabled rules against a threat context.
|
||||
|
||||
Args:
|
||||
context: Dict with keys from ThreatMonitor / AutonomyDaemon:
|
||||
- threat_score: {'score': int, 'level': str, 'details': [...]}
|
||||
- connection_count: int
|
||||
- connections: [...]
|
||||
- ddos: {'under_attack': bool, 'attack_type': str, ...}
|
||||
- new_ports: [{'port': int, 'process': str}, ...]
|
||||
- arp_alerts: [...]
|
||||
- bandwidth: {'rx_mbps': float, 'tx_mbps': float}
|
||||
- scan_indicators: int
|
||||
- timestamp: str
|
||||
|
||||
Returns:
|
||||
List of (Rule, resolved_actions) for rules that match and aren't in cooldown.
|
||||
"""
|
||||
matches = []
|
||||
now = datetime.now()
|
||||
|
||||
for rule in self._rules:
|
||||
if not rule.enabled:
|
||||
continue
|
||||
|
||||
# Check cooldown
|
||||
if rule.last_triggered:
|
||||
try:
|
||||
last = datetime.fromisoformat(rule.last_triggered)
|
||||
if (now - last).total_seconds() < rule.cooldown_seconds:
|
||||
continue
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
# Evaluate all conditions (AND logic)
|
||||
if not rule.conditions:
|
||||
continue
|
||||
|
||||
all_match = all(
|
||||
self._evaluate_condition(cond, context)
|
||||
for cond in rule.conditions
|
||||
)
|
||||
|
||||
if all_match:
|
||||
# Resolve action variables
|
||||
resolved = [self._resolve_variables(a, context) for a in rule.actions]
|
||||
matches.append((rule, resolved))
|
||||
|
||||
# Mark triggered
|
||||
rule.last_triggered = now.isoformat()
|
||||
|
||||
# Save updated trigger times
|
||||
if matches:
|
||||
self.save()
|
||||
|
||||
return matches
|
||||
|
||||
def _evaluate_condition(self, condition: dict, context: dict) -> bool:
|
||||
"""Evaluate a single condition against context."""
|
||||
ctype = condition.get('type', '')
|
||||
value = condition.get('value')
|
||||
|
||||
if ctype == 'threat_score_above':
|
||||
return context.get('threat_score', {}).get('score', 0) > (value or 0)
|
||||
|
||||
elif ctype == 'threat_score_below':
|
||||
return context.get('threat_score', {}).get('score', 0) < (value or 100)
|
||||
|
||||
elif ctype == 'threat_level_is':
|
||||
return context.get('threat_score', {}).get('level', 'LOW') == (value or 'HIGH')
|
||||
|
||||
elif ctype == 'port_scan_detected':
|
||||
return context.get('scan_indicators', 0) > 0
|
||||
|
||||
elif ctype == 'ddos_detected':
|
||||
return context.get('ddos', {}).get('under_attack', False)
|
||||
|
||||
elif ctype == 'ddos_attack_type':
|
||||
return context.get('ddos', {}).get('attack_type', '') == (value or '')
|
||||
|
||||
elif ctype == 'connection_from_ip':
|
||||
return self._check_ip_match(value, context.get('connections', []))
|
||||
|
||||
elif ctype == 'connection_count_above':
|
||||
return context.get('connection_count', 0) > (value or 0)
|
||||
|
||||
elif ctype == 'new_listening_port':
|
||||
return len(context.get('new_ports', [])) > 0
|
||||
|
||||
elif ctype == 'bandwidth_rx_above_mbps':
|
||||
return context.get('bandwidth', {}).get('rx_mbps', 0) > (value or 0)
|
||||
|
||||
elif ctype == 'arp_spoof_detected':
|
||||
return len(context.get('arp_alerts', [])) > 0
|
||||
|
||||
elif ctype == 'schedule':
|
||||
return self._check_cron(condition.get('cron', ''))
|
||||
|
||||
elif ctype == 'always':
|
||||
return True
|
||||
|
||||
_logger.warning(f"[Rules] Unknown condition type: {ctype}")
|
||||
return False
|
||||
|
||||
def _check_ip_match(self, pattern: str, connections: list) -> bool:
|
||||
"""Check if any connection's remote IP matches a pattern (IP or CIDR)."""
|
||||
if not pattern:
|
||||
return False
|
||||
try:
|
||||
network = ipaddress.ip_network(pattern, strict=False)
|
||||
for conn in connections:
|
||||
remote = conn.get('remote_addr', '')
|
||||
if remote and remote not in ('0.0.0.0', '::', '127.0.0.1', '::1', '*'):
|
||||
try:
|
||||
if ipaddress.ip_address(remote) in network:
|
||||
return True
|
||||
except ValueError:
|
||||
continue
|
||||
except ValueError:
|
||||
# Not a valid IP/CIDR, try exact match
|
||||
return any(conn.get('remote_addr') == pattern for conn in connections)
|
||||
return False
|
||||
|
||||
def _check_cron(self, cron_expr: str) -> bool:
|
||||
"""Minimal 5-field cron matcher: minute hour day month weekday.
|
||||
|
||||
Supports * and */N. Does not support ranges or lists.
|
||||
"""
|
||||
if not cron_expr:
|
||||
return False
|
||||
|
||||
parts = cron_expr.strip().split()
|
||||
if len(parts) != 5:
|
||||
return False
|
||||
|
||||
now = datetime.now()
|
||||
current = [now.minute, now.hour, now.day, now.month, now.isoweekday() % 7]
|
||||
|
||||
for field_val, pattern in zip(current, parts):
|
||||
if pattern == '*':
|
||||
continue
|
||||
if pattern.startswith('*/'):
|
||||
try:
|
||||
step = int(pattern[2:])
|
||||
if step > 0 and field_val % step != 0:
|
||||
return False
|
||||
except ValueError:
|
||||
return False
|
||||
else:
|
||||
try:
|
||||
if field_val != int(pattern):
|
||||
return False
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _resolve_variables(self, action: dict, context: dict) -> dict:
|
||||
"""Replace $variable placeholders in action parameters with context values."""
|
||||
resolved = {}
|
||||
|
||||
# Build variable map from context
|
||||
variables = {
|
||||
'$threat_score': str(context.get('threat_score', {}).get('score', 0)),
|
||||
'$threat_level': context.get('threat_score', {}).get('level', 'LOW'),
|
||||
}
|
||||
|
||||
# Source IP = top talker (most connections)
|
||||
connections = context.get('connections', [])
|
||||
if connections:
|
||||
ip_counts = {}
|
||||
for c in connections:
|
||||
rip = c.get('remote_addr', '')
|
||||
if rip and rip not in ('0.0.0.0', '::', '127.0.0.1', '::1', '*'):
|
||||
ip_counts[rip] = ip_counts.get(rip, 0) + 1
|
||||
if ip_counts:
|
||||
variables['$source_ip'] = max(ip_counts, key=ip_counts.get)
|
||||
|
||||
# New port
|
||||
new_ports = context.get('new_ports', [])
|
||||
if new_ports:
|
||||
variables['$new_port'] = str(new_ports[0].get('port', ''))
|
||||
variables['$suspicious_pid'] = str(new_ports[0].get('pid', ''))
|
||||
|
||||
# DDoS attack type
|
||||
ddos = context.get('ddos', {})
|
||||
if ddos:
|
||||
variables['$attack_type'] = ddos.get('attack_type', 'unknown')
|
||||
|
||||
# Resolve in all string values
|
||||
for key, val in action.items():
|
||||
if isinstance(val, str):
|
||||
for var_name, var_val in variables.items():
|
||||
val = val.replace(var_name, var_val)
|
||||
resolved[key] = val
|
||||
|
||||
return resolved
|
||||
712
core/sites_db.py
Normal file
712
core/sites_db.py
Normal file
@@ -0,0 +1,712 @@
|
||||
"""
|
||||
AUTARCH Sites Database Module
|
||||
Unified username enumeration database from multiple OSINT sources
|
||||
|
||||
Database: dh_sites.db - Master database with detection patterns
|
||||
"""
|
||||
|
||||
import os
|
||||
import json
|
||||
import sqlite3
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from typing import Optional, List, Dict, Any, Tuple
|
||||
from datetime import datetime
|
||||
|
||||
from .banner import Colors
|
||||
from .config import get_config
|
||||
|
||||
|
||||
class SitesDatabase:
|
||||
"""Unified OSINT sites database with SQLite storage."""
|
||||
|
||||
# Default database is dh_sites.db (the new categorized database with detection fields)
|
||||
DEFAULT_DB = "dh_sites.db"
|
||||
|
||||
# Detection method mapping
|
||||
DETECTION_METHODS = {
|
||||
'status_code': 'status',
|
||||
'message': 'content',
|
||||
'response_url': 'redirect',
|
||||
'redirection': 'redirect',
|
||||
}
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""Initialize sites database.
|
||||
|
||||
Args:
|
||||
db_path: Path to SQLite database. Defaults to data/sites/dh_sites.db
|
||||
"""
|
||||
if db_path is None:
|
||||
from core.paths import get_data_dir
|
||||
self.data_dir = get_data_dir() / "sites"
|
||||
self.db_path = self.data_dir / self.DEFAULT_DB
|
||||
else:
|
||||
self.db_path = Path(db_path)
|
||||
self.data_dir = self.db_path.parent
|
||||
|
||||
self.data_dir.mkdir(parents=True, exist_ok=True)
|
||||
self._conn = None
|
||||
self._lock = threading.Lock()
|
||||
|
||||
def _get_connection(self) -> sqlite3.Connection:
|
||||
"""Get thread-safe database connection."""
|
||||
if self._conn is None:
|
||||
self._conn = sqlite3.connect(str(self.db_path), check_same_thread=False)
|
||||
self._conn.row_factory = sqlite3.Row
|
||||
return self._conn
|
||||
|
||||
def get_stats(self) -> Dict[str, Any]:
|
||||
"""Get database statistics."""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
stats = {
|
||||
'db_path': str(self.db_path),
|
||||
'db_size_mb': round(self.db_path.stat().st_size / 1024 / 1024, 2) if self.db_path.exists() else 0,
|
||||
'total_sites': 0,
|
||||
'enabled_sites': 0,
|
||||
'nsfw_sites': 0,
|
||||
'with_detection': 0,
|
||||
'by_source': {},
|
||||
'by_category': {},
|
||||
'by_error_type': {},
|
||||
}
|
||||
|
||||
try:
|
||||
cursor.execute("SELECT COUNT(*) FROM sites")
|
||||
stats['total_sites'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 1")
|
||||
stats['enabled_sites'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE nsfw = 1")
|
||||
stats['nsfw_sites'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE error_type IS NOT NULL")
|
||||
stats['with_detection'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT source, COUNT(*) FROM sites GROUP BY source ORDER BY COUNT(*) DESC")
|
||||
stats['by_source'] = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
cursor.execute("SELECT category, COUNT(*) FROM sites GROUP BY category ORDER BY COUNT(*) DESC")
|
||||
stats['by_category'] = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
cursor.execute("SELECT error_type, COUNT(*) FROM sites WHERE error_type IS NOT NULL GROUP BY error_type ORDER BY COUNT(*) DESC")
|
||||
stats['by_error_type'] = {row[0]: row[1] for row in cursor.fetchall()}
|
||||
|
||||
except sqlite3.Error:
|
||||
pass
|
||||
|
||||
return stats
|
||||
|
||||
# =========================================================================
|
||||
# QUERY METHODS
|
||||
# =========================================================================
|
||||
|
||||
def get_sites(
|
||||
self,
|
||||
category: str = None,
|
||||
include_nsfw: bool = False,
|
||||
enabled_only: bool = True,
|
||||
source: str = None,
|
||||
limit: int = None,
|
||||
order_by: str = 'name'
|
||||
) -> List[Dict]:
|
||||
"""Get sites from database.
|
||||
|
||||
Args:
|
||||
category: Filter by category.
|
||||
include_nsfw: Include NSFW sites.
|
||||
enabled_only: Only return enabled sites.
|
||||
source: Filter by source.
|
||||
limit: Maximum number of results.
|
||||
order_by: 'name' or 'category'.
|
||||
|
||||
Returns:
|
||||
List of site dictionaries.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = "SELECT * FROM sites WHERE 1=1"
|
||||
params = []
|
||||
|
||||
if category:
|
||||
query += " AND category = ?"
|
||||
params.append(category)
|
||||
|
||||
if not include_nsfw:
|
||||
query += " AND nsfw = 0"
|
||||
|
||||
if enabled_only:
|
||||
query += " AND enabled = 1"
|
||||
|
||||
if source:
|
||||
query += " AND source = ?"
|
||||
params.append(source)
|
||||
|
||||
query += f" ORDER BY {order_by} COLLATE NOCASE ASC"
|
||||
|
||||
if limit:
|
||||
query += f" LIMIT {limit}"
|
||||
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
return [dict(row) for row in rows]
|
||||
|
||||
def get_site(self, name: str) -> Optional[Dict]:
|
||||
"""Get a specific site by name.
|
||||
|
||||
Args:
|
||||
name: Site name.
|
||||
|
||||
Returns:
|
||||
Site dictionary or None.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT * FROM sites WHERE name = ? COLLATE NOCASE", (name,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
return dict(row) if row else None
|
||||
|
||||
def search_sites(self, query: str, include_nsfw: bool = False, limit: int = 100) -> List[Dict]:
|
||||
"""Search sites by name.
|
||||
|
||||
Args:
|
||||
query: Search query.
|
||||
include_nsfw: Include NSFW sites.
|
||||
limit: Maximum results.
|
||||
|
||||
Returns:
|
||||
List of matching sites.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
sql = "SELECT * FROM sites WHERE name LIKE ? AND enabled = 1"
|
||||
params = [f"%{query}%"]
|
||||
|
||||
if not include_nsfw:
|
||||
sql += " AND nsfw = 0"
|
||||
|
||||
sql += f" ORDER BY name COLLATE NOCASE ASC LIMIT {limit}"
|
||||
|
||||
cursor.execute(sql, params)
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
def get_categories(self) -> List[Tuple[str, int]]:
|
||||
"""Get all categories with site counts.
|
||||
|
||||
Returns:
|
||||
List of (category, count) tuples.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
SELECT category, COUNT(*) as count
|
||||
FROM sites
|
||||
WHERE enabled = 1
|
||||
GROUP BY category
|
||||
ORDER BY count DESC
|
||||
""")
|
||||
|
||||
return [(row[0], row[1]) for row in cursor.fetchall()]
|
||||
|
||||
def get_sites_for_scan(
|
||||
self,
|
||||
categories: List[str] = None,
|
||||
include_nsfw: bool = False,
|
||||
max_sites: int = 500,
|
||||
sort_alphabetically: bool = True
|
||||
) -> List[Dict]:
|
||||
"""Get sites optimized for username scanning with detection patterns.
|
||||
|
||||
Args:
|
||||
categories: List of categories to include.
|
||||
include_nsfw: Include NSFW sites.
|
||||
max_sites: Maximum number of sites.
|
||||
sort_alphabetically: Sort by name (True) or by category (False).
|
||||
|
||||
Returns:
|
||||
List of sites ready for scanning with detection info.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
query = """SELECT name, url_template, category, source, nsfw,
|
||||
error_type, error_code, error_string, match_code, match_string
|
||||
FROM sites WHERE enabled = 1"""
|
||||
params = []
|
||||
|
||||
if categories:
|
||||
placeholders = ','.join('?' * len(categories))
|
||||
query += f" AND category IN ({placeholders})"
|
||||
params.extend(categories)
|
||||
|
||||
if not include_nsfw:
|
||||
query += " AND nsfw = 0"
|
||||
|
||||
# Sort order
|
||||
if sort_alphabetically:
|
||||
query += " ORDER BY name COLLATE NOCASE ASC"
|
||||
else:
|
||||
query += " ORDER BY category ASC, name COLLATE NOCASE ASC"
|
||||
|
||||
query += f" LIMIT {max_sites}"
|
||||
|
||||
cursor.execute(query, params)
|
||||
rows = cursor.fetchall()
|
||||
|
||||
# Format for scanning with detection info
|
||||
sites = []
|
||||
for row in rows:
|
||||
name, url, category, source, nsfw, error_type, error_code, error_string, match_code, match_string = row
|
||||
|
||||
# Map error_type to detection method
|
||||
method = self.DETECTION_METHODS.get(error_type, 'status') if error_type else 'status'
|
||||
|
||||
sites.append({
|
||||
'name': name,
|
||||
'url': url,
|
||||
'category': category,
|
||||
'source': source,
|
||||
'nsfw': bool(nsfw),
|
||||
# Detection fields
|
||||
'method': method,
|
||||
'error_type': error_type,
|
||||
'error_code': error_code, # HTTP code when NOT found (e.g., 404)
|
||||
'error_string': error_string, # String when NOT found
|
||||
'match_code': match_code, # HTTP code when found (e.g., 200)
|
||||
'match_string': match_string, # String when found
|
||||
})
|
||||
|
||||
return sites
|
||||
|
||||
def get_site_by_url(self, url_template: str) -> Optional[Dict]:
|
||||
"""Get a site by its URL template.
|
||||
|
||||
Args:
|
||||
url_template: URL template with {} placeholder.
|
||||
|
||||
Returns:
|
||||
Site dictionary or None.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("SELECT * FROM sites WHERE url_template = ?", (url_template,))
|
||||
row = cursor.fetchone()
|
||||
|
||||
return dict(row) if row else None
|
||||
|
||||
def toggle_site(self, name: str, enabled: bool) -> bool:
|
||||
"""Enable or disable a site.
|
||||
|
||||
Args:
|
||||
name: Site name.
|
||||
enabled: Enable (True) or disable (False).
|
||||
|
||||
Returns:
|
||||
True if successful.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute(
|
||||
"UPDATE sites SET enabled = ? WHERE name = ? COLLATE NOCASE",
|
||||
(1 if enabled else 0, name)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
return cursor.rowcount > 0
|
||||
|
||||
def add_site(
|
||||
self,
|
||||
name: str,
|
||||
url_template: str,
|
||||
category: str = 'other',
|
||||
source: str = 'custom',
|
||||
nsfw: bool = False,
|
||||
error_type: str = 'status_code',
|
||||
error_code: int = None,
|
||||
error_string: str = None,
|
||||
match_code: int = None,
|
||||
match_string: str = None,
|
||||
) -> bool:
|
||||
"""Add a custom site to the database.
|
||||
|
||||
Args:
|
||||
name: Site name.
|
||||
url_template: URL with {} placeholder for username.
|
||||
category: Site category.
|
||||
source: Source identifier.
|
||||
nsfw: Whether site is NSFW.
|
||||
error_type: Detection type (status_code, message, etc).
|
||||
error_code: HTTP status when user NOT found.
|
||||
error_string: String when user NOT found.
|
||||
match_code: HTTP status when user found.
|
||||
match_string: String when user found.
|
||||
|
||||
Returns:
|
||||
True if successful.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("""
|
||||
INSERT OR REPLACE INTO sites
|
||||
(name, url_template, category, source, nsfw, enabled,
|
||||
error_type, error_code, error_string, match_code, match_string)
|
||||
VALUES (?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
name,
|
||||
url_template,
|
||||
category,
|
||||
source,
|
||||
1 if nsfw else 0,
|
||||
error_type,
|
||||
error_code,
|
||||
error_string,
|
||||
match_code,
|
||||
match_string,
|
||||
))
|
||||
conn.commit()
|
||||
return True
|
||||
except Exception:
|
||||
return False
|
||||
|
||||
def update_detection(
|
||||
self,
|
||||
name: str,
|
||||
error_type: str = None,
|
||||
error_code: int = None,
|
||||
error_string: str = None,
|
||||
match_code: int = None,
|
||||
match_string: str = None,
|
||||
) -> bool:
|
||||
"""Update detection settings for a site.
|
||||
|
||||
Args:
|
||||
name: Site name.
|
||||
error_type: Detection type.
|
||||
error_code: HTTP status when NOT found.
|
||||
error_string: String when NOT found.
|
||||
match_code: HTTP status when found.
|
||||
match_string: String when found.
|
||||
|
||||
Returns:
|
||||
True if successful.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
updates = []
|
||||
params = []
|
||||
|
||||
if error_type is not None:
|
||||
updates.append("error_type = ?")
|
||||
params.append(error_type)
|
||||
if error_code is not None:
|
||||
updates.append("error_code = ?")
|
||||
params.append(error_code)
|
||||
if error_string is not None:
|
||||
updates.append("error_string = ?")
|
||||
params.append(error_string)
|
||||
if match_code is not None:
|
||||
updates.append("match_code = ?")
|
||||
params.append(match_code)
|
||||
if match_string is not None:
|
||||
updates.append("match_string = ?")
|
||||
params.append(match_string)
|
||||
|
||||
if not updates:
|
||||
return False
|
||||
|
||||
params.append(name)
|
||||
query = f"UPDATE sites SET {', '.join(updates)} WHERE name = ? COLLATE NOCASE"
|
||||
|
||||
cursor.execute(query, params)
|
||||
conn.commit()
|
||||
|
||||
return cursor.rowcount > 0
|
||||
|
||||
def get_sites_without_detection(self, limit: int = 100) -> List[Dict]:
|
||||
"""Get sites that don't have detection patterns configured.
|
||||
|
||||
Args:
|
||||
limit: Maximum results.
|
||||
|
||||
Returns:
|
||||
List of sites without detection info.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
cursor.execute("""
|
||||
SELECT * FROM sites
|
||||
WHERE enabled = 1
|
||||
AND (error_string IS NULL OR error_string = '')
|
||||
AND (match_string IS NULL OR match_string = '')
|
||||
ORDER BY name COLLATE NOCASE ASC
|
||||
LIMIT ?
|
||||
""", (limit,))
|
||||
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
def get_detection_coverage(self) -> Dict[str, Any]:
|
||||
"""Get statistics on detection pattern coverage.
|
||||
|
||||
Returns:
|
||||
Dictionary with coverage statistics.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
stats = {}
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 1")
|
||||
total = cursor.fetchone()[0]
|
||||
stats['total_enabled'] = total
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 1 AND error_type IS NOT NULL")
|
||||
stats['with_error_type'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 1 AND error_string IS NOT NULL AND error_string != ''")
|
||||
stats['with_error_string'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 1 AND match_string IS NOT NULL AND match_string != ''")
|
||||
stats['with_match_string'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 1 AND error_code IS NOT NULL")
|
||||
stats['with_error_code'] = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 1 AND match_code IS NOT NULL")
|
||||
stats['with_match_code'] = cursor.fetchone()[0]
|
||||
|
||||
# Calculate percentages
|
||||
if total > 0:
|
||||
stats['pct_error_type'] = round(stats['with_error_type'] * 100 / total, 1)
|
||||
stats['pct_error_string'] = round(stats['with_error_string'] * 100 / total, 1)
|
||||
stats['pct_match_string'] = round(stats['with_match_string'] * 100 / total, 1)
|
||||
|
||||
return stats
|
||||
|
||||
def get_disabled_count(self) -> int:
|
||||
"""Get count of disabled sites."""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT COUNT(*) FROM sites WHERE enabled = 0")
|
||||
return cursor.fetchone()[0]
|
||||
|
||||
def enable_all_sites(self) -> int:
|
||||
"""Re-enable all disabled sites."""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("UPDATE sites SET enabled = 1 WHERE enabled = 0")
|
||||
count = cursor.rowcount
|
||||
conn.commit()
|
||||
return count
|
||||
|
||||
def disable_category(self, category: str) -> int:
|
||||
"""Disable all sites in a category.
|
||||
|
||||
Args:
|
||||
category: Category to disable.
|
||||
|
||||
Returns:
|
||||
Number of sites disabled.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("UPDATE sites SET enabled = 0 WHERE category = ? AND enabled = 1", (category,))
|
||||
count = cursor.rowcount
|
||||
conn.commit()
|
||||
return count
|
||||
|
||||
def enable_category(self, category: str) -> int:
|
||||
"""Enable all sites in a category.
|
||||
|
||||
Args:
|
||||
category: Category to enable.
|
||||
|
||||
Returns:
|
||||
Number of sites enabled.
|
||||
"""
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("UPDATE sites SET enabled = 1 WHERE category = ? AND enabled = 0", (category,))
|
||||
count = cursor.rowcount
|
||||
conn.commit()
|
||||
return count
|
||||
|
||||
def load_from_json(self, json_path: str = None) -> Dict[str, int]:
|
||||
"""Load/reload sites from the master dh.json file.
|
||||
|
||||
Args:
|
||||
json_path: Path to JSON file. Defaults to data/sites/dh.json
|
||||
|
||||
Returns:
|
||||
Statistics dict with import counts.
|
||||
"""
|
||||
if json_path is None:
|
||||
json_path = self.data_dir / "dh.json"
|
||||
else:
|
||||
json_path = Path(json_path)
|
||||
|
||||
stats = {'total': 0, 'new': 0, 'updated': 0, 'errors': 0}
|
||||
|
||||
if not json_path.exists():
|
||||
print(f"{Colors.RED}[X] JSON file not found: {json_path}{Colors.RESET}")
|
||||
return stats
|
||||
|
||||
print(f"{Colors.CYAN}[*] Loading sites from {json_path}...{Colors.RESET}")
|
||||
|
||||
try:
|
||||
with open(json_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
sites = data.get('sites', [])
|
||||
stats['total'] = len(sites)
|
||||
|
||||
with self._lock:
|
||||
conn = self._get_connection()
|
||||
cursor = conn.cursor()
|
||||
|
||||
for site in sites:
|
||||
try:
|
||||
cursor.execute("""
|
||||
INSERT OR REPLACE INTO sites
|
||||
(name, url_template, category, source, nsfw, enabled,
|
||||
error_type, error_code, error_string, match_code, match_string)
|
||||
VALUES (?, ?, ?, ?, ?, 1, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
site['name'],
|
||||
site['url'],
|
||||
site.get('category', 'other'),
|
||||
site.get('source', 'dh'),
|
||||
1 if site.get('nsfw') else 0,
|
||||
site.get('error_type'),
|
||||
site.get('error_code'),
|
||||
site.get('error_string'),
|
||||
site.get('match_code'),
|
||||
site.get('match_string'),
|
||||
))
|
||||
stats['new'] += 1
|
||||
except Exception as e:
|
||||
stats['errors'] += 1
|
||||
|
||||
conn.commit()
|
||||
|
||||
print(f"{Colors.GREEN}[+] Loaded {stats['new']} sites from JSON{Colors.RESET}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"{Colors.RED}[X] Error loading JSON: {e}{Colors.RESET}")
|
||||
|
||||
return stats
|
||||
|
||||
def export_to_json(self, json_path: str = None) -> bool:
|
||||
"""Export database to JSON format.
|
||||
|
||||
Args:
|
||||
json_path: Output path. Defaults to data/sites/dh_export.json
|
||||
|
||||
Returns:
|
||||
True if successful.
|
||||
"""
|
||||
if json_path is None:
|
||||
json_path = self.data_dir / "dh_export.json"
|
||||
else:
|
||||
json_path = Path(json_path)
|
||||
|
||||
try:
|
||||
sites = self.get_sites(enabled_only=False, include_nsfw=True)
|
||||
|
||||
# Get category and source stats
|
||||
stats = self.get_stats()
|
||||
|
||||
export_data = {
|
||||
"project": "darkHal Security Group - AUTARCH",
|
||||
"version": "1.1",
|
||||
"description": "Exported sites database with detection patterns",
|
||||
"total_sites": len(sites),
|
||||
"stats": {
|
||||
"by_category": stats['by_category'],
|
||||
"by_source": stats['by_source'],
|
||||
"by_error_type": stats['by_error_type'],
|
||||
},
|
||||
"sites": []
|
||||
}
|
||||
|
||||
for site in sites:
|
||||
site_entry = {
|
||||
"name": site['name'],
|
||||
"url": site['url_template'],
|
||||
"category": site['category'],
|
||||
"source": site['source'],
|
||||
"nsfw": bool(site['nsfw']),
|
||||
"enabled": bool(site['enabled']),
|
||||
}
|
||||
|
||||
# Add detection fields if present
|
||||
if site.get('error_type'):
|
||||
site_entry['error_type'] = site['error_type']
|
||||
if site.get('error_code'):
|
||||
site_entry['error_code'] = site['error_code']
|
||||
if site.get('error_string'):
|
||||
site_entry['error_string'] = site['error_string']
|
||||
if site.get('match_code'):
|
||||
site_entry['match_code'] = site['match_code']
|
||||
if site.get('match_string'):
|
||||
site_entry['match_string'] = site['match_string']
|
||||
|
||||
export_data['sites'].append(site_entry)
|
||||
|
||||
with open(json_path, 'w') as f:
|
||||
json.dump(export_data, f, indent=2)
|
||||
|
||||
print(f"{Colors.GREEN}[+] Exported {len(sites)} sites to {json_path}{Colors.RESET}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"{Colors.RED}[X] Export error: {e}{Colors.RESET}")
|
||||
return False
|
||||
|
||||
def close(self):
|
||||
"""Close database connection."""
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
|
||||
|
||||
# Global instance
|
||||
_sites_db: Optional[SitesDatabase] = None
|
||||
|
||||
|
||||
def get_sites_db() -> SitesDatabase:
|
||||
"""Get the global sites database instance."""
|
||||
global _sites_db
|
||||
if _sites_db is None:
|
||||
_sites_db = SitesDatabase()
|
||||
return _sites_db
|
||||
675
core/tools.py
Normal file
675
core/tools.py
Normal file
@@ -0,0 +1,675 @@
|
||||
"""
|
||||
AUTARCH Tool System
|
||||
Defines tools that the agent can use to interact with the environment
|
||||
"""
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
from typing import Callable, Dict, List, Any, Optional
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
|
||||
from .banner import Colors
|
||||
|
||||
|
||||
@dataclass
|
||||
class ToolParameter:
|
||||
"""Definition of a tool parameter."""
|
||||
name: str
|
||||
description: str
|
||||
type: str = "string"
|
||||
required: bool = True
|
||||
default: Any = None
|
||||
|
||||
|
||||
@dataclass
|
||||
class Tool:
|
||||
"""Definition of an agent tool."""
|
||||
name: str
|
||||
description: str
|
||||
function: Callable
|
||||
parameters: List[ToolParameter] = field(default_factory=list)
|
||||
category: str = "general"
|
||||
|
||||
def to_schema(self) -> Dict[str, Any]:
|
||||
"""Convert tool to JSON schema for LLM."""
|
||||
properties = {}
|
||||
required = []
|
||||
|
||||
for param in self.parameters:
|
||||
properties[param.name] = {
|
||||
"type": param.type,
|
||||
"description": param.description
|
||||
}
|
||||
if param.required:
|
||||
required.append(param.name)
|
||||
|
||||
return {
|
||||
"name": self.name,
|
||||
"description": self.description,
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": properties,
|
||||
"required": required
|
||||
}
|
||||
}
|
||||
|
||||
def execute(self, **kwargs) -> Dict[str, Any]:
|
||||
"""Execute the tool with given parameters.
|
||||
|
||||
Returns:
|
||||
Dict with 'success' bool and 'result' or 'error' string.
|
||||
"""
|
||||
try:
|
||||
result = self.function(**kwargs)
|
||||
return {"success": True, "result": result}
|
||||
except Exception as e:
|
||||
return {"success": False, "error": str(e)}
|
||||
|
||||
|
||||
class ToolRegistry:
|
||||
"""Registry for managing available tools."""
|
||||
|
||||
def __init__(self):
|
||||
self._tools: Dict[str, Tool] = {}
|
||||
self._register_builtin_tools()
|
||||
|
||||
def register(self, tool: Tool):
|
||||
"""Register a tool."""
|
||||
self._tools[tool.name] = tool
|
||||
|
||||
def unregister(self, name: str):
|
||||
"""Unregister a tool by name."""
|
||||
if name in self._tools:
|
||||
del self._tools[name]
|
||||
|
||||
def get(self, name: str) -> Optional[Tool]:
|
||||
"""Get a tool by name."""
|
||||
return self._tools.get(name)
|
||||
|
||||
def list_tools(self) -> List[Tool]:
|
||||
"""List all registered tools."""
|
||||
return list(self._tools.values())
|
||||
|
||||
def get_tools_schema(self) -> List[Dict[str, Any]]:
|
||||
"""Get JSON schema for all tools."""
|
||||
return [tool.to_schema() for tool in self._tools.values()]
|
||||
|
||||
def get_tools_prompt(self) -> str:
|
||||
"""Generate a tools description for the LLM prompt."""
|
||||
lines = ["Available tools:"]
|
||||
for tool in self._tools.values():
|
||||
lines.append(f"\n## {tool.name}")
|
||||
lines.append(f"Description: {tool.description}")
|
||||
if tool.parameters:
|
||||
lines.append("Parameters:")
|
||||
for param in tool.parameters:
|
||||
req = "(required)" if param.required else "(optional)"
|
||||
lines.append(f" - {param.name} [{param.type}] {req}: {param.description}")
|
||||
return "\n".join(lines)
|
||||
|
||||
def execute(self, tool_name: str, **kwargs) -> Dict[str, Any]:
|
||||
"""Execute a tool by name.
|
||||
|
||||
Args:
|
||||
tool_name: Name of the tool to execute.
|
||||
**kwargs: Parameters to pass to the tool.
|
||||
|
||||
Returns:
|
||||
Dict with execution result.
|
||||
"""
|
||||
tool = self.get(tool_name)
|
||||
if not tool:
|
||||
return {"success": False, "error": f"Tool '{tool_name}' not found"}
|
||||
return tool.execute(**kwargs)
|
||||
|
||||
def _register_builtin_tools(self):
|
||||
"""Register built-in tools."""
|
||||
|
||||
# Shell command execution
|
||||
self.register(Tool(
|
||||
name="shell",
|
||||
description="Execute a shell command and return the output. Use for system operations, running scripts, or gathering system information.",
|
||||
function=self._tool_shell,
|
||||
parameters=[
|
||||
ToolParameter("command", "The shell command to execute", "string", True),
|
||||
ToolParameter("timeout", "Timeout in seconds (default 30)", "integer", False, 30),
|
||||
],
|
||||
category="system"
|
||||
))
|
||||
|
||||
# Read file
|
||||
self.register(Tool(
|
||||
name="read_file",
|
||||
description="Read the contents of a file. Use to examine files, configs, or source code.",
|
||||
function=self._tool_read_file,
|
||||
parameters=[
|
||||
ToolParameter("path", "Path to the file to read", "string", True),
|
||||
ToolParameter("max_lines", "Maximum number of lines to read (default all)", "integer", False),
|
||||
],
|
||||
category="filesystem"
|
||||
))
|
||||
|
||||
# Write file
|
||||
self.register(Tool(
|
||||
name="write_file",
|
||||
description="Write content to a file. Creates the file if it doesn't exist, overwrites if it does.",
|
||||
function=self._tool_write_file,
|
||||
parameters=[
|
||||
ToolParameter("path", "Path to the file to write", "string", True),
|
||||
ToolParameter("content", "Content to write to the file", "string", True),
|
||||
],
|
||||
category="filesystem"
|
||||
))
|
||||
|
||||
# List directory
|
||||
self.register(Tool(
|
||||
name="list_dir",
|
||||
description="List contents of a directory. Use to explore filesystem structure.",
|
||||
function=self._tool_list_dir,
|
||||
parameters=[
|
||||
ToolParameter("path", "Path to the directory (default: current)", "string", False, "."),
|
||||
ToolParameter("show_hidden", "Include hidden files (default: false)", "boolean", False, False),
|
||||
],
|
||||
category="filesystem"
|
||||
))
|
||||
|
||||
# Search files
|
||||
self.register(Tool(
|
||||
name="search_files",
|
||||
description="Search for files matching a pattern. Use to find specific files.",
|
||||
function=self._tool_search_files,
|
||||
parameters=[
|
||||
ToolParameter("pattern", "Glob pattern to match (e.g., '*.py', '**/*.txt')", "string", True),
|
||||
ToolParameter("path", "Starting directory (default: current)", "string", False, "."),
|
||||
],
|
||||
category="filesystem"
|
||||
))
|
||||
|
||||
# Search in files (grep)
|
||||
self.register(Tool(
|
||||
name="search_content",
|
||||
description="Search for text content within files. Use to find specific code or text.",
|
||||
function=self._tool_search_content,
|
||||
parameters=[
|
||||
ToolParameter("pattern", "Text or regex pattern to search for", "string", True),
|
||||
ToolParameter("path", "File or directory to search in", "string", False, "."),
|
||||
ToolParameter("file_pattern", "Glob pattern for files to search (e.g., '*.py')", "string", False),
|
||||
],
|
||||
category="filesystem"
|
||||
))
|
||||
|
||||
# Create module
|
||||
self.register(Tool(
|
||||
name="create_module",
|
||||
description="Create a new AUTARCH module. Writes a Python file to the modules/ directory that becomes available in the dashboard.",
|
||||
function=self._tool_create_module,
|
||||
parameters=[
|
||||
ToolParameter("name", "Module filename without .py extension (e.g., port_scanner)", "string", True),
|
||||
ToolParameter("category", "Module category: defense, offense, counter, analyze, osint, or simulate", "string", True),
|
||||
ToolParameter("code", "Complete Python source code for the module", "string", True),
|
||||
],
|
||||
category="development"
|
||||
))
|
||||
|
||||
# Task complete
|
||||
self.register(Tool(
|
||||
name="task_complete",
|
||||
description="Mark the current task as complete. Use when you have fully accomplished the goal.",
|
||||
function=self._tool_task_complete,
|
||||
parameters=[
|
||||
ToolParameter("summary", "Summary of what was accomplished", "string", True),
|
||||
],
|
||||
category="control"
|
||||
))
|
||||
|
||||
# Ask user
|
||||
self.register(Tool(
|
||||
name="ask_user",
|
||||
description="Ask the user a question when you need clarification or input.",
|
||||
function=self._tool_ask_user,
|
||||
parameters=[
|
||||
ToolParameter("question", "The question to ask the user", "string", True),
|
||||
],
|
||||
category="interaction"
|
||||
))
|
||||
|
||||
# Metasploit tools
|
||||
self.register(Tool(
|
||||
name="msf_connect",
|
||||
description="Connect to Metasploit RPC. Required before using other MSF tools.",
|
||||
function=self._tool_msf_connect,
|
||||
parameters=[
|
||||
ToolParameter("password", "MSF RPC password (uses saved if not provided)", "string", False),
|
||||
],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
self.register(Tool(
|
||||
name="msf_search",
|
||||
description="Search for Metasploit modules by keyword.",
|
||||
function=self._tool_msf_search,
|
||||
parameters=[
|
||||
ToolParameter("query", "Search query (e.g., 'smb', 'apache', 'cve:2021')", "string", True),
|
||||
],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
self.register(Tool(
|
||||
name="msf_module_info",
|
||||
description="Get detailed information about a Metasploit module.",
|
||||
function=self._tool_msf_module_info,
|
||||
parameters=[
|
||||
ToolParameter("module_type", "Module type: exploit, auxiliary, post, payload", "string", True),
|
||||
ToolParameter("module_name", "Module name (e.g., 'windows/smb/ms17_010_eternalblue')", "string", True),
|
||||
],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
self.register(Tool(
|
||||
name="msf_module_options",
|
||||
description="Get available options for a Metasploit module.",
|
||||
function=self._tool_msf_module_options,
|
||||
parameters=[
|
||||
ToolParameter("module_type", "Module type: exploit, auxiliary, post, payload", "string", True),
|
||||
ToolParameter("module_name", "Module name", "string", True),
|
||||
],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
self.register(Tool(
|
||||
name="msf_execute",
|
||||
description="Execute a Metasploit module with specified options.",
|
||||
function=self._tool_msf_execute,
|
||||
parameters=[
|
||||
ToolParameter("module_type", "Module type: exploit, auxiliary, post", "string", True),
|
||||
ToolParameter("module_name", "Module name", "string", True),
|
||||
ToolParameter("options", "JSON object of module options (e.g., {\"RHOSTS\": \"192.168.1.1\"})", "string", True),
|
||||
],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
self.register(Tool(
|
||||
name="msf_sessions",
|
||||
description="List active Metasploit sessions.",
|
||||
function=self._tool_msf_sessions,
|
||||
parameters=[],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
self.register(Tool(
|
||||
name="msf_session_command",
|
||||
description="Execute a command in a Metasploit session.",
|
||||
function=self._tool_msf_session_command,
|
||||
parameters=[
|
||||
ToolParameter("session_id", "Session ID", "string", True),
|
||||
ToolParameter("command", "Command to execute", "string", True),
|
||||
],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
self.register(Tool(
|
||||
name="msf_console",
|
||||
description="Run a command in the Metasploit console.",
|
||||
function=self._tool_msf_console,
|
||||
parameters=[
|
||||
ToolParameter("command", "Console command to run", "string", True),
|
||||
],
|
||||
category="msf"
|
||||
))
|
||||
|
||||
# Built-in tool implementations
|
||||
|
||||
def _tool_shell(self, command: str, timeout: int = 30) -> str:
|
||||
"""Execute a shell command."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
command,
|
||||
shell=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=timeout
|
||||
)
|
||||
output = result.stdout
|
||||
if result.stderr:
|
||||
output += f"\n[stderr]: {result.stderr}"
|
||||
if result.returncode != 0:
|
||||
output += f"\n[exit code]: {result.returncode}"
|
||||
return output.strip() or "[no output]"
|
||||
except subprocess.TimeoutExpired:
|
||||
return f"[error]: Command timed out after {timeout} seconds"
|
||||
except Exception as e:
|
||||
return f"[error]: {str(e)}"
|
||||
|
||||
def _tool_read_file(self, path: str, max_lines: int = None) -> str:
|
||||
"""Read a file's contents."""
|
||||
path = Path(path).expanduser()
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"File not found: {path}")
|
||||
if not path.is_file():
|
||||
raise ValueError(f"Not a file: {path}")
|
||||
|
||||
with open(path, 'r', errors='replace') as f:
|
||||
if max_lines:
|
||||
lines = []
|
||||
for i, line in enumerate(f):
|
||||
if i >= max_lines:
|
||||
lines.append(f"... [{path.stat().st_size} bytes total, truncated at {max_lines} lines]")
|
||||
break
|
||||
lines.append(line.rstrip())
|
||||
return '\n'.join(lines)
|
||||
else:
|
||||
return f.read()
|
||||
|
||||
def _tool_write_file(self, path: str, content: str) -> str:
|
||||
"""Write content to a file."""
|
||||
path = Path(path).expanduser()
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(path, 'w') as f:
|
||||
f.write(content)
|
||||
return f"Successfully wrote {len(content)} bytes to {path}"
|
||||
|
||||
def _tool_list_dir(self, path: str = ".", show_hidden: bool = False) -> str:
|
||||
"""List directory contents."""
|
||||
path = Path(path).expanduser()
|
||||
if not path.exists():
|
||||
raise FileNotFoundError(f"Directory not found: {path}")
|
||||
if not path.is_dir():
|
||||
raise ValueError(f"Not a directory: {path}")
|
||||
|
||||
entries = []
|
||||
for entry in sorted(path.iterdir()):
|
||||
if not show_hidden and entry.name.startswith('.'):
|
||||
continue
|
||||
prefix = "d " if entry.is_dir() else "f "
|
||||
entries.append(f"{prefix}{entry.name}")
|
||||
|
||||
return '\n'.join(entries) if entries else "[empty directory]"
|
||||
|
||||
def _tool_search_files(self, pattern: str, path: str = ".") -> str:
|
||||
"""Search for files matching a pattern."""
|
||||
path = Path(path).expanduser()
|
||||
matches = list(path.glob(pattern))
|
||||
|
||||
if not matches:
|
||||
return f"No files matching '{pattern}'"
|
||||
|
||||
result = []
|
||||
for match in matches[:50]: # Limit results
|
||||
result.append(str(match))
|
||||
|
||||
if len(matches) > 50:
|
||||
result.append(f"... and {len(matches) - 50} more")
|
||||
|
||||
return '\n'.join(result)
|
||||
|
||||
def _tool_search_content(self, pattern: str, path: str = ".", file_pattern: str = None) -> str:
|
||||
"""Search for content in files."""
|
||||
try:
|
||||
cmd = f"grep -rn '{pattern}' {path}"
|
||||
if file_pattern:
|
||||
cmd = f"grep -rn --include='{file_pattern}' '{pattern}' {path}"
|
||||
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
shell=True,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
output = result.stdout.strip()
|
||||
if not output:
|
||||
return f"No matches found for '{pattern}'"
|
||||
|
||||
# Limit output
|
||||
lines = output.split('\n')
|
||||
if len(lines) > 30:
|
||||
return '\n'.join(lines[:30]) + f"\n... and {len(lines) - 30} more matches"
|
||||
return output
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
return "[error]: Search timed out"
|
||||
except Exception as e:
|
||||
return f"[error]: {str(e)}"
|
||||
|
||||
def _tool_create_module(self, name: str, category: str, code: str) -> str:
|
||||
"""Create a new AUTARCH module in the modules/ directory."""
|
||||
import importlib.util as ilu
|
||||
|
||||
valid_categories = ('defense', 'offense', 'counter', 'analyze', 'osint', 'simulate')
|
||||
category = category.lower().strip()
|
||||
if category not in valid_categories:
|
||||
return f"[error]: Invalid category '{category}'. Must be one of: {', '.join(valid_categories)}"
|
||||
|
||||
# Sanitize name
|
||||
name = name.strip().replace(' ', '_').replace('-', '_').lower()
|
||||
if not name.replace('_', '').isalnum():
|
||||
return f"[error]: Invalid module name '{name}'. Use only letters, numbers, and underscores."
|
||||
|
||||
# Check required attributes in source code
|
||||
required = ['DESCRIPTION', 'VERSION', 'CATEGORY', 'def run(']
|
||||
missing = [r for r in required if r not in code]
|
||||
if missing:
|
||||
return f"[error]: Module code is missing required elements: {', '.join(missing)}"
|
||||
|
||||
# Determine modules directory
|
||||
modules_dir = Path(__file__).parent.parent / 'modules'
|
||||
module_path = modules_dir / f'{name}.py'
|
||||
|
||||
if module_path.exists():
|
||||
return f"[error]: Module '{name}' already exists at {module_path}. Choose a different name."
|
||||
|
||||
# Write the module file
|
||||
try:
|
||||
module_path.write_text(code, encoding='utf-8')
|
||||
except Exception as e:
|
||||
return f"[error]: Failed to write module: {e}"
|
||||
|
||||
# Validate by attempting to import
|
||||
try:
|
||||
spec = ilu.spec_from_file_location(name, module_path)
|
||||
mod = ilu.module_from_spec(spec)
|
||||
spec.loader.exec_module(mod)
|
||||
|
||||
# Verify it has run()
|
||||
if not hasattr(mod, 'run'):
|
||||
module_path.unlink()
|
||||
return "[error]: Module loaded but has no run() function. Module deleted."
|
||||
|
||||
except Exception as e:
|
||||
# Import failed — delete the bad module
|
||||
try:
|
||||
module_path.unlink()
|
||||
except Exception:
|
||||
pass
|
||||
return f"[error]: Module failed to import: {e}. Module deleted."
|
||||
|
||||
return f"Module '{name}' created successfully at {module_path}. Category: {category}. It is now available in the dashboard."
|
||||
|
||||
def _tool_task_complete(self, summary: str) -> str:
|
||||
"""Mark task as complete - this is a control signal."""
|
||||
return f"__TASK_COMPLETE__:{summary}"
|
||||
|
||||
def _tool_ask_user(self, question: str) -> str:
|
||||
"""Ask user a question - handled by agent loop."""
|
||||
return f"__ASK_USER__:{question}"
|
||||
|
||||
# Metasploit tool implementations
|
||||
|
||||
def _tool_msf_connect(self, password: str = None) -> str:
|
||||
"""Connect to Metasploit RPC."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
try:
|
||||
msf.connect(password)
|
||||
version = msf.rpc.get_version()
|
||||
return f"Connected to Metasploit {version.get('version', 'Unknown')}"
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
def _tool_msf_search(self, query: str) -> str:
|
||||
"""Search for Metasploit modules."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
if not msf.is_connected:
|
||||
return "[error]: Not connected to Metasploit. Use msf_connect first."
|
||||
|
||||
try:
|
||||
results = msf.rpc.search_modules(query)
|
||||
if not results:
|
||||
return f"No modules found matching '{query}'"
|
||||
|
||||
output = []
|
||||
for i, mod in enumerate(results[:20]): # Limit to 20 results
|
||||
if isinstance(mod, dict):
|
||||
name = mod.get('fullname', mod.get('name', 'Unknown'))
|
||||
desc = mod.get('description', '')[:60]
|
||||
output.append(f"{name}\n {desc}")
|
||||
else:
|
||||
output.append(str(mod))
|
||||
|
||||
if len(results) > 20:
|
||||
output.append(f"\n... and {len(results) - 20} more results")
|
||||
|
||||
return '\n'.join(output)
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
def _tool_msf_module_info(self, module_type: str, module_name: str) -> str:
|
||||
"""Get module information."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
if not msf.is_connected:
|
||||
return "[error]: Not connected to Metasploit. Use msf_connect first."
|
||||
|
||||
try:
|
||||
info = msf.rpc.get_module_info(module_type, module_name)
|
||||
output = [
|
||||
f"Name: {info.name}",
|
||||
f"Type: {info.type}",
|
||||
f"Rank: {info.rank}",
|
||||
f"Description: {info.description[:200]}..." if len(info.description) > 200 else f"Description: {info.description}",
|
||||
]
|
||||
if info.author:
|
||||
output.append(f"Authors: {', '.join(info.author[:3])}")
|
||||
return '\n'.join(output)
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
def _tool_msf_module_options(self, module_type: str, module_name: str) -> str:
|
||||
"""Get module options."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
if not msf.is_connected:
|
||||
return "[error]: Not connected to Metasploit. Use msf_connect first."
|
||||
|
||||
try:
|
||||
options = msf.rpc.get_module_options(module_type, module_name)
|
||||
output = []
|
||||
for name, details in options.items():
|
||||
if isinstance(details, dict):
|
||||
required = "*" if details.get('required', False) else ""
|
||||
default = details.get('default', '')
|
||||
desc = details.get('desc', '')[:50]
|
||||
output.append(f"{name}{required}: {desc} [default: {default}]")
|
||||
else:
|
||||
output.append(f"{name}: {details}")
|
||||
return '\n'.join(output) if output else "No options available"
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
def _tool_msf_execute(self, module_type: str, module_name: str, options: str) -> str:
|
||||
"""Execute a Metasploit module."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
if not msf.is_connected:
|
||||
return "[error]: Not connected to Metasploit. Use msf_connect first."
|
||||
|
||||
try:
|
||||
opts = json.loads(options) if isinstance(options, str) else options
|
||||
except json.JSONDecodeError:
|
||||
return "[error]: Invalid JSON in options parameter"
|
||||
|
||||
try:
|
||||
result = msf.rpc.execute_module(module_type, module_name, opts)
|
||||
job_id = result.get('job_id')
|
||||
uuid = result.get('uuid')
|
||||
return f"Module executed. Job ID: {job_id}, UUID: {uuid}"
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
def _tool_msf_sessions(self) -> str:
|
||||
"""List active sessions."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
if not msf.is_connected:
|
||||
return "[error]: Not connected to Metasploit. Use msf_connect first."
|
||||
|
||||
try:
|
||||
sessions = msf.rpc.list_sessions()
|
||||
if not sessions:
|
||||
return "No active sessions"
|
||||
|
||||
output = []
|
||||
for sid, info in sessions.items():
|
||||
if isinstance(info, dict):
|
||||
stype = info.get('type', 'Unknown')
|
||||
target = info.get('target_host', 'Unknown')
|
||||
user = info.get('username', '')
|
||||
output.append(f"[{sid}] {stype} - {target} ({user})")
|
||||
else:
|
||||
output.append(f"[{sid}] {info}")
|
||||
return '\n'.join(output)
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
def _tool_msf_session_command(self, session_id: str, command: str) -> str:
|
||||
"""Execute command in a session."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
if not msf.is_connected:
|
||||
return "[error]: Not connected to Metasploit. Use msf_connect first."
|
||||
|
||||
try:
|
||||
msf.rpc.session_shell_write(session_id, command)
|
||||
import time
|
||||
time.sleep(1) # Wait for command execution
|
||||
output = msf.rpc.session_shell_read(session_id)
|
||||
return output if output else "[no output]"
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
def _tool_msf_console(self, command: str) -> str:
|
||||
"""Run a console command."""
|
||||
from .msf import get_msf_manager, MSFError
|
||||
|
||||
msf = get_msf_manager()
|
||||
if not msf.is_connected:
|
||||
return "[error]: Not connected to Metasploit. Use msf_connect first."
|
||||
|
||||
try:
|
||||
output = msf.rpc.run_console_command(command)
|
||||
return output if output else "[no output]"
|
||||
except MSFError as e:
|
||||
return f"[error]: {e}"
|
||||
|
||||
|
||||
# Global tool registry
|
||||
_registry: Optional[ToolRegistry] = None
|
||||
|
||||
|
||||
def get_tool_registry() -> ToolRegistry:
|
||||
"""Get the global tool registry."""
|
||||
global _registry
|
||||
if _registry is None:
|
||||
_registry = ToolRegistry()
|
||||
return _registry
|
||||
147
core/tray.py
Normal file
147
core/tray.py
Normal file
@@ -0,0 +1,147 @@
|
||||
"""AUTARCH System Tray Icon
|
||||
|
||||
Provides a taskbar/system tray icon with Start, Stop, Restart, Open Dashboard,
|
||||
and Exit controls for the web dashboard.
|
||||
|
||||
Requires: pystray, Pillow
|
||||
"""
|
||||
|
||||
import sys
|
||||
import threading
|
||||
import webbrowser
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import pystray
|
||||
from PIL import Image, ImageDraw, ImageFont
|
||||
TRAY_AVAILABLE = True
|
||||
except ImportError:
|
||||
TRAY_AVAILABLE = False
|
||||
|
||||
|
||||
def _get_icon_path():
|
||||
"""Find the .ico file — works in both source and frozen (PyInstaller) builds."""
|
||||
if getattr(sys, 'frozen', False):
|
||||
base = Path(sys._MEIPASS)
|
||||
else:
|
||||
base = Path(__file__).parent.parent
|
||||
ico = base / 'autarch.ico'
|
||||
if ico.exists():
|
||||
return ico
|
||||
return None
|
||||
|
||||
|
||||
def create_icon_image(size=64):
|
||||
"""Load tray icon from .ico file, falling back to programmatic generation."""
|
||||
ico_path = _get_icon_path()
|
||||
if ico_path:
|
||||
try:
|
||||
img = Image.open(str(ico_path))
|
||||
img = img.resize((size, size), Image.LANCZOS)
|
||||
return img.convert('RGBA')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback: generate programmatically
|
||||
img = Image.new('RGBA', (size, size), (0, 0, 0, 0))
|
||||
draw = ImageDraw.Draw(img)
|
||||
draw.ellipse([1, 1, size - 2, size - 2], fill=(15, 15, 25, 255),
|
||||
outline=(0, 180, 255, 255), width=2)
|
||||
try:
|
||||
font = ImageFont.truetype("arial.ttf", int(size * 0.5))
|
||||
except OSError:
|
||||
font = ImageFont.load_default()
|
||||
bbox = draw.textbbox((0, 0), "A", font=font)
|
||||
tw, th = bbox[2] - bbox[0], bbox[3] - bbox[1]
|
||||
x = (size - tw) // 2
|
||||
y = (size - th) // 2 - bbox[1]
|
||||
draw.text((x, y), "A", fill=(0, 200, 255, 255), font=font)
|
||||
return img
|
||||
|
||||
|
||||
class TrayManager:
|
||||
"""Manages the system tray icon and Flask server lifecycle."""
|
||||
|
||||
def __init__(self, app, host, port, ssl_context=None):
|
||||
self.app = app
|
||||
self.host = host
|
||||
self.port = port
|
||||
self.ssl_context = ssl_context
|
||||
self._server = None
|
||||
self._thread = None
|
||||
self.running = False
|
||||
self._icon = None
|
||||
self._proto = 'https' if ssl_context else 'http'
|
||||
|
||||
def start_server(self):
|
||||
"""Start the Flask web server in a background thread."""
|
||||
if self.running:
|
||||
return
|
||||
|
||||
from werkzeug.serving import make_server
|
||||
self._server = make_server(self.host, self.port, self.app, threaded=True)
|
||||
|
||||
if self.ssl_context:
|
||||
import ssl
|
||||
ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||
ctx.load_cert_chain(self.ssl_context[0], self.ssl_context[1])
|
||||
self._server.socket = ctx.wrap_socket(self._server.socket, server_side=True)
|
||||
|
||||
self.running = True
|
||||
self._thread = threading.Thread(target=self._server.serve_forever, daemon=True)
|
||||
self._thread.start()
|
||||
|
||||
def stop_server(self):
|
||||
"""Stop the Flask web server."""
|
||||
if not self.running or not self._server:
|
||||
return
|
||||
self._server.shutdown()
|
||||
self._server = None
|
||||
self._thread = None
|
||||
self.running = False
|
||||
|
||||
def restart_server(self):
|
||||
"""Stop and restart the Flask web server."""
|
||||
self.stop_server()
|
||||
self.start_server()
|
||||
|
||||
def open_browser(self):
|
||||
"""Open the dashboard in the default web browser."""
|
||||
if self.running:
|
||||
host = 'localhost' if self.host in ('0.0.0.0', '::') else self.host
|
||||
webbrowser.open(f"{self._proto}://{host}:{self.port}")
|
||||
|
||||
def quit(self):
|
||||
"""Stop server and exit the tray icon."""
|
||||
self.stop_server()
|
||||
if self._icon:
|
||||
self._icon.stop()
|
||||
|
||||
def run(self):
|
||||
"""Start server and show tray icon. Blocks until Exit is clicked."""
|
||||
if not TRAY_AVAILABLE:
|
||||
raise RuntimeError("pystray or Pillow not installed")
|
||||
|
||||
self.start_server()
|
||||
|
||||
image = create_icon_image()
|
||||
menu = pystray.Menu(
|
||||
pystray.MenuItem(
|
||||
lambda item: f"AUTARCH — {'Running' if self.running else 'Stopped'}",
|
||||
None, enabled=False),
|
||||
pystray.Menu.SEPARATOR,
|
||||
pystray.MenuItem("Start", lambda: self.start_server(),
|
||||
enabled=lambda item: not self.running),
|
||||
pystray.MenuItem("Stop", lambda: self.stop_server(),
|
||||
enabled=lambda item: self.running),
|
||||
pystray.MenuItem("Restart", lambda: self.restart_server(),
|
||||
enabled=lambda item: self.running),
|
||||
pystray.Menu.SEPARATOR,
|
||||
pystray.MenuItem("Open Dashboard", lambda: self.open_browser(),
|
||||
enabled=lambda item: self.running, default=True),
|
||||
pystray.Menu.SEPARATOR,
|
||||
pystray.MenuItem("Exit", lambda: self.quit()),
|
||||
)
|
||||
|
||||
self._icon = pystray.Icon("autarch", image, "AUTARCH", menu=menu)
|
||||
self._icon.run() # Blocks until quit()
|
||||
274
core/upnp.py
Normal file
274
core/upnp.py
Normal file
@@ -0,0 +1,274 @@
|
||||
"""
|
||||
AUTARCH UPnP Manager
|
||||
Manages UPnP port forwarding via miniupnpc (upnpc CLI)
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import List, Dict, Optional, Tuple
|
||||
|
||||
from core.paths import find_tool
|
||||
|
||||
|
||||
class UPnPManager:
|
||||
"""UPnP port forwarding manager wrapping the upnpc CLI."""
|
||||
|
||||
def __init__(self, config=None):
|
||||
self.config = config
|
||||
self._upnpc = find_tool('upnpc')
|
||||
|
||||
def is_available(self) -> bool:
|
||||
"""Check if upnpc is installed."""
|
||||
return self._upnpc is not None
|
||||
|
||||
def _run(self, args: list, timeout: int = 15) -> Tuple[bool, str]:
|
||||
"""Run upnpc with arguments and return (success, output)."""
|
||||
if not self._upnpc:
|
||||
return False, "upnpc not found. Install miniupnpc."
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[self._upnpc] + args,
|
||||
capture_output=True, text=True, timeout=timeout
|
||||
)
|
||||
output = result.stdout + result.stderr
|
||||
return result.returncode == 0, output.strip()
|
||||
except subprocess.TimeoutExpired:
|
||||
return False, "Command timed out"
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def list_mappings(self) -> Tuple[bool, str]:
|
||||
"""List current UPnP port mappings."""
|
||||
return self._run(['-l'])
|
||||
|
||||
def add_mapping(self, internal_ip: str, internal_port: int,
|
||||
external_port: int, protocol: str,
|
||||
description: str = "AUTARCH") -> Tuple[bool, str]:
|
||||
"""Add a UPnP port mapping.
|
||||
|
||||
Args:
|
||||
internal_ip: LAN IP to forward to
|
||||
internal_port: Internal port number
|
||||
external_port: External port number
|
||||
protocol: TCP or UDP
|
||||
description: Mapping description
|
||||
"""
|
||||
protocol = protocol.upper()
|
||||
if protocol not in ('TCP', 'UDP'):
|
||||
return False, "Protocol must be TCP or UDP"
|
||||
return self._run([
|
||||
'-a', internal_ip,
|
||||
str(internal_port), str(external_port),
|
||||
protocol, '0', description
|
||||
])
|
||||
|
||||
def remove_mapping(self, external_port: int, protocol: str) -> Tuple[bool, str]:
|
||||
"""Remove a UPnP port mapping."""
|
||||
protocol = protocol.upper()
|
||||
return self._run(['-d', str(external_port), protocol])
|
||||
|
||||
def get_external_ip(self) -> Tuple[bool, str]:
|
||||
"""Get the external IP via UPnP."""
|
||||
success, output = self._run(['-e'])
|
||||
if success:
|
||||
# Parse "ExternalIPAddress = x.x.x.x" from output
|
||||
for line in output.splitlines():
|
||||
if 'ExternalIPAddress' in line:
|
||||
parts = line.split('=')
|
||||
if len(parts) >= 2:
|
||||
return True, parts[-1].strip()
|
||||
# If no specific line found, return raw output
|
||||
return True, output
|
||||
return False, output
|
||||
|
||||
def refresh_all(self) -> List[Dict]:
|
||||
"""Re-add all configured port mappings. Returns list of results."""
|
||||
mappings = self.load_mappings_from_config()
|
||||
internal_ip = self._get_internal_ip()
|
||||
results = []
|
||||
|
||||
for mapping in mappings:
|
||||
port = mapping['port']
|
||||
proto = mapping['protocol']
|
||||
desc = mapping.get('description', 'AUTARCH')
|
||||
success, output = self.add_mapping(
|
||||
internal_ip, port, port, proto, desc
|
||||
)
|
||||
results.append({
|
||||
'port': port,
|
||||
'protocol': proto,
|
||||
'success': success,
|
||||
'message': output
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
def _get_internal_ip(self) -> str:
|
||||
"""Get the configured internal IP."""
|
||||
if self.config:
|
||||
return self.config.get('upnp', 'internal_ip', fallback='10.0.0.26')
|
||||
return '10.0.0.26'
|
||||
|
||||
def load_mappings_from_config(self) -> List[Dict]:
|
||||
"""Load port mappings from config file.
|
||||
|
||||
Config format: mappings = 443:TCP,51820:UDP,8080:TCP
|
||||
"""
|
||||
if not self.config:
|
||||
return []
|
||||
|
||||
mappings_str = self.config.get('upnp', 'mappings', fallback='')
|
||||
if not mappings_str:
|
||||
return []
|
||||
|
||||
mappings = []
|
||||
for entry in mappings_str.split(','):
|
||||
entry = entry.strip()
|
||||
if ':' in entry:
|
||||
parts = entry.split(':')
|
||||
try:
|
||||
mappings.append({
|
||||
'port': int(parts[0]),
|
||||
'protocol': parts[1].upper()
|
||||
})
|
||||
except (ValueError, IndexError):
|
||||
continue
|
||||
return mappings
|
||||
|
||||
def save_mappings_to_config(self, mappings: List[Dict]):
|
||||
"""Save port mappings to config file."""
|
||||
if not self.config:
|
||||
return
|
||||
|
||||
mappings_str = ','.join(
|
||||
f"{m['port']}:{m['protocol']}" for m in mappings
|
||||
)
|
||||
self.config.set('upnp', 'mappings', mappings_str)
|
||||
self.config.save()
|
||||
|
||||
# --- Cron Management ---
|
||||
|
||||
def _get_autarch_path(self) -> str:
|
||||
"""Get the path to autarch.py."""
|
||||
from core.paths import get_app_dir
|
||||
return str(get_app_dir() / 'autarch.py')
|
||||
|
||||
def _get_cron_command(self) -> str:
|
||||
"""Get the cron command string for UPnP refresh."""
|
||||
autarch_path = self._get_autarch_path()
|
||||
return f'/usr/bin/python3 {autarch_path} --upnp-refresh > /dev/null 2>&1'
|
||||
|
||||
def get_cron_status(self) -> Dict:
|
||||
"""Check if UPnP cron job is installed.
|
||||
|
||||
Returns:
|
||||
Dict with 'installed' (bool), 'interval' (str), 'line' (str)
|
||||
"""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['crontab', '-l'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return {'installed': False, 'interval': None, 'line': None}
|
||||
|
||||
for line in result.stdout.splitlines():
|
||||
if 'upnp-refresh' in line and not line.startswith('#'):
|
||||
# Parse interval from cron expression
|
||||
match = re.match(r'^\d+\s+\*/(\d+)', line)
|
||||
interval = match.group(1) if match else '?'
|
||||
return {
|
||||
'installed': True,
|
||||
'interval': f'{interval}h',
|
||||
'line': line.strip()
|
||||
}
|
||||
|
||||
return {'installed': False, 'interval': None, 'line': None}
|
||||
except Exception:
|
||||
return {'installed': False, 'interval': None, 'line': None}
|
||||
|
||||
def install_cron(self, interval_hours: int = 12) -> Tuple[bool, str]:
|
||||
"""Install a crontab entry for periodic UPnP refresh.
|
||||
|
||||
Args:
|
||||
interval_hours: How often to refresh (in hours)
|
||||
"""
|
||||
# First remove any existing entry
|
||||
self.uninstall_cron()
|
||||
|
||||
cron_line = f'0 */{interval_hours} * * * {self._get_cron_command()}'
|
||||
|
||||
try:
|
||||
# Get current crontab
|
||||
result = subprocess.run(
|
||||
['crontab', '-l'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
existing = result.stdout if result.returncode == 0 else ''
|
||||
|
||||
# Append new entry
|
||||
new_crontab = existing.rstrip('\n') + '\n' + cron_line + '\n'
|
||||
|
||||
# Install
|
||||
proc = subprocess.run(
|
||||
['crontab', '-'],
|
||||
input=new_crontab, capture_output=True, text=True, timeout=5
|
||||
)
|
||||
|
||||
if proc.returncode == 0:
|
||||
# Save interval to config
|
||||
if self.config:
|
||||
self.config.set('upnp', 'refresh_hours', str(interval_hours))
|
||||
self.config.save()
|
||||
return True, f"Cron job installed: every {interval_hours} hours"
|
||||
else:
|
||||
return False, proc.stderr
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def uninstall_cron(self) -> Tuple[bool, str]:
|
||||
"""Remove the UPnP refresh cron job."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['crontab', '-l'],
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
if result.returncode != 0:
|
||||
return True, "No crontab exists"
|
||||
|
||||
# Filter out our line
|
||||
lines = result.stdout.splitlines()
|
||||
filtered = [l for l in lines if 'upnp-refresh' not in l]
|
||||
|
||||
if len(lines) == len(filtered):
|
||||
return True, "No UPnP cron job found"
|
||||
|
||||
new_crontab = '\n'.join(filtered) + '\n'
|
||||
|
||||
proc = subprocess.run(
|
||||
['crontab', '-'],
|
||||
input=new_crontab, capture_output=True, text=True, timeout=5
|
||||
)
|
||||
|
||||
if proc.returncode == 0:
|
||||
return True, "Cron job removed"
|
||||
else:
|
||||
return False, proc.stderr
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
|
||||
# Singleton
|
||||
_upnp_manager = None
|
||||
|
||||
|
||||
def get_upnp_manager(config=None) -> UPnPManager:
|
||||
"""Get the global UPnP manager instance."""
|
||||
global _upnp_manager
|
||||
if _upnp_manager is None:
|
||||
if config is None:
|
||||
from core.config import get_config
|
||||
config = get_config()
|
||||
_upnp_manager = UPnPManager(config)
|
||||
return _upnp_manager
|
||||
858
core/wireguard.py
Normal file
858
core/wireguard.py
Normal file
@@ -0,0 +1,858 @@
|
||||
"""
|
||||
AUTARCH WireGuard VPN Manager
|
||||
Server management, client/peer CRUD, remote ADB (TCP/IP + USB/IP).
|
||||
|
||||
Integrates /home/snake/wg_setec/ functionality into the AUTARCH framework
|
||||
with added remote ADB and USB/IP support for Android device management
|
||||
over WireGuard tunnels.
|
||||
"""
|
||||
|
||||
import io
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import time
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional, Dict, List, Any, Tuple
|
||||
|
||||
from core.paths import get_data_dir, find_tool
|
||||
|
||||
|
||||
class WireGuardManager:
|
||||
"""WireGuard VPN + Remote ADB manager."""
|
||||
|
||||
def __init__(self, config=None):
|
||||
self._wg_bin = find_tool('wg')
|
||||
self._wg_quick = find_tool('wg-quick')
|
||||
self._usbip_bin = find_tool('usbip')
|
||||
|
||||
self._data_dir = get_data_dir() / 'wireguard'
|
||||
self._data_dir.mkdir(parents=True, exist_ok=True)
|
||||
self._clients_file = self._data_dir / 'clients.json'
|
||||
self._last_ip_file = self._data_dir / 'last_ip'
|
||||
|
||||
# Config from autarch_settings.conf [wireguard] section
|
||||
self._config = config or {}
|
||||
self._wg_config_path = self._config.get('config_path', '/etc/wireguard/wg0.conf')
|
||||
self._interface = self._config.get('interface', 'wg0')
|
||||
self._subnet = self._config.get('subnet', '10.1.0.0/24')
|
||||
self._server_address = self._config.get('server_address', '10.1.0.1')
|
||||
self._listen_port = self._config.get('listen_port', '51820')
|
||||
self._default_dns = self._config.get('default_dns', '1.1.1.1, 8.8.8.8')
|
||||
self._default_allowed_ips = self._config.get('default_allowed_ips', '0.0.0.0/0, ::/0')
|
||||
|
||||
# ── Helpers ──────────────────────────────────────────────────────
|
||||
|
||||
def _run_wg(self, args, timeout=10):
|
||||
"""Run wg command, return (stdout, stderr, rc)."""
|
||||
if not self._wg_bin:
|
||||
return ('', 'wg binary not found', 1)
|
||||
cmd = [self._wg_bin] + args
|
||||
try:
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout)
|
||||
return (proc.stdout, proc.stderr, proc.returncode)
|
||||
except subprocess.TimeoutExpired:
|
||||
return ('', 'Command timed out', 1)
|
||||
except Exception as e:
|
||||
return ('', str(e), 1)
|
||||
|
||||
def _run_wg_sudo(self, args, timeout=10):
|
||||
"""Run wg command with sudo, return (stdout, stderr, rc)."""
|
||||
if not self._wg_bin:
|
||||
return ('', 'wg binary not found', 1)
|
||||
cmd = ['sudo', self._wg_bin] + args
|
||||
try:
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout)
|
||||
return (proc.stdout, proc.stderr, proc.returncode)
|
||||
except subprocess.TimeoutExpired:
|
||||
return ('', 'Command timed out', 1)
|
||||
except Exception as e:
|
||||
return ('', str(e), 1)
|
||||
|
||||
def _run_cmd(self, cmd, timeout=10, input_data=None):
|
||||
"""Run arbitrary command, return (stdout, stderr, rc)."""
|
||||
try:
|
||||
proc = subprocess.run(
|
||||
cmd, capture_output=True, text=True,
|
||||
timeout=timeout, input=input_data
|
||||
)
|
||||
return (proc.stdout, proc.stderr, proc.returncode)
|
||||
except subprocess.TimeoutExpired:
|
||||
return ('', 'Command timed out', 1)
|
||||
except Exception as e:
|
||||
return ('', str(e), 1)
|
||||
|
||||
def _load_clients(self):
|
||||
"""Load clients from JSON file."""
|
||||
if not self._clients_file.exists():
|
||||
return {}
|
||||
try:
|
||||
with open(self._clients_file, 'r') as f:
|
||||
return json.load(f)
|
||||
except (json.JSONDecodeError, OSError):
|
||||
return {}
|
||||
|
||||
def _save_clients(self, data):
|
||||
"""Save clients to JSON file."""
|
||||
with open(self._clients_file, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
def _get_server_public_key(self):
|
||||
"""Read server public key."""
|
||||
# Try file first
|
||||
key_path = Path('/etc/wireguard/server_public.key')
|
||||
if key_path.exists():
|
||||
try:
|
||||
return key_path.read_text().strip()
|
||||
except OSError:
|
||||
pass
|
||||
# Try wg show
|
||||
stdout, _, rc = self._run_wg_sudo(['show', self._interface, 'public-key'])
|
||||
if rc == 0 and stdout.strip():
|
||||
return stdout.strip()
|
||||
return ''
|
||||
|
||||
def _get_server_endpoint(self):
|
||||
"""Read server public IP/endpoint."""
|
||||
ip_path = Path('/etc/wireguard/server_public_ip')
|
||||
if ip_path.exists():
|
||||
try:
|
||||
return ip_path.read_text().strip()
|
||||
except OSError:
|
||||
pass
|
||||
return ''
|
||||
|
||||
def _adb_bin(self):
|
||||
"""Get ADB binary path."""
|
||||
return find_tool('adb')
|
||||
|
||||
def _run_adb(self, args, timeout=30):
|
||||
"""Run ADB command, return (stdout, stderr, rc)."""
|
||||
adb = self._adb_bin()
|
||||
if not adb:
|
||||
return ('', 'adb binary not found', 1)
|
||||
cmd = [adb] + args
|
||||
try:
|
||||
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout)
|
||||
return (proc.stdout, proc.stderr, proc.returncode)
|
||||
except subprocess.TimeoutExpired:
|
||||
return ('', 'Command timed out', 1)
|
||||
except Exception as e:
|
||||
return ('', str(e), 1)
|
||||
|
||||
# ── Server Management ────────────────────────────────────────────
|
||||
|
||||
def is_available(self):
|
||||
"""Check if wg binary exists."""
|
||||
return self._wg_bin is not None
|
||||
|
||||
def get_server_status(self):
|
||||
"""Parse wg show for interface info."""
|
||||
stdout, stderr, rc = self._run_wg_sudo(['show', self._interface])
|
||||
if rc != 0:
|
||||
return {
|
||||
'running': False,
|
||||
'interface': self._interface,
|
||||
'error': stderr.strip() if stderr else 'Interface not running',
|
||||
}
|
||||
|
||||
info = {
|
||||
'interface': self._interface,
|
||||
'running': True,
|
||||
'public_key': self._get_server_public_key(),
|
||||
'endpoint': f'{self._get_server_endpoint()}:{self._listen_port}',
|
||||
'listen_port': self._listen_port,
|
||||
}
|
||||
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('listening port:'):
|
||||
info['listen_port'] = line.split(':', 1)[1].strip()
|
||||
elif line.startswith('public key:'):
|
||||
info['public_key'] = line.split(':', 1)[1].strip()
|
||||
|
||||
# Count peers
|
||||
peer_count = stdout.count('peer:')
|
||||
info['peer_count'] = peer_count
|
||||
|
||||
return info
|
||||
|
||||
def start_interface(self):
|
||||
"""Start WireGuard interface with wg-quick."""
|
||||
if not self._wg_quick:
|
||||
return {'ok': False, 'error': 'wg-quick not found'}
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', self._wg_quick, 'up', self._interface], timeout=15)
|
||||
if rc == 0:
|
||||
return {'ok': True, 'message': f'{self._interface} started'}
|
||||
# Already running is not an error
|
||||
if 'already exists' in stderr:
|
||||
return {'ok': True, 'message': f'{self._interface} already running'}
|
||||
return {'ok': False, 'error': stderr.strip() or 'Failed to start'}
|
||||
|
||||
def stop_interface(self):
|
||||
"""Stop WireGuard interface with wg-quick."""
|
||||
if not self._wg_quick:
|
||||
return {'ok': False, 'error': 'wg-quick not found'}
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', self._wg_quick, 'down', self._interface], timeout=15)
|
||||
if rc == 0:
|
||||
return {'ok': True, 'message': f'{self._interface} stopped'}
|
||||
if 'is not a WireGuard interface' in stderr:
|
||||
return {'ok': True, 'message': f'{self._interface} already stopped'}
|
||||
return {'ok': False, 'error': stderr.strip() or 'Failed to stop'}
|
||||
|
||||
def restart_interface(self):
|
||||
"""Restart WireGuard interface."""
|
||||
self.stop_interface()
|
||||
time.sleep(1)
|
||||
return self.start_interface()
|
||||
|
||||
# ── Key Generation ───────────────────────────────────────────────
|
||||
|
||||
def generate_keypair(self):
|
||||
"""Generate WireGuard keypair. Returns (private_key, public_key)."""
|
||||
priv_out, priv_err, priv_rc = self._run_wg(['genkey'])
|
||||
if priv_rc != 0:
|
||||
return (None, None)
|
||||
private_key = priv_out.strip()
|
||||
pub_out, pub_err, pub_rc = self._run_wg(['pubkey'], timeout=5)
|
||||
# pubkey reads from stdin, need to pipe
|
||||
proc = subprocess.run(
|
||||
[self._wg_bin, 'pubkey'], input=private_key,
|
||||
capture_output=True, text=True, timeout=5
|
||||
)
|
||||
if proc.returncode != 0:
|
||||
return (None, None)
|
||||
public_key = proc.stdout.strip()
|
||||
return (private_key, public_key)
|
||||
|
||||
def generate_preshared_key(self):
|
||||
"""Generate WireGuard preshared key."""
|
||||
stdout, _, rc = self._run_wg(['genpsk'])
|
||||
if rc == 0:
|
||||
return stdout.strip()
|
||||
return None
|
||||
|
||||
# ── IP Assignment ────────────────────────────────────────────────
|
||||
|
||||
def get_next_ip(self):
|
||||
"""Get next available client IP in the subnet."""
|
||||
try:
|
||||
if self._last_ip_file.exists():
|
||||
last_octet = int(self._last_ip_file.read_text().strip())
|
||||
else:
|
||||
last_octet = 1
|
||||
except (ValueError, OSError):
|
||||
last_octet = 1
|
||||
|
||||
next_octet = last_octet + 1
|
||||
self._last_ip_file.write_text(str(next_octet))
|
||||
|
||||
# Extract subnet prefix (e.g. "10.1.0" from "10.1.0.0/24")
|
||||
prefix = '.'.join(self._subnet.split('.')[:3])
|
||||
return f'{prefix}.{next_octet}'
|
||||
|
||||
# ── Client/Peer Management ───────────────────────────────────────
|
||||
|
||||
def create_client(self, name, dns=None, allowed_ips=None):
|
||||
"""Create a new WireGuard client/peer."""
|
||||
private_key, public_key = self.generate_keypair()
|
||||
if not private_key:
|
||||
return {'ok': False, 'error': 'Failed to generate keypair'}
|
||||
|
||||
preshared_key = self.generate_preshared_key()
|
||||
assigned_ip = self.get_next_ip()
|
||||
|
||||
client_id = str(uuid.uuid4())[:8]
|
||||
client = {
|
||||
'id': client_id,
|
||||
'name': name,
|
||||
'private_key': private_key,
|
||||
'public_key': public_key,
|
||||
'preshared_key': preshared_key or '',
|
||||
'assigned_ip': assigned_ip,
|
||||
'dns': dns or self._default_dns,
|
||||
'allowed_ips': allowed_ips or self._default_allowed_ips,
|
||||
'enabled': True,
|
||||
'created_at': datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
# Add to live WireGuard
|
||||
try:
|
||||
self._add_peer_to_wg(public_key, preshared_key, assigned_ip)
|
||||
except Exception as e:
|
||||
return {'ok': False, 'error': f'Failed to add peer to WG: {e}'}
|
||||
|
||||
# Add to config file
|
||||
try:
|
||||
self._append_peer_to_config(public_key, preshared_key, assigned_ip, name)
|
||||
except Exception as e:
|
||||
pass # Non-fatal, peer is live
|
||||
|
||||
# Save to JSON store
|
||||
clients = self._load_clients()
|
||||
clients[client_id] = client
|
||||
self._save_clients(clients)
|
||||
|
||||
return {'ok': True, 'client': client}
|
||||
|
||||
def delete_client(self, client_id):
|
||||
"""Delete a client/peer."""
|
||||
clients = self._load_clients()
|
||||
client = clients.get(client_id)
|
||||
if not client:
|
||||
return {'ok': False, 'error': 'Client not found'}
|
||||
|
||||
# Remove from live WG
|
||||
self._remove_peer_from_wg(client['public_key'])
|
||||
|
||||
# Remove from config file
|
||||
try:
|
||||
self._remove_peer_from_config(client['public_key'])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Remove from JSON
|
||||
del clients[client_id]
|
||||
self._save_clients(clients)
|
||||
|
||||
return {'ok': True, 'message': f'Client {client["name"]} deleted'}
|
||||
|
||||
def toggle_client(self, client_id, enabled):
|
||||
"""Enable or disable a client."""
|
||||
clients = self._load_clients()
|
||||
client = clients.get(client_id)
|
||||
if not client:
|
||||
return {'ok': False, 'error': 'Client not found'}
|
||||
|
||||
if enabled and not client.get('enabled', True):
|
||||
# Re-add peer
|
||||
self._add_peer_to_wg(
|
||||
client['public_key'], client.get('preshared_key', ''),
|
||||
client['assigned_ip'])
|
||||
elif not enabled and client.get('enabled', True):
|
||||
# Remove peer
|
||||
self._remove_peer_from_wg(client['public_key'])
|
||||
|
||||
client['enabled'] = enabled
|
||||
self._save_clients(clients)
|
||||
action = 'enabled' if enabled else 'disabled'
|
||||
return {'ok': True, 'message': f'Client {client["name"]} {action}'}
|
||||
|
||||
def get_all_clients(self):
|
||||
"""Get list of all clients."""
|
||||
clients = self._load_clients()
|
||||
return list(clients.values())
|
||||
|
||||
def get_client(self, client_id):
|
||||
"""Get single client by ID."""
|
||||
clients = self._load_clients()
|
||||
return clients.get(client_id)
|
||||
|
||||
def get_peer_status(self):
|
||||
"""Parse wg show for per-peer stats. Returns dict keyed by public key."""
|
||||
stdout, _, rc = self._run_wg_sudo(['show', self._interface])
|
||||
if rc != 0:
|
||||
return {}
|
||||
|
||||
peers = {}
|
||||
current_peer = None
|
||||
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('peer:'):
|
||||
current_peer = line.split(':', 1)[1].strip()
|
||||
peers[current_peer] = {
|
||||
'public_key': current_peer,
|
||||
'latest_handshake': None,
|
||||
'latest_handshake_str': '',
|
||||
'transfer_rx': 0,
|
||||
'transfer_tx': 0,
|
||||
'transfer_rx_str': '',
|
||||
'transfer_tx_str': '',
|
||||
'allowed_ips': '',
|
||||
'endpoint': '',
|
||||
}
|
||||
elif current_peer:
|
||||
if line.startswith('latest handshake:'):
|
||||
hs_str = line.split(':', 1)[1].strip()
|
||||
peers[current_peer]['latest_handshake'] = _parse_handshake(hs_str)
|
||||
peers[current_peer]['latest_handshake_str'] = hs_str
|
||||
elif line.startswith('transfer:'):
|
||||
transfer = line.split(':', 1)[1].strip()
|
||||
parts = transfer.split(',')
|
||||
if len(parts) == 2:
|
||||
peers[current_peer]['transfer_rx'] = _parse_transfer(parts[0].strip())
|
||||
peers[current_peer]['transfer_tx'] = _parse_transfer(parts[1].strip())
|
||||
peers[current_peer]['transfer_rx_str'] = parts[0].strip().replace('received', '').strip()
|
||||
peers[current_peer]['transfer_tx_str'] = parts[1].strip().replace('sent', '').strip()
|
||||
elif line.startswith('allowed ips:'):
|
||||
peers[current_peer]['allowed_ips'] = line.split(':', 1)[1].strip()
|
||||
elif line.startswith('endpoint:'):
|
||||
peers[current_peer]['endpoint'] = line.split(':', 1)[1].strip()
|
||||
|
||||
return peers
|
||||
|
||||
def _add_peer_to_wg(self, public_key, preshared_key, allowed_ip):
|
||||
"""Add peer to live WireGuard interface."""
|
||||
if preshared_key:
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', self._wg_bin, 'set', self._interface,
|
||||
'peer', public_key,
|
||||
'preshared-key', '/dev/stdin',
|
||||
'allowed-ips', f'{allowed_ip}/32'],
|
||||
input_data=preshared_key, timeout=10
|
||||
)
|
||||
else:
|
||||
stdout, stderr, rc = self._run_wg_sudo(
|
||||
['set', self._interface,
|
||||
'peer', public_key,
|
||||
'allowed-ips', f'{allowed_ip}/32'])
|
||||
if rc != 0:
|
||||
raise RuntimeError(f'wg set failed: {stderr}')
|
||||
|
||||
def _remove_peer_from_wg(self, public_key):
|
||||
"""Remove peer from live WireGuard interface."""
|
||||
self._run_wg_sudo(
|
||||
['set', self._interface, 'peer', public_key, 'remove'])
|
||||
|
||||
def _append_peer_to_config(self, public_key, preshared_key, allowed_ip, name=''):
|
||||
"""Append [Peer] block to wg0.conf."""
|
||||
config_path = Path(self._wg_config_path)
|
||||
if not config_path.exists():
|
||||
return
|
||||
content = config_path.read_text()
|
||||
timestamp = time.strftime('%c')
|
||||
block = f'\n# Client: {name} - Added {timestamp}\n[Peer]\n'
|
||||
block += f'PublicKey = {public_key}\n'
|
||||
if preshared_key:
|
||||
block += f'PresharedKey = {preshared_key}\n'
|
||||
block += f'AllowedIPs = {allowed_ip}/32\n'
|
||||
# Write via sudo tee
|
||||
self._run_cmd(
|
||||
['sudo', 'tee', '-a', self._wg_config_path],
|
||||
input_data=block, timeout=5)
|
||||
|
||||
def _remove_peer_from_config(self, public_key):
|
||||
"""Remove [Peer] block from wg0.conf."""
|
||||
config_path = Path(self._wg_config_path)
|
||||
if not config_path.exists():
|
||||
return
|
||||
# Read via sudo
|
||||
stdout, _, rc = self._run_cmd(['sudo', 'cat', self._wg_config_path])
|
||||
if rc != 0:
|
||||
return
|
||||
content = stdout
|
||||
|
||||
lines = content.split('\n')
|
||||
new_lines = []
|
||||
i = 0
|
||||
while i < len(lines):
|
||||
line = lines[i]
|
||||
# Check comment line preceding peer block
|
||||
if line.strip().startswith('# Client:') and i + 1 < len(lines):
|
||||
block_lines = [line]
|
||||
j = i + 1
|
||||
while j < len(lines):
|
||||
if (lines[j].strip() == '' or
|
||||
(lines[j].strip().startswith('[') and lines[j].strip() != '[Peer]') or
|
||||
lines[j].strip().startswith('# Client:')):
|
||||
break
|
||||
block_lines.append(lines[j])
|
||||
j += 1
|
||||
if public_key in '\n'.join(block_lines):
|
||||
i = j
|
||||
continue
|
||||
elif line.strip() == '[Peer]':
|
||||
block_lines = [line]
|
||||
j = i + 1
|
||||
while j < len(lines):
|
||||
if (lines[j].strip() == '' or
|
||||
(lines[j].strip().startswith('[') and lines[j].strip() != '[Peer]') or
|
||||
lines[j].strip().startswith('# Client:')):
|
||||
break
|
||||
block_lines.append(lines[j])
|
||||
j += 1
|
||||
if public_key in '\n'.join(block_lines):
|
||||
i = j
|
||||
continue
|
||||
new_lines.append(line)
|
||||
i += 1
|
||||
|
||||
cleaned = re.sub(r'\n{3,}', '\n\n', '\n'.join(new_lines))
|
||||
# Write back via sudo tee
|
||||
self._run_cmd(
|
||||
['sudo', 'tee', self._wg_config_path],
|
||||
input_data=cleaned, timeout=5)
|
||||
|
||||
def import_existing_peers(self):
|
||||
"""Parse wg0.conf and import existing peers into JSON store."""
|
||||
stdout, _, rc = self._run_cmd(['sudo', 'cat', self._wg_config_path])
|
||||
if rc != 0:
|
||||
return {'ok': False, 'error': 'Cannot read WG config', 'imported': 0}
|
||||
|
||||
lines = stdout.split('\n')
|
||||
peers = []
|
||||
current_peer = None
|
||||
pending_name = None
|
||||
|
||||
for line in lines:
|
||||
stripped = line.strip()
|
||||
name_match = re.match(r'# Client:\s*(.+?)(?:\s*-\s*Added|$)', stripped)
|
||||
if name_match:
|
||||
pending_name = name_match.group(1).strip()
|
||||
continue
|
||||
if stripped == '[Peer]':
|
||||
current_peer = {'name': pending_name}
|
||||
peers.append(current_peer)
|
||||
pending_name = None
|
||||
continue
|
||||
if stripped.startswith('['):
|
||||
current_peer = None
|
||||
pending_name = None
|
||||
continue
|
||||
if current_peer is not None and '=' in stripped:
|
||||
key, val = stripped.split('=', 1)
|
||||
current_peer[key.strip()] = val.strip()
|
||||
|
||||
clients = self._load_clients()
|
||||
existing_keys = {c['public_key'] for c in clients.values()}
|
||||
imported = 0
|
||||
|
||||
for peer in peers:
|
||||
public_key = peer.get('PublicKey')
|
||||
allowed_ip = peer.get('AllowedIPs', '').replace('/32', '')
|
||||
preshared_key = peer.get('PresharedKey', '')
|
||||
name = peer.get('name') or 'legacy-client'
|
||||
|
||||
if not public_key or not allowed_ip:
|
||||
continue
|
||||
if public_key in existing_keys:
|
||||
continue
|
||||
|
||||
# Ensure unique name
|
||||
existing_names = {c['name'] for c in clients.values()}
|
||||
final_name = name
|
||||
counter = 1
|
||||
while final_name in existing_names:
|
||||
final_name = f'{name}-{counter}'
|
||||
counter += 1
|
||||
|
||||
client_id = str(uuid.uuid4())[:8]
|
||||
clients[client_id] = {
|
||||
'id': client_id,
|
||||
'name': final_name,
|
||||
'private_key': '',
|
||||
'public_key': public_key,
|
||||
'preshared_key': preshared_key,
|
||||
'assigned_ip': allowed_ip,
|
||||
'dns': self._default_dns,
|
||||
'allowed_ips': self._default_allowed_ips,
|
||||
'enabled': True,
|
||||
'created_at': datetime.now().isoformat(),
|
||||
'imported': True,
|
||||
}
|
||||
existing_keys.add(public_key)
|
||||
imported += 1
|
||||
|
||||
self._save_clients(clients)
|
||||
return {'ok': True, 'imported': imported}
|
||||
|
||||
# ── Client Config Generation ─────────────────────────────────────
|
||||
|
||||
def generate_client_config(self, client):
|
||||
"""Build the .conf file content for a client."""
|
||||
server_pubkey = self._get_server_public_key()
|
||||
server_endpoint = self._get_server_endpoint()
|
||||
|
||||
lines = ['[Interface]']
|
||||
if client.get('private_key'):
|
||||
lines.append(f"PrivateKey = {client['private_key']}")
|
||||
lines.append(f"Address = {client['assigned_ip']}/32")
|
||||
lines.append(f"DNS = {client.get('dns', self._default_dns)}")
|
||||
lines.append('')
|
||||
lines.append('[Peer]')
|
||||
lines.append(f'PublicKey = {server_pubkey}')
|
||||
if client.get('preshared_key'):
|
||||
lines.append(f"PresharedKey = {client['preshared_key']}")
|
||||
lines.append(f'Endpoint = {server_endpoint}:{self._listen_port}')
|
||||
lines.append(f"AllowedIPs = {client.get('allowed_ips', self._default_allowed_ips)}")
|
||||
lines.append('PersistentKeepalive = 25')
|
||||
lines.append('')
|
||||
return '\n'.join(lines)
|
||||
|
||||
def generate_qr_code(self, config_text):
|
||||
"""Generate QR code PNG bytes from config text."""
|
||||
try:
|
||||
import qrcode
|
||||
qr = qrcode.QRCode(
|
||||
version=1, box_size=10, border=4,
|
||||
error_correction=qrcode.constants.ERROR_CORRECT_L)
|
||||
qr.add_data(config_text)
|
||||
qr.make(fit=True)
|
||||
img = qr.make_image(fill_color='black', back_color='white')
|
||||
buf = io.BytesIO()
|
||||
img.save(buf, format='PNG')
|
||||
buf.seek(0)
|
||||
return buf.getvalue()
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
# ── Remote ADB — TCP/IP ──────────────────────────────────────────
|
||||
|
||||
def adb_connect(self, client_ip):
|
||||
"""Connect to device via ADB TCP/IP over WireGuard tunnel."""
|
||||
stdout, stderr, rc = self._run_adb(
|
||||
['connect', f'{client_ip}:5555'], timeout=15)
|
||||
output = (stdout + stderr).strip()
|
||||
if 'connected' in output.lower():
|
||||
return {'ok': True, 'message': output}
|
||||
return {'ok': False, 'error': output or 'Connection failed'}
|
||||
|
||||
def adb_disconnect(self, client_ip):
|
||||
"""Disconnect ADB TCP/IP device."""
|
||||
stdout, stderr, rc = self._run_adb(
|
||||
['disconnect', f'{client_ip}:5555'], timeout=10)
|
||||
return {'ok': rc == 0, 'message': (stdout + stderr).strip()}
|
||||
|
||||
def get_adb_remote_devices(self):
|
||||
"""Filter adb devices for WireGuard subnet IPs."""
|
||||
stdout, _, rc = self._run_adb(['devices', '-l'], timeout=10)
|
||||
if rc != 0:
|
||||
return []
|
||||
# Extract WG subnet prefix
|
||||
prefix = '.'.join(self._subnet.split('.')[:3]) + '.'
|
||||
devices = []
|
||||
for line in stdout.strip().split('\n')[1:]: # skip header
|
||||
line = line.strip()
|
||||
if not line or 'List of' in line:
|
||||
continue
|
||||
parts = line.split()
|
||||
if parts and parts[0].startswith(prefix):
|
||||
serial = parts[0]
|
||||
state = parts[1] if len(parts) > 1 else 'unknown'
|
||||
model = ''
|
||||
for p in parts[2:]:
|
||||
if p.startswith('model:'):
|
||||
model = p.split(':', 1)[1]
|
||||
devices.append({
|
||||
'serial': serial,
|
||||
'state': state,
|
||||
'model': model,
|
||||
'ip': serial.split(':')[0],
|
||||
})
|
||||
return devices
|
||||
|
||||
def auto_connect_peers(self):
|
||||
"""Try ADB connect on all active WG peers."""
|
||||
peer_status = self.get_peer_status()
|
||||
clients = self._load_clients()
|
||||
results = []
|
||||
|
||||
for client in clients.values():
|
||||
if not client.get('enabled', True):
|
||||
continue
|
||||
# Check if peer has recent handshake
|
||||
pub_key = client['public_key']
|
||||
peer = peer_status.get(pub_key, {})
|
||||
hs = peer.get('latest_handshake')
|
||||
if hs is not None and hs < 180: # Active within 3 minutes
|
||||
ip = client['assigned_ip']
|
||||
result = self.adb_connect(ip)
|
||||
results.append({
|
||||
'name': client['name'],
|
||||
'ip': ip,
|
||||
'result': result,
|
||||
})
|
||||
|
||||
return {'ok': True, 'results': results, 'attempted': len(results)}
|
||||
|
||||
# ── Remote ADB — USB/IP ──────────────────────────────────────────
|
||||
|
||||
def usbip_available(self):
|
||||
"""Check if usbip binary exists."""
|
||||
return self._usbip_bin is not None
|
||||
|
||||
def check_usbip_modules(self):
|
||||
"""Check if vhci-hcd kernel module is loaded."""
|
||||
stdout, _, rc = self._run_cmd(['lsmod'], timeout=5)
|
||||
return 'vhci_hcd' in stdout
|
||||
|
||||
def load_usbip_modules(self):
|
||||
"""Load vhci-hcd kernel module."""
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', 'modprobe', 'vhci-hcd'], timeout=10)
|
||||
if rc == 0:
|
||||
return {'ok': True, 'message': 'vhci-hcd module loaded'}
|
||||
return {'ok': False, 'error': stderr.strip() or 'Failed to load module'}
|
||||
|
||||
def usbip_list_remote(self, client_ip):
|
||||
"""List exportable USB devices on remote host."""
|
||||
if not self._usbip_bin:
|
||||
return {'ok': False, 'error': 'usbip not found', 'devices': []}
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', self._usbip_bin, 'list', '-r', client_ip], timeout=15)
|
||||
if rc != 0:
|
||||
return {'ok': False, 'error': stderr.strip() or 'Failed to list',
|
||||
'devices': []}
|
||||
|
||||
devices = []
|
||||
current = None
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
# Parse device lines like: "1-1: vendor:product ..."
|
||||
m = re.match(r'(\d+-[\d.]+):\s*(.+)', line)
|
||||
if m:
|
||||
current = {
|
||||
'busid': m.group(1),
|
||||
'description': m.group(2).strip(),
|
||||
}
|
||||
devices.append(current)
|
||||
elif current and ':' in line and not line.startswith('usbip'):
|
||||
# Additional info lines
|
||||
current['description'] += f' | {line}'
|
||||
|
||||
return {'ok': True, 'devices': devices}
|
||||
|
||||
def usbip_attach(self, client_ip, busid):
|
||||
"""Attach remote USB device via USB/IP."""
|
||||
if not self._usbip_bin:
|
||||
return {'ok': False, 'error': 'usbip not found'}
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', self._usbip_bin, 'attach', '-r', client_ip, '-b', busid],
|
||||
timeout=15)
|
||||
if rc == 0:
|
||||
return {'ok': True, 'message': f'Attached {busid} from {client_ip}'}
|
||||
return {'ok': False, 'error': stderr.strip() or 'Failed to attach'}
|
||||
|
||||
def usbip_detach(self, port):
|
||||
"""Detach USB/IP virtual device by port number."""
|
||||
if not self._usbip_bin:
|
||||
return {'ok': False, 'error': 'usbip not found'}
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', self._usbip_bin, 'detach', '-p', str(port)], timeout=10)
|
||||
if rc == 0:
|
||||
return {'ok': True, 'message': f'Detached port {port}'}
|
||||
return {'ok': False, 'error': stderr.strip() or 'Failed to detach'}
|
||||
|
||||
def usbip_port_status(self):
|
||||
"""List imported virtual USB devices."""
|
||||
if not self._usbip_bin:
|
||||
return {'ok': False, 'error': 'usbip not found', 'ports': []}
|
||||
stdout, stderr, rc = self._run_cmd(
|
||||
['sudo', self._usbip_bin, 'port'], timeout=10)
|
||||
if rc != 0:
|
||||
return {'ok': False, 'error': stderr.strip(), 'ports': []}
|
||||
|
||||
ports = []
|
||||
current = None
|
||||
for line in stdout.split('\n'):
|
||||
line = line.strip()
|
||||
m = re.match(r'Port\s+(\d+):\s*(.+)', line)
|
||||
if m:
|
||||
current = {
|
||||
'port': m.group(1),
|
||||
'status': m.group(2).strip(),
|
||||
}
|
||||
ports.append(current)
|
||||
elif current and line and not line.startswith('Port'):
|
||||
current['detail'] = line
|
||||
|
||||
return {'ok': True, 'ports': ports}
|
||||
|
||||
def get_usbip_status(self):
|
||||
"""Combined USB/IP status."""
|
||||
available = self.usbip_available()
|
||||
modules_loaded = self.check_usbip_modules() if available else False
|
||||
ports = self.usbip_port_status() if available else {'ports': []}
|
||||
return {
|
||||
'available': available,
|
||||
'modules_loaded': modules_loaded,
|
||||
'active_imports': len(ports.get('ports', [])),
|
||||
'ports': ports.get('ports', []),
|
||||
}
|
||||
|
||||
# ── UPnP Integration ─────────────────────────────────────────────
|
||||
|
||||
def refresh_upnp_mapping(self):
|
||||
"""Ensure port 51820/UDP is UPnP-mapped."""
|
||||
try:
|
||||
from core.upnp import get_upnp_manager
|
||||
mgr = get_upnp_manager()
|
||||
result = mgr.add_mapping(
|
||||
int(self._listen_port), 'UDP',
|
||||
f'WireGuard VPN (port {self._listen_port})')
|
||||
return result
|
||||
except Exception as e:
|
||||
return {'ok': False, 'error': str(e)}
|
||||
|
||||
|
||||
# ── Utility Functions ────────────────────────────────────────────────
|
||||
|
||||
def _parse_handshake(hs_str):
|
||||
"""Parse handshake time string into seconds ago, or None."""
|
||||
total_seconds = 0
|
||||
parts = hs_str.replace(' ago', '').split(',')
|
||||
for part in parts:
|
||||
part = part.strip()
|
||||
match = re.match(r'(\d+)\s+(second|minute|hour|day)', part)
|
||||
if match:
|
||||
val = int(match.group(1))
|
||||
unit = match.group(2)
|
||||
if unit == 'second':
|
||||
total_seconds += val
|
||||
elif unit == 'minute':
|
||||
total_seconds += val * 60
|
||||
elif unit == 'hour':
|
||||
total_seconds += val * 3600
|
||||
elif unit == 'day':
|
||||
total_seconds += val * 86400
|
||||
return total_seconds if total_seconds > 0 else None
|
||||
|
||||
|
||||
def _parse_transfer(s):
|
||||
"""Parse transfer string like '1.5 MiB' into bytes."""
|
||||
match = re.match(r'([\d.]+)\s*(\w+)', s)
|
||||
if not match:
|
||||
return 0
|
||||
val = float(match.group(1))
|
||||
unit = match.group(2)
|
||||
multipliers = {
|
||||
'B': 1, 'KiB': 1024, 'MiB': 1024**2,
|
||||
'GiB': 1024**3, 'TiB': 1024**4
|
||||
}
|
||||
return int(val * multipliers.get(unit, 1))
|
||||
|
||||
|
||||
# ── Singleton ────────────────────────────────────────────────────────
|
||||
|
||||
_manager = None
|
||||
|
||||
def get_wireguard_manager(config=None):
|
||||
global _manager
|
||||
if _manager is None:
|
||||
# Load config from autarch_settings.conf
|
||||
if config is None:
|
||||
try:
|
||||
from core.config import get_config
|
||||
cfg = get_config()
|
||||
config = {
|
||||
'config_path': cfg.get('wireguard', 'config_path',
|
||||
fallback='/etc/wireguard/wg0.conf'),
|
||||
'interface': cfg.get('wireguard', 'interface', fallback='wg0'),
|
||||
'subnet': cfg.get('wireguard', 'subnet', fallback='10.1.0.0/24'),
|
||||
'server_address': cfg.get('wireguard', 'server_address',
|
||||
fallback='10.1.0.1'),
|
||||
'listen_port': cfg.get('wireguard', 'listen_port', fallback='51820'),
|
||||
'default_dns': cfg.get('wireguard', 'default_dns',
|
||||
fallback='1.1.1.1, 8.8.8.8'),
|
||||
'default_allowed_ips': cfg.get('wireguard', 'default_allowed_ips',
|
||||
fallback='0.0.0.0/0, ::/0'),
|
||||
}
|
||||
except Exception:
|
||||
config = {}
|
||||
_manager = WireGuardManager(config)
|
||||
return _manager
|
||||
754
core/wireshark.py
Normal file
754
core/wireshark.py
Normal file
@@ -0,0 +1,754 @@
|
||||
"""
|
||||
AUTARCH Wireshark/Packet Analysis Engine
|
||||
Scapy-based packet capture and analysis with optional tshark fallback.
|
||||
|
||||
Primary engine: scapy (pure Python, needs libpcap for live capture)
|
||||
Fallback: tshark CLI (if installed, for advanced protocol dissection)
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
import json
|
||||
import time
|
||||
import struct
|
||||
import subprocess
|
||||
import threading
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from collections import Counter, defaultdict
|
||||
from typing import Optional, List, Dict, Any, Callable
|
||||
|
||||
from core.paths import find_tool, get_data_dir
|
||||
|
||||
# Try importing scapy
|
||||
SCAPY_AVAILABLE = False
|
||||
try:
|
||||
from scapy.all import (
|
||||
sniff, rdpcap, wrpcap, get_if_list, conf,
|
||||
IP, IPv6, TCP, UDP, DNS, DNSQR, DNSRR, Raw, Ether, ARP, ICMP,
|
||||
)
|
||||
SCAPY_AVAILABLE = True
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Check for tshark
|
||||
TSHARK_PATH = find_tool('tshark')
|
||||
|
||||
|
||||
class WiresharkManager:
|
||||
"""Packet capture and analysis using scapy + optional tshark."""
|
||||
|
||||
def __init__(self):
|
||||
self._capture_thread = None
|
||||
self._capture_running = False
|
||||
self._capture_packets = []
|
||||
self._capture_stats = {}
|
||||
self._capture_callback = None
|
||||
self._last_packets = None
|
||||
self._capture_file = None
|
||||
self._data_dir = get_data_dir() / 'captures'
|
||||
self._data_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
@property
|
||||
def scapy_available(self):
|
||||
return SCAPY_AVAILABLE
|
||||
|
||||
@property
|
||||
def tshark_available(self):
|
||||
return TSHARK_PATH is not None
|
||||
|
||||
@property
|
||||
def can_capture(self):
|
||||
"""Check if live capture is possible (needs root + libpcap)."""
|
||||
if not SCAPY_AVAILABLE:
|
||||
return False
|
||||
try:
|
||||
return os.geteuid() == 0
|
||||
except AttributeError:
|
||||
# Windows - check differently
|
||||
return True
|
||||
|
||||
def get_status(self) -> Dict[str, Any]:
|
||||
"""Get engine status."""
|
||||
return {
|
||||
'scapy': SCAPY_AVAILABLE,
|
||||
'tshark': self.tshark_available,
|
||||
'tshark_path': TSHARK_PATH or '',
|
||||
'can_capture': self.can_capture,
|
||||
'capturing': self._capture_running,
|
||||
}
|
||||
|
||||
# ==================== INTERFACES ====================
|
||||
|
||||
def list_interfaces(self) -> List[Dict[str, str]]:
|
||||
"""List available network interfaces."""
|
||||
interfaces = []
|
||||
|
||||
if SCAPY_AVAILABLE:
|
||||
try:
|
||||
for iface in get_if_list():
|
||||
interfaces.append({'name': iface, 'description': '', 'source': 'scapy'})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback/supplement with tshark
|
||||
if TSHARK_PATH and not interfaces:
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[TSHARK_PATH, '-D'],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
if result.returncode == 0:
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
if line.strip():
|
||||
# Format: "1. eth0 (Description)"
|
||||
match = re.match(r'\d+\.\s+(\S+)\s*(?:\((.+)\))?', line)
|
||||
if match:
|
||||
interfaces.append({
|
||||
'name': match.group(1),
|
||||
'description': match.group(2) or '',
|
||||
'source': 'tshark',
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback to /sys/class/net
|
||||
if not interfaces:
|
||||
net_dir = Path('/sys/class/net')
|
||||
if net_dir.exists():
|
||||
for d in sorted(net_dir.iterdir()):
|
||||
interfaces.append({'name': d.name, 'description': '', 'source': 'sysfs'})
|
||||
|
||||
return interfaces
|
||||
|
||||
# ==================== CAPTURE ====================
|
||||
|
||||
def start_capture(self, interface: str = None, bpf_filter: str = None,
|
||||
duration: int = 30, output_file: str = None,
|
||||
callback: Callable = None) -> Dict[str, Any]:
|
||||
"""Start packet capture in a background thread.
|
||||
|
||||
Args:
|
||||
interface: Network interface (None = default)
|
||||
bpf_filter: BPF filter string (e.g., "tcp port 80")
|
||||
duration: Capture duration in seconds (max 300)
|
||||
output_file: Save to pcap file
|
||||
callback: Called with each packet dict for live streaming
|
||||
|
||||
Returns:
|
||||
Status dict
|
||||
"""
|
||||
if not SCAPY_AVAILABLE:
|
||||
return {'error': 'Scapy not available'}
|
||||
if not self.can_capture:
|
||||
return {'error': 'Root privileges required for live capture'}
|
||||
if self._capture_running:
|
||||
return {'error': 'Capture already running'}
|
||||
|
||||
duration = max(5, min(300, duration))
|
||||
self._capture_packets = []
|
||||
self._capture_running = True
|
||||
self._capture_callback = callback
|
||||
self._capture_stats = {
|
||||
'interface': interface or 'default',
|
||||
'filter': bpf_filter or '',
|
||||
'start_time': datetime.now().isoformat(),
|
||||
'duration': duration,
|
||||
'packet_count': 0,
|
||||
}
|
||||
|
||||
if output_file:
|
||||
self._capture_file = output_file
|
||||
else:
|
||||
ts = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
self._capture_file = str(self._data_dir / f'capture_{ts}.pcap')
|
||||
|
||||
def _do_capture():
|
||||
try:
|
||||
kwargs = {
|
||||
'timeout': duration,
|
||||
'prn': self._packet_handler,
|
||||
'store': True,
|
||||
}
|
||||
if interface:
|
||||
kwargs['iface'] = interface
|
||||
if bpf_filter:
|
||||
kwargs['filter'] = bpf_filter
|
||||
|
||||
packets = sniff(**kwargs)
|
||||
self._last_packets = packets
|
||||
|
||||
# Save to pcap
|
||||
if self._capture_file and packets:
|
||||
wrpcap(self._capture_file, packets)
|
||||
self._capture_stats['output_file'] = self._capture_file
|
||||
|
||||
except Exception as e:
|
||||
self._capture_stats['error'] = str(e)
|
||||
finally:
|
||||
self._capture_running = False
|
||||
self._capture_stats['end_time'] = datetime.now().isoformat()
|
||||
self._capture_stats['packet_count'] = len(self._capture_packets)
|
||||
|
||||
self._capture_thread = threading.Thread(target=_do_capture, daemon=True)
|
||||
self._capture_thread.start()
|
||||
|
||||
return {'status': 'started', 'file': self._capture_file}
|
||||
|
||||
def _packet_handler(self, pkt):
|
||||
"""Process each captured packet."""
|
||||
summary = self._packet_to_dict(pkt)
|
||||
self._capture_packets.append(summary)
|
||||
self._capture_stats['packet_count'] = len(self._capture_packets)
|
||||
|
||||
if self._capture_callback:
|
||||
try:
|
||||
self._capture_callback(summary)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def stop_capture(self) -> Dict[str, Any]:
|
||||
"""Stop running capture."""
|
||||
if not self._capture_running:
|
||||
return {'status': 'not_running'}
|
||||
|
||||
self._capture_running = False
|
||||
# Signal scapy to stop - set a flag it checks
|
||||
try:
|
||||
conf.sniff_promisc = False # This won't stop it, but thread will timeout
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return {
|
||||
'status': 'stopping',
|
||||
'packets': len(self._capture_packets),
|
||||
'file': self._capture_file,
|
||||
}
|
||||
|
||||
def get_capture_stats(self) -> Dict[str, Any]:
|
||||
"""Get current/last capture statistics."""
|
||||
stats = dict(self._capture_stats)
|
||||
stats['running'] = self._capture_running
|
||||
stats['packet_count'] = len(self._capture_packets)
|
||||
return stats
|
||||
|
||||
# ==================== PCAP READING ====================
|
||||
|
||||
def read_pcap(self, filepath: str, max_packets: int = 10000) -> Dict[str, Any]:
|
||||
"""Read and parse a PCAP file.
|
||||
|
||||
Args:
|
||||
filepath: Path to pcap file
|
||||
max_packets: Maximum packets to load
|
||||
|
||||
Returns:
|
||||
Dict with packets list and metadata
|
||||
"""
|
||||
p = Path(filepath)
|
||||
if not p.exists():
|
||||
return {'error': f'File not found: {filepath}'}
|
||||
|
||||
if not SCAPY_AVAILABLE:
|
||||
# Fallback to tshark
|
||||
if TSHARK_PATH:
|
||||
return self._read_pcap_tshark(filepath, max_packets)
|
||||
return {'error': 'Neither scapy nor tshark available'}
|
||||
|
||||
try:
|
||||
packets = rdpcap(str(p), count=max_packets)
|
||||
self._last_packets = packets
|
||||
|
||||
packet_list = []
|
||||
for pkt in packets:
|
||||
packet_list.append(self._packet_to_dict(pkt))
|
||||
|
||||
return {
|
||||
'file': str(p),
|
||||
'size': p.stat().st_size,
|
||||
'total_packets': len(packets),
|
||||
'packets': packet_list,
|
||||
}
|
||||
except Exception as e:
|
||||
return {'error': f'Failed to read PCAP: {e}'}
|
||||
|
||||
def _read_pcap_tshark(self, filepath: str, max_packets: int) -> Dict[str, Any]:
|
||||
"""Read PCAP using tshark fallback."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[TSHARK_PATH, '-r', filepath, '-c', str(max_packets),
|
||||
'-T', 'fields',
|
||||
'-e', 'frame.number', '-e', 'frame.time_relative',
|
||||
'-e', 'ip.src', '-e', 'ip.dst',
|
||||
'-e', 'frame.protocols', '-e', 'frame.len',
|
||||
'-e', '_ws.col.Info',
|
||||
'-E', 'separator=|'],
|
||||
capture_output=True, text=True, timeout=30
|
||||
)
|
||||
packets = []
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
if not line.strip():
|
||||
continue
|
||||
parts = line.split('|')
|
||||
if len(parts) >= 6:
|
||||
packets.append({
|
||||
'number': int(parts[0]) if parts[0] else 0,
|
||||
'time': parts[1],
|
||||
'src': parts[2],
|
||||
'dst': parts[3],
|
||||
'protocol': parts[4].split(':')[-1] if parts[4] else '',
|
||||
'length': int(parts[5]) if parts[5] else 0,
|
||||
'info': parts[6] if len(parts) > 6 else '',
|
||||
})
|
||||
return {
|
||||
'file': filepath,
|
||||
'total_packets': len(packets),
|
||||
'packets': packets,
|
||||
'source': 'tshark',
|
||||
}
|
||||
except Exception as e:
|
||||
return {'error': f'tshark failed: {e}'}
|
||||
|
||||
def _packet_to_dict(self, pkt) -> Dict[str, Any]:
|
||||
"""Convert a scapy packet to a serializable dict."""
|
||||
d = {
|
||||
'length': len(pkt),
|
||||
'protocol': '',
|
||||
'src': '',
|
||||
'dst': '',
|
||||
'sport': None,
|
||||
'dport': None,
|
||||
'info': '',
|
||||
'time': float(pkt.time) if hasattr(pkt, 'time') else 0,
|
||||
}
|
||||
|
||||
if pkt.haslayer(IP):
|
||||
d['src'] = pkt[IP].src
|
||||
d['dst'] = pkt[IP].dst
|
||||
d['protocol'] = 'IP'
|
||||
elif pkt.haslayer(IPv6):
|
||||
d['src'] = pkt[IPv6].src
|
||||
d['dst'] = pkt[IPv6].dst
|
||||
d['protocol'] = 'IPv6'
|
||||
elif pkt.haslayer(ARP):
|
||||
d['protocol'] = 'ARP'
|
||||
d['src'] = pkt[ARP].psrc
|
||||
d['dst'] = pkt[ARP].pdst
|
||||
d['info'] = f'ARP {pkt[ARP].op}'
|
||||
|
||||
if pkt.haslayer(TCP):
|
||||
d['sport'] = pkt[TCP].sport
|
||||
d['dport'] = pkt[TCP].dport
|
||||
d['protocol'] = 'TCP'
|
||||
flags = pkt[TCP].flags
|
||||
d['info'] = f'{pkt[TCP].sport} -> {pkt[TCP].dport} [{flags}]'
|
||||
elif pkt.haslayer(UDP):
|
||||
d['sport'] = pkt[UDP].sport
|
||||
d['dport'] = pkt[UDP].dport
|
||||
d['protocol'] = 'UDP'
|
||||
d['info'] = f'{pkt[UDP].sport} -> {pkt[UDP].dport}'
|
||||
elif pkt.haslayer(ICMP):
|
||||
d['protocol'] = 'ICMP'
|
||||
d['info'] = f'Type {pkt[ICMP].type} Code {pkt[ICMP].code}'
|
||||
|
||||
if pkt.haslayer(DNS):
|
||||
d['protocol'] = 'DNS'
|
||||
if pkt.haslayer(DNSQR):
|
||||
qname = pkt[DNSQR].qname
|
||||
if isinstance(qname, bytes):
|
||||
qname = qname.decode(errors='ignore').rstrip('.')
|
||||
d['info'] = f'Query: {qname}'
|
||||
|
||||
# Detect common application protocols by port
|
||||
if d['protocol'] in ('TCP', 'UDP'):
|
||||
ports = (d.get('sport'), d.get('dport'))
|
||||
if 80 in ports or 8080 in ports:
|
||||
d['protocol'] = 'HTTP'
|
||||
elif 443 in ports or 8443 in ports:
|
||||
d['protocol'] = 'TLS'
|
||||
elif 53 in ports:
|
||||
d['protocol'] = 'DNS'
|
||||
elif 22 in ports:
|
||||
d['protocol'] = 'SSH'
|
||||
elif 21 in ports:
|
||||
d['protocol'] = 'FTP'
|
||||
elif 25 in ports or 587 in ports:
|
||||
d['protocol'] = 'SMTP'
|
||||
elif 23 in ports:
|
||||
d['protocol'] = 'Telnet'
|
||||
|
||||
return d
|
||||
|
||||
# ==================== ANALYSIS ====================
|
||||
|
||||
def _get_packets(self, packets=None):
|
||||
"""Get packets from argument or last loaded."""
|
||||
if packets is not None:
|
||||
return packets
|
||||
if self._last_packets is not None:
|
||||
return self._last_packets
|
||||
if self._capture_packets:
|
||||
return self._capture_packets
|
||||
return []
|
||||
|
||||
def get_protocol_hierarchy(self, packets=None) -> Dict[str, Any]:
|
||||
"""Get protocol distribution from packets.
|
||||
|
||||
Returns dict with protocol counts and percentages.
|
||||
"""
|
||||
pkts = self._get_packets(packets)
|
||||
if not pkts:
|
||||
return {'protocols': {}, 'total': 0}
|
||||
|
||||
counts = Counter()
|
||||
total = len(pkts)
|
||||
|
||||
for pkt in pkts:
|
||||
if isinstance(pkt, dict):
|
||||
proto = pkt.get('protocol', 'Unknown')
|
||||
else:
|
||||
proto = self._packet_to_dict(pkt).get('protocol', 'Unknown')
|
||||
counts[proto] += 1
|
||||
|
||||
protocols = {}
|
||||
for proto, count in counts.most_common():
|
||||
protocols[proto] = {
|
||||
'count': count,
|
||||
'percent': round(count * 100 / total, 1) if total else 0,
|
||||
}
|
||||
|
||||
return {'protocols': protocols, 'total': total}
|
||||
|
||||
def extract_conversations(self, packets=None) -> List[Dict[str, Any]]:
|
||||
"""Extract IP conversations (src-dst pairs with stats)."""
|
||||
pkts = self._get_packets(packets)
|
||||
if not pkts:
|
||||
return []
|
||||
|
||||
convos = defaultdict(lambda: {'packets': 0, 'bytes': 0, 'protocols': set()})
|
||||
|
||||
for pkt in pkts:
|
||||
if isinstance(pkt, dict):
|
||||
src = pkt.get('src', '')
|
||||
dst = pkt.get('dst', '')
|
||||
proto = pkt.get('protocol', '')
|
||||
length = pkt.get('length', 0)
|
||||
else:
|
||||
d = self._packet_to_dict(pkt)
|
||||
src, dst, proto, length = d['src'], d['dst'], d['protocol'], d['length']
|
||||
|
||||
if not src or not dst:
|
||||
continue
|
||||
|
||||
# Normalize key (sorted so A->B and B->A are same conversation)
|
||||
key = tuple(sorted([src, dst]))
|
||||
convos[key]['packets'] += 1
|
||||
convos[key]['bytes'] += length
|
||||
convos[key]['protocols'].add(proto)
|
||||
convos[key]['src'] = key[0]
|
||||
convos[key]['dst'] = key[1]
|
||||
|
||||
result = []
|
||||
for key, data in sorted(convos.items(), key=lambda x: x[1]['packets'], reverse=True):
|
||||
result.append({
|
||||
'src': data['src'],
|
||||
'dst': data['dst'],
|
||||
'packets': data['packets'],
|
||||
'bytes': data['bytes'],
|
||||
'protocols': list(data['protocols']),
|
||||
})
|
||||
|
||||
return result[:100] # Top 100
|
||||
|
||||
def extract_dns_queries(self, packets=None) -> List[Dict[str, Any]]:
|
||||
"""Extract DNS queries and responses."""
|
||||
pkts = self._get_packets(packets)
|
||||
if not pkts:
|
||||
return []
|
||||
|
||||
queries = []
|
||||
|
||||
for pkt in pkts:
|
||||
if isinstance(pkt, dict):
|
||||
# From captured packet summaries - limited info
|
||||
if pkt.get('protocol') == 'DNS' and 'Query:' in pkt.get('info', ''):
|
||||
queries.append({
|
||||
'query': pkt['info'].replace('Query: ', ''),
|
||||
'type': 'A',
|
||||
'src': pkt.get('src', ''),
|
||||
'response': '',
|
||||
})
|
||||
else:
|
||||
# Full scapy packet
|
||||
if pkt.haslayer(DNS):
|
||||
if pkt.haslayer(DNSQR):
|
||||
qname = pkt[DNSQR].qname
|
||||
if isinstance(qname, bytes):
|
||||
qname = qname.decode(errors='ignore').rstrip('.')
|
||||
qtype_num = pkt[DNSQR].qtype
|
||||
qtype_map = {1: 'A', 2: 'NS', 5: 'CNAME', 6: 'SOA',
|
||||
15: 'MX', 16: 'TXT', 28: 'AAAA', 33: 'SRV'}
|
||||
qtype = qtype_map.get(qtype_num, str(qtype_num))
|
||||
|
||||
response = ''
|
||||
if pkt.haslayer(DNSRR) and pkt[DNS].ancount > 0:
|
||||
try:
|
||||
rdata = pkt[DNSRR].rdata
|
||||
if isinstance(rdata, bytes):
|
||||
rdata = rdata.decode(errors='ignore')
|
||||
response = str(rdata)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
src = pkt[IP].src if pkt.haslayer(IP) else ''
|
||||
queries.append({
|
||||
'query': qname,
|
||||
'type': qtype,
|
||||
'src': src,
|
||||
'response': response,
|
||||
})
|
||||
|
||||
# Deduplicate and count
|
||||
seen = {}
|
||||
for q in queries:
|
||||
key = q['query']
|
||||
if key in seen:
|
||||
seen[key]['count'] += 1
|
||||
if q['response'] and not seen[key]['response']:
|
||||
seen[key]['response'] = q['response']
|
||||
else:
|
||||
seen[key] = {**q, 'count': 1}
|
||||
|
||||
return sorted(seen.values(), key=lambda x: x['count'], reverse=True)[:200]
|
||||
|
||||
def extract_http_requests(self, packets=None) -> List[Dict[str, Any]]:
|
||||
"""Extract HTTP requests from packets."""
|
||||
pkts = self._get_packets(packets)
|
||||
if not pkts:
|
||||
return []
|
||||
|
||||
requests = []
|
||||
http_methods = [b'GET', b'POST', b'PUT', b'DELETE', b'HEAD', b'OPTIONS', b'PATCH']
|
||||
|
||||
for pkt in pkts:
|
||||
if isinstance(pkt, dict):
|
||||
continue # Can't extract HTTP from summaries
|
||||
|
||||
if not pkt.haslayer(Raw):
|
||||
continue
|
||||
if not pkt.haslayer(TCP):
|
||||
continue
|
||||
|
||||
try:
|
||||
payload = bytes(pkt[Raw].load)
|
||||
# Check if it starts with an HTTP method
|
||||
is_http = any(payload.startswith(m + b' ') for m in http_methods)
|
||||
if not is_http:
|
||||
continue
|
||||
|
||||
lines = payload.split(b'\r\n')
|
||||
request_line = lines[0].decode(errors='ignore')
|
||||
parts = request_line.split(' ')
|
||||
if len(parts) < 2:
|
||||
continue
|
||||
|
||||
method = parts[0]
|
||||
path = parts[1]
|
||||
host = ''
|
||||
user_agent = ''
|
||||
content_type = ''
|
||||
|
||||
for line in lines[1:]:
|
||||
line_str = line.decode(errors='ignore')
|
||||
lower = line_str.lower()
|
||||
if lower.startswith('host:'):
|
||||
host = line_str.split(':', 1)[1].strip()
|
||||
elif lower.startswith('user-agent:'):
|
||||
user_agent = line_str.split(':', 1)[1].strip()
|
||||
elif lower.startswith('content-type:'):
|
||||
content_type = line_str.split(':', 1)[1].strip()
|
||||
|
||||
src = pkt[IP].src if pkt.haslayer(IP) else ''
|
||||
dst = pkt[IP].dst if pkt.haslayer(IP) else ''
|
||||
|
||||
requests.append({
|
||||
'method': method,
|
||||
'host': host,
|
||||
'path': path,
|
||||
'src': src,
|
||||
'dst': dst,
|
||||
'user_agent': user_agent[:100],
|
||||
'content_type': content_type,
|
||||
})
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return requests[:500]
|
||||
|
||||
def extract_credentials(self, packets=None) -> List[Dict[str, Any]]:
|
||||
"""Detect plaintext credentials in packets.
|
||||
|
||||
Checks FTP, HTTP Basic Auth, Telnet, SMTP, POP3, IMAP.
|
||||
"""
|
||||
pkts = self._get_packets(packets)
|
||||
if not pkts:
|
||||
return []
|
||||
|
||||
creds = []
|
||||
|
||||
for pkt in pkts:
|
||||
if isinstance(pkt, dict):
|
||||
continue
|
||||
|
||||
if not pkt.haslayer(Raw) or not pkt.haslayer(TCP):
|
||||
continue
|
||||
|
||||
try:
|
||||
payload = bytes(pkt[Raw].load)
|
||||
payload_str = payload.decode(errors='ignore')
|
||||
payload_lower = payload_str.lower()
|
||||
src = pkt[IP].src if pkt.haslayer(IP) else ''
|
||||
dst = pkt[IP].dst if pkt.haslayer(IP) else ''
|
||||
dport = pkt[TCP].dport
|
||||
|
||||
# FTP credentials
|
||||
if dport == 21:
|
||||
if payload_str.startswith('USER '):
|
||||
creds.append({
|
||||
'protocol': 'FTP',
|
||||
'type': 'username',
|
||||
'value': payload_str.split(' ', 1)[1].strip(),
|
||||
'src': src, 'dst': dst,
|
||||
})
|
||||
elif payload_str.startswith('PASS '):
|
||||
creds.append({
|
||||
'protocol': 'FTP',
|
||||
'type': 'password',
|
||||
'value': payload_str.split(' ', 1)[1].strip(),
|
||||
'src': src, 'dst': dst,
|
||||
})
|
||||
|
||||
# HTTP Basic Auth
|
||||
if dport in (80, 8080, 8443):
|
||||
auth_match = re.search(r'Authorization:\s*Basic\s+(\S+)', payload_str, re.IGNORECASE)
|
||||
if auth_match:
|
||||
import base64
|
||||
try:
|
||||
decoded = base64.b64decode(auth_match.group(1)).decode(errors='ignore')
|
||||
creds.append({
|
||||
'protocol': 'HTTP',
|
||||
'type': 'basic_auth',
|
||||
'value': decoded,
|
||||
'src': src, 'dst': dst,
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# HTTP form data (POST with password fields)
|
||||
if dport in (80, 8080) and b'POST' in payload[:10]:
|
||||
for pattern in [r'password=([^&\s]+)', r'passwd=([^&\s]+)', r'pass=([^&\s]+)']:
|
||||
match = re.search(pattern, payload_str, re.IGNORECASE)
|
||||
if match:
|
||||
creds.append({
|
||||
'protocol': 'HTTP',
|
||||
'type': 'form_password',
|
||||
'value': match.group(1),
|
||||
'src': src, 'dst': dst,
|
||||
})
|
||||
break
|
||||
|
||||
# SMTP AUTH
|
||||
if dport in (25, 587):
|
||||
if payload_str.startswith('AUTH LOGIN') or payload_str.startswith('AUTH PLAIN'):
|
||||
creds.append({
|
||||
'protocol': 'SMTP',
|
||||
'type': 'auth',
|
||||
'value': payload_str.strip(),
|
||||
'src': src, 'dst': dst,
|
||||
})
|
||||
|
||||
# Telnet (look for login/password prompts followed by data)
|
||||
if dport == 23:
|
||||
if any(k in payload_lower for k in ['login:', 'username:', 'password:']):
|
||||
creds.append({
|
||||
'protocol': 'Telnet',
|
||||
'type': 'prompt',
|
||||
'value': payload_str.strip()[:100],
|
||||
'src': src, 'dst': dst,
|
||||
})
|
||||
|
||||
# POP3
|
||||
if dport == 110:
|
||||
if payload_str.startswith('USER ') or payload_str.startswith('PASS '):
|
||||
creds.append({
|
||||
'protocol': 'POP3',
|
||||
'type': 'auth',
|
||||
'value': payload_str.strip(),
|
||||
'src': src, 'dst': dst,
|
||||
})
|
||||
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return creds[:100]
|
||||
|
||||
# ==================== EXPORT ====================
|
||||
|
||||
def export_packets(self, packets=None, fmt: str = 'json',
|
||||
filepath: str = None) -> Dict[str, Any]:
|
||||
"""Export packets to JSON or CSV.
|
||||
|
||||
Args:
|
||||
packets: Packet list (uses last loaded if None)
|
||||
fmt: 'json' or 'csv'
|
||||
filepath: Output path (auto-generated if None)
|
||||
|
||||
Returns:
|
||||
Dict with success status and filepath
|
||||
"""
|
||||
pkts = self._get_packets(packets)
|
||||
if not pkts:
|
||||
return {'error': 'No packets to export'}
|
||||
|
||||
# Convert to dicts if needed
|
||||
packet_dicts = []
|
||||
for pkt in pkts:
|
||||
if isinstance(pkt, dict):
|
||||
packet_dicts.append(pkt)
|
||||
else:
|
||||
packet_dicts.append(self._packet_to_dict(pkt))
|
||||
|
||||
ts = datetime.now().strftime('%Y%m%d_%H%M%S')
|
||||
from core.paths import get_data_dir
|
||||
export_dir = get_data_dir() / 'exports'
|
||||
export_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if fmt == 'csv':
|
||||
if not filepath:
|
||||
filepath = str(export_dir / f'packets_{ts}.csv')
|
||||
lines = ['Time,Source,Destination,Protocol,Length,Info']
|
||||
for p in packet_dicts:
|
||||
lines.append(f'{p.get("time","")},{p.get("src","")},{p.get("dst","")},{p.get("protocol","")},{p.get("length",0)},{p.get("info","")}')
|
||||
Path(filepath).write_text('\n'.join(lines))
|
||||
else:
|
||||
if not filepath:
|
||||
filepath = str(export_dir / f'packets_{ts}.json')
|
||||
export_data = {
|
||||
'exported': datetime.now().isoformat(),
|
||||
'total_packets': len(packet_dicts),
|
||||
'packets': packet_dicts,
|
||||
}
|
||||
Path(filepath).write_text(json.dumps(export_data, indent=2))
|
||||
|
||||
return {'success': True, 'filepath': filepath, 'count': len(packet_dicts)}
|
||||
|
||||
|
||||
# Global instance
|
||||
_manager: Optional[WiresharkManager] = None
|
||||
|
||||
|
||||
def get_wireshark_manager() -> WiresharkManager:
|
||||
"""Get the global WiresharkManager instance."""
|
||||
global _manager
|
||||
if _manager is None:
|
||||
_manager = WiresharkManager()
|
||||
return _manager
|
||||
Reference in New Issue
Block a user