AUTARCH v1.9 — remote monitoring, SSH manager, daemon, vault, cleanup

- Add Remote Monitoring Station with PIAP device profile system
- Add SSH/SSHD manager with fail2ban integration
- Add privileged daemon architecture for safe root operations
- Add encrypted vault, HAL memory, HAL auto-analyst
- Add network security suite, module creator, codex training
- Add start.sh launcher script and GTK3 desktop launcher
- Remove Output/ build artifacts, installer files, loose docs
- Update .gitignore for runtime data and build artifacts
- Update README for v1.9 with new launch method, screenshots, and features

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
SsSnake
2026-03-24 06:59:06 -07:00
parent 1092689f45
commit da53899f66
382 changed files with 15277 additions and 493964 deletions

View File

@@ -1,6 +1,7 @@
"""Chat and Agent API routes — Hal chat with Agent system for module creation."""
import json
import os
import threading
import time
import uuid
@@ -53,6 +54,14 @@ def chat():
if not message:
return jsonify({'error': 'No message provided'})
# Store in HAL's encrypted memory
try:
from core.hal_memory import get_hal_memory
mem = get_hal_memory()
mem.add('user', message, metadata={'mode': mode})
except Exception:
pass
if mode == 'agent':
return _handle_agent_chat(message)
else:
@@ -77,8 +86,16 @@ def _handle_direct_chat(message):
system_prompt = _get_system_prompt()
try:
token_gen = llm.chat(message, system_prompt=system_prompt, stream=True)
full_response = []
for token in token_gen:
full_response.append(token)
yield f"data: {json.dumps({'token': token})}\n\n"
# Store HAL's response in memory
try:
from core.hal_memory import get_hal_memory
get_hal_memory().add('hal', ''.join(full_response))
except Exception:
pass
except LLMError as e:
yield f"data: {json.dumps({'type': 'error', 'content': str(e)})}\n\n"
@@ -113,11 +130,27 @@ def _handle_agent_chat(message):
tools = get_tool_registry()
agent = Agent(llm=llm, tools=tools, max_steps=20, verbose=False)
# Inject system prompt into agent
system_prompt = _get_system_prompt()
agent.SYSTEM_PROMPT = system_prompt + "\n\n{tools_description}"
# Inject system prompt — keep the THOUGHT/ACTION/PARAMS format from Agent,
# prepend with our behavioral rules
hal_prompt = _get_system_prompt()
agent.SYSTEM_PROMPT = hal_prompt + """
FORMAT — you MUST use this exact format:
THOUGHT: your reasoning
ACTION: tool_name
PARAMS: {{"param": "value"}}
When done: ACTION: task_complete PARAMS: {{"summary": "what was done"}}
When you need input: ACTION: ask_user PARAMS: {{"question": "your question"}}
{tools_description}
"""
def on_step(step):
# Check stop signal
if stop_event.is_set():
return
if step.thought:
steps.append({'type': 'thought', 'content': step.thought})
if step.tool_name and step.tool_name not in ('task_complete', 'ask_user'):
@@ -135,6 +168,22 @@ def _handle_agent_chat(message):
else:
steps.append({'type': 'error', 'content': result.error or result.summary})
# Store agent conversation in HAL memory
try:
from core.hal_memory import get_hal_memory
mem = get_hal_memory()
for step in result.steps:
if step.thought:
mem.add('hal_thought', step.thought)
if step.tool_name:
mem.add('hal_action', f'{step.tool_name}({json.dumps(step.tool_args or {})})')
if step.tool_result:
mem.add('hal_result', step.tool_result[:2000])
mem.add('hal', result.summary if result.success else (result.error or result.summary))
mem.save()
except Exception:
pass
except Exception as e:
steps.append({'type': 'error', 'content': str(e)})
finally:
@@ -181,6 +230,143 @@ def chat_reset():
return jsonify({'ok': True})
@chat_bp.route('/hal/analyze', methods=['POST'])
@login_required
def hal_analyze():
"""Send tool output to HAL for AI analysis.
Expects JSON: {tool_name, output, context?, category?}
Returns JSON: {available, analysis, risk_level, has_fixes, tool_name}
"""
data = request.get_json(silent=True) or {}
tool_name = data.get('tool_name', 'unknown')
output = data.get('output', '')
context = data.get('context', '')
category = data.get('category', 'default')
if not output:
return jsonify({'available': False, 'analysis': 'No output provided', 'tool_name': tool_name})
from core.hal_analyst import analyze_output
result = analyze_output(tool_name, output, context=context, category=category)
return jsonify(result)
@chat_bp.route('/hal/fix', methods=['POST'])
@login_required
def hal_fix():
"""Execute a fix command suggested by HAL.
Expects JSON: {command: str}
Returns JSON: {ok, output, exit_code}
"""
from core.daemon import root_exec
import shlex
import subprocess as _subprocess
data = request.get_json(silent=True) or {}
command = data.get('command', '').strip()
if not command:
return jsonify({'ok': False, 'error': 'No command provided'})
# Safety: block obviously dangerous commands
dangerous = ['rm -rf /', 'mkfs', 'dd if=', ':(){', 'format c:']
for d in dangerous:
if d in command.lower():
return jsonify({'ok': False, 'error': f'Blocked dangerous command: {d}'})
# Clean the command: strip sudo, shell redirections
import re
command = re.sub(r'\s*2>/dev/null\s*', ' ', command)
command = re.sub(r'\s*>/dev/null\s*', ' ', command)
command = re.sub(r'\s*2>&1\s*', ' ', command)
command = command.strip()
if command.startswith('sudo '):
command = command[5:].strip()
# Commands that should run as the normal user, not root
USER_COMMANDS = {'adb', 'fastboot'}
def _is_user_cmd(cmd_str):
"""Check if a command should run as normal user."""
base = cmd_str.split()[0] if cmd_str.split() else ''
return os.path.basename(base) in USER_COMMANDS
def _run_user(cmd_parts, timeout=60):
"""Run a command as the normal user via subprocess."""
try:
result = _subprocess.run(
cmd_parts, capture_output=True, text=True, timeout=timeout
)
return {
'ok': result.returncode == 0,
'stdout': result.stdout,
'stderr': result.stderr,
'code': result.returncode,
}
except _subprocess.TimeoutExpired:
return {'ok': False, 'stdout': '', 'stderr': f'Timeout after {timeout}s', 'code': -2}
except FileNotFoundError:
return {'ok': False, 'stdout': '', 'stderr': f'Command not found: {cmd_parts[0]}', 'code': -3}
except Exception as e:
return {'ok': False, 'stdout': '', 'stderr': str(e), 'code': -4}
def _exec(cmd_parts, timeout=60):
"""Route to user or root execution based on command."""
if cmd_parts and os.path.basename(cmd_parts[0]) in USER_COMMANDS:
return _run_user(cmd_parts, timeout=timeout)
return root_exec(cmd_parts, timeout=timeout)
# Handle pipes (cmd1 | cmd2) — run as shell command through bash
if '|' in command:
if _is_user_cmd(command):
r = _run_user(['bash', '-c', command], timeout=60)
else:
r = root_exec(['bash', '-c', command], timeout=60)
return jsonify({'ok': r['ok'], 'output': r['stdout'] + r['stderr'], 'exit_code': r['code']})
# Handle chained commands (&&) by running them sequentially
if '&&' in command:
parts = [c.strip() for c in command.split('&&') if c.strip()]
all_output = ''
for part in parts:
if part.startswith('sudo '):
part = part[5:].strip()
part = re.sub(r'\s*2>/dev/null\s*', ' ', part).strip()
part = re.sub(r'\s*>/dev/null\s*', ' ', part).strip()
try:
cmd_parts = shlex.split(part)
except ValueError:
cmd_parts = part.split()
r = _exec(cmd_parts, timeout=60)
all_output += r['stdout'] + r['stderr']
if not r['ok']:
return jsonify({'ok': False, 'output': all_output, 'exit_code': r['code']})
return jsonify({'ok': True, 'output': all_output, 'exit_code': 0})
# Single command
try:
cmd_parts = shlex.split(command)
except ValueError:
cmd_parts = command.split()
r = _exec(cmd_parts, timeout=60)
return jsonify({
'ok': r['ok'],
'output': r['stdout'] + r['stderr'],
'exit_code': r['code'],
})
@chat_bp.route('/hal/available')
@login_required
def hal_available():
"""Quick check if HAL analysis is available (LLM loaded)."""
from core.hal_analyst import is_llm_available
return jsonify({'available': is_llm_available()})
@chat_bp.route('/chat/status')
@login_required
def chat_status():

View File

@@ -115,12 +115,19 @@ def attack_continuous_stop():
return jsonify(_get_deauth().stop_continuous())
@deauth_bp.route('/attack/status')
@deauth_bp.route('/attack/status', methods=['GET', 'POST'])
@login_required
def attack_status():
return jsonify(_get_deauth().get_attack_status())
@deauth_bp.route('/status', methods=['GET', 'POST'])
@login_required
def status():
"""Alias for attack status — templates may call /status directly."""
return jsonify(_get_deauth().get_attack_status())
@deauth_bp.route('/history')
@login_required
def history():

View File

@@ -7,6 +7,7 @@ import socket
import json
from flask import Blueprint, render_template, request, jsonify, Response, stream_with_context
from web.auth import login_required
from core.daemon import root_exec
defense_bp = Blueprint('defense', __name__, url_prefix='/defense')
@@ -40,6 +41,9 @@ def index():
except Exception:
sys_info['ip'] = '127.0.0.1'
# Return JSON if requested (for OS detection by sub-pages)
if request.headers.get('Accept', '') == 'application/json':
return jsonify(sys_info)
return render_template('defense.html', modules=modules, sys_info=sys_info)
@@ -108,7 +112,8 @@ def linux_check(check_name):
@login_required
def linux_firewall_rules():
"""Get current iptables rules."""
success, output = _run_cmd("sudo iptables -L -n --line-numbers 2>/dev/null")
r = root_exec(['iptables', '-L', '-n', '--line-numbers'])
success, output = r['ok'], r['stdout']
if success:
return jsonify({'rules': output})
return jsonify({'rules': 'Could not read iptables rules (need sudo privileges)'})
@@ -123,7 +128,8 @@ def linux_firewall_block():
if not ip or not re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', ip):
return jsonify({'error': 'Invalid IP address', 'success': False})
success, _ = _run_cmd(f"sudo iptables -A INPUT -s {ip} -j DROP")
r = root_exec(['iptables', '-A', 'INPUT', '-s', ip, '-j', 'DROP'])
success, _ = r['ok'], r['stdout']
if success:
return jsonify({'message': f'Blocked {ip}', 'success': True})
return jsonify({'error': f'Failed to block {ip} (need sudo)', 'success': False})
@@ -138,7 +144,8 @@ def linux_firewall_unblock():
if not ip or not re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', ip):
return jsonify({'error': 'Invalid IP address', 'success': False})
success, _ = _run_cmd(f"sudo iptables -D INPUT -s {ip} -j DROP")
r = root_exec(['iptables', '-D', 'INPUT', '-s', ip, '-j', 'DROP'])
success, _ = r['ok'], r['stdout']
if success:
return jsonify({'message': f'Unblocked {ip}', 'success': True})
return jsonify({'error': f'Failed to unblock {ip}', 'success': False})

View File

@@ -27,6 +27,24 @@ def status():
# ── ADB Endpoints ──────────────────────────────────────────────────
@hardware_bp.route('/adb/kill-server', methods=['POST'])
@login_required
def adb_kill_server():
"""Kill the ADB server."""
from core.daemon import root_exec
r = root_exec(['adb', 'kill-server'], timeout=10)
return jsonify({'ok': r['ok'], 'output': r['stdout'] + r['stderr']})
@hardware_bp.route('/adb/start-server', methods=['POST'])
@login_required
def adb_start_server():
"""Start the ADB server."""
from core.daemon import root_exec
r = root_exec(['adb', 'start-server'], timeout=10)
return jsonify({'ok': r['ok'], 'output': r['stdout'] + r['stderr']})
@hardware_bp.route('/adb/devices')
@login_required
def adb_devices():

View File

@@ -0,0 +1,262 @@
"""Module Creator route - create, edit, validate, and manage AUTARCH modules"""
import ast
import os
import re
from datetime import datetime
from pathlib import Path
from flask import Blueprint, render_template, request, jsonify
from web.auth import login_required
module_creator_bp = Blueprint('module_creator', __name__, url_prefix='/module-creator')
MODULES_DIR = Path(__file__).parent.parent.parent / 'modules'
CATEGORIES = ['defense', 'offense', 'counter', 'analyze', 'osint', 'simulate', 'core', 'hardware']
CATEGORY_DESCRIPTIONS = {
'defense': 'Defensive security module for monitoring, hardening, and threat detection',
'offense': 'Offensive security module for penetration testing and exploitation',
'counter': 'Counter-intelligence module for anti-surveillance and evasion',
'analyze': 'Analysis module for forensics, traffic inspection, and data processing',
'osint': 'Open-source intelligence gathering and reconnaissance module',
'simulate': 'Simulation module for attack modeling and scenario testing',
'core': 'Core infrastructure module for platform internals and utilities',
'hardware': 'Hardware interface module for RF, BLE, RFID, SDR, and embedded devices',
}
def _module_skeleton(name, category, description, author):
"""Generate skeleton code for a new module."""
return f'''"""
{description}
"""
DESCRIPTION = "{description}"
AUTHOR = "{author}"
VERSION = "1.0"
CATEGORY = "{category}"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen, display_banner
def run():
"""Main entry point."""
clear_screen()
display_banner()
print(f"{{Colors.BOLD}}{name}{{Colors.RESET}}")
print(f"{{Colors.DIM}}{{"" * 50}}{{Colors.RESET}}\\n")
# TODO: Implement module logic here
print(f"{{Colors.GREEN}}[+] Module loaded successfully{{Colors.RESET}}")
if __name__ == "__main__":
run()
'''
def _parse_module_metadata(filepath):
"""Extract metadata from a module file."""
meta = {
'name': filepath.stem,
'category': 'unknown',
'description': '',
'version': '',
'author': '',
'file_size': filepath.stat().st_size,
'last_modified': datetime.fromtimestamp(filepath.stat().st_mtime).strftime('%Y-%m-%d %H:%M'),
}
try:
source = filepath.read_text(errors='replace')
tree = ast.parse(source)
for node in ast.walk(tree):
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and isinstance(node.value, ast.Constant):
if target.id == 'DESCRIPTION':
meta['description'] = str(node.value.value)
elif target.id == 'CATEGORY':
meta['category'] = str(node.value.value)
elif target.id == 'VERSION':
meta['version'] = str(node.value.value)
elif target.id == 'AUTHOR':
meta['author'] = str(node.value.value)
except Exception:
pass
return meta
@module_creator_bp.route('/')
@login_required
def index():
return render_template('module_creator.html')
@module_creator_bp.route('/templates')
@login_required
def templates():
"""Return skeleton templates for each category."""
result = []
for cat in CATEGORIES:
result.append({
'name': f'new_{cat}_module',
'category': cat,
'description': CATEGORY_DESCRIPTIONS.get(cat, ''),
'code': _module_skeleton(f'new_{cat}_module', cat, CATEGORY_DESCRIPTIONS.get(cat, ''), 'darkHal'),
})
return jsonify(result)
@module_creator_bp.route('/create', methods=['POST'])
@login_required
def create():
"""Create a new module file."""
data = request.get_json(silent=True)
if not data:
return jsonify({'success': False, 'error': 'Invalid JSON payload'}), 400
name = data.get('name', '').strip()
category = data.get('category', '').strip()
description = data.get('description', '').strip()
author = data.get('author', 'darkHal').strip()
code = data.get('code', '').strip()
# Validate name
if not name:
return jsonify({'success': False, 'error': 'Module name is required'}), 400
if not re.match(r'^[A-Za-z0-9_]+$', name):
return jsonify({'success': False, 'error': 'Module name must be alphanumeric and underscores only'}), 400
# Check category
if category not in CATEGORIES:
return jsonify({'success': False, 'error': f'Invalid category. Must be one of: {", ".join(CATEGORIES)}'}), 400
# Check existence
target = MODULES_DIR / f'{name}.py'
if target.exists():
return jsonify({'success': False, 'error': f'Module "{name}" already exists'}), 409
# Use provided code or generate skeleton
if not code:
code = _module_skeleton(name, category, description, author)
try:
target.write_text(code)
except Exception as e:
return jsonify({'success': False, 'error': f'Failed to write module: {e}'}), 500
return jsonify({'success': True, 'message': f'Module "{name}" created successfully', 'path': str(target)})
@module_creator_bp.route('/validate', methods=['POST'])
@login_required
def validate():
"""Validate Python syntax and required attributes."""
data = request.get_json(silent=True)
if not data or 'code' not in data:
return jsonify({'valid': False, 'errors': ['No code provided']}), 400
code = data['code']
errors = []
warnings = []
# Syntax check
try:
tree = ast.parse(code)
except SyntaxError as e:
return jsonify({
'valid': False,
'errors': [f'Syntax error at line {e.lineno}: {e.msg}'],
'warnings': [],
})
# Check required attributes
found_attrs = set()
found_run = False
for node in ast.walk(tree):
if isinstance(node, ast.Assign):
for target in node.targets:
if isinstance(target, ast.Name) and target.id in ('DESCRIPTION', 'CATEGORY'):
found_attrs.add(target.id)
if isinstance(node, ast.FunctionDef) and node.name == 'run':
found_run = True
if 'DESCRIPTION' not in found_attrs:
errors.append('Missing required attribute: DESCRIPTION')
if 'CATEGORY' not in found_attrs:
errors.append('Missing required attribute: CATEGORY')
if not found_run:
errors.append('Missing required function: run()')
valid = len(errors) == 0
if valid:
warnings.append('All checks passed')
return jsonify({'valid': valid, 'errors': errors, 'warnings': warnings})
@module_creator_bp.route('/list')
@login_required
def list_modules():
"""Return JSON list of all existing modules."""
modules = []
if MODULES_DIR.exists():
for f in sorted(MODULES_DIR.glob('*.py')):
if f.name.startswith('__'):
continue
modules.append(_parse_module_metadata(f))
return jsonify(modules)
@module_creator_bp.route('/preview', methods=['POST'])
@login_required
def preview():
"""Load and return source code of an existing module."""
data = request.get_json(silent=True)
if not data or 'name' not in data:
return jsonify({'success': False, 'error': 'Module name is required'}), 400
name = data['name'].strip()
target = MODULES_DIR / f'{name}.py'
if not target.exists():
return jsonify({'success': False, 'error': f'Module "{name}" not found'}), 404
try:
code = target.read_text(errors='replace')
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
meta = _parse_module_metadata(target)
return jsonify({'success': True, 'code': code, 'metadata': meta})
@module_creator_bp.route('/save', methods=['POST'])
@login_required
def save():
"""Save edits to an existing module file."""
data = request.get_json(silent=True)
if not data:
return jsonify({'success': False, 'error': 'Invalid JSON payload'}), 400
name = data.get('name', '').strip()
code = data.get('code', '')
if not name:
return jsonify({'success': False, 'error': 'Module name is required'}), 400
if not re.match(r'^[A-Za-z0-9_]+$', name):
return jsonify({'success': False, 'error': 'Invalid module name'}), 400
target = MODULES_DIR / f'{name}.py'
if not target.exists():
return jsonify({'success': False, 'error': f'Module "{name}" does not exist'}), 404
try:
target.write_text(code)
except Exception as e:
return jsonify({'success': False, 'error': f'Failed to save: {e}'}), 500
return jsonify({'success': True, 'message': f'Module "{name}" saved successfully'})

1517
web/routes/network.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -48,7 +48,7 @@ def stop_ap():
return jsonify(_get_ap().stop_rogue_ap())
@pineapple_bp.route('/status')
@pineapple_bp.route('/status', methods=['GET', 'POST'])
@login_required
def status():
return jsonify(_get_ap().get_status())

View File

@@ -0,0 +1,274 @@
"""Remote Monitoring Station — load .piap device profiles and control remote radios."""
import configparser
import logging
import os
import subprocess
import threading
import time
from datetime import datetime
from pathlib import Path
from flask import Blueprint, render_template, request, jsonify
logger = logging.getLogger(__name__)
remote_monitor_bp = Blueprint('remote_monitor', __name__, url_prefix='/remote-monitor')
PIAP_DIR = Path(__file__).parent.parent.parent / 'data' / 'piap'
CAPTURE_DIR = Path(__file__).parent.parent.parent / 'data' / 'captures'
_ssh_sessions = {}
_capture_threads = {}
def _parse_piap(filepath):
"""Parse a .piap file into a dict structure."""
cfg = configparser.ConfigParser(interpolation=None)
cfg.read(filepath)
device = dict(cfg['device']) if 'device' in cfg else {}
connection = dict(cfg['connection']) if 'connection' in cfg else {}
radios = []
i = 0
while f'radio_{i}' in cfg:
radio = dict(cfg[f'radio_{i}'])
radio['index'] = i
if 'channels' in radio:
radio['channel_list'] = [c.strip() for c in radio['channels'].split(',')]
if 'modes' in radio:
radio['mode_list'] = [m.strip() for m in radio['modes'].split(',')]
radios.append(radio)
i += 1
features = dict(cfg['features']) if 'features' in cfg else {}
info_cmds = dict(cfg['info']) if 'info' in cfg else {}
return {
'device': device,
'connection': connection,
'radios': radios,
'features': features,
'info': info_cmds,
'filename': os.path.basename(filepath),
}
def _ssh_cmd(conn, cmd, timeout=15):
"""Run a command on the remote device over SSH."""
host = conn.get('host', '')
port = conn.get('port', '22')
user = conn.get('user', 'root')
auth = conn.get('auth', 'key')
key_path = conn.get('key_path', '')
password = conn.get('password', '')
ssh_timeout = conn.get('timeout', '10')
ssh_args = ['ssh', '-o', 'StrictHostKeyChecking=no', '-o', 'ConnectTimeout=' + ssh_timeout,
'-p', port]
if auth == 'key' and key_path:
ssh_args += ['-i', key_path]
ssh_args.append(f'{user}@{host}')
ssh_args.append(cmd)
try:
r = subprocess.run(ssh_args, capture_output=True, text=True, timeout=timeout)
return {'ok': r.returncode == 0, 'stdout': r.stdout.strip(), 'stderr': r.stderr.strip(), 'code': r.returncode}
except subprocess.TimeoutExpired:
return {'ok': False, 'stdout': '', 'stderr': 'timeout', 'code': -1}
except Exception as e:
return {'ok': False, 'stdout': '', 'stderr': str(e), 'code': -1}
def _expand_cmd(cmd_template, radio=None, channel=None, bssid=None, count=None, timestamp=None):
"""Replace {variables} in a command template."""
if not cmd_template:
return ''
cmd = cmd_template
if radio:
cmd = cmd.replace('{phy}', radio.get('phy', ''))
cmd = cmd.replace('{interface}', radio.get('interface', ''))
cmd = cmd.replace('{mon}', radio.get('monitor_interface', ''))
cmd = cmd.replace('{channels}', radio.get('channels', ''))
if channel:
cmd = cmd.replace('{channel}', str(channel))
elif radio:
cmd = cmd.replace('{channel}', radio.get('default_channel', '1'))
if bssid:
cmd = cmd.replace('{bssid}', bssid)
if count:
cmd = cmd.replace('{count}', str(count))
if timestamp is None:
timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
cmd = cmd.replace('{timestamp}', timestamp)
return cmd
# ── Routes ──────────────────────────────────────────────────────────────────
@remote_monitor_bp.route('/')
def index():
"""Main page — loads available .piap files for dropdown."""
piap_files = []
for f in sorted(PIAP_DIR.glob('*.piap')):
if f.name == 'template.piap':
continue
try:
p = _parse_piap(f)
piap_files.append({'filename': f.name, 'name': p['device'].get('name', f.stem)})
except Exception as e:
logger.warning("Failed to parse %s: %s", f, e)
return render_template('remote_monitor.html', piap_files=piap_files)
@remote_monitor_bp.route('/api/load', methods=['POST'])
def load_piap():
"""Load a .piap file and return its full config."""
filename = request.json.get('filename', '')
filepath = PIAP_DIR / filename
if not filepath.exists() or not filepath.suffix == '.piap':
return jsonify({'ok': False, 'error': 'File not found'}), 404
try:
data = _parse_piap(filepath)
return jsonify({'ok': True, 'data': data})
except Exception as e:
return jsonify({'ok': False, 'error': str(e)}), 500
@remote_monitor_bp.route('/api/connect', methods=['POST'])
def connect():
"""Test SSH connection to the remote device."""
conn = request.json.get('connection', {})
result = _ssh_cmd(conn, 'echo ok')
return jsonify(result)
@remote_monitor_bp.route('/api/info', methods=['POST'])
def device_info():
"""Get device info (uptime, memory, kernel, etc)."""
conn = request.json.get('connection', {})
info_cmds = request.json.get('info', {})
results = {}
for key, cmd in info_cmds.items():
if key.startswith('cmd_'):
label = key[4:]
r = _ssh_cmd(conn, cmd)
results[label] = r.get('stdout', r.get('stderr', ''))
return jsonify({'ok': True, 'info': results})
@remote_monitor_bp.route('/api/radio/status', methods=['POST'])
def radio_status():
"""Get status of a specific radio."""
conn = request.json.get('connection', {})
radio = request.json.get('radio', {})
cmd = _expand_cmd(radio.get('cmd_status', 'iw dev'), radio)
result = _ssh_cmd(conn, cmd)
return jsonify(result)
@remote_monitor_bp.route('/api/radio/monitor-on', methods=['POST'])
def monitor_on():
"""Enable monitor mode on a radio."""
conn = request.json.get('connection', {})
radio = request.json.get('radio', {})
channel = request.json.get('channel', radio.get('default_channel', '1'))
cmd = _expand_cmd(radio.get('cmd_monitor_on', ''), radio, channel=channel)
result = _ssh_cmd(conn, cmd)
return jsonify(result)
@remote_monitor_bp.route('/api/radio/monitor-off', methods=['POST'])
def monitor_off():
"""Disable monitor mode on a radio."""
conn = request.json.get('connection', {})
radio = request.json.get('radio', {})
cmd = _expand_cmd(radio.get('cmd_monitor_off', ''), radio)
result = _ssh_cmd(conn, cmd)
return jsonify(result)
@remote_monitor_bp.route('/api/radio/set-channel', methods=['POST'])
def set_channel():
"""Set channel on a monitor interface."""
conn = request.json.get('connection', {})
radio = request.json.get('radio', {})
channel = request.json.get('channel', '1')
cmd = _expand_cmd(radio.get('cmd_set_channel', ''), radio, channel=channel)
result = _ssh_cmd(conn, cmd)
return jsonify(result)
@remote_monitor_bp.route('/api/capture/start', methods=['POST'])
def capture_start():
"""Start packet capture on remote device."""
conn = request.json.get('connection', {})
radio = request.json.get('radio', {})
features = request.json.get('features', {})
cmd = _expand_cmd(features.get('cmd_capture_start', ''), radio)
result = _ssh_cmd(conn, cmd)
return jsonify(result)
@remote_monitor_bp.route('/api/capture/stop', methods=['POST'])
def capture_stop():
"""Stop packet capture on remote device."""
conn = request.json.get('connection', {})
features = request.json.get('features', {})
cmd = features.get('cmd_capture_stop', 'killall tcpdump 2>/dev/null')
result = _ssh_cmd(conn, cmd)
return jsonify(result)
@remote_monitor_bp.route('/api/scan', methods=['POST'])
def wifi_scan():
"""Run passive WiFi scan."""
conn = request.json.get('connection', {})
radio = request.json.get('radio', {})
features = request.json.get('features', {})
cmd = _expand_cmd(features.get('cmd_wifi_scan', ''), radio)
result = _ssh_cmd(conn, cmd, timeout=30)
return jsonify(result)
@remote_monitor_bp.route('/api/deauth', methods=['POST'])
def deauth():
"""Send deauth frames."""
conn = request.json.get('connection', {})
radio = request.json.get('radio', {})
features = request.json.get('features', {})
bssid = request.json.get('bssid', '')
count = request.json.get('count', '10')
cmd = _expand_cmd(features.get('cmd_deauth', ''), radio, bssid=bssid, count=count)
result = _ssh_cmd(conn, cmd, timeout=30)
return jsonify(result)
@remote_monitor_bp.route('/api/exec', methods=['POST'])
def exec_cmd():
"""Execute an arbitrary command on the remote device."""
conn = request.json.get('connection', {})
radio = request.json.get('radio')
cmd = request.json.get('cmd', '')
if radio:
cmd = _expand_cmd(cmd, radio)
result = _ssh_cmd(conn, cmd, timeout=30)
return jsonify(result)
@remote_monitor_bp.route('/api/piap/list')
def list_piaps():
"""List available .piap files."""
piap_files = []
for f in sorted(PIAP_DIR.glob('*.piap')):
if f.name == 'template.piap':
continue
try:
p = _parse_piap(f)
piap_files.append({'filename': f.name, 'name': p['device'].get('name', f.stem)})
except:
pass
return jsonify({'ok': True, 'files': piap_files})

View File

@@ -209,14 +209,14 @@ def update_llm():
config.set('claude', 'model', request.form.get('model', 'claude-sonnet-4-20250514'))
api_key = request.form.get('api_key', '')
if api_key:
config.set('claude', 'api_key', api_key)
config._set_secret('claude_api_key', api_key, 'claude', 'api_key')
config.set('claude', 'max_tokens', request.form.get('max_tokens', '4096'))
config.set('claude', 'temperature', request.form.get('temperature', '0.7'))
elif backend == 'huggingface':
config.set('huggingface', 'model', request.form.get('model', 'mistralai/Mistral-7B-Instruct-v0.3'))
api_key = request.form.get('api_key', '')
if api_key:
config.set('huggingface', 'api_key', api_key)
config._set_secret('huggingface_api_key', api_key, 'huggingface', 'api_key')
config.set('huggingface', 'endpoint', request.form.get('endpoint', ''))
config.set('huggingface', 'provider', request.form.get('provider', 'auto'))
config.set('huggingface', 'max_tokens', request.form.get('max_tokens', '1024'))
@@ -231,7 +231,7 @@ def update_llm():
config.set('openai', 'model', request.form.get('model', 'gpt-4o'))
api_key = request.form.get('api_key', '')
if api_key:
config.set('openai', 'api_key', api_key)
config._set_secret('openai_api_key', api_key, 'openai', 'api_key')
config.set('openai', 'base_url', request.form.get('base_url', 'https://api.openai.com/v1'))
config.set('openai', 'max_tokens', request.form.get('max_tokens', '4096'))
config.set('openai', 'temperature', request.form.get('temperature', '0.7'))
@@ -273,6 +273,8 @@ def llm_settings():
claude=config.get_claude_settings(),
openai=config.get_openai_settings(),
huggingface=config.get_huggingface_settings(),
agents=config.get_agents_settings(),
mcp_port=config.get('web', 'mcp_port', fallback='8081'),
default_models_dir=default_models_dir,
)
@@ -298,6 +300,61 @@ def llm_load():
return jsonify({'ok': False, 'error': str(exc)})
@settings_bp.route('/llm/claude-models', methods=['POST'])
@login_required
def llm_claude_models():
"""Fetch available Claude models from the Anthropic API."""
_log = logging.getLogger('autarch.settings')
try:
import anthropic
except ImportError:
return jsonify({'ok': False, 'error': 'anthropic package not installed'})
config = current_app.autarch_config
api_key = config.get('claude', 'api_key', fallback='') or os.environ.get('ANTHROPIC_API_KEY', '')
if not api_key:
return jsonify({'ok': False, 'error': 'No Claude API key configured'})
try:
client = anthropic.Anthropic(api_key=api_key)
resp = client.models.list(limit=100)
models = []
for m in resp.data:
models.append({
'id': m.id,
'name': getattr(m, 'display_name', m.id),
'created': getattr(m, 'created_at', None),
})
# Sort: newest first, then alphabetical
models.sort(key=lambda x: x['id'])
return jsonify({'ok': True, 'models': models})
except Exception as exc:
_log.error(f"[Claude Models] API error: {exc}", exc_info=True)
return jsonify({'ok': False, 'error': str(exc)})
@settings_bp.route('/agents/save', methods=['POST'])
@login_required
def agents_save():
"""Save agent configuration settings."""
_log = logging.getLogger('autarch.settings')
config = current_app.autarch_config
data = request.get_json(silent=True) or {}
for key in ('backend', 'local_max_steps', 'local_verbose',
'claude_enabled', 'claude_model', 'claude_max_tokens', 'claude_max_steps',
'openai_enabled', 'openai_model', 'openai_base_url', 'openai_max_tokens', 'openai_max_steps'):
if key in data:
val = data[key]
if isinstance(val, bool):
val = 'true' if val else 'false'
config.set('agents', key, str(val))
config.save()
_log.info(f"[Agents] Settings saved — backend: {data.get('backend', '?')}")
return jsonify({'ok': True})
@settings_bp.route('/llm/scan-models', methods=['POST'])
@login_required
def llm_scan_models():
@@ -382,6 +439,45 @@ def llm_hf_verify():
# ── MCP Server API ───────────────────────────────────────────
@settings_bp.route('/mcp/save', methods=['POST'])
@login_required
def mcp_save():
"""Save MCP server configuration."""
_log = logging.getLogger('autarch.settings')
config = current_app.autarch_config
data = request.get_json(silent=True) or {}
mcp_keys = ('enabled', 'auto_start', 'transport', 'host', 'port', 'log_level',
'instructions', 'auth_enabled', 'auth_token', 'rate_limit',
'mask_errors', 'request_timeout', 'max_message_size', 'cors_origins',
'ssl_enabled', 'ssl_cert', 'ssl_key', 'disabled_tools',
'nmap_timeout', 'tcpdump_timeout', 'whois_timeout', 'dns_timeout',
'geoip_timeout', 'geoip_endpoint')
for key in mcp_keys:
if key in data:
val = data[key]
if isinstance(val, bool):
val = 'true' if val else 'false'
config.set('mcp', key, str(val))
config.save()
_log.info(f"[MCP] Settings saved")
return jsonify({'ok': True})
@settings_bp.route('/mcp/generate-token', methods=['POST'])
@login_required
def mcp_generate_token():
"""Generate a new random auth token for MCP."""
import secrets
token = secrets.token_urlsafe(32)
config = current_app.autarch_config
config.set('mcp', 'auth_token', token)
config.save()
return jsonify({'ok': True, 'token': token})
@settings_bp.route('/mcp/status', methods=['POST'])
@login_required
def mcp_status():
@@ -400,8 +496,9 @@ def mcp_start():
try:
from core.mcp_server import start_sse_server
config = current_app.autarch_config
port = int(config.get('web', 'mcp_port', fallback='8081'))
result = start_sse_server(port=port)
port = config.get_int('mcp', 'port', 8081)
host = config.get('mcp', 'host', '0.0.0.0')
result = start_sse_server(host=host, port=port)
return jsonify(result)
except Exception as e:
return jsonify({'ok': False, 'error': str(e)})
@@ -535,6 +632,18 @@ def debug_test():
return jsonify({'ok': True, 'sent': 5})
# ==================== MCP SERVER ====================
@settings_bp.route('/mcp')
@login_required
def mcp_settings():
"""MCP Server configuration and management page."""
config = current_app.autarch_config
return render_template('mcp_settings.html',
mcp=config.get_mcp_settings(),
)
# ==================== DEPENDENCIES ====================
@settings_bp.route('/deps')
@@ -544,6 +653,72 @@ def deps_index():
return render_template('system_deps.html')
@settings_bp.route('/deps/system-check', methods=['POST'])
@login_required
def deps_system_check():
"""Check non-Python system tools availability."""
import shutil
tools_to_check = {
'nmap': {'cmd': 'nmap', 'version_flag': '--version'},
'tshark': {'cmd': 'tshark', 'version_flag': '--version'},
'tcpdump': {'cmd': 'tcpdump', 'version_flag': '--version'},
'msfconsole': {'cmd': 'msfconsole', 'version_flag': '--version'},
'wg': {'cmd': 'wg', 'version_flag': '--version'},
'node': {'cmd': 'node', 'version_flag': '--version'},
'go': {'cmd': 'go', 'version_flag': 'version'},
'adb': {'cmd': 'adb', 'version_flag': 'version'},
'upnpc': {'cmd': 'upnpc', 'version_flag': '--help'},
'whois': {'cmd': 'whois', 'version_flag': '--version'},
'aircrack': {'cmd': 'aircrack-ng', 'version_flag': '--version'},
'mdk4': {'cmd': 'mdk4', 'version_flag': '--help'},
'sslstrip': {'cmd': 'sslstrip', 'version_flag': '-h'},
'iw': {'cmd': 'iw', 'version_flag': '--version'},
'nmcli': {'cmd': 'nmcli', 'version_flag': '--version'},
'hostapd': {'cmd': 'hostapd', 'version_flag': '-v'},
'dnsmasq': {'cmd': 'dnsmasq', 'version_flag': '--version'},
'nft': {'cmd': 'nft', 'version_flag': '--version'},
'torch': {'python_check': True},
}
results = {}
for name, info in tools_to_check.items():
if info.get('python_check'):
try:
import importlib
mod = importlib.import_module('torch')
ver = getattr(mod, '__version__', 'unknown')
cuda = 'CUDA' if getattr(mod, 'cuda', None) and mod.cuda.is_available() else 'CPU'
results[name] = {'found': True, 'version': f'{ver} ({cuda})'}
except ImportError:
results[name] = {'found': False}
continue
cmd = info['cmd']
path = shutil.which(cmd)
# Also check bundled tools
if not path:
from core.paths import find_tool
found = find_tool(cmd)
if found:
path = str(found)
if path:
version = ''
try:
r = subprocess.run([path, info['version_flag']],
capture_output=True, text=True, timeout=5)
output = (r.stdout + r.stderr).strip()
# Extract first line with a version-like pattern
for line in output.split('\n')[:3]:
if any(c.isdigit() for c in line):
version = line.strip()[:80]
break
except Exception:
version = 'found'
results[name] = {'found': True, 'version': version or 'found'}
else:
results[name] = {'found': False}
return jsonify({'ok': True, 'tools': results})
@settings_bp.route('/deps/check', methods=['POST'])
@login_required
def deps_check():

730
web/routes/ssh_manager.py Normal file
View File

@@ -0,0 +1,730 @@
"""SSH / SSHD Configuration Manager routes."""
import logging
import os
import re
import tempfile
import time
from flask import Blueprint, render_template, request, jsonify
from web.auth import login_required
from core.daemon import root_exec
log = logging.getLogger(__name__)
ssh_manager_bp = Blueprint('ssh_manager', __name__, url_prefix='/ssh')
# ─── Helpers ─────────────────────────────────────────────────────────────────
def _parse_sshd_config(text: str) -> dict:
"""Parse sshd_config text into a dict of {directive: value} pairs.
Handles comments, blank lines, and Match blocks (flattened).
For repeated directives only the first occurrence is kept (sshd semantics).
"""
result = {}
for line in text.splitlines():
stripped = line.strip()
if not stripped or stripped.startswith('#'):
continue
parts = stripped.split(None, 1)
if len(parts) == 2:
key, value = parts
elif len(parts) == 1:
key, value = parts[0], ''
else:
continue
# sshd uses first-match semantics; keep only the first occurrence
if key not in result:
result[key] = value
return result
# ─── Routes ──────────────────────────────────────────────────────────────────
@ssh_manager_bp.route('/')
@login_required
def index():
"""Render the SSH manager page."""
return render_template('ssh_manager.html')
# ── Status ───────────────────────────────────────────────────────────────────
@ssh_manager_bp.route('/status', methods=['GET'])
@login_required
def status():
"""Return JSON with SSH service status."""
# Check active state — try sshd first, then ssh
active_result = root_exec('systemctl is-active sshd', timeout=10)
if active_result.get('code', 1) != 0:
active_result = root_exec('systemctl is-active ssh', timeout=10)
active = active_result.get('stdout', '').strip()
# Enabled state
enabled_result = root_exec('systemctl is-enabled sshd', timeout=10)
if enabled_result.get('code', 1) != 0:
enabled_result = root_exec('systemctl is-enabled ssh', timeout=10)
enabled = enabled_result.get('stdout', '').strip()
# Config exists
config_exists = os.path.isfile('/etc/ssh/sshd_config')
# Version — sshd -V prints to stderr on OpenSSH
ver_result = root_exec('sshd -V', timeout=10)
version = (ver_result.get('stderr', '') + ver_result.get('stdout', '')).strip()
# Often the first meaningful line is all we need
if version:
version = version.splitlines()[0]
return jsonify({
'ok': True,
'active': active,
'enabled': enabled,
'config_exists': config_exists,
'version': version,
})
# ── Security Scan ────────────────────────────────────────────────────────────
@ssh_manager_bp.route('/scan', methods=['POST'])
@login_required
def scan():
"""Security scan of sshd_config."""
result = root_exec('cat /etc/ssh/sshd_config', timeout=10)
if not result.get('ok'):
return jsonify({'ok': False, 'error': 'Failed to read sshd_config: ' + result.get('stderr', '')}), 500
cfg = _parse_sshd_config(result['stdout'])
checks = []
def _add(name, severity, current, recommended, description, status='fail'):
checks.append({
'name': name,
'status': status,
'severity': severity,
'current_value': current,
'recommended': recommended,
'description': description,
})
# PermitRootLogin
val = cfg.get('PermitRootLogin', 'prohibit-password')
if val.lower() == 'yes':
_add('PermitRootLogin', 'CRITICAL', val, 'no', 'Root login with password is enabled — extremely dangerous.')
else:
_add('PermitRootLogin', 'CRITICAL', val, 'no', 'Root login is restricted.', 'pass')
# PasswordAuthentication
val = cfg.get('PasswordAuthentication', 'yes')
if val.lower() == 'yes':
_add('PasswordAuthentication', 'WARNING', val, 'no', 'Password authentication is enabled — prefer SSH keys.')
else:
_add('PasswordAuthentication', 'WARNING', val, 'no', 'Password authentication is disabled.', 'pass')
# PermitEmptyPasswords
val = cfg.get('PermitEmptyPasswords', 'no')
if val.lower() == 'yes':
_add('PermitEmptyPasswords', 'CRITICAL', val, 'no', 'Empty passwords are permitted — critical risk.')
else:
_add('PermitEmptyPasswords', 'CRITICAL', val, 'no', 'Empty passwords are not permitted.', 'pass')
# X11Forwarding
val = cfg.get('X11Forwarding', 'no')
if val.lower() == 'yes':
_add('X11Forwarding', 'LOW', val, 'no', 'X11 forwarding is enabled — consider disabling if not needed.')
else:
_add('X11Forwarding', 'LOW', val, 'no', 'X11 forwarding is disabled.', 'pass')
# Port
val = cfg.get('Port', '22')
if val == '22':
_add('Port', 'INFO', val, 'non-default', 'SSH is running on the default port — consider changing to reduce automated attacks.', 'info')
else:
_add('Port', 'INFO', val, 'non-default', 'SSH is running on a non-default port.', 'pass')
# Protocol
val = cfg.get('Protocol', '')
if val == '1':
_add('Protocol', 'CRITICAL', val, '2', 'SSHv1 is enabled — it has known vulnerabilities.')
elif val:
_add('Protocol', 'CRITICAL', val, '2', 'Protocol version is set.', 'pass')
# MaxAuthTries
val = cfg.get('MaxAuthTries', '6')
try:
if int(val) > 6:
_add('MaxAuthTries', 'WARNING', val, '3-6', 'MaxAuthTries is high — allows excessive brute-force attempts.')
else:
_add('MaxAuthTries', 'WARNING', val, '3-6', 'MaxAuthTries is within acceptable range.', 'pass')
except ValueError:
_add('MaxAuthTries', 'WARNING', val, '3-6', 'Could not parse MaxAuthTries value.')
# LoginGraceTime
val = cfg.get('LoginGraceTime', '120')
try:
numeric = int(val.rstrip('smSM'))
if numeric > 120:
_add('LoginGraceTime', 'WARNING', val, '60-120', 'LoginGraceTime is too long — connections can linger.')
else:
_add('LoginGraceTime', 'WARNING', val, '60-120', 'LoginGraceTime is acceptable.', 'pass')
except ValueError:
_add('LoginGraceTime', 'WARNING', val, '60-120', 'Could not parse LoginGraceTime value.')
# UsePAM
val = cfg.get('UsePAM', 'yes')
if val.lower() == 'no':
_add('UsePAM', 'WARNING', val, 'yes', 'PAM is disabled — may break system authentication features.')
else:
_add('UsePAM', 'WARNING', val, 'yes', 'PAM is enabled.', 'pass')
# AllowTcpForwarding
val = cfg.get('AllowTcpForwarding', 'yes')
if val.lower() == 'yes':
_add('AllowTcpForwarding', 'LOW', val, 'no', 'TCP forwarding is enabled — consider disabling if not required.')
else:
_add('AllowTcpForwarding', 'LOW', val, 'no', 'TCP forwarding is disabled.', 'pass')
# ClientAliveInterval
val = cfg.get('ClientAliveInterval', '0')
try:
if int(val) == 0:
_add('ClientAliveInterval', 'WARNING', val, '300', 'No client alive interval — idle sessions will never timeout.')
else:
_add('ClientAliveInterval', 'WARNING', val, '300', 'Client alive interval is set.', 'pass')
except ValueError:
_add('ClientAliveInterval', 'WARNING', val, '300', 'Could not parse ClientAliveInterval value.')
return jsonify({'ok': True, 'checks': checks})
# ── Config Read / Save ───────────────────────────────────────────────────────
@ssh_manager_bp.route('/config', methods=['GET'])
@login_required
def config_read():
"""Read sshd_config file contents."""
result = root_exec('cat /etc/ssh/sshd_config', timeout=10)
if not result.get('ok'):
return jsonify({'ok': False, 'error': result.get('stderr', 'Failed to read config')}), 500
return jsonify({'ok': True, 'config': result['stdout']})
@ssh_manager_bp.route('/config/save', methods=['POST'])
@login_required
def config_save():
"""Save sshd_config with backup and syntax validation."""
data = request.get_json(silent=True)
if not data or 'config' not in data:
return jsonify({'ok': False, 'error': 'Missing config field'}), 400
config_text = data['config']
timestamp = int(time.time())
backup_path = f'/etc/ssh/sshd_config.bak.{timestamp}'
# 1. Create backup
bak = root_exec(f'cp /etc/ssh/sshd_config {backup_path}', timeout=10)
if not bak.get('ok'):
return jsonify({'ok': False, 'error': 'Failed to create backup: ' + bak.get('stderr', '')}), 500
# 2. Write new config via a temp file
try:
tmp = tempfile.NamedTemporaryFile(mode='w', suffix='.sshd_config', delete=False)
tmp.write(config_text)
tmp.close()
cp_result = root_exec(f'cp {tmp.name} /etc/ssh/sshd_config', timeout=10)
os.unlink(tmp.name)
if not cp_result.get('ok'):
# Restore backup
root_exec(f'cp {backup_path} /etc/ssh/sshd_config', timeout=10)
return jsonify({'ok': False, 'error': 'Failed to write config: ' + cp_result.get('stderr', '')}), 500
except Exception as exc:
root_exec(f'cp {backup_path} /etc/ssh/sshd_config', timeout=10)
return jsonify({'ok': False, 'error': f'Write error: {exc}'}), 500
# 3. Validate syntax
validate = root_exec('sshd -t', timeout=10)
if validate.get('code', 1) != 0:
# Restore backup
root_exec(f'cp {backup_path} /etc/ssh/sshd_config', timeout=10)
err = (validate.get('stderr', '') + validate.get('stdout', '')).strip()
return jsonify({'ok': False, 'error': 'Syntax validation failed — backup restored.', 'validation': err}), 400
return jsonify({
'ok': True,
'validation': 'Configuration is valid.',
'backup': backup_path,
})
# ── Config Generate ──────────────────────────────────────────────────────────
# All supported directives grouped logically
_CONFIG_GROUPS = {
'Connection': [
'Port', 'AddressFamily', 'ListenAddress', 'Protocol',
],
'Authentication': [
'PermitRootLogin', 'PubkeyAuthentication', 'PasswordAuthentication',
'PermitEmptyPasswords', 'ChallengeResponseAuthentication',
'KbdInteractiveAuthentication', 'UsePAM', 'AuthenticationMethods',
'MaxAuthTries', 'LoginGraceTime',
],
'Keys': [
'HostKey', 'AuthorizedKeysFile', 'AuthorizedPrincipalsFile',
],
'Session': [
'MaxSessions', 'ClientAliveInterval', 'ClientAliveCountMax', 'TCPKeepAlive',
],
'Access Control': [
'AllowUsers', 'AllowGroups', 'DenyUsers', 'DenyGroups',
],
'Forwarding': [
'AllowTcpForwarding', 'X11Forwarding', 'X11DisplayOffset',
'GatewayPorts', 'PermitTunnel',
],
'Logging': [
'SyslogFacility', 'LogLevel',
],
'Security': [
'StrictModes', 'HostbasedAuthentication', 'IgnoreRhosts',
'IgnoreUserKnownHosts', 'RekeyLimit', 'Ciphers', 'MACs', 'KexAlgorithms',
],
'Other': [
'Subsystem', 'Banner', 'PrintMotd', 'PrintLastLog',
'AcceptEnv', 'UseDNS', 'PermitUserEnvironment', 'Compression',
],
}
# Flatten for quick lookup
_ALL_DIRECTIVES = set()
for _directives in _CONFIG_GROUPS.values():
_ALL_DIRECTIVES.update(_directives)
@ssh_manager_bp.route('/config/generate', methods=['POST'])
@login_required
def config_generate():
"""Generate a hardened sshd_config from submitted fields.
Returns the text without saving so the user can review it first.
"""
data = request.get_json(silent=True) or {}
lines = [
'# sshd_config — generated by AUTARCH SSH Manager',
f'# Generated: {time.strftime("%Y-%m-%d %H:%M:%S %Z")}',
'#',
'# Review carefully before applying.',
'',
]
for group_name, directives in _CONFIG_GROUPS.items():
group_lines = []
for directive in directives:
value = data.get(directive)
if value is not None and str(value).strip() != '':
group_lines.append(f'{directive} {value}')
if group_lines:
lines.append(f'# ── {group_name} {"" * (60 - len(group_name))}')
lines.extend(group_lines)
lines.append('')
config_text = '\n'.join(lines) + '\n'
return jsonify({'ok': True, 'config': config_text})
# ── Service Control ──────────────────────────────────────────────────────────
_ALLOWED_ACTIONS = {'start', 'stop', 'restart', 'enable', 'disable'}
@ssh_manager_bp.route('/service/<action>', methods=['POST'])
@login_required
def service_action(action):
"""Start / stop / restart / enable / disable the SSH service."""
if action not in _ALLOWED_ACTIONS:
return jsonify({'ok': False, 'error': f'Invalid action: {action}'}), 400
# Try sshd first, fall back to ssh
result = root_exec(f'systemctl {action} sshd', timeout=20)
if result.get('code', 1) != 0:
result = root_exec(f'systemctl {action} ssh', timeout=20)
output = (result.get('stdout', '') + '\n' + result.get('stderr', '')).strip()
return jsonify({
'ok': result.get('code', 1) == 0,
'output': output,
})
# ── Key Generation ───────────────────────────────────────────────────────────
@ssh_manager_bp.route('/keys/generate', methods=['POST'])
@login_required
def keys_generate():
"""Generate an SSH key pair (does not require root)."""
data = request.get_json(silent=True) or {}
key_type = data.get('type', 'ed25519')
bits = int(data.get('bits', 4096))
comment = data.get('comment', '')
passphrase = data.get('passphrase', '')
if key_type not in ('ed25519', 'rsa'):
return jsonify({'ok': False, 'error': 'Unsupported key type (use ed25519 or rsa)'}), 400
try:
tmp_dir = tempfile.mkdtemp(prefix='autarch_sshkey_')
key_path = os.path.join(tmp_dir, 'id_key')
cmd = ['ssh-keygen', '-t', key_type, '-f', key_path, '-N', passphrase]
if key_type == 'rsa':
cmd += ['-b', str(bits)]
if comment:
cmd += ['-C', comment]
import subprocess
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
if proc.returncode != 0:
return jsonify({'ok': False, 'error': proc.stderr.strip()}), 500
with open(key_path, 'r') as f:
private_key = f.read()
with open(key_path + '.pub', 'r') as f:
public_key = f.read().strip()
# Get fingerprint
fp_proc = subprocess.run(
['ssh-keygen', '-lf', key_path + '.pub'],
capture_output=True, text=True, timeout=10,
)
fingerprint = fp_proc.stdout.strip()
# Clean up temp files
os.unlink(key_path)
os.unlink(key_path + '.pub')
os.rmdir(tmp_dir)
return jsonify({
'ok': True,
'public_key': public_key,
'private_key': private_key,
'fingerprint': fingerprint,
})
except Exception as exc:
log.exception('SSH key generation failed')
return jsonify({'ok': False, 'error': str(exc)}), 500
# ── Host Keys ────────────────────────────────────────────────────────────────
@ssh_manager_bp.route('/keys/host', methods=['GET'])
@login_required
def keys_host():
"""List host public keys and their fingerprints."""
import subprocess
result = root_exec('ls /etc/ssh/ssh_host_*_key.pub', timeout=10)
if not result.get('ok'):
return jsonify({'ok': False, 'error': 'No host keys found or permission denied.'}), 500
pub_files = [f.strip() for f in result['stdout'].splitlines() if f.strip()]
keys = []
for pub_file in pub_files:
# Read the key
cat_result = root_exec(f'cat {pub_file}', timeout=10)
if not cat_result.get('ok'):
continue
key_text = cat_result['stdout'].strip()
key_type = key_text.split()[0] if key_text else 'unknown'
# Fingerprint
fp_result = root_exec(f'ssh-keygen -lf {pub_file}', timeout=10)
fingerprint = fp_result.get('stdout', '').strip() if fp_result.get('ok') else ''
keys.append({
'type': key_type,
'fingerprint': fingerprint,
'file': pub_file,
})
return jsonify({'ok': True, 'keys': keys})
# ── Authorized Keys ─────────────────────────────────────────────────────────
def _authorized_keys_path() -> str:
return os.path.expanduser('~/.ssh/authorized_keys')
@ssh_manager_bp.route('/keys/authorized', methods=['GET'])
@login_required
def keys_authorized():
"""Read ~/.ssh/authorized_keys."""
ak_path = _authorized_keys_path()
keys = []
try:
if os.path.isfile(ak_path):
with open(ak_path, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
parts = line.split(None, 2)
comment = parts[2] if len(parts) >= 3 else ''
keys.append({'key': line, 'comment': comment})
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
return jsonify({'ok': True, 'keys': keys})
@ssh_manager_bp.route('/keys/authorized/add', methods=['POST'])
@login_required
def keys_authorized_add():
"""Append a public key to authorized_keys."""
data = request.get_json(silent=True) or {}
key = data.get('key', '').strip()
if not key:
return jsonify({'ok': False, 'error': 'No key provided'}), 400
ak_path = _authorized_keys_path()
try:
ssh_dir = os.path.dirname(ak_path)
os.makedirs(ssh_dir, mode=0o700, exist_ok=True)
with open(ak_path, 'a') as f:
f.write(key + '\n')
os.chmod(ak_path, 0o600)
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
return jsonify({'ok': True})
@ssh_manager_bp.route('/keys/authorized/remove', methods=['POST'])
@login_required
def keys_authorized_remove():
"""Remove a key by index from authorized_keys."""
data = request.get_json(silent=True) or {}
index = data.get('index')
if index is None:
return jsonify({'ok': False, 'error': 'No index provided'}), 400
try:
index = int(index)
except (ValueError, TypeError):
return jsonify({'ok': False, 'error': 'Index must be an integer'}), 400
ak_path = _authorized_keys_path()
try:
if not os.path.isfile(ak_path):
return jsonify({'ok': False, 'error': 'authorized_keys file does not exist'}), 404
with open(ak_path, 'r') as f:
lines = f.readlines()
# Build list of non-empty, non-comment key lines with original indices
key_lines = []
for i, line in enumerate(lines):
stripped = line.strip()
if stripped and not stripped.startswith('#'):
key_lines.append(i)
if index < 0 or index >= len(key_lines):
return jsonify({'ok': False, 'error': f'Index {index} out of range (0-{len(key_lines) - 1})'}), 400
# Remove the line at the original file index
del lines[key_lines[index]]
with open(ak_path, 'w') as f:
f.writelines(lines)
os.chmod(ak_path, 0o600)
except Exception as exc:
return jsonify({'ok': False, 'error': str(exc)}), 500
return jsonify({'ok': True})
# ══════════════════════════════════════════════════════════════════════════════
# FAIL2BAN
# ══════════════════════════════════════════════════════════════════════════════
@ssh_manager_bp.route('/fail2ban/status')
@login_required
def f2b_status():
r = root_exec(['fail2ban-client', 'status'])
if not r['ok']:
return jsonify({'ok': False, 'error': r['stderr'] or 'fail2ban not running', 'active': False})
jails = []
total_banned = 0
for line in r['stdout'].split('\n'):
if 'Jail list:' in line:
jails = [j.strip() for j in line.split(':')[1].strip().split(',') if j.strip()]
jail_details = []
for jail in jails:
jr = root_exec(['fail2ban-client', 'status', jail])
banned = 0
banned_ips = []
if jr['ok']:
for line in jr['stdout'].split('\n'):
if 'Currently banned:' in line:
try: banned = int(line.split(':')[1].strip())
except: pass
elif 'Banned IP list:' in line:
banned_ips = [ip.strip() for ip in line.split(':',1)[1].strip().split() if ip.strip()]
total_banned += banned
jail_details.append({'name': jail, 'banned': banned, 'banned_ips': banned_ips})
sr = root_exec(['systemctl', 'is-active', 'fail2ban'])
return jsonify({'ok': True, 'active': sr['stdout'].strip() == 'active',
'jail_count': len(jails), 'total_banned': total_banned, 'jails': jail_details})
@ssh_manager_bp.route('/fail2ban/service/<action>', methods=['POST'])
@login_required
def f2b_service(action):
if action not in ('start', 'stop', 'restart', 'enable', 'disable'):
return jsonify({'ok': False, 'error': 'Invalid action'})
r = root_exec(['systemctl', action, 'fail2ban'])
return jsonify({'ok': r['ok'], 'output': r['stdout'] + r['stderr']})
@ssh_manager_bp.route('/fail2ban/banned')
@login_required
def f2b_banned():
r = root_exec(['fail2ban-client', 'status'])
if not r['ok']:
return jsonify({'ok': False, 'error': 'fail2ban not running'})
all_banned = []
jails = []
for line in r['stdout'].split('\n'):
if 'Jail list:' in line:
jails = [j.strip() for j in line.split(':')[1].strip().split(',') if j.strip()]
for jail in jails:
jr = root_exec(['fail2ban-client', 'status', jail])
if jr['ok']:
for line in jr['stdout'].split('\n'):
if 'Banned IP list:' in line:
for ip in line.split(':', 1)[1].strip().split():
if ip.strip():
all_banned.append({'ip': ip.strip(), 'jail': jail})
return jsonify({'ok': True, 'banned': all_banned, 'total': len(all_banned)})
@ssh_manager_bp.route('/fail2ban/ban', methods=['POST'])
@login_required
def f2b_ban():
data = request.get_json(silent=True) or {}
ip = data.get('ip', '').strip()
jail = data.get('jail', 'sshd').strip()
if not ip: return jsonify({'ok': False, 'error': 'IP required'})
r = root_exec(['fail2ban-client', 'set', jail, 'banip', ip])
return jsonify({'ok': r['ok'], 'output': r['stdout'] + r['stderr']})
@ssh_manager_bp.route('/fail2ban/unban', methods=['POST'])
@login_required
def f2b_unban():
data = request.get_json(silent=True) or {}
ip = data.get('ip', '').strip()
jail = data.get('jail', '').strip()
if not ip: return jsonify({'ok': False, 'error': 'IP required'})
r = root_exec(['fail2ban-client', 'set', jail, 'unbanip', ip]) if jail else root_exec(['fail2ban-client', 'unban', ip])
return jsonify({'ok': r['ok'], 'output': r['stdout'] + r['stderr']})
@ssh_manager_bp.route('/fail2ban/search', methods=['POST'])
@login_required
def f2b_search():
data = request.get_json(silent=True) or {}
ip = data.get('ip', '').strip()
if not ip: return jsonify({'ok': False, 'error': 'IP required'})
results = []
r = root_exec(['fail2ban-client', 'status'])
jails = []
if r['ok']:
for line in r['stdout'].split('\n'):
if 'Jail list:' in line:
jails = [j.strip() for j in line.split(':')[1].strip().split(',') if j.strip()]
for jail in jails:
jr = root_exec(['fail2ban-client', 'status', jail])
if jr['ok'] and ip in jr['stdout']:
results.append({'jail': jail, 'status': 'banned'})
lr = root_exec(['grep', ip, '/var/log/fail2ban.log'])
log_entries = [l.strip() for l in lr['stdout'].strip().split('\n')[-20:] if l.strip()] if lr['ok'] else []
return jsonify({'ok': True, 'ip': ip, 'active_bans': results, 'log_entries': log_entries})
@ssh_manager_bp.route('/fail2ban/jail/create', methods=['POST'])
@login_required
def f2b_jail_create():
data = request.get_json(silent=True) or {}
name = data.get('name', '').strip()
if not name or not re.match(r'^[a-zA-Z0-9_-]+$', name):
return jsonify({'ok': False, 'error': 'Invalid jail name'})
config = f"[{name}]\nenabled = {'true' if data.get('enabled', True) else 'false'}\nfilter = {data.get('filter', name)}\nlogpath = {data.get('logpath', '')}\nmaxretry = {data.get('maxretry', '5')}\nfindtime = {data.get('findtime', '10m')}\nbantime = {data.get('bantime', '1h')}\naction = {data.get('action', '%(action_mwl)s')}\n"
tmp = tempfile.NamedTemporaryFile(mode='w', suffix='.local', delete=False)
tmp.write(config); tmp.close()
r = root_exec(['cp', tmp.name, f'/etc/fail2ban/jail.d/{name}.local'])
os.unlink(tmp.name)
if not r['ok']: return jsonify({'ok': False, 'error': r['stderr']})
root_exec(['fail2ban-client', 'reload'])
return jsonify({'ok': True, 'config': config})
@ssh_manager_bp.route('/fail2ban/scan-apps', methods=['POST'])
@login_required
def f2b_scan_apps():
checks = [
('sshd', 'openssh-server', '/var/log/auth.log', 'sshd'),
('apache2', 'apache2', '/var/log/apache2/error.log', 'apache-auth'),
('nginx', 'nginx', '/var/log/nginx/error.log', 'nginx-http-auth'),
('postfix', 'postfix', '/var/log/mail.log', 'postfix'),
('dovecot', 'dovecot-core', '/var/log/mail.log', 'dovecot'),
('mysql', 'mysql-server', '/var/log/mysql/error.log', 'mysqld-auth'),
('postgresql', 'postgresql', '/var/log/postgresql/*.log', 'postgresql'),
('vsftpd', 'vsftpd', '/var/log/vsftpd.log', 'vsftpd'),
('exim4', 'exim4', '/var/log/exim4/mainlog', 'exim'),
('recidive', None, '/var/log/fail2ban.log', 'recidive'),
]
existing = set()
r = root_exec(['fail2ban-client', 'status'])
if r['ok']:
for line in r['stdout'].split('\n'):
if 'Jail list:' in line:
existing = set(j.strip() for j in line.split(':')[1].strip().split(',') if j.strip())
apps = []
for service, pkg, logpath, filt in checks:
installed = True if not pkg else (root_exec(['dpkg', '-l', pkg])['ok'] and 'ii' in root_exec(['dpkg', '-l', pkg])['stdout'])
lr = root_exec(['ls', logpath.split('*')[0] if '*' in logpath else logpath])
apps.append({'service': service, 'package': pkg, 'installed': installed,
'log_path': logpath, 'log_exists': lr['ok'], 'filter': filt,
'has_jail': filt in existing or service in existing})
return jsonify({'ok': True, 'apps': apps})
@ssh_manager_bp.route('/fail2ban/auto-config', methods=['POST'])
@login_required
def f2b_auto_config():
data = request.get_json(silent=True) or {}
apply_now = data.get('apply', False)
checks = [
('sshd', '/var/log/auth.log', 'sshd', '5', '10m', '1h'),
('apache2', '/var/log/apache2/error.log', 'apache-auth', '5', '10m', '1h'),
('nginx', '/var/log/nginx/error.log', 'nginx-http-auth', '5', '10m', '1h'),
('postfix', '/var/log/mail.log', 'postfix', '5', '10m', '1h'),
('recidive', '/var/log/fail2ban.log', 'recidive', '3', '1d', '1w'),
]
generated = []
for svc, logpath, filt, maxr, findt, bant in checks:
if not root_exec(['ls', logpath])['ok']: continue
generated.append({'service': svc, 'config': f"[{svc}]\nenabled = true\nfilter = {filt}\nlogpath = {logpath}\nmaxretry = {maxr}\nfindtime = {findt}\nbantime = {bant}\n"})
if apply_now and generated:
tmp = tempfile.NamedTemporaryFile(mode='w', suffix='.local', delete=False)
tmp.write('\n'.join(g['config'] for g in generated)); tmp.close()
root_exec(['cp', tmp.name, '/etc/fail2ban/jail.d/autarch-auto.local'])
os.unlink(tmp.name)
root_exec(['fail2ban-client', 'reload'])
return jsonify({'ok': True, 'generated': generated, 'applied': apply_now, 'count': len(generated)})

View File

@@ -41,17 +41,41 @@ def _new_target(data: dict) -> dict:
host = data.get('host', '').strip()
now = _now()
return {
'id': str(uuid.uuid4()),
'name': data.get('name', '').strip() or host,
'host': host,
'type': data.get('type', 'ip'),
'status': data.get('status', 'active'),
'os': data.get('os', 'Unknown'),
'tags': [t.strip() for t in data.get('tags', '').split(',') if t.strip()],
'ports': data.get('ports', '').strip(),
'notes': data.get('notes', '').strip(),
'created_at': now,
'updated_at': now,
'id': str(uuid.uuid4()),
'name': data.get('name', '').strip() or host,
'host': host,
'type': data.get('type', 'ip'),
'status': data.get('status', 'active'),
'os': data.get('os', 'Unknown'),
'tags': [t.strip() for t in data.get('tags', '').split(',') if t.strip()],
'ports': data.get('ports', '').strip(),
'notes': data.get('notes', '').strip(),
# Investigation profile fields
'ipv4': data.get('ipv4', '').strip(),
'ipv6': data.get('ipv6', '').strip(),
'domain': data.get('domain', '').strip(),
'dns_records': data.get('dns_records', '').strip(),
'email': data.get('email', '').strip(),
'usernames': data.get('usernames', '').strip(),
'geo_country': data.get('geo_country', '').strip(),
'geo_city': data.get('geo_city', '').strip(),
'geo_isp': data.get('geo_isp', '').strip(),
'geo_asn': data.get('geo_asn', '').strip(),
'geo_coords': data.get('geo_coords', '').strip(),
'traceroute': data.get('traceroute', '').strip(),
'whois': data.get('whois', '').strip(),
'rdns': data.get('rdns', '').strip(),
'mac_address': data.get('mac_address', '').strip(),
'hostname': data.get('hostname', '').strip(),
'services': data.get('services', '').strip(),
'vulns': data.get('vulns', '').strip(),
'threat_level': data.get('threat_level', 'unknown'),
'source': data.get('source', '').strip(),
'first_seen': data.get('first_seen', now),
'last_seen': data.get('last_seen', now),
'custom_fields': data.get('custom_fields', []),
'created_at': now,
'updated_at': now,
}
@@ -165,3 +189,145 @@ def import_targets():
added += 1
_save(existing)
return jsonify({'ok': True, 'added': added, 'total': len(existing)})
# ══════════════════════════════════════════════════════════════════════════════
# INVESTIGATION REPORTS (IR)
# ══════════════════════════════════════════════════════════════════════════════
def _ir_file() -> Path:
p = Path(__file__).parent.parent.parent / 'data' / 'reports' / 'investigation_reports.json'
p.parent.mkdir(parents=True, exist_ok=True)
return p
def _load_irs() -> list:
f = _ir_file()
if f.exists():
try:
return json.loads(f.read_text())
except Exception:
return []
return []
def _save_irs(irs: list):
_ir_file().write_text(json.dumps(irs, indent=2))
def _generate_ir_id(ip: str = '') -> str:
"""Generate IR identifier — hex from IP if available, otherwise random 9-char hex."""
if ip and ip.strip():
# Convert IP octets to hex
parts = ip.strip().split('.')
if len(parts) == 4:
try:
return 'IR-' + ''.join(f'{int(p):02X}' for p in parts)
except ValueError:
pass
# Random 9-char hex
return 'IR-' + uuid.uuid4().hex[:9].upper()
@targets_bp.route('/ir')
@login_required
def ir_list():
"""List all investigation reports."""
return jsonify({'ok': True, 'reports': _load_irs()})
@targets_bp.route('/ir/create', methods=['POST'])
@login_required
def ir_create():
"""Create a new investigation report."""
data = request.get_json(silent=True) or {}
now = datetime.now(timezone.utc).isoformat()
ip = data.get('ip', data.get('host', ''))
ir_id = _generate_ir_id(ip)
# Detect if created by HAL
is_hal = 'HAL' in data.get('source', '') or 'hal' in data.get('source', '').lower()
report = {
'id': ir_id,
'title': data.get('title', f'Investigation {ir_id}'),
'ip': ip,
'status': data.get('status', 'open'),
'threat_level': data.get('threat_level', 'unknown'),
'source': data.get('source', ''),
'created_by_hal': is_hal,
'scan_type': data.get('scan_type', ''),
'scan_output': data.get('scan_output', ''),
'analysis': data.get('analysis', ''),
'risk_level': data.get('risk_level', ''),
'fix_attempted': data.get('fix_attempted', False),
'fix_results': data.get('fix_results', ''),
'recommendations': data.get('recommendations', ''),
'geo': data.get('geo', {}),
'custom_fields': data.get('custom_fields', []),
'notes': data.get('notes', ''),
'created_at': now,
'updated_at': now,
}
irs = _load_irs()
irs.insert(0, report)
_save_irs(irs)
return jsonify({'ok': True, 'ir': report})
@targets_bp.route('/ir/<ir_id>', methods=['GET'])
@login_required
def ir_get(ir_id):
"""Get a single IR."""
irs = _load_irs()
for ir in irs:
if ir['id'] == ir_id:
return jsonify({'ok': True, 'ir': ir})
return jsonify({'ok': False, 'error': 'IR not found'})
@targets_bp.route('/ir/<ir_id>/update', methods=['POST'])
@login_required
def ir_update(ir_id):
"""Update an existing IR."""
data = request.get_json(silent=True) or {}
irs = _load_irs()
for ir in irs:
if ir['id'] == ir_id:
for key in data:
if key != 'id':
ir[key] = data[key]
ir['updated_at'] = datetime.now(timezone.utc).isoformat()
_save_irs(irs)
return jsonify({'ok': True, 'ir': ir})
return jsonify({'ok': False, 'error': 'IR not found'})
@targets_bp.route('/ir/<ir_id>/load-to-hal', methods=['POST'])
@login_required
def ir_load_to_hal(ir_id):
"""Load an IR's details into HAL's memory so the agent can continue working on it."""
irs = _load_irs()
for ir in irs:
if ir['id'] == ir_id:
try:
from core.hal_memory import get_hal_memory
mem = get_hal_memory()
mem.add('context', json.dumps(ir), metadata={'type': 'ir_loaded', 'ir_id': ir_id})
mem.save()
except Exception:
pass
return jsonify({'ok': True, 'ir': ir})
return jsonify({'ok': False, 'error': 'IR not found'})
@targets_bp.route('/ir/<ir_id>/delete', methods=['POST'])
@login_required
def ir_delete(ir_id):
"""Delete an IR."""
irs = _load_irs()
irs = [ir for ir in irs if ir['id'] != ir_id]
_save_irs(irs)
return jsonify({'ok': True})