AUTARCH v1.9 — remote monitoring, SSH manager, daemon, vault, cleanup

- Add Remote Monitoring Station with PIAP device profile system
- Add SSH/SSHD manager with fail2ban integration
- Add privileged daemon architecture for safe root operations
- Add encrypted vault, HAL memory, HAL auto-analyst
- Add network security suite, module creator, codex training
- Add start.sh launcher script and GTK3 desktop launcher
- Remove Output/ build artifacts, installer files, loose docs
- Update .gitignore for runtime data and build artifacts
- Update README for v1.9 with new launch method, screenshots, and features

Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
SsSnake
2026-03-24 06:59:06 -07:00
parent 1092689f45
commit da53899f66
382 changed files with 15277 additions and 493964 deletions

View File

@@ -209,14 +209,14 @@ def update_llm():
config.set('claude', 'model', request.form.get('model', 'claude-sonnet-4-20250514'))
api_key = request.form.get('api_key', '')
if api_key:
config.set('claude', 'api_key', api_key)
config._set_secret('claude_api_key', api_key, 'claude', 'api_key')
config.set('claude', 'max_tokens', request.form.get('max_tokens', '4096'))
config.set('claude', 'temperature', request.form.get('temperature', '0.7'))
elif backend == 'huggingface':
config.set('huggingface', 'model', request.form.get('model', 'mistralai/Mistral-7B-Instruct-v0.3'))
api_key = request.form.get('api_key', '')
if api_key:
config.set('huggingface', 'api_key', api_key)
config._set_secret('huggingface_api_key', api_key, 'huggingface', 'api_key')
config.set('huggingface', 'endpoint', request.form.get('endpoint', ''))
config.set('huggingface', 'provider', request.form.get('provider', 'auto'))
config.set('huggingface', 'max_tokens', request.form.get('max_tokens', '1024'))
@@ -231,7 +231,7 @@ def update_llm():
config.set('openai', 'model', request.form.get('model', 'gpt-4o'))
api_key = request.form.get('api_key', '')
if api_key:
config.set('openai', 'api_key', api_key)
config._set_secret('openai_api_key', api_key, 'openai', 'api_key')
config.set('openai', 'base_url', request.form.get('base_url', 'https://api.openai.com/v1'))
config.set('openai', 'max_tokens', request.form.get('max_tokens', '4096'))
config.set('openai', 'temperature', request.form.get('temperature', '0.7'))
@@ -273,6 +273,8 @@ def llm_settings():
claude=config.get_claude_settings(),
openai=config.get_openai_settings(),
huggingface=config.get_huggingface_settings(),
agents=config.get_agents_settings(),
mcp_port=config.get('web', 'mcp_port', fallback='8081'),
default_models_dir=default_models_dir,
)
@@ -298,6 +300,61 @@ def llm_load():
return jsonify({'ok': False, 'error': str(exc)})
@settings_bp.route('/llm/claude-models', methods=['POST'])
@login_required
def llm_claude_models():
"""Fetch available Claude models from the Anthropic API."""
_log = logging.getLogger('autarch.settings')
try:
import anthropic
except ImportError:
return jsonify({'ok': False, 'error': 'anthropic package not installed'})
config = current_app.autarch_config
api_key = config.get('claude', 'api_key', fallback='') or os.environ.get('ANTHROPIC_API_KEY', '')
if not api_key:
return jsonify({'ok': False, 'error': 'No Claude API key configured'})
try:
client = anthropic.Anthropic(api_key=api_key)
resp = client.models.list(limit=100)
models = []
for m in resp.data:
models.append({
'id': m.id,
'name': getattr(m, 'display_name', m.id),
'created': getattr(m, 'created_at', None),
})
# Sort: newest first, then alphabetical
models.sort(key=lambda x: x['id'])
return jsonify({'ok': True, 'models': models})
except Exception as exc:
_log.error(f"[Claude Models] API error: {exc}", exc_info=True)
return jsonify({'ok': False, 'error': str(exc)})
@settings_bp.route('/agents/save', methods=['POST'])
@login_required
def agents_save():
"""Save agent configuration settings."""
_log = logging.getLogger('autarch.settings')
config = current_app.autarch_config
data = request.get_json(silent=True) or {}
for key in ('backend', 'local_max_steps', 'local_verbose',
'claude_enabled', 'claude_model', 'claude_max_tokens', 'claude_max_steps',
'openai_enabled', 'openai_model', 'openai_base_url', 'openai_max_tokens', 'openai_max_steps'):
if key in data:
val = data[key]
if isinstance(val, bool):
val = 'true' if val else 'false'
config.set('agents', key, str(val))
config.save()
_log.info(f"[Agents] Settings saved — backend: {data.get('backend', '?')}")
return jsonify({'ok': True})
@settings_bp.route('/llm/scan-models', methods=['POST'])
@login_required
def llm_scan_models():
@@ -382,6 +439,45 @@ def llm_hf_verify():
# ── MCP Server API ───────────────────────────────────────────
@settings_bp.route('/mcp/save', methods=['POST'])
@login_required
def mcp_save():
"""Save MCP server configuration."""
_log = logging.getLogger('autarch.settings')
config = current_app.autarch_config
data = request.get_json(silent=True) or {}
mcp_keys = ('enabled', 'auto_start', 'transport', 'host', 'port', 'log_level',
'instructions', 'auth_enabled', 'auth_token', 'rate_limit',
'mask_errors', 'request_timeout', 'max_message_size', 'cors_origins',
'ssl_enabled', 'ssl_cert', 'ssl_key', 'disabled_tools',
'nmap_timeout', 'tcpdump_timeout', 'whois_timeout', 'dns_timeout',
'geoip_timeout', 'geoip_endpoint')
for key in mcp_keys:
if key in data:
val = data[key]
if isinstance(val, bool):
val = 'true' if val else 'false'
config.set('mcp', key, str(val))
config.save()
_log.info(f"[MCP] Settings saved")
return jsonify({'ok': True})
@settings_bp.route('/mcp/generate-token', methods=['POST'])
@login_required
def mcp_generate_token():
"""Generate a new random auth token for MCP."""
import secrets
token = secrets.token_urlsafe(32)
config = current_app.autarch_config
config.set('mcp', 'auth_token', token)
config.save()
return jsonify({'ok': True, 'token': token})
@settings_bp.route('/mcp/status', methods=['POST'])
@login_required
def mcp_status():
@@ -400,8 +496,9 @@ def mcp_start():
try:
from core.mcp_server import start_sse_server
config = current_app.autarch_config
port = int(config.get('web', 'mcp_port', fallback='8081'))
result = start_sse_server(port=port)
port = config.get_int('mcp', 'port', 8081)
host = config.get('mcp', 'host', '0.0.0.0')
result = start_sse_server(host=host, port=port)
return jsonify(result)
except Exception as e:
return jsonify({'ok': False, 'error': str(e)})
@@ -535,6 +632,18 @@ def debug_test():
return jsonify({'ok': True, 'sent': 5})
# ==================== MCP SERVER ====================
@settings_bp.route('/mcp')
@login_required
def mcp_settings():
"""MCP Server configuration and management page."""
config = current_app.autarch_config
return render_template('mcp_settings.html',
mcp=config.get_mcp_settings(),
)
# ==================== DEPENDENCIES ====================
@settings_bp.route('/deps')
@@ -544,6 +653,72 @@ def deps_index():
return render_template('system_deps.html')
@settings_bp.route('/deps/system-check', methods=['POST'])
@login_required
def deps_system_check():
"""Check non-Python system tools availability."""
import shutil
tools_to_check = {
'nmap': {'cmd': 'nmap', 'version_flag': '--version'},
'tshark': {'cmd': 'tshark', 'version_flag': '--version'},
'tcpdump': {'cmd': 'tcpdump', 'version_flag': '--version'},
'msfconsole': {'cmd': 'msfconsole', 'version_flag': '--version'},
'wg': {'cmd': 'wg', 'version_flag': '--version'},
'node': {'cmd': 'node', 'version_flag': '--version'},
'go': {'cmd': 'go', 'version_flag': 'version'},
'adb': {'cmd': 'adb', 'version_flag': 'version'},
'upnpc': {'cmd': 'upnpc', 'version_flag': '--help'},
'whois': {'cmd': 'whois', 'version_flag': '--version'},
'aircrack': {'cmd': 'aircrack-ng', 'version_flag': '--version'},
'mdk4': {'cmd': 'mdk4', 'version_flag': '--help'},
'sslstrip': {'cmd': 'sslstrip', 'version_flag': '-h'},
'iw': {'cmd': 'iw', 'version_flag': '--version'},
'nmcli': {'cmd': 'nmcli', 'version_flag': '--version'},
'hostapd': {'cmd': 'hostapd', 'version_flag': '-v'},
'dnsmasq': {'cmd': 'dnsmasq', 'version_flag': '--version'},
'nft': {'cmd': 'nft', 'version_flag': '--version'},
'torch': {'python_check': True},
}
results = {}
for name, info in tools_to_check.items():
if info.get('python_check'):
try:
import importlib
mod = importlib.import_module('torch')
ver = getattr(mod, '__version__', 'unknown')
cuda = 'CUDA' if getattr(mod, 'cuda', None) and mod.cuda.is_available() else 'CPU'
results[name] = {'found': True, 'version': f'{ver} ({cuda})'}
except ImportError:
results[name] = {'found': False}
continue
cmd = info['cmd']
path = shutil.which(cmd)
# Also check bundled tools
if not path:
from core.paths import find_tool
found = find_tool(cmd)
if found:
path = str(found)
if path:
version = ''
try:
r = subprocess.run([path, info['version_flag']],
capture_output=True, text=True, timeout=5)
output = (r.stdout + r.stderr).strip()
# Extract first line with a version-like pattern
for line in output.split('\n')[:3]:
if any(c.isdigit() for c in line):
version = line.strip()[:80]
break
except Exception:
version = 'found'
results[name] = {'found': True, 'version': version or 'found'}
else:
results[name] = {'found': False}
return jsonify({'ok': True, 'tools': results})
@settings_bp.route('/deps/check', methods=['POST'])
@login_required
def deps_check():