diff --git a/.devcontainer/requirements.txt b/.devcontainer/requirements.txt
index d3c6ccc4..d423946a 100644
--- a/.devcontainer/requirements.txt
+++ b/.devcontainer/requirements.txt
@@ -4,4 +4,5 @@ decentra-vision
python-telegram-bot[rate-limiter]
protobuf==5.28.3
vectordb
-ngrok
\ No newline at end of file
+ngrok
+paramiko
\ No newline at end of file
diff --git a/extensions/business/cybersec/red_mesh/constants.py b/extensions/business/cybersec/red_mesh/constants.py
index 65bd84b1..373154f8 100644
--- a/extensions/business/cybersec/red_mesh/constants.py
+++ b/extensions/business/cybersec/red_mesh/constants.py
@@ -9,16 +9,17 @@
"description": "Collect banner and version data for common network services.",
"category": "service",
"methods": [
- "_service_info_80",
- "_service_info_443",
- "_service_info_8080",
- "_service_info_21",
- "_service_info_22",
- "_service_info_23",
- "_service_info_25",
- "_service_info_53",
- "_service_info_161",
- "_service_info_445",
+ "_service_info_http",
+ "_service_info_https",
+ "_service_info_http_alt",
+ "_service_info_ftp",
+ "_service_info_ssh",
+ "_service_info_telnet",
+ "_service_info_smtp",
+ "_service_info_dns",
+ "_service_info_snmp",
+ "_service_info_smb",
+ "_service_info_wins",
"_service_info_generic"
]
},
@@ -29,54 +30,59 @@
"category": "service",
"methods": [
"_service_info_tls",
- "_service_info_1433",
- "_service_info_3306",
- "_service_info_3389",
- "_service_info_5432",
- "_service_info_5900",
- "_service_info_6379",
- "_service_info_9200",
- "_service_info_11211",
- "_service_info_27017",
- "_service_info_502"
+ "_service_info_mssql",
+ "_service_info_mysql",
+ "_service_info_rdp",
+ "_service_info_postgresql",
+ "_service_info_vnc",
+ "_service_info_redis",
+ "_service_info_elasticsearch",
+ "_service_info_memcached",
+ "_service_info_mongodb",
+ "_service_info_modbus"
]
},
{
- "id": "web_test_common",
- "label": "Common exposure scan",
- "description": "Probe default admin panels, disclosed files, and common misconfigurations.",
+ "id": "web_discovery",
+ "label": "Discovery",
+ "description": "Enumerate exposed files, admin panels, homepage secrets, tech fingerprinting, and VPN endpoints (OWASP WSTG-INFO).",
"category": "web",
- "methods": [
- "_web_test_common",
- "_web_test_homepage",
- "_web_test_flags",
- "_web_test_graphql_introspection",
- "_web_test_metadata_endpoints"
- ]
+ "methods": ["_web_test_common", "_web_test_homepage", "_web_test_tech_fingerprint", "_web_test_vpn_endpoints"]
},
{
- "id": "web_test_security_headers",
- "label": "Security headers audit",
- "description": "Check HSTS, CSP, X-Frame-Options, and other critical response headers.",
+ "id": "web_hardening",
+ "label": "Hardening audit",
+ "description": "Audit cookie flags, security headers, CORS policy, redirect handling, and HTTP methods (OWASP WSTG-CONF).",
"category": "web",
- "methods": [
- "_web_test_security_headers",
- "_web_test_cors_misconfiguration",
- "_web_test_open_redirect",
- "_web_test_http_methods"
- ]
+ "methods": ["_web_test_flags", "_web_test_security_headers", "_web_test_cors_misconfiguration", "_web_test_open_redirect", "_web_test_http_methods"]
},
{
- "id": "web_test_vulnerability",
- "label": "Vulnerability probes",
- "description": "Non-destructive probes for common web vulnerabilities.",
+ "id": "web_api_exposure",
+ "label": "API exposure",
+ "description": "Detect GraphQL introspection leaks, cloud metadata endpoints, and API auth bypass (OWASP WSTG-APIT).",
"category": "web",
- "methods": [
- "_web_test_path_traversal",
- "_web_test_xss",
- "_web_test_sql_injection",
- "_web_test_api_auth_bypass"
- ]
+ "methods": ["_web_test_graphql_introspection", "_web_test_metadata_endpoints", "_web_test_api_auth_bypass"]
+ },
+ {
+ "id": "web_injection",
+ "label": "Injection probes",
+ "description": "Non-destructive probes for path traversal, reflected XSS, and SQL injection (OWASP WSTG-INPV).",
+ "category": "web",
+ "methods": ["_web_test_path_traversal", "_web_test_xss", "_web_test_sql_injection"]
+ },
+ {
+ "id": "active_auth",
+ "label": "Credential testing",
+ "description": "Test default/weak credentials on database and remote access services. May trigger account lockout.",
+ "category": "service",
+ "methods": ["_service_info_mysql_creds", "_service_info_postgresql_creds"]
+ },
+ {
+ "id": "post_scan_correlation",
+ "label": "Cross-service correlation",
+ "description": "Post-scan analysis: honeypot detection, OS consistency, infrastructure leak aggregation.",
+ "category": "correlation",
+ "methods": ["_post_scan_correlate"]
}
]
@@ -106,4 +112,70 @@
# LLM Analysis types
LLM_ANALYSIS_SECURITY_ASSESSMENT = "security_assessment"
LLM_ANALYSIS_VULNERABILITY_SUMMARY = "vulnerability_summary"
-LLM_ANALYSIS_REMEDIATION_PLAN = "remediation_plan"
\ No newline at end of file
+LLM_ANALYSIS_REMEDIATION_PLAN = "remediation_plan"
+LLM_ANALYSIS_QUICK_SUMMARY = "quick_summary"
+
+# =====================================================================
+# Protocol fingerprinting and probe routing
+# =====================================================================
+
+# Fingerprint configuration
+FINGERPRINT_TIMEOUT = 2 # seconds — passive banner grab timeout
+FINGERPRINT_MAX_BANNER = 512 # bytes — max banner stored per port
+FINGERPRINT_HTTP_TIMEOUT = 4 # seconds — active HTTP probe timeout (honeypots may be slow)
+FINGERPRINT_NUDGE_TIMEOUT = 3 # seconds — generic \r\n nudge probe timeout
+
+# Well-known TCP port → protocol (fallback when banner is unrecognized)
+WELL_KNOWN_PORTS = {
+ 21: "ftp", 22: "ssh", 23: "telnet", 25: "smtp", 42: "wins",
+ 53: "dns", 80: "http", 81: "http", 110: "pop3", 137: "nbns", 143: "imap",
+ 161: "snmp", 443: "https", 445: "smb", 465: "smtp", # SMTPS
+ 502: "modbus", 587: "smtp", 993: "imap", 995: "pop3", # TLS-wrapped mail
+ 1433: "mssql", 3306: "mysql", 3389: "rdp", 5432: "postgresql",
+ 5900: "vnc", 6379: "redis", 8000: "http", 8008: "http",
+ 8080: "http", 8081: "http", 8443: "https", 8888: "http",
+ 9200: "http", 11211: "memcached", 27017: "mongodb",
+}
+
+# Protocols where web vulnerability tests are applicable
+WEB_PROTOCOLS = frozenset({"http", "https"})
+
+# Which protocols each service probe is designed to test.
+# Probes not listed here run unconditionally (forward-compatible with new probes).
+PROBE_PROTOCOL_MAP = {
+ "_service_info_ftp": frozenset({"ftp"}),
+ "_service_info_ssh": frozenset({"ssh"}),
+ "_service_info_telnet": frozenset({"telnet"}),
+ "_service_info_smtp": frozenset({"smtp"}),
+ "_service_info_dns": frozenset({"dns"}),
+ "_service_info_http": frozenset({"http"}),
+ "_service_info_https": frozenset({"https"}),
+ "_service_info_http_alt": frozenset({"http"}),
+ "_service_info_tls": frozenset({"https", "unknown"}),
+ "_service_info_mssql": frozenset({"mssql"}),
+ "_service_info_mysql": frozenset({"mysql"}),
+ "_service_info_rdp": frozenset({"rdp"}),
+ "_service_info_postgresql": frozenset({"postgresql"}),
+ "_service_info_vnc": frozenset({"vnc"}),
+ "_service_info_redis": frozenset({"redis"}),
+ "_service_info_elasticsearch": frozenset({"http", "https"}),
+ "_service_info_memcached": frozenset({"memcached"}),
+ "_service_info_mongodb": frozenset({"mongodb"}),
+ "_service_info_snmp": frozenset({"snmp"}),
+ "_service_info_smb": frozenset({"smb"}),
+ "_service_info_modbus": frozenset({"modbus"}),
+ "_service_info_wins": frozenset({"wins", "nbns"}),
+ "_service_info_generic": frozenset({"unknown"}),
+ "_service_info_mysql_creds": frozenset({"mysql"}),
+ "_service_info_postgresql_creds": frozenset({"postgresql"}),
+}
+
+# =====================================================================
+# Risk score computation
+# =====================================================================
+
+RISK_SEVERITY_WEIGHTS = {"CRITICAL": 40, "HIGH": 25, "MEDIUM": 10, "LOW": 2, "INFO": 0}
+RISK_CONFIDENCE_MULTIPLIERS = {"certain": 1.0, "firm": 0.8, "tentative": 0.5}
+RISK_SIGMOID_K = 0.02
+RISK_CRED_PENALTY_PER = 15
+RISK_CRED_PENALTY_CAP = 30
\ No newline at end of file
diff --git a/extensions/business/cybersec/red_mesh/correlation_mixin.py b/extensions/business/cybersec/red_mesh/correlation_mixin.py
new file mode 100644
index 00000000..63d143f6
--- /dev/null
+++ b/extensions/business/cybersec/red_mesh/correlation_mixin.py
@@ -0,0 +1,213 @@
+"""
+Cross-service correlation engine for RedMesh Scanner.
+
+Analyzes aggregated scan_metadata collected by individual probes to detect
+patterns that no single probe can identify alone — honeypot indicators,
+OS mismatches, infrastructure leaks, and timezone drift.
+"""
+
+import ipaddress
+
+from .findings import Finding, Severity, probe_result
+
+
+# Map keywords found in OS strings to normalized OS families
+_OS_FAMILY_MAP = {
+ "ubuntu": "Linux",
+ "debian": "Linux",
+ "centos": "Linux",
+ "fedora": "Linux",
+ "alpine": "Linux",
+ "rhel": "Linux",
+ "red hat": "Linux",
+ "suse": "Linux",
+ "arch": "Linux",
+ "linux": "Linux",
+ "windows": "Windows",
+ "win32": "Windows",
+ "win64": "Windows",
+ "microsoft": "Windows",
+ "darwin": "macOS",
+ "macos": "macOS",
+ "mac os": "macOS",
+ "freebsd": "FreeBSD",
+ "openbsd": "OpenBSD",
+ "netbsd": "NetBSD",
+}
+
+
+def _normalize_os_family(os_string):
+ """Map an OS claim string to a normalized family name."""
+ lower = os_string.lower()
+ for keyword, family in _OS_FAMILY_MAP.items():
+ if keyword in lower:
+ return family
+ return os_string # unknown — keep as-is for comparison
+
+
+def _is_private_ip(ip_str):
+ """Check if an IP address string is RFC1918 / private."""
+ try:
+ return ipaddress.ip_address(ip_str).is_private
+ except (ValueError, TypeError):
+ return False
+
+
+def _subnet_16(ip_str):
+ """Return the /16 subnet prefix for an IPv4 address string."""
+ try:
+ addr = ipaddress.ip_address(ip_str)
+ if isinstance(addr, ipaddress.IPv4Address):
+ octets = str(addr).split(".")
+ return f"{octets[0]}.{octets[1]}.0.0/16"
+ except (ValueError, TypeError):
+ pass
+ return None
+
+
+class _CorrelationMixin:
+ """
+ Post-scan cross-service correlation engine.
+
+ Consumes ``self.state["scan_metadata"]`` populated by probe mixins and
+ produces ``self.state["correlation_findings"]`` with honeypot indicators,
+ OS consistency checks, infrastructure leak detection, and timezone drift.
+ """
+
+ def _post_scan_correlate(self):
+ """Entry point: run all correlation checks and store findings."""
+ meta = self.state.get("scan_metadata")
+ if not meta:
+ return
+
+ findings = []
+ findings += self._correlate_port_ratio()
+ findings += self._correlate_os_consistency()
+ findings += self._correlate_infrastructure_leak()
+ findings += self._correlate_tls_consistency()
+ findings += self._correlate_timezone_drift()
+
+ if findings:
+ self.P(f"Correlation engine produced {len(findings)} findings.")
+ self.state["correlation_findings"] = [
+ {
+ "severity": f.severity.value,
+ "title": f.title,
+ "description": f.description,
+ "evidence": f.evidence,
+ "remediation": f.remediation,
+ "cwe_id": f.cwe_id,
+ "confidence": f.confidence,
+ }
+ for f in findings
+ ]
+
+ def _correlate_port_ratio(self):
+ """Flag honeypot if >50% of scanned ports are open and >20 ports open."""
+ findings = []
+ open_ports = self.state.get("open_ports", [])
+ ports_scanned = self.state.get("ports_scanned", [])
+ if not ports_scanned:
+ return findings
+ ratio = len(open_ports) / len(ports_scanned)
+ if ratio > 0.5 and len(open_ports) > 20:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"Honeypot indicator: {len(open_ports)}/{len(ports_scanned)} ports open ({ratio:.0%})",
+ description="An unusually high ratio of open ports suggests this host is a honeypot. "
+ "Real servers rarely expose more than 50% of scanned ports.",
+ evidence=f"open={len(open_ports)}, scanned={len(ports_scanned)}, ratio={ratio:.2f}",
+ remediation="Verify this is a legitimate host before relying on scan results.",
+ cwe_id="CWE-345",
+ confidence="firm",
+ ))
+ return findings
+
+ def _correlate_os_consistency(self):
+ """Flag honeypot if services report conflicting OS families."""
+ findings = []
+ meta = self.state.get("scan_metadata", {})
+ os_claims = meta.get("os_claims", {})
+ if len(os_claims) < 2:
+ return findings
+
+ families = {}
+ for source, os_string in os_claims.items():
+ family = _normalize_os_family(os_string)
+ families.setdefault(family, []).append(source)
+
+ if len(families) > 1:
+ evidence_parts = [f"{family}: {', '.join(sources)}" for family, sources in families.items()]
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"Honeypot indicator: OS mismatch across services ({', '.join(families.keys())})",
+ description="Different services on this host report conflicting operating systems. "
+ "This is a strong honeypot indicator — real hosts run a single OS.",
+ evidence="; ".join(evidence_parts),
+ remediation="Investigate this host — it may be a honeypot or compromised system.",
+ cwe_id="CWE-345",
+ confidence="firm",
+ ))
+ return findings
+
+ def _correlate_infrastructure_leak(self):
+ """Detect Docker multi-network architecture from distinct /16 private subnets."""
+ findings = []
+ meta = self.state.get("scan_metadata", {})
+ internal_ips = meta.get("internal_ips", [])
+ if not internal_ips:
+ return findings
+
+ subnets = {}
+ for entry in internal_ips:
+ ip_str = entry.get("ip") if isinstance(entry, dict) else str(entry)
+ if not _is_private_ip(ip_str):
+ continue
+ subnet = _subnet_16(ip_str)
+ if subnet:
+ subnets.setdefault(subnet, []).append(entry)
+
+ if len(subnets) >= 2:
+ subnet_list = ", ".join(sorted(subnets.keys()))
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Infrastructure leak: {len(subnets)} distinct private subnets detected",
+ description="Internal IPs from multiple /16 subnets were leaked across services, "
+ "suggesting Docker multi-network architecture or multiple internal zones.",
+ evidence=f"Subnets: {subnet_list}",
+ remediation="Review network segmentation; ensure internal IPs are not exposed in service responses.",
+ cwe_id="CWE-200",
+ confidence="firm",
+ ))
+ return findings
+
+ def _correlate_tls_consistency(self):
+ """Compare cert issuers across TLS ports. Placeholder for future SAN emission."""
+ # Will be populated once TLS SAN emission is fully wired
+ return []
+
+ def _correlate_timezone_drift(self):
+ """Detect inconsistent timezone offsets across services."""
+ findings = []
+ meta = self.state.get("scan_metadata", {})
+ tz_hints = meta.get("timezone_hints", [])
+ if len(tz_hints) < 2:
+ return findings
+
+ offsets = set()
+ for entry in tz_hints:
+ offset = entry.get("offset") if isinstance(entry, dict) else str(entry)
+ offsets.add(offset)
+
+ if len(offsets) >= 2:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Timezone inconsistency: {len(offsets)} distinct offsets detected",
+ description="Services on this host report different timezone offsets. "
+ "Real hosts share a single system clock — this may indicate a honeypot or misconfiguration.",
+ evidence=f"Offsets: {', '.join(sorted(offsets))}",
+ remediation="Investigate timezone configuration across services.",
+ cwe_id="CWE-345",
+ confidence="firm",
+ ))
+ return findings
diff --git a/extensions/business/cybersec/red_mesh/cve_db.py b/extensions/business/cybersec/red_mesh/cve_db.py
new file mode 100644
index 00000000..9e8a85ce
--- /dev/null
+++ b/extensions/business/cybersec/red_mesh/cve_db.py
@@ -0,0 +1,190 @@
+"""
+Declarative CVE database for RedMesh version-based vulnerability matching.
+
+Each entry maps a product + version constraint to a known CVE. The
+``check_cves()`` helper returns ``Finding`` instances that feed directly
+into ``probe_result()``.
+"""
+
+import re
+from dataclasses import dataclass
+from .findings import Finding, Severity
+
+CVE_DB_LAST_UPDATED = "2026-02-20"
+
+
+@dataclass(frozen=True)
+class CveEntry:
+ product: str
+ constraint: str # "<1.4.3", ">=2.4.49,<2.4.51", "<7.0"
+ cve_id: str
+ severity: Severity
+ title: str
+ cwe_id: str = ""
+
+
+CVE_DATABASE: list = [
+ # ── Elasticsearch ──────────────────────────────────────────────────
+ CveEntry("elasticsearch", "<1.2", "CVE-2014-3120", Severity.CRITICAL, "MVEL scripting RCE", "CWE-94"),
+ CveEntry("elasticsearch", "<1.4.3", "CVE-2015-1427", Severity.CRITICAL, "Groovy sandbox escape RCE", "CWE-94"),
+ CveEntry("elasticsearch", "<1.4.5", "CVE-2015-3337", Severity.HIGH, "Directory traversal via site plugin", "CWE-22"),
+ CveEntry("elasticsearch", "<5.6.5", "CVE-2017-11480", Severity.MEDIUM, "XSS via URL access control bypass", "CWE-79"),
+ CveEntry("elasticsearch", "<6.4.3", "CVE-2018-17244", Severity.MEDIUM, "Security bypass in token generation", "CWE-287"),
+ CveEntry("elasticsearch", ">=7.0.0,<7.17.19", "CVE-2024-23450", Severity.HIGH, "Ingest pipeline DoS via deep nesting", "CWE-400"),
+
+ # ── OpenSSH ────────────────────────────────────────────────────────
+ CveEntry("openssh", "<9.3", "CVE-2024-6387", Severity.CRITICAL, "regreSSHion: signal handler race RCE", "CWE-362"),
+ CveEntry("openssh", ">=6.8,<9.9.2", "CVE-2025-26465", Severity.HIGH, "MitM via VerifyHostKeyDNS bypass", "CWE-305"),
+ CveEntry("openssh", "<8.1", "CVE-2019-6111", Severity.HIGH, "SCP client-side file overwrite", "CWE-20"),
+ CveEntry("openssh", "<7.6", "CVE-2017-15906", Severity.MEDIUM, "Improper write restriction in readonly mode", "CWE-732"),
+ CveEntry("openssh", "<7.0", "CVE-2016-6210", Severity.MEDIUM, "User enumeration via timing", "CWE-200"),
+
+ # ── Redis ──────────────────────────────────────────────────────────
+ CveEntry("redis", "<6.0.8", "CVE-2021-32761", Severity.HIGH, "Integer overflow in BITFIELD", "CWE-190"),
+ CveEntry("redis", "<6.2.7", "CVE-2022-24735", Severity.HIGH, "Lua sandbox escape via EVAL", "CWE-94"),
+ CveEntry("redis", "<7.0.5", "CVE-2022-35951", Severity.HIGH, "Integer overflow in XAUTOCLAIM", "CWE-190"),
+ CveEntry("redis", "<6.2.16", "CVE-2024-31449", Severity.HIGH, "Lua bit library stack buffer overflow RCE", "CWE-121"),
+ CveEntry("redis", "<7.2.7", "CVE-2024-46981", Severity.HIGH, "Lua GC use-after-free RCE", "CWE-416"),
+
+ # ── MySQL ──────────────────────────────────────────────────────────
+ CveEntry("mysql", ">=5.7,<5.7.20", "CVE-2016-6662", Severity.CRITICAL, "Config file injection RCE", "CWE-94"),
+ CveEntry("mysql", ">=5.5,<5.5.52", "CVE-2016-6664", Severity.HIGH, "Privilege escalation via mysqld_safe", "CWE-269"),
+ CveEntry("mysql", ">=8.0,<8.0.23", "CVE-2021-2022", Severity.MEDIUM, "InnoDB buffer pool corruption", "CWE-787"),
+ CveEntry("mysql", ">=5.7,<5.7.44", "CVE-2024-20973", Severity.HIGH, "Optimizer DoS via low-privilege network attack", "CWE-404"),
+
+ # ── PostgreSQL (new) ───────────────────────────────────────────────
+ CveEntry("postgresql", "<17.3", "CVE-2025-1094", Severity.HIGH, "libpq quoting SQL injection leading to RCE", "CWE-89"),
+ CveEntry("postgresql", "<17.1", "CVE-2024-10979", Severity.HIGH, "PL/Perl env variable manipulation to RCE", "CWE-94"),
+ CveEntry("postgresql", "<17.1", "CVE-2024-10976", Severity.HIGH, "Row security policy bypass via role confusion", "CWE-862"),
+
+ # ── MongoDB (new) ──────────────────────────────────────────────────
+ CveEntry("mongodb", "<4.4.30", "CVE-2024-8207", Severity.HIGH, "Privilege escalation via untrusted library load", "CWE-284"),
+
+ # ── Exim ───────────────────────────────────────────────────────────
+ CveEntry("exim", "<4.98", "CVE-2024-39929", Severity.CRITICAL, "RFC 2231 header parsing bypass — malware delivery", "CWE-20"),
+ CveEntry("exim", "<4.97.1", "CVE-2023-42115", Severity.CRITICAL, "AUTH out-of-bounds write", "CWE-787"),
+ CveEntry("exim", "<4.96.1", "CVE-2023-42116", Severity.HIGH, "NTLM challenge stack buffer overflow", "CWE-121"),
+ CveEntry("exim", "<4.96.1", "CVE-2023-42114", Severity.HIGH, "NTLM challenge out-of-bounds read", "CWE-125"),
+ CveEntry("exim", "<4.94.2", "CVE-2021-27216", Severity.HIGH, "Privilege escalation via symlink attack", "CWE-59"),
+
+ # ── Apache httpd ───────────────────────────────────────────────────
+ CveEntry("apache", ">=2.4.0,<2.4.60", "CVE-2024-38475", Severity.CRITICAL, "mod_rewrite escaping flaw — SSRF / RCE", "CWE-116"),
+ CveEntry("apache", ">=2.4.0,<2.4.60", "CVE-2024-38476", Severity.CRITICAL, "Backend header exploit — SSRF / local script exec", "CWE-829"),
+ CveEntry("apache", ">=2.4.49,<2.4.51", "CVE-2021-41773", Severity.CRITICAL, "Path traversal + RCE", "CWE-22"),
+ CveEntry("apache", ">=2.4.0,<2.4.52", "CVE-2021-44790", Severity.CRITICAL, "mod_lua buffer overflow", "CWE-787"),
+ CveEntry("apache", ">=2.4.0,<2.4.62", "CVE-2024-40725", Severity.HIGH, "HTTP request smuggling via mod_proxy", "CWE-444"),
+ CveEntry("apache", "<2.4.49", "CVE-2021-40438", Severity.HIGH, "mod_proxy SSRF", "CWE-918"),
+ CveEntry("apache", "<2.2.34", "CVE-2017-7679", Severity.HIGH, "mod_mime buffer overread", "CWE-126"),
+
+ # ── nginx ──────────────────────────────────────────────────────────
+ CveEntry("nginx", "<1.17.7", "CVE-2019-20372", Severity.MEDIUM, "HTTP request smuggling", "CWE-444"),
+ CveEntry("nginx", "<1.5.7", "CVE-2013-4547", Severity.HIGH, "URI processing security bypass", "CWE-20"),
+ CveEntry("nginx", ">=1.25.0,<1.25.4", "CVE-2024-24989", Severity.HIGH, "HTTP/3 QUIC null pointer crash", "CWE-476"),
+ CveEntry("nginx", ">=1.25.0,<1.25.4", "CVE-2024-24990", Severity.HIGH, "HTTP/3 use-after-free crash", "CWE-416"),
+
+ # ── Postfix ────────────────────────────────────────────────────────
+ CveEntry("postfix", "<3.5.23", "CVE-2023-51764", Severity.MEDIUM, "SMTP smuggling via pipelining", "CWE-345"),
+
+ # ── OpenSSL ────────────────────────────────────────────────────────
+ CveEntry("openssl", "<1.1.1", "CVE-2020-1971", Severity.HIGH, "NULL dereference in GENERAL_NAME_cmp", "CWE-476"),
+ CveEntry("openssl", "<3.0.7", "CVE-2022-3602", Severity.HIGH, "X.509 email address buffer overflow", "CWE-120"),
+ CveEntry("openssl", ">=3.2.0,<3.2.4", "CVE-2024-12797", Severity.HIGH, "RPK verification bypass enabling MitM", "CWE-392"),
+ CveEntry("openssl", "<3.0.14", "CVE-2024-4741", Severity.HIGH, "SSL_free_buffers use-after-free", "CWE-416"),
+
+ # ── ProFTPD ────────────────────────────────────────────────────────
+ CveEntry("proftpd", "<1.3.6", "CVE-2019-12815", Severity.CRITICAL, "Arbitrary file copy via mod_copy", "CWE-284"),
+ CveEntry("proftpd", "<1.3.8", "CVE-2024-48651", Severity.HIGH, "Supplemental group inherits GID 0 (root group)", "CWE-269"),
+
+ # ── vsftpd ─────────────────────────────────────────────────────────
+ CveEntry("vsftpd", ">=2.3.4,<2.3.5", "CVE-2011-2523", Severity.CRITICAL, "Backdoor command execution", "CWE-506"),
+
+ # ── Memcached (new) ────────────────────────────────────────────────
+ CveEntry("memcached", "<1.4.33", "CVE-2016-8704", Severity.CRITICAL, "process_bin_append integer overflow RCE", "CWE-190"),
+ CveEntry("memcached", "<1.4.33", "CVE-2016-8705", Severity.HIGH, "process_bin_update integer overflow RCE", "CWE-190"),
+ CveEntry("memcached", "<1.4.33", "CVE-2016-8706", Severity.CRITICAL, "SASL auth integer overflow RCE", "CWE-190"),
+
+ # ── VNC (new) ──────────────────────────────────────────────────────
+ CveEntry("tightvnc", "<=1.3.10", "CVE-2019-15678", Severity.CRITICAL, "rfbServerCutText heap buffer overflow RCE", "CWE-122"),
+ CveEntry("tightvnc", "<=1.3.10", "CVE-2019-15679", Severity.CRITICAL, "InitialiseRFBConnection heap overflow RCE", "CWE-122"),
+ CveEntry("libvncserver", "<0.9.13", "CVE-2019-20788", Severity.CRITICAL, "HandleCursorShape integer overflow RCE", "CWE-190"),
+
+ # ── Samba (new — for SMB deep enumeration) ─────────────────────────
+ CveEntry("samba", ">=4.16.0,<4.17.12", "CVE-2023-3961", Severity.CRITICAL, "Pipe name validation bypass — root socket access", "CWE-22"),
+ CveEntry("samba", "<4.13.17", "CVE-2021-44142", Severity.CRITICAL, "vfs_fruit heap overflow RCE", "CWE-787"),
+ CveEntry("samba", "<4.6.1", "CVE-2017-7494", Severity.CRITICAL, "SambaCry — writable share RCE via shared library upload", "CWE-94"),
+
+ # ── Asterisk / FreePBX (new — for SIP probe) ──────────────────────
+ CveEntry("asterisk", "<20.11.0", "CVE-2024-42365", Severity.HIGH, "AMI manager injection via caller ID", "CWE-94"),
+ CveEntry("asterisk", "<18.24.0", "CVE-2023-49786", Severity.HIGH, "PJSIP request smuggling via multipart parser", "CWE-444"),
+]
+
+
+def check_cves(product: str, version: str) -> list:
+ """Match version against CVE database. Returns list of Findings."""
+ findings = []
+ for entry in CVE_DATABASE:
+ if entry.product != product:
+ continue
+ if _matches_constraint(version, entry.constraint):
+ findings.append(Finding(
+ severity=entry.severity,
+ title=f"{entry.cve_id}: {entry.title} ({product} {version})",
+ description=f"{product} {version} is vulnerable to {entry.cve_id}. "
+ "NOTE: Linux distributions backport security fixes without changing "
+ "the upstream version number — this may be a false positive.",
+ evidence=f"Detected version: {version}, affected: {entry.constraint}",
+ remediation=f"Upgrade {product} to a patched version, or verify backport status with the OS vendor.",
+ cwe_id=entry.cwe_id,
+ confidence="tentative",
+ ))
+ return findings
+
+
+def _matches_constraint(version: str, constraint: str) -> bool:
+ """Parse version constraint string and compare.
+
+ Supports: ``<1.4.3``, ``>=2.4.49,<2.4.51``, ``<7.0``.
+ Comma-separated constraints are ANDed.
+ """
+ parts = [c.strip() for c in constraint.split(",")]
+ parsed = _parse_version(version)
+ if parsed is None:
+ return False
+ for part in parts:
+ if not _check_single(parsed, part):
+ return False
+ return True
+
+
+def _parse_version(version: str):
+ """Extract leading numeric version tuple from a string like '1.4.3-beta'."""
+ m = re.match(r"(\d+(?:\.\d+)*)", version.strip())
+ if not m:
+ return None
+ return tuple(int(x) for x in m.group(1).split("."))
+
+
+def _check_single(parsed: tuple, expr: str) -> bool:
+ """Evaluate one comparison like '<1.4.3' or '>=2.4.49'."""
+ m = re.match(r"(>=|<=|>|<|==)(.+)", expr.strip())
+ if not m:
+ return False
+ op, ver_str = m.group(1), m.group(2)
+ target = _parse_version(ver_str)
+ if target is None:
+ return False
+ # Normalize lengths for comparison
+ max_len = max(len(parsed), len(target))
+ a = parsed + (0,) * (max_len - len(parsed))
+ b = target + (0,) * (max_len - len(target))
+ if op == "<":
+ return a < b
+ elif op == "<=":
+ return a <= b
+ elif op == ">":
+ return a > b
+ elif op == ">=":
+ return a >= b
+ elif op == "==":
+ return a == b
+ return False
diff --git a/extensions/business/cybersec/red_mesh/findings.py b/extensions/business/cybersec/red_mesh/findings.py
new file mode 100644
index 00000000..17b08ef8
--- /dev/null
+++ b/extensions/business/cybersec/red_mesh/findings.py
@@ -0,0 +1,48 @@
+"""
+Structured vulnerability findings for RedMesh probes.
+
+Every probe returns a plain dict via ``probe_result()`` so that the
+aggregator pipeline (merge_objects_deep, R1FS serialization) keeps working
+unchanged. The ``Finding`` dataclass and ``Severity`` enum provide
+type-safe construction and JSON-safe serialization.
+"""
+
+from dataclasses import dataclass, asdict
+from enum import Enum
+
+
+class Severity(str, Enum):
+ CRITICAL = "CRITICAL"
+ HIGH = "HIGH"
+ MEDIUM = "MEDIUM"
+ LOW = "LOW"
+ INFO = "INFO"
+
+
+_VULN_SEVERITIES = frozenset({Severity.CRITICAL, Severity.HIGH, Severity.MEDIUM})
+
+
+@dataclass(frozen=True)
+class Finding:
+ severity: Severity
+ title: str
+ description: str
+ evidence: str = ""
+ remediation: str = ""
+ owasp_id: str = "" # e.g. "A07:2021"
+ cwe_id: str = "" # e.g. "CWE-287"
+ confidence: str = "firm" # certain | firm | tentative
+
+
+def probe_result(*, raw_data: dict = None, findings: list = None) -> dict:
+ """Build a probe return dict: JSON-safe, merge_objects_deep-safe, backward-compat."""
+ result = dict(raw_data or {})
+ f_list = findings or []
+ result["findings"] = [{**asdict(f), "severity": f.severity.value} for f in f_list]
+ result["vulnerabilities"] = [f.title for f in f_list if f.severity in _VULN_SEVERITIES]
+ return result
+
+
+def probe_error(target: str, port: int, probe_name: str, exc: Exception) -> None:
+ """Log-level error — returns None so failed probes are not stored in results."""
+ return None
diff --git a/extensions/business/cybersec/red_mesh/pentester_api_01.py b/extensions/business/cybersec/red_mesh/pentester_api_01.py
index dbba0e31..64ef67fc 100644
--- a/extensions/business/cybersec/red_mesh/pentester_api_01.py
+++ b/extensions/business/cybersec/red_mesh/pentester_api_01.py
@@ -40,6 +40,11 @@
LLM_ANALYSIS_SECURITY_ASSESSMENT,
LLM_ANALYSIS_VULNERABILITY_SUMMARY,
LLM_ANALYSIS_REMEDIATION_PLAN,
+ RISK_SEVERITY_WEIGHTS,
+ RISK_CONFIDENCE_MULTIPLIERS,
+ RISK_SIGMOID_K,
+ RISK_CRED_PENALTY_PER,
+ RISK_CRED_PENALTY_CAP,
)
__VER__ = '0.8.2'
@@ -82,6 +87,13 @@
"LLM_AGENT_API_TIMEOUT": 120, # Timeout in seconds for LLM API calls
"LLM_AUTO_ANALYSIS_TYPE": "security_assessment", # Default analysis type
+ # Security hardening controls
+ "REDACT_CREDENTIALS": True, # Strip passwords from persisted reports
+ "ICS_SAFE_MODE": True, # Halt probing when ICS/SCADA indicators detected
+ "RATE_LIMIT_ENABLED": True, # Enforce minimum probe gap when sand walking disabled
+ "SCANNER_IDENTITY": "probe.redmesh.local", # EHLO domain for SMTP probes
+ "SCANNER_USER_AGENT": "", # HTTP User-Agent (empty = default requests UA)
+
'VALIDATION_RULES': {
**BasePlugin.CONFIG['VALIDATION_RULES'],
},
@@ -124,6 +136,7 @@ def on_init(self):
self.scan_jobs = {} # target -> PentestJob instance
self.completed_jobs_reports = {} # target -> final report dict
self.lst_completed_jobs = [] # List of completed jobs
+ self._audit_log = [] # Structured audit event log
self.__last_checked_jobs = 0
self.__warmupstart = self.time()
self.__warmup_done = False
@@ -362,6 +375,10 @@ def _launch_job(
enabled_features=None,
scan_min_delay=0.0,
scan_max_delay=0.0,
+ ics_safe_mode=True,
+ rate_limit_enabled=True,
+ scanner_identity="probe.redmesh.local",
+ scanner_user_agent="",
):
"""
Launch local worker threads for a job by splitting the port range.
@@ -453,6 +470,10 @@ def _launch_job(
enabled_features=enabled_features,
scan_min_delay=scan_min_delay,
scan_max_delay=scan_max_delay,
+ ics_safe_mode=ics_safe_mode,
+ rate_limit_enabled=rate_limit_enabled,
+ scanner_identity=scanner_identity,
+ scanner_user_agent=scanner_user_agent,
)
batch_job.start()
local_jobs[batch_job.local_worker_id] = batch_job
@@ -548,6 +569,10 @@ def _maybe_launch_jobs(self, nr_local_workers=None):
exceptions = []
scan_min_delay = job_specs.get("scan_min_delay", self.cfg_scan_min_rnd_delay)
scan_max_delay = job_specs.get("scan_max_delay", self.cfg_scan_max_rnd_delay)
+ ics_safe_mode = job_specs.get("ics_safe_mode", self.cfg_ics_safe_mode)
+ rate_limit_enabled = job_specs.get("rate_limit_enabled", self.cfg_rate_limit_enabled)
+ scanner_identity = job_specs.get("scanner_identity", self.cfg_scanner_identity)
+ scanner_user_agent = job_specs.get("scanner_user_agent", self.cfg_scanner_user_agent)
workers_requested = nr_local_workers if nr_local_workers is not None else self.cfg_nr_local_workers
self.P("Using {} local workers for job {}".format(workers_requested, job_id))
try:
@@ -564,6 +589,10 @@ def _maybe_launch_jobs(self, nr_local_workers=None):
enabled_features=enabled_features,
scan_min_delay=scan_min_delay,
scan_max_delay=scan_max_delay,
+ ics_safe_mode=ics_safe_mode,
+ rate_limit_enabled=rate_limit_enabled,
+ scanner_identity=scanner_identity,
+ scanner_user_agent=scanner_user_agent,
)
except ValueError as exc:
self.P(f"Skipping job {job_id}: {exc}", color='r')
@@ -663,13 +692,128 @@ def merge_objects_deep(self, obj_a, obj_b):
merged[key] = value_b
return merged
elif isinstance(obj_a, list) and isinstance(obj_b, list):
- return list(set(obj_a).union(set(obj_b)))
+ try:
+ return list(set(obj_a).union(set(obj_b)))
+ except TypeError:
+ import json as _json
+ seen = set()
+ merged = []
+ for item in obj_a + obj_b:
+ try:
+ key = _json.dumps(item, sort_keys=True, default=str)
+ except (TypeError, ValueError):
+ key = id(item)
+ if key not in seen:
+ seen.add(key)
+ merged.append(item)
+ return merged
elif isinstance(obj_a, set) and isinstance(obj_b, set):
return obj_a.union(obj_b)
else:
return obj_b # Prefer obj_b in case of conflict
+ def _redact_report(self, report):
+ """
+ Redact credentials from a report before persistence.
+
+ Deep-copies the report and masks password values in findings and
+ accepted_credentials lists so that sensitive data is not written
+ to R1FS or CStore.
+
+ Parameters
+ ----------
+ report : dict
+ Aggregated scan report.
+
+ Returns
+ -------
+ dict
+ Redacted copy of the report.
+ """
+ import re as _re
+ from copy import deepcopy
+ redacted = deepcopy(report)
+ service_info = redacted.get("service_info", {})
+ for port_key, methods in service_info.items():
+ if not isinstance(methods, dict):
+ continue
+ for method_key, method_data in methods.items():
+ if not isinstance(method_data, dict):
+ continue
+ # Redact findings evidence
+ for finding in method_data.get("findings", []):
+ if not isinstance(finding, dict):
+ continue
+ evidence = finding.get("evidence", "")
+ if isinstance(evidence, str):
+ evidence = _re.sub(
+ r'(Accepted credential:\s*\S+?):(\S+)',
+ r'\1:***', evidence
+ )
+ evidence = _re.sub(
+ r'(Accepted random creds\s*\S+?):(\S+)',
+ r'\1:***', evidence
+ )
+ finding["evidence"] = evidence
+ # Redact accepted_credentials lists
+ creds = method_data.get("accepted_credentials", [])
+ if isinstance(creds, list):
+ method_data["accepted_credentials"] = [
+ _re.sub(r'^(\S+?):(.+)$', r'\1:***', c) if isinstance(c, str) else c
+ for c in creds
+ ]
+ return redacted
+
+
+ def _log_audit_event(self, event_type, details):
+ """
+ Append a structured audit event to the in-memory log.
+
+ Parameters
+ ----------
+ event_type : str
+ Event category (e.g. ``"scan_launched"``, ``"scan_completed"``).
+ details : dict
+ Arbitrary event metadata.
+
+ Returns
+ -------
+ None
+ """
+ entry = {
+ "timestamp": self.time(),
+ "event": event_type,
+ "node": self.ee_addr,
+ "node_alias": self.ee_id,
+ **details,
+ }
+ self.P(f"[AUDIT] {event_type}: {self.json_dumps(entry)}")
+ self._audit_log.append(entry)
+ # Cap at 1000 entries to prevent memory bloat
+ if len(self._audit_log) > 1000:
+ self._audit_log = self._audit_log[-1000:]
+ return
+
+
+ def _emit_timeline_event(self, job_specs, event_type, label, actor=None, actor_type="system", meta=None):
+ job_specs.setdefault("timeline", []).append({
+ "type": event_type,
+ "label": label,
+ "date": self.time(),
+ "actor": actor or self.ee_id,
+ "actor_type": actor_type,
+ "meta": meta or {},
+ })
+
+ def _get_timeline_date(self, job_specs, event_type):
+ """Get epoch date of first event matching type."""
+ for entry in job_specs.get("timeline", []):
+ if entry["type"] == event_type:
+ return entry["date"]
+ return None
+
+
def _close_job(self, job_id, canceled=False):
"""
Close a local job, aggregate reports, and persist in CStore.
@@ -708,8 +852,11 @@ def _close_job(self, job_id, canceled=False):
# Save full report to R1FS and store only CID in CStore
if report:
+ # Redact credentials before persisting
+ redact = job_specs.get("redact_credentials", True)
+ persist_report = self._redact_report(report) if redact else report
try:
- report_cid = self.r1fs.add_json(report, show_logs=False)
+ report_cid = self.r1fs.add_json(persist_report, show_logs=False)
if report_cid:
worker_entry["report_cid"] = report_cid
worker_entry["result"] = None # No blob in CStore
@@ -741,6 +888,20 @@ def _close_job(self, job_id, canceled=False):
self.json_dumps(job_specs, indent=2)
))
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_id, value=job_specs)
+
+ # Audit: scan completed
+ nr_findings = 0
+ for port_data in report.get("service_info", {}).values():
+ if isinstance(port_data, dict):
+ for method_data in port_data.values():
+ if isinstance(method_data, dict):
+ nr_findings += len(method_data.get("findings", []))
+ self._log_audit_event("scan_completed", {
+ "job_id": job_id,
+ "target": job_specs.get("target"),
+ "canceled": canceled,
+ "nr_findings": nr_findings,
+ })
return
@@ -798,6 +959,106 @@ def _maybe_close_jobs(self):
return
+ def _compute_risk_score(self, aggregated_report):
+ """
+ Compute a 0-100 risk score from an aggregated scan report.
+
+ The score combines four components:
+ A. Finding severity (weighted by confidence)
+ B. Open ports (diminishing returns)
+ C. Attack surface breadth (distinct protocols)
+ D. Default credentials penalty
+
+ Parameters
+ ----------
+ aggregated_report : dict
+ Aggregated report with service_info, web_tests_info, correlation_findings,
+ open_ports, and port_protocols.
+
+ Returns
+ -------
+ dict
+ ``{"score": int, "breakdown": dict}``
+ """
+ import math
+
+ findings_score = 0.0
+ finding_counts = {"CRITICAL": 0, "HIGH": 0, "MEDIUM": 0, "LOW": 0, "INFO": 0}
+ cred_count = 0
+
+ def process_findings(findings_list):
+ nonlocal findings_score, cred_count
+ for finding in findings_list:
+ if not isinstance(finding, dict):
+ continue
+ severity = finding.get("severity", "INFO").upper()
+ confidence = finding.get("confidence", "firm").lower()
+ weight = RISK_SEVERITY_WEIGHTS.get(severity, 0)
+ multiplier = RISK_CONFIDENCE_MULTIPLIERS.get(confidence, 0.5)
+ findings_score += weight * multiplier
+ if severity in finding_counts:
+ finding_counts[severity] += 1
+ title = finding.get("title", "")
+ if isinstance(title, str) and "default credential accepted" in title.lower():
+ cred_count += 1
+
+ # A. Iterate service_info findings
+ service_info = aggregated_report.get("service_info", {})
+ for port_key, probes in service_info.items():
+ if not isinstance(probes, dict):
+ continue
+ for probe_name, probe_data in probes.items():
+ if not isinstance(probe_data, dict):
+ continue
+ process_findings(probe_data.get("findings", []))
+
+ # A. Iterate web_tests_info findings
+ web_tests_info = aggregated_report.get("web_tests_info", {})
+ for port_key, tests in web_tests_info.items():
+ if not isinstance(tests, dict):
+ continue
+ for test_name, test_data in tests.items():
+ if not isinstance(test_data, dict):
+ continue
+ process_findings(test_data.get("findings", []))
+
+ # A. Iterate correlation_findings
+ correlation_findings = aggregated_report.get("correlation_findings", [])
+ if isinstance(correlation_findings, list):
+ process_findings(correlation_findings)
+
+ # B. Open ports — diminishing returns: 15 × (1 - e^(-ports/8))
+ open_ports = aggregated_report.get("open_ports", [])
+ nr_ports = len(open_ports) if isinstance(open_ports, list) else 0
+ open_ports_score = 15.0 * (1.0 - math.exp(-nr_ports / 8.0))
+
+ # C. Attack surface breadth — distinct protocols: 10 × (1 - e^(-protocols/4))
+ port_protocols = aggregated_report.get("port_protocols", {})
+ nr_protocols = len(set(port_protocols.values())) if isinstance(port_protocols, dict) else 0
+ breadth_score = 10.0 * (1.0 - math.exp(-nr_protocols / 4.0))
+
+ # D. Default credentials penalty
+ credentials_penalty = min(cred_count * RISK_CRED_PENALTY_PER, RISK_CRED_PENALTY_CAP)
+
+ # Raw total
+ raw_total = findings_score + open_ports_score + breadth_score + credentials_penalty
+
+ # Normalize to 0-100 via logistic curve
+ score = int(round(100.0 * (2.0 / (1.0 + math.exp(-RISK_SIGMOID_K * raw_total)) - 1.0)))
+ score = max(0, min(100, score))
+
+ return {
+ "score": score,
+ "breakdown": {
+ "findings_score": round(findings_score, 1),
+ "open_ports_score": round(open_ports_score, 1),
+ "breadth_score": round(breadth_score, 1),
+ "credentials_penalty": credentials_penalty,
+ "raw_total": round(raw_total, 1),
+ "finding_counts": finding_counts,
+ },
+ }
+
def _maybe_finalize_pass(self):
"""
Launcher finalizes completed passes and orchestrates continuous monitoring.
@@ -848,23 +1109,39 @@ def _maybe_finalize_pass(self):
# ═══════════════════════════════════════════════════
# STATE: All peers completed current pass
# ═══════════════════════════════════════════════════
+ pass_date_started = self._get_timeline_date(job_specs, "pass_started") or self._get_timeline_date(job_specs, "created")
+ pass_date_completed = self.time()
pass_history.append({
"pass_nr": job_pass,
- "completed_at": self.time(),
+ "date_started": pass_date_started,
+ "date_completed": pass_date_completed,
+ "duration": round(pass_date_completed - pass_date_started, 2) if pass_date_started else None,
"reports": {addr: w.get("report_cid") for addr, w in workers.items()}
})
+ # Compute risk score for this pass
+ aggregated_for_score = self._collect_aggregated_report(workers)
+ if aggregated_for_score:
+ risk_result = self._compute_risk_score(aggregated_for_score)
+ pass_history[-1]["risk_score"] = risk_result["score"]
+ pass_history[-1]["risk_breakdown"] = risk_result["breakdown"]
+ job_specs["risk_score"] = risk_result["score"]
+ self.P(f"Risk score for job {job_id} pass {job_pass}: {risk_result['score']}/100")
+
# Handle SINGLEPASS - set FINALIZED and exit (no scheduling)
if run_mode == "SINGLEPASS":
job_specs["job_status"] = "FINALIZED"
- job_specs["date_updated"] = self.time()
- job_specs["date_finalized"] = self.time()
+ created_at = self._get_timeline_date(job_specs, "created") or self.time()
+ job_specs["duration"] = round(self.time() - created_at, 2)
+ self._emit_timeline_event(job_specs, "scan_completed", "Scan completed")
self.P(f"[SINGLEPASS] Job {job_id} complete. Status set to FINALIZED.")
# Run LLM auto-analysis on aggregated report (launcher only)
if self.cfg_llm_agent_api_enabled:
self._run_aggregated_llm_analysis(job_id, job_specs, workers, pass_nr=job_pass)
+ self._run_quick_summary_analysis(job_id, job_specs, workers, pass_nr=job_pass)
+ self._emit_timeline_event(job_specs, "finalized", "Job finalized")
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_key, value=job_specs)
continue
@@ -873,14 +1150,17 @@ def _maybe_finalize_pass(self):
# Check if soft stop was scheduled
if job_status == "SCHEDULED_FOR_STOP":
job_specs["job_status"] = "STOPPED"
- job_specs["date_updated"] = self.time()
- job_specs["date_finalized"] = self.time()
+ created_at = self._get_timeline_date(job_specs, "created") or self.time()
+ job_specs["duration"] = round(self.time() - created_at, 2)
+ self._emit_timeline_event(job_specs, "scan_completed", f"Scan completed (pass {job_pass})")
self.P(f"[CONTINUOUS] Pass {job_pass} complete for job {job_id}. Status set to STOPPED (soft stop was scheduled)")
# Run LLM auto-analysis on aggregated report (launcher only)
if self.cfg_llm_agent_api_enabled:
self._run_aggregated_llm_analysis(job_id, job_specs, workers, pass_nr=job_pass)
+ self._run_quick_summary_analysis(job_id, job_specs, workers, pass_nr=job_pass)
+ self._emit_timeline_event(job_specs, "stopped", "Job stopped")
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_key, value=job_specs)
continue
# end if
@@ -888,12 +1168,13 @@ def _maybe_finalize_pass(self):
# Run LLM auto-analysis for this pass (launcher only)
if self.cfg_llm_agent_api_enabled:
self._run_aggregated_llm_analysis(job_id, job_specs, workers, pass_nr=job_pass)
+ self._run_quick_summary_analysis(job_id, job_specs, workers, pass_nr=job_pass)
# Schedule next pass
interval = job_specs.get("monitor_interval", self.cfg_monitor_interval)
jitter = random.uniform(0, self.cfg_monitor_jitter)
job_specs["next_pass_at"] = self.time() + interval + jitter
- job_specs["date_updated"] = self.time()
+ self._emit_timeline_event(job_specs, "pass_completed", f"Pass {job_pass} completed")
self.P(f"[CONTINUOUS] Pass {job_pass} complete for job {job_id}. Next pass in {interval}s (+{jitter:.1f}s jitter)")
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_key, value=job_specs)
@@ -909,6 +1190,7 @@ def _maybe_finalize_pass(self):
# ═══════════════════════════════════════════════════
job_specs["job_pass"] = job_pass + 1
job_specs["next_pass_at"] = None
+ self._emit_timeline_event(job_specs, "pass_started", f"Pass {job_pass + 1} started")
for addr in workers:
workers[addr]["finished"] = False
@@ -1079,6 +1361,14 @@ def launch_test(
task_name: str = "",
task_description: str = "",
selected_peers: list[str] = None,
+ redact_credentials: bool = True,
+ ics_safe_mode: bool = True,
+ rate_limit_enabled: bool = True,
+ scanner_identity: str = "",
+ scanner_user_agent: str = "",
+ authorized: bool = False,
+ created_by_name: str = "",
+ created_by_id: str = "",
):
"""
Start a pentest on the specified target.
@@ -1134,6 +1424,11 @@ def launch_test(
# INFO: This method only announces the job to the network. It does not
# execute the job itself - that part is handled by PentestJob
# executed after periodical check from plugin process.
+ if not authorized:
+ raise ValueError(
+ "Scan authorization required. Confirm you are authorized to scan this target."
+ )
+
if excluded_features is None:
excluded_features = self.cfg_excluded_features or []
if not target:
@@ -1247,6 +1542,12 @@ def launch_test(
# end for chainstore_peers
# end if
+ # Resolve scanner identity defaults
+ if not scanner_identity:
+ scanner_identity = self.cfg_scanner_identity
+ if not scanner_user_agent:
+ scanner_user_agent = self.cfg_scanner_user_agent
+
job_id = self.uuid(8)
self.P(f"Launching {job_id=} {target=} with {exceptions=}")
self.P(f"Announcing pentest to workers (instance_id {self.cfg_instance_id})...")
@@ -1258,9 +1559,7 @@ def launch_test(
"end_port" : end_port,
"launcher": self.ee_addr,
"launcher_alias": self.ee_id,
- "date_created": self.time(),
- "date_updated": self.time(),
- "date_finalized": None,
+ "timeline": [],
"workers" : workers,
"distribution_strategy": distribution_strategy,
"port_order": port_order,
@@ -1283,12 +1582,42 @@ def launch_test(
"task_description": task_description,
# Peer selection (defaults to all chainstore_peers if not specified)
"selected_peers": active_peers,
+ # Security hardening options
+ "redact_credentials": redact_credentials,
+ "ics_safe_mode": ics_safe_mode,
+ "rate_limit_enabled": rate_limit_enabled,
+ "scanner_identity": scanner_identity,
+ "scanner_user_agent": scanner_user_agent,
+ "authorized": True,
+ # User identity (forwarded from Navigator UI)
+ "created_by_name": created_by_name or None,
+ "created_by_id": created_by_id or None,
}
+ self._emit_timeline_event(
+ job_specs, "created",
+ f"Job created by {created_by_name}",
+ actor=created_by_name,
+ actor_type="user"
+ )
+ self._emit_timeline_event(job_specs, "started", "Scan started", actor=self.ee_id, actor_type="node")
self.chainstore_hset(
hkey=self.cfg_instance_id,
key=job_id,
value=job_specs
)
+
+ self._log_audit_event("scan_launched", {
+ "job_id": job_id,
+ "target": target,
+ "start_port": start_port,
+ "end_port": end_port,
+ "launcher": self.ee_addr,
+ "enabled_features_count": len(enabled_features),
+ "redact_credentials": redact_credentials,
+ "ics_safe_mode": ics_safe_mode,
+ "rate_limit_enabled": rate_limit_enabled,
+ })
+
all_network_jobs = self.chainstore_hgetall(hkey=self.cfg_instance_id)
report = {}
for other_key, other_spec in all_network_jobs.items():
@@ -1377,6 +1706,9 @@ def list_network_jobs(self):
for job_key, job_spec in raw_network_jobs.items():
normalized_key, normalized_spec = self._normalize_job_record(job_key, job_spec)
if normalized_key and normalized_spec:
+ # Replace heavy pass_history with a lightweight count for listing
+ pass_history = normalized_spec.pop("pass_history", None)
+ normalized_spec["pass_count"] = len(pass_history) if isinstance(pass_history, list) else 0
normalized_jobs[normalized_key] = normalized_spec
return normalized_jobs
@@ -1429,13 +1761,76 @@ def stop_and_delete_job(self, job_id : str):
worker_entry = job_specs.setdefault("workers", {}).setdefault(self.ee_addr, {})
worker_entry["finished"] = True
worker_entry["canceled"] = True
+ self._emit_timeline_event(job_specs, "stopped", "Job stopped and deleted", actor_type="user")
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_id, value=job_specs)
else:
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_id, value=None)
self.P(f"Job {job_id} deleted.")
+ self._log_audit_event("scan_stopped", {"job_id": job_id})
return {"status": "success", "job_id": job_id}
+ @BasePlugin.endpoint
+ def purge_job(self, job_id: str):
+ """
+ Purge a job: delete all R1FS artifacts then tombstone the CStore entry.
+ Job must be finished/canceled — cannot purge a running job.
+
+ Parameters
+ ----------
+ job_id : str
+ Identifier of the job to purge.
+
+ Returns
+ -------
+ dict
+ Status of the purge operation including CID deletion counts.
+ """
+ raw = self.chainstore_hget(hkey=self.cfg_instance_id, key=job_id)
+ if not isinstance(raw, dict):
+ return {"status": "error", "message": f"Job {job_id} not found."}
+
+ _, job_specs = self._normalize_job_record(job_id, raw)
+
+ # Reject if job is still running
+ workers = job_specs.get("workers", {})
+ if any(not w.get("finished") for w in workers.values()):
+ return {"status": "error", "message": "Cannot purge a running job. Stop it first."}
+
+ # Collect all CIDs (deduplicated)
+ cids = set()
+ for addr, w in workers.items():
+ cid = w.get("report_cid")
+ if cid:
+ cids.add(cid)
+
+ for entry in job_specs.get("pass_history", []):
+ for addr, cid in entry.get("reports", {}).items():
+ if cid:
+ cids.add(cid)
+ for key in ("llm_analysis_cid", "quick_summary_cid"):
+ cid = entry.get(key)
+ if cid:
+ cids.add(cid)
+
+ # Delete from R1FS (best-effort)
+ deleted = 0
+ for cid in cids:
+ try:
+ self.r1fs.delete_file(cid, show_logs=False, raise_on_error=False)
+ deleted += 1
+ except Exception as e:
+ self.P(f"Failed to delete CID {cid}: {e}", color='y')
+
+ # Tombstone CStore entry
+ self.chainstore_hset(hkey=self.cfg_instance_id, key=job_id, value=None)
+
+ self.P(f"Purged job {job_id}: {deleted}/{len(cids)} CIDs deleted.")
+ self._log_audit_event("job_purged", {"job_id": job_id, "cids_deleted": deleted, "cids_total": len(cids)})
+
+ return {"status": "success", "job_id": job_id, "cids_deleted": deleted, "cids_total": len(cids)}
+
+
@BasePlugin.endpoint
def get_report(self, cid: str):
"""
@@ -1463,6 +1858,25 @@ def get_report(self, cid: str):
return {"error": str(e), "cid": cid}
+ @BasePlugin.endpoint
+ def get_audit_log(self, limit: int = 100):
+ """
+ Retrieve recent audit events.
+
+ Parameters
+ ----------
+ limit : int, optional
+ Maximum number of entries to return (default 100, 0 = all).
+
+ Returns
+ -------
+ dict
+ Audit log entries and total count.
+ """
+ entries = self._audit_log[-limit:] if limit > 0 else self._audit_log
+ return {"audit_log": entries, "total": len(self._audit_log)}
+
+
@BasePlugin.endpoint(method="post")
def stop_monitoring(self, job_id: str, stop_type: str = "SOFT"):
"""
@@ -1495,13 +1909,12 @@ def stop_monitoring(self, job_id: str, stop_type: str = "SOFT"):
if stop_type == "HARD":
job_specs["job_status"] = "STOPPED"
- job_specs["date_updated"] = self.time()
- job_specs["date_finalized"] = self.time()
+ self._emit_timeline_event(job_specs, "stopped", "Job stopped", actor_type="user")
self.P(f"[CONTINUOUS] Hard stop for job {job_id} after {passes_completed} passes")
else:
# SOFT stop - let current pass complete
job_specs["job_status"] = "SCHEDULED_FOR_STOP"
- job_specs["date_updated"] = self.time()
+ self._emit_timeline_event(job_specs, "scheduled_for_stop", "Stop scheduled", actor_type="user")
self.P(f"[CONTINUOUS] Soft stop scheduled for job {job_id} (will stop after current pass)")
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_id, value=job_specs)
@@ -1613,14 +2026,24 @@ def analyze_job(
pass_history[-1]["llm_analysis_cid"] = analysis_cid
else:
# No pass_history yet - create one
+ pass_date_started = self._get_timeline_date(job_specs, "pass_started") or self._get_timeline_date(job_specs, "created")
+ pass_date_completed = self.time()
pass_history.append({
"pass_nr": current_pass,
- "completed_at": self.time(),
+ "date_started": pass_date_started,
+ "date_completed": pass_date_completed,
+ "duration": round(pass_date_completed - pass_date_started, 2) if pass_date_started else None,
"reports": {addr: w.get("report_cid") for addr, w in workers.items()},
"llm_analysis_cid": analysis_cid,
})
job_specs["pass_history"] = pass_history
+ self._emit_timeline_event(
+ job_specs, "llm_analysis",
+ f"Manual LLM analysis completed",
+ actor_type="user",
+ meta={"analysis_cid": analysis_cid, "pass_nr": pass_history[-1].get("pass_nr") if pass_history else current_pass}
+ )
self.chainstore_hset(hkey=self.cfg_instance_id, key=job_id, value=job_specs)
self.P(f"Manual LLM analysis saved for job {job_id}, CID: {analysis_cid}")
except Exception as e:
diff --git a/extensions/business/cybersec/red_mesh/redmesh_llm_agent_api.py b/extensions/business/cybersec/red_mesh/redmesh_llm_agent_api.py
index 27e2752f..e2a43d55 100644
--- a/extensions/business/cybersec/red_mesh/redmesh_llm_agent_api.py
+++ b/extensions/business/cybersec/red_mesh/redmesh_llm_agent_api.py
@@ -49,6 +49,7 @@
LLM_ANALYSIS_SECURITY_ASSESSMENT,
LLM_ANALYSIS_VULNERABILITY_SUMMARY,
LLM_ANALYSIS_REMEDIATION_PLAN,
+ LLM_ANALYSIS_QUICK_SUMMARY,
)
__VER__ = '0.1.0'
@@ -123,6 +124,8 @@
5. Verification steps to confirm remediation
Be practical and provide copy-paste ready solutions where possible.""",
+
+ LLM_ANALYSIS_QUICK_SUMMARY: """You are a cybersecurity expert. Based on the scan results below, write a quick executive summary in exactly 2-4 sentences. Cover: how many ports/services were found, the overall risk posture (critical/high/medium/low), and the single most important finding or action item. Be specific but extremely concise -- this is a dashboard glance summary, not a full report.""",
}
@@ -503,6 +506,7 @@ def analyze_scan(
LLM_ANALYSIS_SECURITY_ASSESSMENT,
LLM_ANALYSIS_VULNERABILITY_SUMMARY,
LLM_ANALYSIS_REMEDIATION_PLAN,
+ LLM_ANALYSIS_QUICK_SUMMARY,
]
if analysis_type not in valid_types:
return {
@@ -535,7 +539,12 @@ def analyze_scan(
# Build and send request
# Use higher max_tokens for analysis by default
- effective_max_tokens = max_tokens if max_tokens is not None else 2048
+ if max_tokens is not None:
+ effective_max_tokens = max_tokens
+ elif analysis_type == LLM_ANALYSIS_QUICK_SUMMARY:
+ effective_max_tokens = 256
+ else:
+ effective_max_tokens = 2048
payload = self._build_deepseek_request(
messages=messages,
diff --git a/extensions/business/cybersec/red_mesh/redmesh_llm_agent_mixin.py b/extensions/business/cybersec/red_mesh/redmesh_llm_agent_mixin.py
index a71fb17a..1085dfa1 100644
--- a/extensions/business/cybersec/red_mesh/redmesh_llm_agent_mixin.py
+++ b/extensions/business/cybersec/red_mesh/redmesh_llm_agent_mixin.py
@@ -282,6 +282,11 @@ def _run_aggregated_llm_analysis(
if entry.get("pass_nr") == pass_nr:
entry["llm_analysis_cid"] = analysis_cid
break
+ self._emit_timeline_event(
+ job_specs, "llm_analysis",
+ f"LLM analysis completed for pass {pass_nr}",
+ meta={"analysis_cid": analysis_cid, "pass_nr": pass_nr}
+ )
self.P(f"LLM analysis for pass {pass_nr} saved, CID: {analysis_cid}")
return analysis_cid
else:
@@ -291,6 +296,101 @@ def _run_aggregated_llm_analysis(
self.P(f"Error saving LLM analysis to R1FS: {e}", color='r')
return None
+ def _run_quick_summary_analysis(
+ self,
+ job_id: str,
+ job_specs: dict,
+ workers: dict,
+ pass_nr: int = None
+ ) -> Optional[str]:
+ """
+ Run a short (2-4 sentence) AI quick summary on the aggregated report.
+
+ Same pattern as _run_aggregated_llm_analysis but uses the quick_summary
+ analysis type with a low token budget.
+
+ Parameters
+ ----------
+ job_id : str
+ Identifier of the job.
+ job_specs : dict
+ Job specification (will be updated with quick_summary_cid).
+ workers : dict
+ Worker entries containing report data.
+ pass_nr : int, optional
+ Pass number for continuous monitoring jobs.
+
+ Returns
+ -------
+ str or None
+ Quick summary CID if successful, None otherwise.
+ """
+ target = job_specs.get("target", "unknown")
+ pass_info = f" (pass {pass_nr})" if pass_nr else ""
+ self.P(f"Running quick summary analysis for job {job_id}{pass_info}, target {target}...")
+
+ # Collect and aggregate reports from all workers
+ aggregated_report = self._collect_aggregated_report(workers)
+
+ if not aggregated_report:
+ self.P(f"No data for quick summary for job {job_id}", color='y')
+ return None
+
+ # Add job metadata to report for context
+ aggregated_report["_job_metadata"] = {
+ "job_id": job_id,
+ "target": target,
+ "num_workers": len(workers),
+ "worker_addresses": list(workers.keys()),
+ "start_port": job_specs.get("start_port"),
+ "end_port": job_specs.get("end_port"),
+ "enabled_features": job_specs.get("enabled_features", []),
+ "run_mode": job_specs.get("run_mode", "SINGLEPASS"),
+ "pass_nr": pass_nr,
+ }
+
+ # Call LLM analysis with quick_summary type
+ analysis_result = self._call_llm_agent_api(
+ endpoint="/analyze_scan",
+ method="POST",
+ payload={
+ "scan_results": aggregated_report,
+ "analysis_type": "quick_summary",
+ "focus_areas": None,
+ }
+ )
+
+ if not analysis_result or "error" in analysis_result:
+ self.P(
+ f"Quick summary failed for job {job_id}: {analysis_result.get('error') if analysis_result else 'No response'}",
+ color='y'
+ )
+ return None
+
+ # Save to R1FS
+ try:
+ summary_cid = self.r1fs.add_json(analysis_result, show_logs=False)
+ if summary_cid:
+ # Store in pass_history
+ pass_history = job_specs.get("pass_history", [])
+ for entry in pass_history:
+ if entry.get("pass_nr") == pass_nr:
+ entry["quick_summary_cid"] = summary_cid
+ break
+ self._emit_timeline_event(
+ job_specs, "llm_analysis",
+ f"Quick summary completed for pass {pass_nr}",
+ meta={"quick_summary_cid": summary_cid, "pass_nr": pass_nr}
+ )
+ self.P(f"Quick summary for pass {pass_nr} saved, CID: {summary_cid}")
+ return summary_cid
+ else:
+ self.P(f"Failed to save quick summary to R1FS for job {job_id}", color='y')
+ return None
+ except Exception as e:
+ self.P(f"Error saving quick summary to R1FS: {e}", color='r')
+ return None
+
def _get_llm_health_status(self) -> dict:
"""
Check health of the LLM Agent API connection.
diff --git a/extensions/business/cybersec/red_mesh/redmesh_utils.py b/extensions/business/cybersec/red_mesh/redmesh_utils.py
index d1d93865..8dcbbb51 100644
--- a/extensions/business/cybersec/red_mesh/redmesh_utils.py
+++ b/extensions/business/cybersec/red_mesh/redmesh_utils.py
@@ -11,7 +11,17 @@
from copy import deepcopy
from .service_mixin import _ServiceInfoMixin
-from .web_mixin import _WebTestsMixin
+from .web_discovery_mixin import _WebDiscoveryMixin
+from .web_hardening_mixin import _WebHardeningMixin
+from .web_api_mixin import _WebApiExposureMixin
+from .web_injection_mixin import _WebInjectionMixin
+from .correlation_mixin import _CorrelationMixin
+from .constants import (
+ PROBE_PROTOCOL_MAP, WEB_PROTOCOLS,
+ WELL_KNOWN_PORTS as _WELL_KNOWN_PORTS,
+ FINGERPRINT_TIMEOUT, FINGERPRINT_MAX_BANNER, FINGERPRINT_HTTP_TIMEOUT,
+ FINGERPRINT_NUDGE_TIMEOUT,
+)
COMMON_PORTS = [
@@ -26,7 +36,11 @@
class PentestLocalWorker(
_ServiceInfoMixin,
- _WebTestsMixin
+ _WebDiscoveryMixin,
+ _WebHardeningMixin,
+ _WebApiExposureMixin,
+ _WebInjectionMixin,
+ _CorrelationMixin,
):
"""
Execute a pentest workflow against a target on a dedicated thread.
@@ -61,6 +75,10 @@ def __init__(
enabled_features=None,
scan_min_delay: float = 0.0,
scan_max_delay: float = 0.0,
+ ics_safe_mode: bool = True,
+ rate_limit_enabled: bool = True,
+ scanner_identity: str = "probe.redmesh.local",
+ scanner_user_agent: str = "",
):
"""
Initialize a pentest worker with target ports and exclusions.
@@ -89,6 +107,14 @@ def __init__(
Minimum random delay (seconds) between operations (Dune sand walking).
scan_max_delay : float, optional
Maximum random delay (seconds) between operations (Dune sand walking).
+ ics_safe_mode : bool, optional
+ Halt probing when ICS/SCADA indicators are detected.
+ rate_limit_enabled : bool, optional
+ Enforce minimum 100ms delay between probes when sand walking is disabled.
+ scanner_identity : str, optional
+ EHLO domain for SMTP probes.
+ scanner_user_agent : str, optional
+ HTTP User-Agent header for web probes.
Raises
------
@@ -113,6 +139,11 @@ def __init__(
self.owner = owner
self.scan_min_delay = scan_min_delay
self.scan_max_delay = scan_max_delay
+ self.ics_safe_mode = ics_safe_mode
+ self._ics_detected = False
+ self.rate_limit_enabled = rate_limit_enabled
+ self.scanner_identity = scanner_identity
+ self.scanner_user_agent = scanner_user_agent
self.P(f"Initializing pentest worker {self.local_worker_id} for target {self.target}...")
# port handling
@@ -147,9 +178,21 @@ def __init__(
"web_tested": False,
"web_tests_info": {},
+ "port_protocols": {},
+ "port_banners": {},
+
"completed_tests": [],
"done": False,
"canceled": False,
+
+ "scan_metadata": {
+ "os_claims": {},
+ "internal_ips": [],
+ "container_ids": [],
+ "timezone_hints": [],
+ "server_versions": {},
+ },
+ "correlation_findings": [],
}
self.__all_features = self._get_all_features()
@@ -200,12 +243,16 @@ def get_worker_specific_result_fields():
return {
"start_port" : min,
"end_port" : max,
- "ports_scanned" : sum,
-
+ "ports_scanned" : sum,
+
"open_ports" : list,
"service_info" : dict,
"web_tests_info" : dict,
"completed_tests" : list,
+ "port_protocols" : dict,
+ "port_banners" : dict,
+ "scan_metadata" : dict,
+ "correlation_findings" : list,
}
@@ -226,11 +273,11 @@ def get_status(self, for_aggregations=False):
completed_tests = self.state.get("completed_tests", [])
open_ports = self.state.get("open_ports", [])
if open_ports:
- # Full work: port scan + all enabled features + 2 completion markers
- max_features = len(self.__enabled_features) + 3
+ # Full work: port scan + fingerprint + all enabled features + 2 completion markers
+ max_features = len(self.__enabled_features) + 4
else:
- # No open ports: just port scan + 2 completion markers
- max_features = 3
+ # No open ports: port scan + fingerprint + service_info_completed + web_tests_completed
+ max_features = 4
progress = f"{(len(completed_tests) / max_features) * 100:.1f}%"
dct_status = {
@@ -260,6 +307,12 @@ def get_status(self, for_aggregations=False):
dct_status["completed_tests"] = self.state["completed_tests"]
+ dct_status["port_protocols"] = self.state.get("port_protocols", {})
+ dct_status["port_banners"] = self.state.get("port_banners", {})
+
+ dct_status["scan_metadata"] = self.state.get("scan_metadata", {})
+ dct_status["correlation_findings"] = self.state.get("correlation_findings", [])
+
return dct_status
@@ -335,7 +388,10 @@ def _interruptible_sleep(self):
True if stop was requested (should exit), False otherwise.
"""
if self.scan_max_delay <= 0:
- return False # Delays disabled
+ if self.rate_limit_enabled:
+ time.sleep(0.1) # Minimum 100ms between probes
+ return self.stop_event.is_set()
+ return False # Delays disabled, no rate limit
delay = random.uniform(self.scan_min_delay, self.scan_max_delay)
time.sleep(delay)
# TODO: while elapsed < delay with sleep(0.1) could be used for more granular interruptible sleep
@@ -358,6 +414,10 @@ def execute_job(self):
if not self._check_stopped():
self._scan_ports_step()
+ if not self._check_stopped():
+ self._fingerprint_ports()
+ self.state["completed_tests"].append("fingerprint_completed")
+
if not self._check_stopped():
self._gather_service_info()
self.state["completed_tests"].append("service_info_completed")
@@ -366,6 +426,10 @@ def execute_job(self):
self._run_web_tests()
self.state["completed_tests"].append("web_tests_completed")
+ if not self._check_stopped():
+ self._post_scan_correlate()
+ self.state["completed_tests"].append("correlation_completed")
+
self.state['done'] = True
self.P(f"Job completed. Ports open and checked: {self.state['open_ports']}")
@@ -459,6 +523,240 @@ def _scan_ports_step(self, batch_size=None, batch_nr=1):
return
+ def _fingerprint_ports(self):
+ """
+ Classify each open port by protocol using passive banner grabbing.
+
+ For each open port the method attempts, in order:
+
+ 1. **Passive banner grab** — connect and recv without sending data.
+ 2. **Banner-based classification** — pattern-match known protocol greetings.
+ 3. **Well-known port lookup** — fall back to ``WELL_KNOWN_PORTS``.
+ 4. **Generic nudge probe** — send ``\\r\\n`` to elicit a response from
+ services that wait for client input (honeypots, RPC, custom daemons).
+ 5. **Active HTTP probe** — minimal ``HEAD /`` request for silent HTTP servers.
+ 6. **Default** — mark the port as ``"unknown"``.
+
+ Results are stored in ``state["port_protocols"]`` and
+ ``state["port_banners"]``.
+
+ Returns
+ -------
+ None
+ """
+ open_ports = self.state["open_ports"]
+ if not open_ports:
+ self.P("No open ports to fingerprint.")
+ return
+
+ target = self.target
+ port_protocols = {}
+ port_banners = {}
+
+ self.P(f"Fingerprinting {len(open_ports)} open ports.")
+
+ for port in open_ports:
+ if self.stop_event.is_set():
+ return
+
+ protocol = None
+ banner_text = ""
+
+ # --- 1. Passive banner grab ---
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(FINGERPRINT_TIMEOUT)
+ sock.connect((target, port))
+ try:
+ raw = sock.recv(FINGERPRINT_MAX_BANNER)
+ except (socket.timeout, OSError):
+ raw = b""
+ sock.close()
+ except Exception:
+ raw = b""
+
+ # --- Sanitize banner ---
+ if raw:
+ banner_text = ''.join(
+ ch if 32 <= ord(ch) < 127 else '.' for ch in raw[:FINGERPRINT_MAX_BANNER].decode("utf-8", errors="replace")
+ )
+
+ # --- 2. Classify banner by content ---
+ if raw:
+ text = raw.decode("utf-8", errors="replace")
+ text_upper = text.upper()
+
+ if text.startswith("SSH-"):
+ protocol = "ssh"
+ elif text.startswith("220"):
+ if "FTP" in text_upper:
+ protocol = "ftp"
+ elif "SMTP" in text_upper or "ESMTP" in text_upper:
+ protocol = "smtp"
+ else:
+ protocol = _WELL_KNOWN_PORTS.get(port, "ftp")
+ elif text.startswith("RFB "):
+ protocol = "vnc"
+ elif len(raw) >= 7 and raw[3:4] == b'\x00' and raw[4:5] == b'\x0a':
+ # MySQL greeting: 3-byte payload len + seq=0x00 + protocol version 0x0a + version string
+ # Validate payload length (bytes 0-2 LE) is sane and version string is printable ASCII.
+ _pkt_len = int.from_bytes(raw[0:3], 'little')
+ if 10 <= _pkt_len <= 512:
+ _ver_end = raw.find(b'\x00', 5)
+ if _ver_end > 5 and all(32 <= b < 127 for b in raw[5:_ver_end]):
+ protocol = "mysql"
+ elif "login:" in text.lower():
+ protocol = "telnet"
+ elif len(raw) >= 3 and raw[0:1] == b'\xff' and raw[1:2] in (b'\xfb', b'\xfc', b'\xfd', b'\xfe'):
+ # Telnet IAC negotiation: 0xFF (IAC) + WILL/WONT/DO/DONT + option byte (RFC 854)
+ protocol = "telnet"
+ elif text.startswith("HTTP/"):
+ protocol = "http"
+ elif text.startswith("+OK"):
+ protocol = "pop3"
+ elif text.startswith("* OK"):
+ protocol = "imap"
+
+ # --- 3. Well-known port lookup ---
+ if protocol is None:
+ protocol = _WELL_KNOWN_PORTS.get(port)
+
+ # --- 4. Generic nudge probe ---
+ # Some services (honeypots, RPC, custom daemons) don't speak first
+ # but will respond to any input. Send a minimal \r\n nudge.
+ if protocol is None:
+ try:
+ nudge_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ nudge_sock.settimeout(FINGERPRINT_NUDGE_TIMEOUT)
+ nudge_sock.connect((target, port))
+ nudge_sock.sendall(b"\r\n")
+ try:
+ nudge_resp = nudge_sock.recv(FINGERPRINT_MAX_BANNER)
+ except (socket.timeout, OSError):
+ nudge_resp = b""
+ nudge_sock.close()
+ except Exception:
+ nudge_resp = b""
+
+ if nudge_resp:
+ nudge_text = nudge_resp.decode("utf-8", errors="replace")
+ if not banner_text:
+ banner_text = ''.join(
+ ch if 32 <= ord(ch) < 127 else '.'
+ for ch in nudge_text[:FINGERPRINT_MAX_BANNER]
+ )
+ if nudge_text.startswith("HTTP/"):
+ protocol = "http"
+ elif "= 3 and nudge_resp[0:1] == b'\xff' and nudge_resp[1:2] in (b'\xfb', b'\xfc', b'\xfd', b'\xfe'):
+ # Telnet IAC negotiation: 0xFF (IAC) + WILL/WONT/DO/DONT + option byte (RFC 854)
+ protocol = "telnet"
+
+ # --- 5. Active HTTP probe ---
+ if protocol is None:
+ try:
+ http_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ http_sock.settimeout(FINGERPRINT_HTTP_TIMEOUT)
+ http_sock.connect((target, port))
+ http_sock.sendall(f"HEAD / HTTP/1.0\r\nHost: {target}\r\n\r\n".encode())
+ try:
+ http_resp = http_sock.recv(FINGERPRINT_MAX_BANNER)
+ except (socket.timeout, OSError):
+ http_resp = b""
+ http_sock.close()
+ if http_resp:
+ http_text = http_resp.decode("utf-8", errors="replace")
+ if http_text.startswith("HTTP/"):
+ protocol = "http"
+ elif "= 8
+ and mb_resp[2:4] == b'\x00\x00'
+ and mb_resp[7:8] == b'\x2b'):
+ protocol = "modbus"
+ except Exception:
+ pass
+
+ # --- 6. Default ---
+ if protocol is None:
+ protocol = "unknown"
+
+ port_protocols[port] = protocol
+ port_banners[port] = banner_text
+ self.P(f"Port {port} fingerprinted as '{protocol}'.")
+
+ # Dune sand walking - random delay between fingerprint probes
+ if self._interruptible_sleep():
+ return # Stop was requested during sleep
+
+ self.state["port_protocols"] = port_protocols
+ self.state["port_banners"] = port_banners
+ self.P(f"Fingerprinting complete: {port_protocols}")
+
+
+ def _is_ics_finding(self, probe_result):
+ """
+ Check if a probe result contains ICS/SCADA indicators.
+
+ Parameters
+ ----------
+ probe_result : dict
+ Structured result from a service probe.
+
+ Returns
+ -------
+ bool
+ True if ICS keywords are found in any finding.
+ """
+ if not isinstance(probe_result, dict):
+ return False
+ for finding in probe_result.get("findings", []):
+ title = (finding.get("title") or "").lower()
+ evidence = (finding.get("evidence") or "").lower()
+ combined = title + " " + evidence
+ ics_keywords = [
+ "modbus", "siemens", "simatic", "plc", "scada",
+ "schneider", "allen-bradley", "bacnet", "dnp3",
+ "iec 61850", "iec61850", "profinet", "s7comm",
+ ]
+ if any(kw in combined for kw in ics_keywords):
+ return True
+ return False
+
+
def _gather_service_info(self):
"""
Gather banner or basic information from each newly open port.
@@ -475,20 +773,46 @@ def _gather_service_info(self):
self.P(f"Gathering service info for {len(open_ports)} open ports.")
target = self.target
service_info_methods = [m for m in self.__enabled_features if m.startswith("_service_info_")]
+ port_protocols = self.state.get("port_protocols", {})
aggregated_info = []
for method in service_info_methods:
func = getattr(self, method)
+ target_protocols = PROBE_PROTOCOL_MAP.get(method) # None → run unconditionally
method_info = []
for port in open_ports:
if self.stop_event.is_set():
return
+ # Route probe only to ports matching its target protocol
+ # When port_protocols is empty (fingerprinting didn't run), skip filtering
+ if target_protocols is not None and port_protocols:
+ port_proto = port_protocols.get(port, "unknown")
+ if port_proto not in target_protocols:
+ continue
info = func(target, port)
- if port not in self.state["service_info"]:
- self.state["service_info"][port] = {}
- self.state["service_info"][port][method] = info
if info is not None:
+ if port not in self.state["service_info"]:
+ self.state["service_info"][port] = {}
+ self.state["service_info"][port][method] = info
method_info.append(f"{method}: {port}: {info}")
+ # ICS Safe Mode: halt further probes if ICS detected
+ if self.ics_safe_mode and not self._ics_detected and self._is_ics_finding(info):
+ self._ics_detected = True
+ self.P(f"ICS device detected on {target}:{port} — halting aggressive probes (ICS Safe Mode)")
+ from .findings import Finding, Severity, probe_result as _pr
+ ics_halt = _pr(findings=[Finding(
+ severity=Severity.HIGH,
+ title="ICS device detected — scan halted (ICS Safe Mode)",
+ description=f"Industrial control system indicators found on {target}:{port}. "
+ "Further probing halted to prevent potential disruption.",
+ evidence=f"Triggered by probe {method} on port {port}",
+ remediation="Isolate ICS devices on dedicated OT networks.",
+ cwe_id="CWE-284",
+ confidence="firm",
+ )])
+ self.state["service_info"][port]["_ics_safe_halt"] = ics_halt
+ break # Stop the method loop — no more probes on this target
+
# Dune sand walking - random delay before each service probe
if self._interruptible_sleep():
return # Stop was requested during sleep
@@ -500,6 +824,10 @@ def _gather_service_info(self):
f"Method {method} findings:\n{json.dumps(method_info, indent=2)}"
)
self.state["completed_tests"].append(method)
+
+ # ICS Safe Mode: break outer loop if ICS was detected
+ if self._ics_detected:
+ break
# end for each method
return aggregated_info
@@ -518,15 +846,20 @@ def _run_web_tests(self):
self.P("No open ports to run web tests on.")
return
- ports_to_test = list(open_ports)
+ port_protocols = self.state.get("port_protocols", {})
+ if port_protocols:
+ ports_to_test = [p for p in open_ports if port_protocols.get(p, "unknown") in WEB_PROTOCOLS]
+ else:
+ # Fingerprinting didn't run (e.g., direct test call) — fall back to all ports
+ ports_to_test = list(open_ports)
+ if not ports_to_test:
+ self.P("No HTTP/HTTPS ports detected, skipping web tests.")
+ self.state["web_tested"] = True
+ return
self.P(
f"Running web tests on {len(ports_to_test)} ports."
)
target = self.target
-
- if not ports_to_test:
- self.state["web_tested"] = True
- return
result = []
web_tests_methods = [m for m in self.__enabled_features if m.startswith("_web_test_")]
for method in web_tests_methods:
diff --git a/extensions/business/cybersec/red_mesh/service_mixin.py b/extensions/business/cybersec/red_mesh/service_mixin.py
index 6f225e23..22891003 100644
--- a/extensions/business/cybersec/red_mesh/service_mixin.py
+++ b/extensions/business/cybersec/red_mesh/service_mixin.py
@@ -1,4 +1,5 @@
import random
+import re as _re
import socket
import struct
import ftplib
@@ -6,6 +7,50 @@
import ssl
from datetime import datetime
+import paramiko
+
+from .findings import Finding, Severity, probe_result, probe_error
+from .cve_db import check_cves
+
+# Default credentials commonly found on exposed SSH services.
+# Kept intentionally small — this is a quick check, not a brute-force.
+_SSH_DEFAULT_CREDS = [
+ ("root", "root"),
+ ("root", "toor"),
+ ("root", "password"),
+ ("admin", "admin"),
+ ("admin", "password"),
+ ("user", "user"),
+ ("test", "test"),
+]
+
+# Default credentials for FTP services.
+_FTP_DEFAULT_CREDS = [
+ ("root", "root"),
+ ("admin", "admin"),
+ ("admin", "password"),
+ ("ftp", "ftp"),
+ ("user", "user"),
+ ("test", "test"),
+]
+
+# Default credentials for Telnet services.
+_TELNET_DEFAULT_CREDS = [
+ ("root", "root"),
+ ("root", "toor"),
+ ("root", "password"),
+ ("admin", "admin"),
+ ("admin", "password"),
+ ("user", "user"),
+ ("test", "test"),
+]
+
+_HTTP_SERVER_RE = _re.compile(
+ r'(Apache|nginx)[/ ]+(\d+(?:\.\d+)+)', _re.IGNORECASE,
+)
+_HTTP_PRODUCT_MAP = {'apache': 'apache', 'nginx': 'nginx'}
+
+
class _ServiceInfoMixin:
"""
Network service banner probes feeding RedMesh reports.
@@ -15,10 +60,24 @@ class _ServiceInfoMixin:
that `PentestLocalWorker` threads can run without heavy dependencies while
still surfacing high-signal clues.
"""
-
- def _service_info_80(self, target, port):
+
+ def _emit_metadata(self, category, key_or_item, value=None):
+ """Safely append to scan_metadata sub-dicts without crashing if state is uninitialized."""
+ meta = self.state.get("scan_metadata")
+ if meta is None:
+ return
+ bucket = meta.get(category)
+ if bucket is None:
+ return
+ if isinstance(bucket, dict):
+ bucket[key_or_item] = value
+ elif isinstance(bucket, list):
+ bucket.append(key_or_item)
+
+ def _service_info_http(self, target, port): # default port: 80
"""
- Collect HTTP banner and server metadata for common web ports.
+ Assess HTTP service: server fingerprint, technology detection,
+ dangerous HTTP methods, and page title extraction.
Parameters
----------
@@ -29,25 +88,197 @@ def _service_info_80(self, target, port):
Returns
-------
- str | None
- Banner summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ import re as _re
+
+ findings = []
+ scheme = "https" if port in (443, 8443) else "http"
+ url = f"{scheme}://{target}" if port in (80, 443) else f"{scheme}://{target}:{port}"
+
+ result = {
+ "banner": None,
+ "server": None,
+ "title": None,
+ "technologies": [],
+ "dangerous_methods": [],
+ }
+
+ # --- 1. GET request — banner, server, title, tech fingerprint ---
try:
- scheme = "https" if port in (443, 8443) else "http"
- url = f"{scheme}://{target}"
- if port not in (80, 443):
- url = f"{scheme}://{target}:{port}"
self.P(f"Fetching {url} for banner...")
- resp = requests.get(url, timeout=3, verify=False)
- info = (f"HTTP {resp.status_code} {resp.reason}; Server: {resp.headers.get('Server')}")
+ ua = getattr(self, 'scanner_user_agent', '')
+ headers = {'User-Agent': ua} if ua else {}
+ resp = requests.get(url, timeout=5, verify=False, allow_redirects=True, headers=headers)
+
+ result["banner"] = f"HTTP {resp.status_code} {resp.reason}"
+ result["server"] = resp.headers.get("Server")
+ if result["server"]:
+ self._emit_metadata("server_versions", port, result["server"])
+ if result["server"]:
+ _m = _HTTP_SERVER_RE.search(result["server"])
+ if _m:
+ _cve_product = _HTTP_PRODUCT_MAP.get(_m.group(1).lower())
+ if _cve_product:
+ findings += check_cves(_cve_product, _m.group(2))
+ powered_by = resp.headers.get("X-Powered-By")
+
+ # Page title
+ title_match = _re.search(
+ r"
(.*?)", resp.text[:5000], _re.IGNORECASE | _re.DOTALL
+ )
+ if title_match:
+ result["title"] = title_match.group(1).strip()[:100]
+
+ # Technology fingerprinting
+ body_lower = resp.text[:8000].lower()
+ tech_signatures = {
+ "WordPress": ["wp-content", "wp-includes"],
+ "Joomla": ["com_content", "/media/jui/"],
+ "Drupal": ["drupal.js", "sites/default/files"],
+ "Django": ["csrfmiddlewaretoken"],
+ "PHP": [".php", "phpsessid"],
+ "ASP.NET": ["__viewstate", ".aspx"],
+ "React": ["_next/", "__next_data__", "react"],
+ }
+ techs = []
+ if result["server"]:
+ techs.append(result["server"])
+ if powered_by:
+ techs.append(powered_by)
+ for tech, markers in tech_signatures.items():
+ if any(m in body_lower for m in markers):
+ techs.append(tech)
+ result["technologies"] = techs
+
except Exception as e:
- info = f"HTTP probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ # HTTP library failed (e.g. empty reply, connection reset).
+ # Fall back to raw socket probe — try HTTP/1.0 without Host header
+ # (some servers like nginx drop requests with unrecognized Host values).
+ try:
+ _s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ _s.settimeout(3)
+ _s.connect((target, port))
+ # Use HTTP/1.0 without Host — matches nmap's GetRequest probe
+ _s.send(b"GET / HTTP/1.0\r\n\r\n")
+ _raw = b""
+ while True:
+ chunk = _s.recv(4096)
+ if not chunk:
+ break
+ _raw += chunk
+ if len(_raw) > 16384:
+ break
+ _s.close()
+ _raw_str = _raw.decode("utf-8", errors="ignore")
+ if _raw_str:
+ lines = _raw_str.split("\r\n")
+ result["banner"] = lines[0].strip() if lines else "unknown"
+ for line in lines[1:]:
+ low = line.lower()
+ if low.startswith("server:"):
+ result["server"] = line.split(":", 1)[1].strip()
+ break
+ # Report that the server drops Host-header requests
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="HTTP service drops requests with Host header",
+ description=f"TCP port {port} returns empty replies for standard HTTP/1.1 "
+ "requests but responds to HTTP/1.0 without a Host header. "
+ "This indicates a server_name mismatch or intentional filtering.",
+ evidence=f"HTTP/1.1 with Host:{target} → empty reply; "
+ f"HTTP/1.0 without Host → {result['banner']}",
+ remediation="Configure a proper default server block or virtual host.",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ # Check for directory listing in response body
+ body_start = _raw_str.find("\r\n\r\n")
+ if body_start > -1:
+ body = _raw_str[body_start + 4:]
+ if "directory listing" in body.lower() or "(.*?)", body[:5000], _re.IGNORECASE | _re.DOTALL)
+ if title_m:
+ result["title"] = title_m.group(1).strip()[:100]
+ else:
+ result["banner"] = "(empty reply)"
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="HTTP service returns empty reply",
+ description=f"TCP port {port} accepts connections but the server "
+ "closes without sending any HTTP response data.",
+ evidence=f"Raw socket to {target}:{port} — connected OK, received 0 bytes.",
+ remediation="Investigate why the server sends empty replies; "
+ "verify proxy/upstream configuration.",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ except Exception:
+ return probe_error(target, port, "HTTP", e)
+ return probe_result(raw_data=result, findings=findings)
+
+ # --- 2. Dangerous HTTP methods ---
+ dangerous = []
+ for method in ("TRACE", "PUT", "DELETE"):
+ try:
+ r = requests.request(method, url, timeout=3, verify=False)
+ if r.status_code < 400:
+ dangerous.append(method)
+ except Exception:
+ pass
+
+ result["dangerous_methods"] = dangerous
+ if "TRACE" in dangerous:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="HTTP TRACE method enabled (cross-site tracing / XST attack vector).",
+ description="TRACE echoes request bodies back, enabling cross-site tracing attacks.",
+ evidence=f"TRACE {url} returned status < 400.",
+ remediation="Disable the TRACE method in the web server configuration.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-693",
+ confidence="certain",
+ ))
+ if "PUT" in dangerous:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="HTTP PUT method enabled (potential unauthorized file upload).",
+ description="The PUT method allows uploading files to the server.",
+ evidence=f"PUT {url} returned status < 400.",
+ remediation="Disable the PUT method or restrict it to authenticated users.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-749",
+ confidence="certain",
+ ))
+ if "DELETE" in dangerous:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="HTTP DELETE method enabled (potential unauthorized file deletion).",
+ description="The DELETE method allows removing resources from the server.",
+ evidence=f"DELETE {url} returned status < 400.",
+ remediation="Disable the DELETE method or restrict it to authenticated users.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-749",
+ confidence="certain",
+ ))
+
+ return probe_result(raw_data=result, findings=findings)
- def _service_info_8080(self, target, port):
+ def _service_info_http_alt(self, target, port): # default port: 8080
"""
Probe alternate HTTP port 8080 for verbose banners.
@@ -60,30 +291,48 @@ def _service_info_8080(self, target, port):
Returns
-------
- str | None
- Banner text or error message.
+ dict
+ Structured findings.
"""
- info = None
+ # Skip standard HTTP ports — they are covered by _service_info_http.
+ if port in (80, 443):
+ return None
+
+ findings = []
+ raw = {"banner": None, "server": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2)
sock.connect((target, port))
- msg = "HEAD / HTTP/1.1\r\nHost: {}\r\n\r\n".format(target).encode('utf-8')
+ ua = getattr(self, 'scanner_user_agent', '')
+ ua_header = f"\r\nUser-Agent: {ua}" if ua else ""
+ msg = "HEAD / HTTP/1.1\r\nHost: {}{}\r\n\r\n".format(target, ua_header).encode('utf-8')
sock.send(bytes(msg))
data = sock.recv(1024).decode('utf-8', errors='ignore')
- if data:
- banner = ''.join(ch if 32 <= ord(ch) < 127 else '.' for ch in data)
- info = f"Banner on port {port}: \"{banner.strip()}\""
- else:
- info = "No banner (possibly protocol handshake needed)."
sock.close()
+
+ if data:
+ # Extract status line and Server header instead of dumping raw bytes
+ lines = data.split("\r\n")
+ status_line = lines[0].strip() if lines else "unknown"
+ raw["banner"] = status_line
+ for line in lines[1:]:
+ if line.lower().startswith("server:"):
+ raw["server"] = line.split(":", 1)[1].strip()
+ break
+
+ if raw["server"]:
+ _m = _HTTP_SERVER_RE.search(raw["server"])
+ if _m:
+ _cve_product = _HTTP_PRODUCT_MAP.get(_m.group(1).lower())
+ if _cve_product:
+ findings += check_cves(_cve_product, _m.group(2))
except Exception as e:
- info = f"HTTP-ALT probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "HTTP-ALT", e)
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_443(self, target, port):
+ def _service_info_https(self, target, port): # default port: 443
"""
Collect HTTPS response banner data for TLS services.
@@ -96,26 +345,45 @@ def _service_info_443(self, target, port):
Returns
-------
- str | None
- Banner summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None, "server": None}
try:
url = f"https://{target}"
if port != 443:
url = f"https://{target}:{port}"
self.P(f"Fetching {url} for banner...")
- resp = requests.get(url, timeout=3, verify=False)
- info = (f"HTTPS {resp.status_code} {resp.reason}; Server: {resp.headers.get('Server')}")
+ ua = getattr(self, 'scanner_user_agent', '')
+ headers = {'User-Agent': ua} if ua else {}
+ resp = requests.get(url, timeout=3, verify=False, headers=headers)
+ raw["banner"] = f"HTTPS {resp.status_code} {resp.reason}"
+ raw["server"] = resp.headers.get("Server")
+ if raw["server"]:
+ _m = _HTTP_SERVER_RE.search(raw["server"])
+ if _m:
+ _cve_product = _HTTP_PRODUCT_MAP.get(_m.group(1).lower())
+ if _cve_product:
+ findings += check_cves(_cve_product, _m.group(2))
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title=f"HTTPS service detected ({resp.status_code} {resp.reason})",
+ description=f"HTTPS service on {target}:{port}.",
+ evidence=f"Server: {raw['server'] or 'not disclosed'}",
+ confidence="certain",
+ ))
except Exception as e:
- info = f"HTTPS probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "HTTPS", e)
+ return probe_result(raw_data=raw, findings=findings)
def _service_info_tls(self, target, port):
"""
- Inspect TLS handshake details and certificate lifetime.
+ Inspect TLS handshake, certificate chain, and cipher strength.
+
+ Uses a two-pass approach: unverified connect (always gets protocol/cipher),
+ then verified connect (detects self-signed / chain issues).
Parameters
----------
@@ -126,40 +394,284 @@ def _service_info_tls(self, target, port):
Returns
-------
- str | None
- TLS version/cipher summary or error message.
+ dict
+ Structured findings with protocol, cipher, cert details.
"""
- info = None
+ findings = []
+ raw = {"protocol": None, "cipher": None, "cert_subject": None, "cert_issuer": None}
+
+ # Pass 1: Unverified — always get protocol/cipher
+ proto, cipher, cert_der = self._tls_unverified_connect(target, port)
+ if proto is None:
+ return probe_error(target, port, "TLS", Exception("unverified connect failed"))
+
+ raw["protocol"], raw["cipher"] = proto, cipher
+ findings += self._tls_check_protocol(proto, cipher)
+
+ # Pass 1b: SAN parsing and signature check from DER cert
+ if cert_der:
+ san_dns, san_ips = self._tls_parse_san_from_der(cert_der)
+ raw["san_dns"] = san_dns
+ raw["san_ips"] = san_ips
+ for ip_str in san_ips:
+ try:
+ import ipaddress as _ipaddress
+ if _ipaddress.ip_address(ip_str).is_private:
+ self._emit_metadata("internal_ips", {"ip": ip_str, "source": f"tls_san:{port}"})
+ except (ValueError, TypeError):
+ pass
+ findings += self._tls_check_signature_algorithm(cert_der)
+ findings += self._tls_check_validity_period(cert_der)
+
+ # Pass 2: Verified — detect self-signed / chain issues
+ findings += self._tls_check_certificate(target, port, raw)
+
+ # Pass 3: Cert content checks (expiry, default CN)
+ findings += self._tls_check_expiry(raw)
+ findings += self._tls_check_default_cn(raw)
+
+ if not findings:
+ findings.append(Finding(Severity.INFO, f"TLS {proto} {cipher}", "TLS configuration adequate."))
+
+ return probe_result(raw_data=raw, findings=findings)
+
+ def _tls_unverified_connect(self, target, port):
+ """Unverified TLS connect to get protocol, cipher, and DER cert."""
try:
- context = ssl.create_default_context()
+ ctx = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
+ ctx.check_hostname = False
+ ctx.verify_mode = ssl.CERT_NONE
with socket.create_connection((target, port), timeout=3) as sock:
- with context.wrap_socket(sock, server_hostname=target) as ssock:
- cert = ssock.getpeercert()
+ with ctx.wrap_socket(sock, server_hostname=target) as ssock:
proto = ssock.version()
- cipher = ssock.cipher()
- expires = cert.get("notAfter")
- info = f"TLS {proto} {cipher[0]}"
- if proto and proto.upper() in ("SSLV3", "SSLV2", "TLSV1", "TLSV1.1"):
- info = f"VULNERABILITY: Obsolete TLS protocol negotiated ({proto}) using {cipher[0]}"
- if expires:
- try:
- exp = datetime.strptime(expires, "%b %d %H:%M:%S %Y %Z")
- days = (exp - datetime.utcnow()).days
- if days <= 30:
- info = f"VULNERABILITY: TLS {proto} {cipher[0]}; certificate expires in {days} days"
- else:
- info = f"TLS {proto} {cipher[0]}; cert exp in {days} days"
- except Exception:
- info = f"TLS {proto} {cipher[0]}; cert expires {expires}"
+ cipher_info = ssock.cipher()
+ cipher_name = cipher_info[0] if cipher_info else "unknown"
+ cert_der = ssock.getpeercert(binary_form=True)
+ return proto, cipher_name, cert_der
except Exception as e:
- info = f"TLS probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ self.P(f"TLS unverified connect failed on {target}:{port}: {e}", color='y')
+ return None, None, None
+
+ def _tls_check_protocol(self, proto, cipher):
+ """Flag obsolete TLS/SSL protocols and weak ciphers."""
+ findings = []
+ if proto and proto.upper() in ("SSLV2", "SSLV3", "TLSV1", "TLSV1.1"):
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"Obsolete TLS protocol: {proto}",
+ description=f"Server negotiated {proto} with cipher {cipher}. "
+ f"SSLv2/v3 and TLS 1.0/1.1 are deprecated and vulnerable.",
+ evidence=f"protocol={proto}, cipher={cipher}",
+ remediation="Disable SSLv2/v3/TLS 1.0/1.1 and require TLS 1.2+.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-326",
+ confidence="certain",
+ ))
+ if cipher and any(w in cipher.lower() for w in ("rc4", "des", "null", "export")):
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"Weak TLS cipher: {cipher}",
+ description=f"Cipher {cipher} is considered cryptographically weak.",
+ evidence=f"cipher={cipher}",
+ remediation="Disable weak ciphers (RC4, DES, NULL, EXPORT).",
+ owasp_id="A02:2021",
+ cwe_id="CWE-327",
+ confidence="certain",
+ ))
+ return findings
+
+ def _tls_check_certificate(self, target, port, raw):
+ """Verified TLS pass — detect self-signed, untrusted issuer, hostname mismatch."""
+ findings = []
+ try:
+ ctx = ssl.create_default_context()
+ with socket.create_connection((target, port), timeout=3) as sock:
+ with ctx.wrap_socket(sock, server_hostname=target) as ssock:
+ cert = ssock.getpeercert()
+ subj = dict(x[0] for x in cert.get("subject", ()))
+ issuer = dict(x[0] for x in cert.get("issuer", ()))
+ raw["cert_subject"] = subj.get("commonName")
+ raw["cert_issuer"] = issuer.get("organizationName") or issuer.get("commonName")
+ raw["cert_not_after"] = cert.get("notAfter")
+ except ssl.SSLCertVerificationError as e:
+ err_msg = str(e).lower()
+ if "self-signed" in err_msg or "self signed" in err_msg:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="Self-signed TLS certificate",
+ description="The server presents a self-signed certificate that browsers will reject.",
+ evidence=str(e),
+ remediation="Replace with a certificate from a trusted CA.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-295",
+ confidence="certain",
+ ))
+ elif "hostname mismatch" in err_msg:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="TLS certificate hostname mismatch",
+ description=f"Certificate CN/SAN does not match {target}.",
+ evidence=str(e),
+ remediation="Ensure the certificate covers the served hostname.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-295",
+ confidence="certain",
+ ))
+ else:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="TLS certificate validation failed",
+ description="Certificate chain could not be verified.",
+ evidence=str(e),
+ remediation="Use a certificate from a trusted CA with a valid chain.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-295",
+ confidence="firm",
+ ))
+ except Exception:
+ pass # Non-cert errors (connection reset, etc.) — skip
+ return findings
+
+ def _tls_check_expiry(self, raw):
+ """Check certificate expiry from raw dict."""
+ findings = []
+ expires = raw.get("cert_not_after")
+ if not expires:
+ return findings
+ try:
+ exp = datetime.strptime(expires, "%b %d %H:%M:%S %Y %Z")
+ days = (exp - datetime.utcnow()).days
+ raw["cert_days_remaining"] = days
+ if days < 0:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"TLS certificate expired ({-days} days ago)",
+ description="The certificate has already expired.",
+ evidence=f"notAfter={expires}",
+ remediation="Renew the certificate immediately.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-298",
+ confidence="certain",
+ ))
+ elif days <= 30:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"TLS certificate expiring soon ({days} days)",
+ description=f"Certificate expires in {days} days.",
+ evidence=f"notAfter={expires}",
+ remediation="Renew the certificate before expiry.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-298",
+ confidence="certain",
+ ))
+ except Exception:
+ pass
+ return findings
+
+ def _tls_check_default_cn(self, raw):
+ """Flag placeholder common names."""
+ findings = []
+ cn = raw.get("cert_subject")
+ if not cn:
+ return findings
+ cn_lower = cn.lower()
+ placeholders = ("example.com", "localhost", "internet widgits", "test", "changeme", "my company", "acme", "default")
+ if any(p in cn_lower for p in placeholders) or len(cn.strip()) <= 1:
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"TLS certificate placeholder CN: {cn}",
+ description="Certificate uses a default/placeholder common name.",
+ evidence=f"CN={cn}",
+ remediation="Replace with a certificate bearing the correct hostname.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-295",
+ confidence="firm",
+ ))
+ return findings
+
+ def _tls_parse_san_from_der(self, cert_der):
+ """Parse SAN DNS names and IP addresses from a DER-encoded certificate."""
+ dns_names, ip_addresses = [], []
+ if not cert_der:
+ return dns_names, ip_addresses
+ try:
+ from cryptography import x509
+ cert = x509.load_der_x509_certificate(cert_der)
+ try:
+ san_ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName)
+ dns_names = san_ext.value.get_values_for_type(x509.DNSName)
+ ip_addresses = [str(ip) for ip in san_ext.value.get_values_for_type(x509.IPAddress)]
+ except x509.ExtensionNotFound:
+ pass
+ except Exception:
+ pass
+ return dns_names, ip_addresses
+
+ def _tls_check_signature_algorithm(self, cert_der):
+ """Flag SHA-1 or MD5 signature algorithms."""
+ findings = []
+ if not cert_der:
+ return findings
+ try:
+ from cryptography import x509
+ from cryptography.hazmat.primitives import hashes
+ cert = x509.load_der_x509_certificate(cert_der)
+ algo = cert.signature_hash_algorithm
+ if algo and isinstance(algo, (hashes.SHA1, hashes.MD5)):
+ algo_name = algo.name.upper()
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"TLS certificate signed with weak algorithm: {algo_name}",
+ description=f"The certificate uses {algo_name} for its signature, which is cryptographically weak.",
+ evidence=f"signature_algorithm={algo_name}",
+ remediation="Replace with a certificate using SHA-256 or stronger.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-327",
+ confidence="certain",
+ ))
+ except Exception:
+ pass
+ return findings
+
+ def _tls_check_validity_period(self, cert_der):
+ """Flag certificates with a total validity span >5 years (CA/Browser Forum violation)."""
+ findings = []
+ if not cert_der:
+ return findings
+ try:
+ from cryptography import x509
+ cert = x509.load_der_x509_certificate(cert_der)
+ span = cert.not_valid_after_utc - cert.not_valid_before_utc
+ if span.days > 5 * 365:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"TLS certificate validity span exceeds 5 years ({span.days} days)",
+ description="Certificates valid for more than 5 years violate CA/Browser Forum baseline requirements.",
+ evidence=f"not_before={cert.not_valid_before_utc}, not_after={cert.not_valid_after_utc}, span={span.days}d",
+ remediation="Reissue with a validity period of 398 days or less.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-298",
+ confidence="certain",
+ ))
+ except Exception:
+ pass
+ return findings
+
+
+ def _service_info_ftp(self, target, port): # default port: 21
+ """
+ Assess FTP service security: banner, anonymous access, default creds,
+ server fingerprint, TLS support, write access, and credential validation.
+ Checks performed (in order):
- def _service_info_21(self, target, port):
- """
- Identify FTP banners and anonymous login exposure.
+ 1. Banner grab and SYST/FEAT fingerprint.
+ 2. Anonymous login attempt.
+ 3. Write access test (STOR) after anonymous login.
+ 4. Directory listing and traversal.
+ 5. TLS support check (AUTH TLS).
+ 6. Default credential check.
+ 7. Arbitrary credential acceptance test.
Parameters
----------
@@ -170,29 +682,267 @@ def _service_info_21(self, target, port):
Returns
-------
- str | None
- FTP banner info or vulnerability message.
+ dict
+ Structured findings with banner, vulnerabilities, server_info, etc.
"""
- info = None
+ findings = []
+ result = {
+ "banner": None,
+ "server_type": None,
+ "features": [],
+ "anonymous_access": False,
+ "write_access": False,
+ "tls_supported": False,
+ "accepted_credentials": [],
+ "directory_listing": None,
+ }
+
+ def _ftp_connect(user=None, passwd=None):
+ """Open a fresh FTP connection and optionally login."""
+ ftp = ftplib.FTP(timeout=5)
+ ftp.connect(target, port, timeout=5)
+ if user is not None:
+ ftp.login(user, passwd or "")
+ return ftp
+
+ # --- 1. Banner grab ---
try:
- ftp = ftplib.FTP(timeout=3)
- ftp.connect(target, port, timeout=3)
- banner = ftp.getwelcome()
- info = f"FTP banner: {banner}"
+ ftp = _ftp_connect()
+ result["banner"] = ftp.getwelcome()
+ except Exception as e:
+ return probe_error(target, port, "FTP", e)
+
+ # FTP server version CVE check
+ _ftp_m = _re.search(
+ r'(ProFTPD|vsftpd)[/ ]+(\d+(?:\.\d+)+)',
+ result["banner"], _re.IGNORECASE,
+ )
+ if _ftp_m:
+ _cve_product = {'proftpd': 'proftpd', 'vsftpd': 'vsftpd'}.get(_ftp_m.group(1).lower())
+ if _cve_product:
+ findings += check_cves(_cve_product, _ftp_m.group(2))
+
+ # --- 2. Anonymous login ---
+ try:
+ resp = ftp.login()
+ result["anonymous_access"] = True
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="FTP allows anonymous login.",
+ description="The FTP server permits unauthenticated access via anonymous login.",
+ evidence="Anonymous login succeeded.",
+ remediation="Disable anonymous FTP access unless explicitly required.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ except Exception:
+ # Anonymous failed — close and move on to credential tests
try:
- ftp.login() # attempt anonymous login
- info = f"VULNERABILITY: FTP allows anonymous login (banner: {banner})"
+ ftp.quit()
except Exception:
- info = f"FTP banner: {banner} | Anonymous login not allowed"
- ftp.quit()
- except Exception as e:
- info = f"FTP probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ pass
+ ftp = None
+
+ # --- 2b. SYST / FEAT (after login — some servers require auth first) ---
+ if ftp:
+ try:
+ syst = ftp.sendcmd("SYST")
+ result["server_type"] = syst
+ except Exception:
+ pass
+
+ try:
+ feat_resp = ftp.sendcmd("FEAT")
+ feats = [
+ line.strip() for line in feat_resp.split("\n")
+ if line.strip() and not line.startswith("211")
+ ]
+ result["features"] = feats
+ except Exception:
+ pass
+
+ # --- 2c. PASV IP leak check ---
+ if ftp and result["anonymous_access"]:
+ try:
+ pasv_resp = ftp.sendcmd("PASV")
+ _pasv_match = _re.search(r'\((\d+),(\d+),(\d+),(\d+),(\d+),(\d+)\)', pasv_resp)
+ if _pasv_match:
+ pasv_ip = f"{_pasv_match.group(1)}.{_pasv_match.group(2)}.{_pasv_match.group(3)}.{_pasv_match.group(4)}"
+ if pasv_ip != target:
+ import ipaddress as _ipaddress
+ try:
+ if _ipaddress.ip_address(pasv_ip).is_private:
+ result["pasv_ip"] = pasv_ip
+ self._emit_metadata("internal_ips", {"ip": pasv_ip, "source": f"ftp_pasv:{port}"})
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"FTP PASV leaks internal IP: {pasv_ip}",
+ description=f"PASV response reveals RFC1918 address {pasv_ip}, different from target {target}.",
+ evidence=f"PASV response: {pasv_resp}",
+ remediation="Configure FTP passive address masquerading to use the public IP.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ except (ValueError, TypeError):
+ pass
+ except Exception:
+ pass
+
+ # --- 3. Write access test (only if anonymous login succeeded) ---
+ if ftp and result["anonymous_access"]:
+ import io
+ try:
+ ftp.set_pasv(True)
+ test_data = io.BytesIO(b"RedMesh write access probe")
+ resp = ftp.storbinary("STOR __redmesh_probe.txt", test_data)
+ if resp and resp.startswith("226"):
+ result["write_access"] = True
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="FTP anonymous write access enabled (file upload possible).",
+ description="Anonymous users can upload files to the FTP server.",
+ evidence="STOR command succeeded with anonymous session.",
+ remediation="Remove write permissions for anonymous FTP users.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-434",
+ confidence="certain",
+ ))
+ try:
+ ftp.delete("__redmesh_probe.txt")
+ except Exception:
+ pass
+ except Exception:
+ pass
+
+ # --- 4. Directory listing and traversal ---
+ if ftp:
+ try:
+ pwd = ftp.pwd()
+ files = []
+ try:
+ ftp.retrlines("LIST", files.append)
+ except Exception:
+ pass
+ if files:
+ result["directory_listing"] = files[:20]
+ except Exception:
+ pass
+
+ # Check if CWD allows directory traversal
+ for test_dir in ["/etc", "/var", ".."]:
+ try:
+ resp = ftp.cwd(test_dir)
+ if resp and (resp.startswith("250") or resp.startswith("200")):
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"FTP directory traversal: CWD to '{test_dir}' succeeded.",
+ description="The FTP server allows changing to directories outside the intended root.",
+ evidence=f"CWD '{test_dir}' returned: {resp}",
+ remediation="Restrict FTP users to their home directory (chroot).",
+ owasp_id="A01:2021",
+ cwe_id="CWE-22",
+ confidence="certain",
+ ))
+ break
+ except Exception:
+ pass
+ try:
+ ftp.cwd("/")
+ except Exception:
+ pass
+
+ if ftp:
+ try:
+ ftp.quit()
+ except Exception:
+ pass
+
+ # --- 5. TLS support check ---
+ try:
+ ftp_tls = _ftp_connect()
+ resp = ftp_tls.sendcmd("AUTH TLS")
+ if resp.startswith("234"):
+ result["tls_supported"] = True
+ try:
+ ftp_tls.quit()
+ except Exception:
+ pass
+ except Exception:
+ if not result["tls_supported"]:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="FTP does not support TLS encryption (cleartext credentials).",
+ description="Credentials and data are transmitted in cleartext over the network.",
+ evidence="AUTH TLS command rejected or not supported.",
+ remediation="Enable FTPS (AUTH TLS) or migrate to SFTP.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-319",
+ confidence="certain",
+ ))
+
+ # --- 6. Default credential check ---
+ for user, passwd in _FTP_DEFAULT_CREDS:
+ try:
+ ftp_cred = _ftp_connect(user, passwd)
+ result["accepted_credentials"].append(f"{user}:{passwd}")
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"FTP default credential accepted: {user}:{passwd}",
+ description="The FTP server accepted a well-known default credential.",
+ evidence=f"Accepted credential: {user}:{passwd}",
+ remediation="Change default passwords and enforce strong credential policies.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-798",
+ confidence="certain",
+ ))
+ try:
+ ftp_cred.quit()
+ except Exception:
+ pass
+ except (ftplib.error_perm, ftplib.error_reply):
+ pass
+ except Exception:
+ pass
+
+ # --- 7. Arbitrary credential acceptance test ---
+ import string as _string
+ ruser = "".join(random.choices(_string.ascii_lowercase, k=8))
+ rpass = "".join(random.choices(_string.ascii_letters + _string.digits, k=12))
+ try:
+ ftp_rand = _ftp_connect(ruser, rpass)
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="FTP accepts arbitrary credentials",
+ description="Random credentials were accepted, indicating a dangerous misconfiguration or deceptive service.",
+ evidence=f"Accepted random creds {ruser}:{rpass}",
+ remediation="Investigate immediately — authentication is non-functional.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ try:
+ ftp_rand.quit()
+ except Exception:
+ pass
+ except (ftplib.error_perm, ftplib.error_reply):
+ pass
+ except Exception:
+ pass
- def _service_info_22(self, target, port):
+ return probe_result(raw_data=result, findings=findings)
+
+ def _service_info_ssh(self, target, port): # default port: 22
"""
- Retrieve the SSH banner to fingerprint implementations.
+ Assess SSH service security: banner, auth methods, and default credentials.
+
+ Checks performed (in order):
+
+ 1. Banner grab — fingerprint server version.
+ 2. Auth method enumeration — identify if password auth is enabled.
+ 3. Default credential check — try a small list of common creds.
+ 4. Arbitrary credential acceptance test.
Parameters
----------
@@ -203,25 +953,260 @@ def _service_info_22(self, target, port):
Returns
-------
- str | None
- SSH banner text or error message.
+ dict
+ Structured findings with banner, auth_methods, and vulnerabilities.
"""
- info = None
+ findings = []
+ result = {
+ "banner": None,
+ "auth_methods": [],
+ }
+
+ # --- 1. Banner grab (raw socket) ---
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.settimeout(2)
+ sock.settimeout(3)
sock.connect((target, port))
- banner = sock.recv(1024).decode('utf-8', errors='ignore')
- info = f"SSH banner: {banner.strip()}"
+ banner = sock.recv(1024).decode("utf-8", errors="ignore").strip()
sock.close()
+ result["banner"] = banner
+ # Emit OS claim from SSH banner (e.g. "SSH-2.0-OpenSSH_8.9p1 Ubuntu")
+ _os_match = _re.search(r'(Ubuntu|Debian|Fedora|CentOS|Alpine|FreeBSD)', banner, _re.IGNORECASE)
+ if _os_match:
+ self._emit_metadata("os_claims", f"ssh:{port}", _os_match.group(1))
+ except Exception as e:
+ return probe_error(target, port, "SSH", e)
+
+ # --- 2. Auth method enumeration via paramiko Transport ---
+ try:
+ transport = paramiko.Transport((target, port))
+ transport.connect()
+ try:
+ transport.auth_none("")
+ except paramiko.BadAuthenticationType as e:
+ result["auth_methods"] = list(e.allowed_types)
+ except paramiko.AuthenticationException:
+ result["auth_methods"] = ["unknown"]
+ finally:
+ transport.close()
+ except Exception as e:
+ self.P(f"SSH auth enumeration failed on {target}:{port}: {e}", color='y')
+
+ if "password" in result["auth_methods"]:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="SSH password authentication is enabled (prefer key-based auth).",
+ description="The SSH server allows password-based login, which is susceptible to brute-force attacks.",
+ evidence=f"Auth methods: {', '.join(result['auth_methods'])}",
+ remediation="Disable PasswordAuthentication in sshd_config and use key-based auth.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+
+ # --- 3. Default credential check ---
+ accepted_creds = []
+
+ for username, password in _SSH_DEFAULT_CREDS:
+ try:
+ client = paramiko.SSHClient()
+ client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ client.connect(
+ target, port=port,
+ username=username, password=password,
+ timeout=3, auth_timeout=3,
+ look_for_keys=False, allow_agent=False,
+ )
+ accepted_creds.append(f"{username}:{password}")
+ client.close()
+ except paramiko.AuthenticationException:
+ continue
+ except Exception:
+ break # connection issue, stop trying
+
+ # --- 4. Arbitrary credential acceptance test ---
+ random_user = f"probe_{random.randint(10000, 99999)}"
+ random_pass = f"rnd_{random.randint(10000, 99999)}"
+ try:
+ client = paramiko.SSHClient()
+ client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
+ client.connect(
+ target, port=port,
+ username=random_user, password=random_pass,
+ timeout=3, auth_timeout=3,
+ look_for_keys=False, allow_agent=False,
+ )
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="SSH accepts arbitrary credentials",
+ description="Random credentials were accepted, indicating a dangerous misconfiguration or deceptive service.",
+ evidence=f"Accepted random creds {random_user}:{random_pass}",
+ remediation="Investigate immediately — authentication is non-functional.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ client.close()
+ except paramiko.AuthenticationException:
+ pass
+ except Exception:
+ pass
+
+ if accepted_creds:
+ result["accepted_credentials"] = accepted_creds
+ for cred in accepted_creds:
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"SSH default credential accepted: {cred}",
+ description=f"The SSH server accepted a well-known default credential.",
+ evidence=f"Accepted credential: {cred}",
+ remediation="Change default passwords immediately and enforce strong credential policies.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-798",
+ confidence="certain",
+ ))
+
+ # --- 5. Cipher/KEX audit ---
+ cipher_findings, weak_labels = self._ssh_check_ciphers(target, port)
+ findings += cipher_findings
+ result["weak_algorithms"] = weak_labels
+
+ # --- 6. CVE check on banner version ---
+ if result["banner"]:
+ ssh_version = self._ssh_extract_version(result["banner"])
+ if ssh_version:
+ result["ssh_version"] = ssh_version
+ findings += check_cves("openssh", ssh_version)
+
+ return probe_result(raw_data=result, findings=findings)
+
+ def _ssh_extract_version(self, banner):
+ """Extract OpenSSH version from banner like 'SSH-2.0-OpenSSH_8.9p1'."""
+ m = _re.search(r'OpenSSH[_\s](\d+\.\d+(?:\.\d+)?)', banner, _re.IGNORECASE)
+ return m.group(1) if m else None
+
+ def _ssh_check_ciphers(self, target, port):
+ """Audit SSH ciphers, KEX, and MACs via paramiko Transport.
+
+ Returns
+ -------
+ tuple[list[Finding], list[str]]
+ (findings, weak_algorithm_labels) — findings for probe_result,
+ labels for the raw-data ``weak_algorithms`` field.
+ """
+ findings = []
+ weak_labels = []
+ _WEAK_CIPHERS = {"3des-cbc", "blowfish-cbc", "arcfour", "arcfour128", "arcfour256",
+ "aes128-cbc", "aes192-cbc", "aes256-cbc", "cast128-cbc"}
+ _WEAK_KEX = {"diffie-hellman-group1-sha1", "diffie-hellman-group14-sha1",
+ "diffie-hellman-group-exchange-sha1"}
+
+ try:
+ transport = paramiko.Transport((target, port))
+ transport.connect()
+ sec_opts = transport.get_security_options()
+
+ ciphers = set(sec_opts.ciphers) if sec_opts.ciphers else set()
+ kex = set(sec_opts.kex) if sec_opts.kex else set()
+ key_types = set(sec_opts.key_types) if sec_opts.key_types else set()
+
+ # RSA key size check — must be done before transport.close()
+ try:
+ remote_key = transport.get_remote_server_key()
+ if remote_key is not None and remote_key.get_name() == "ssh-rsa":
+ key_bits = remote_key.get_bits()
+ if key_bits < 2048:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"SSH RSA key is critically weak ({key_bits}-bit)",
+ description=f"The server's RSA host key is only {key_bits}-bit, which is trivially factorable.",
+ evidence=f"RSA key size: {key_bits} bits",
+ remediation="Generate a new RSA key of at least 3072 bits, or switch to Ed25519.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-326",
+ confidence="certain",
+ ))
+ weak_labels.append(f"rsa_key: {key_bits}-bit")
+ elif key_bits < 3072:
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"SSH RSA key below NIST recommendation ({key_bits}-bit)",
+ description=f"The server's RSA host key is {key_bits}-bit. NIST recommends >=3072-bit after 2023.",
+ evidence=f"RSA key size: {key_bits} bits",
+ remediation="Generate a new RSA key of at least 3072 bits, or switch to Ed25519.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-326",
+ confidence="certain",
+ ))
+ weak_labels.append(f"rsa_key: {key_bits}-bit")
+ except Exception:
+ pass
+
+ transport.close()
+
+ # DSA key detection
+ if "ssh-dss" in key_types:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="SSH DSA host key offered (ssh-dss)",
+ description="The SSH server offers DSA host keys, which are limited to 1024-bit and considered weak.",
+ evidence=f"Key types: {', '.join(sorted(key_types))}",
+ remediation="Remove DSA host keys and use Ed25519 or RSA (>=3072-bit) instead.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-326",
+ confidence="certain",
+ ))
+ weak_labels.append("key_types: ssh-dss")
+
+ weak_ciphers = ciphers & _WEAK_CIPHERS
+ weak_kex = kex & _WEAK_KEX
+
+ if weak_ciphers:
+ cipher_list = ", ".join(sorted(weak_ciphers))
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"SSH weak ciphers: {cipher_list}",
+ description="The SSH server offers ciphers considered cryptographically weak.",
+ evidence=f"Weak ciphers offered: {cipher_list}",
+ remediation="Disable CBC-mode and RC4 ciphers in sshd_config.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-326",
+ confidence="certain",
+ ))
+ weak_labels.append(f"ciphers: {cipher_list}")
+
+ if weak_kex:
+ kex_list = ", ".join(sorted(weak_kex))
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"SSH weak key exchange: {kex_list}",
+ description="The SSH server offers key-exchange algorithms with known weaknesses.",
+ evidence=f"Weak KEX offered: {kex_list}",
+ remediation="Disable SHA-1 based key exchange algorithms in sshd_config.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-326",
+ confidence="certain",
+ ))
+ weak_labels.append(f"kex: {kex_list}")
+
except Exception as e:
- info = f"SSH probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ self.P(f"SSH cipher audit failed on {target}:{port}: {e}", color='y')
- def _service_info_25(self, target, port):
+ return findings, weak_labels
+
+ def _service_info_smtp(self, target, port): # default port: 25
"""
- Capture SMTP banner data for mail infrastructure mapping.
+ Assess SMTP service security: banner, EHLO features, STARTTLS,
+ authentication methods, open relay, and user enumeration.
+
+ Checks performed (in order):
+
+ 1. Banner grab — fingerprint MTA software and version.
+ 2. EHLO — enumerate server capabilities (SIZE, AUTH, STARTTLS, etc.).
+ 3. STARTTLS support — check for encryption.
+ 4. AUTH methods — detect available authentication mechanisms.
+ 5. Open relay test — attempt MAIL FROM / RCPT TO without auth.
+ 6. VRFY / EXPN — test user enumeration commands.
Parameters
----------
@@ -232,25 +1217,236 @@ def _service_info_25(self, target, port):
Returns
-------
- str | None
- SMTP banner text or error message.
+ dict
+ Structured findings.
"""
- info = None
+ import smtplib
+
+ findings = []
+ result = {
+ "banner": None,
+ "server_hostname": None,
+ "max_message_size": None,
+ "auth_methods": [],
+ }
+
+ # --- 1. Connect and grab banner ---
try:
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.settimeout(3)
- sock.connect((target, port))
- banner = sock.recv(1024).decode('utf-8', errors='ignore')
- info = f"SMTP banner: {banner.strip()}"
- sock.close()
+ smtp = smtplib.SMTP(timeout=5)
+ code, msg = smtp.connect(target, port)
+ result["banner"] = f"{code} {msg.decode(errors='replace')}"
except Exception as e:
- info = f"SMTP probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "SMTP", e)
+
+ # --- 2. EHLO — server capabilities ---
+ identity = getattr(self, 'scanner_identity', 'probe.redmesh.local')
+ ehlo_features = []
+ try:
+ code, msg = smtp.ehlo(identity)
+ if code == 250:
+ for line in msg.decode(errors="replace").split("\n"):
+ feat = line.strip()
+ if feat:
+ ehlo_features.append(feat)
+ except Exception:
+ # Fallback to HELO
+ try:
+ smtp.helo(identity)
+ except Exception:
+ pass
+
+ # Parse meaningful fields from EHLO response
+ for idx, feat in enumerate(ehlo_features):
+ upper = feat.upper()
+ if idx == 0 and " Hello " in feat:
+ # First line is the server greeting: "hostname Hello client [ip]"
+ result["server_hostname"] = feat.split()[0]
+ if upper.startswith("SIZE "):
+ try:
+ size_bytes = int(feat.split()[1])
+ result["max_message_size"] = f"{size_bytes // (1024*1024)}MB"
+ except (ValueError, IndexError):
+ pass
+ if upper.startswith("AUTH "):
+ result["auth_methods"] = feat.split()[1:]
+
+ # --- 2b. Banner timezone extraction ---
+ banner_text = result["banner"] or ""
+ _tz_match = _re.search(r'([+-]\d{4})\s*$', banner_text)
+ if _tz_match:
+ self._emit_metadata("timezone_hints", {"offset": _tz_match.group(1), "source": f"smtp:{port}"})
+
+ # --- 2c. Banner / hostname information disclosure ---
+ # Extract MTA version from banner (e.g. "Exim 4.97", "Postfix", "Sendmail 8.x")
+ version_match = _re.search(
+ r"(Exim|Postfix|Sendmail|Microsoft ESMTP|hMailServer|Haraka|OpenSMTPD)"
+ r"[\s/]*([0-9][0-9.]*)?",
+ banner_text, _re.IGNORECASE,
+ )
+ if version_match:
+ mta = version_match.group(0).strip()
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"SMTP banner discloses MTA software: {mta} (aids CVE lookup).",
+ description="The SMTP banner reveals the mail transfer agent software and version.",
+ evidence=f"Banner: {banner_text[:120]}",
+ remediation="Remove or genericize the SMTP banner to hide MTA version details.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+
+ # CVE check on extracted MTA version
+ _smtp_product_map = {'exim': 'exim', 'postfix': 'postfix'}
+ if version_match and version_match.group(2):
+ _cve_product = _smtp_product_map.get(version_match.group(1).lower())
+ if _cve_product:
+ findings += check_cves(_cve_product, version_match.group(2))
+
+ if result["server_hostname"]:
+ # Check if hostname reveals container/internal info
+ hostname = result["server_hostname"]
+ if _re.search(r"[0-9a-f]{12}", hostname):
+ self._emit_metadata("container_ids", {"id": hostname, "source": f"smtp:{port}"})
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"SMTP hostname leaks container ID: {hostname} (infrastructure disclosure).",
+ description="The EHLO response reveals a container ID or internal hostname.",
+ evidence=f"Hostname: {hostname}",
+ remediation="Configure the SMTP server to use a proper FQDN instead of the container ID.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="firm",
+ ))
+ if _re.match(r'^[a-z0-9-]+-[a-z0-9]{8,10}$', hostname):
+ self._emit_metadata("container_ids", {"id": hostname, "source": f"smtp_k8s:{port}"})
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"SMTP hostname matches Kubernetes pod name pattern: {hostname}",
+ description="The EHLO hostname resembles a Kubernetes pod name (deployment-replicaset-podid).",
+ evidence=f"Hostname: {hostname}",
+ remediation="Configure the SMTP server to use a proper FQDN instead of the pod name.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="firm",
+ ))
+ if hostname.endswith('.internal'):
+ self._emit_metadata("container_ids", {"id": hostname, "source": f"smtp_internal:{port}"})
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"SMTP hostname uses cloud-internal DNS suffix: {hostname}",
+ description="The EHLO hostname ends with '.internal', indicating AWS/GCP internal DNS.",
+ evidence=f"Hostname: {hostname}",
+ remediation="Configure the SMTP server to use a public FQDN instead of internal DNS.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="firm",
+ ))
+
+ # --- 3. STARTTLS ---
+ starttls_supported = any("STARTTLS" in f.upper() for f in ehlo_features)
+ if not starttls_supported:
+ try:
+ code, msg = smtp.docmd("STARTTLS")
+ if code == 220:
+ starttls_supported = True
+ except Exception:
+ pass
+
+ if not starttls_supported:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="SMTP does not support STARTTLS (credentials sent in cleartext).",
+ description="The SMTP server does not offer STARTTLS, leaving credentials and mail unencrypted.",
+ evidence="STARTTLS not listed in EHLO features and STARTTLS command rejected.",
+ remediation="Enable STARTTLS support on the SMTP server.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-319",
+ confidence="certain",
+ ))
+
+ # --- 4. AUTH without credentials ---
+ if result["auth_methods"]:
+ try:
+ code, msg = smtp.docmd("AUTH LOGIN")
+ if code == 235:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="SMTP AUTH LOGIN accepted without credentials.",
+ description="The SMTP server accepted AUTH LOGIN without providing actual credentials.",
+ evidence=f"AUTH LOGIN returned code {code}.",
+ remediation="Fix AUTH configuration to require valid credentials.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ except Exception:
+ pass
+
+ # --- 5. Open relay test ---
+ try:
+ smtp.rset()
+ except Exception:
+ try:
+ smtp.quit()
+ except Exception:
+ pass
+ try:
+ smtp = smtplib.SMTP(target, port, timeout=5)
+ smtp.ehlo(identity)
+ except Exception:
+ smtp = None
- def _service_info_3306(self, target, port):
+ if smtp:
+ try:
+ code_from, _ = smtp.docmd(f"MAIL FROM:")
+ if code_from == 250:
+ code_rcpt, _ = smtp.docmd("RCPT TO:")
+ if code_rcpt == 250:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="SMTP open relay detected (accepts mail to external domains without auth).",
+ description="The SMTP server relays mail to external domains without authentication.",
+ evidence="RCPT TO: accepted (code 250).",
+ remediation="Configure SMTP relay restrictions to require authentication.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="certain",
+ ))
+ smtp.docmd("RSET")
+ except Exception:
+ pass
+
+ # --- 6. VRFY / EXPN ---
+ if smtp:
+ for cmd_name in ("VRFY", "EXPN"):
+ try:
+ code, msg = smtp.docmd(cmd_name, "root")
+ if code in (250, 251, 252):
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"SMTP {cmd_name} command enabled (user enumeration possible).",
+ description=f"The {cmd_name} command can be used to enumerate valid users on the system.",
+ evidence=f"{cmd_name} root returned code {code}.",
+ remediation=f"Disable the {cmd_name} command in the SMTP server configuration.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-203",
+ confidence="certain",
+ ))
+ except Exception:
+ pass
+
+ if smtp:
+ try:
+ smtp.quit()
+ except Exception:
+ pass
+
+ return probe_result(raw_data=result, findings=findings)
+
+ def _service_info_mysql(self, target, port): # default port: 3306
"""
- Perform a lightweight MySQL handshake to expose server version.
+ MySQL handshake probe: extract version, auth plugin, and check CVEs.
Parameters
----------
@@ -261,27 +1457,215 @@ def _service_info_3306(self, target, port):
Returns
-------
- str | None
- MySQL version info or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"version": None, "auth_plugin": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
sock.connect((target, port))
- data = sock.recv(128)
- if data and data[0] == 0x0a:
- version = data[1:].split(b'\x00')[0].decode('utf-8', errors='ignore')
- info = f"MySQL handshake version: {version}"
- else:
- info = "MySQL port open (no banner)"
+ data = sock.recv(256)
sock.close()
+
+ if data and len(data) > 4:
+ # MySQL protocol: first byte of payload is protocol version (0x0a = v10)
+ pkt_payload = data[4:] # skip 3-byte length + 1-byte seq
+ if pkt_payload and pkt_payload[0] == 0x0a:
+ version = pkt_payload[1:].split(b'\x00')[0].decode('utf-8', errors='ignore')
+ raw["version"] = version
+
+ # Extract auth plugin name (at end of handshake after capabilities/salt)
+ try:
+ parts = pkt_payload.split(b'\x00')
+ if len(parts) >= 2:
+ last = parts[-2].decode('utf-8', errors='ignore') if parts[-1] == b'' else parts[-1].decode('utf-8', errors='ignore')
+ if 'mysql_native' in last or 'caching_sha2' in last or 'sha256' in last:
+ raw["auth_plugin"] = last
+ except Exception:
+ pass
+
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"MySQL version disclosed: {version}",
+ description=f"MySQL {version} handshake received on {target}:{port}.",
+ evidence=f"version={version}, auth_plugin={raw['auth_plugin']}",
+ remediation="Restrict MySQL to trusted networks; consider disabling version disclosure.",
+ confidence="certain",
+ ))
+
+ # Salt entropy check — extract 20-byte auth scramble from handshake
+ try:
+ import math
+ # After version null-terminated string: 4 bytes thread_id + 8 bytes salt1
+ after_version = pkt_payload[1:].split(b'\x00', 1)[1]
+ if len(after_version) >= 12:
+ salt1 = after_version[4:12] # 8 bytes after thread_id
+ # Salt part 2: after capabilities(2)+charset(1)+status(2)+caps_upper(2)+auth_len(1)+reserved(10)
+ salt2 = b''
+ if len(after_version) >= 31:
+ salt2 = after_version[31:43].rstrip(b'\x00')
+ full_salt = salt1 + salt2
+ if len(full_salt) >= 8:
+ # Shannon entropy
+ byte_counts = {}
+ for b in full_salt:
+ byte_counts[b] = byte_counts.get(b, 0) + 1
+ entropy = 0.0
+ n = len(full_salt)
+ for count in byte_counts.values():
+ p = count / n
+ if p > 0:
+ entropy -= p * math.log2(p)
+ raw["salt_entropy"] = round(entropy, 2)
+ if entropy < 2.0:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"MySQL salt entropy critically low ({entropy:.2f} bits)",
+ description="The authentication scramble has abnormally low entropy, "
+ "suggesting a non-standard or deceptive MySQL service.",
+ evidence=f"salt_entropy={entropy:.2f}, salt_hex={full_salt.hex()[:40]}",
+ remediation="Investigate this MySQL instance — authentication randomness is insufficient.",
+ cwe_id="CWE-330",
+ confidence="firm",
+ ))
+ except Exception:
+ pass
+
+ # CVE check
+ findings += check_cves("mysql", version)
+ else:
+ raw["protocol_byte"] = pkt_payload[0] if pkt_payload else None
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="MySQL port open (non-standard handshake)",
+ description=f"Port {port} responded but protocol byte is not 0x0a.",
+ confidence="tentative",
+ ))
+ else:
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="MySQL port open (no banner)",
+ description=f"No handshake data received on {target}:{port}.",
+ confidence="tentative",
+ ))
except Exception as e:
- info = f"MySQL probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "MySQL", e)
+
+ return probe_result(raw_data=raw, findings=findings)
+
+ def _service_info_mysql_creds(self, target, port): # default port: 3306
+ """
+ MySQL default credential testing (opt-in via active_auth feature group).
+
+ Attempts mysql_native_password auth with a small list of default credentials.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Port being probed.
- def _service_info_3389(self, target, port):
+ Returns
+ -------
+ dict
+ Structured findings.
+ """
+ import hashlib
+
+ findings = []
+ raw = {"tested_credentials": 0, "accepted_credentials": []}
+ creds = [("root", ""), ("root", "root"), ("root", "password")]
+
+ for username, password in creds:
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(3)
+ sock.connect((target, port))
+ data = sock.recv(256)
+
+ if not data or len(data) < 4:
+ sock.close()
+ continue
+
+ pkt_payload = data[4:]
+ if not pkt_payload or pkt_payload[0] != 0x0a:
+ sock.close()
+ continue
+
+ # Extract salt (scramble) from handshake
+ parts = pkt_payload[1:].split(b'\x00', 1)
+ rest = parts[1] if len(parts) > 1 else b''
+ # Salt part 1: bytes 4..11 after capabilities (skip 4 bytes capabilities + 1 byte filler)
+ if len(rest) >= 13:
+ salt1 = rest[5:13]
+ else:
+ sock.close()
+ continue
+ # Salt part 2: after reserved bytes (skip 2+2+1+10 reserved = 15)
+ salt2 = b''
+ if len(rest) >= 28:
+ salt2 = rest[28:40].rstrip(b'\x00')
+ salt = salt1 + salt2
+
+ # mysql_native_password auth response
+ if password:
+ sha1_pass = hashlib.sha1(password.encode()).digest()
+ sha1_sha1 = hashlib.sha1(sha1_pass).digest()
+ sha1_salt_sha1sha1 = hashlib.sha1(salt + sha1_sha1).digest()
+ auth_data = bytes(a ^ b for a, b in zip(sha1_pass, sha1_salt_sha1sha1))
+ else:
+ auth_data = b''
+
+ # Build auth response packet
+ client_flags = struct.pack('= 5:
+ resp_type = resp[4]
+ if resp_type == 0x00: # OK packet
+ cred_str = f"{username}:{password}" if password else f"{username}:(empty)"
+ raw["accepted_credentials"].append(cred_str)
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"MySQL default credential accepted: {cred_str}",
+ description=f"MySQL on {target}:{port} accepts {cred_str}.",
+ evidence=f"Auth response OK for {cred_str}",
+ remediation="Change default passwords and restrict access.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-798",
+ confidence="certain",
+ ))
+ except Exception:
+ continue
+
+ if not findings:
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="MySQL default credentials rejected",
+ description=f"Tested {raw['tested_credentials']} credential pairs, all rejected.",
+ confidence="certain",
+ ))
+
+ return probe_result(raw_data=raw, findings=findings)
+
+ def _service_info_rdp(self, target, port): # default port: 3389
"""
Verify reachability of RDP services without full negotiation.
@@ -294,24 +1678,32 @@ def _service_info_3389(self, target, port):
Returns
-------
- str | None
- RDP reachability summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2)
sock.connect((target, port))
- info = "RDP service open (no easy banner)."
+ raw["banner"] = "RDP service open"
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="RDP service detected",
+ description=f"RDP port {port} is open on {target}, no further enumeration performed.",
+ evidence=f"TCP connect to {target}:{port} succeeded.",
+ confidence="certain",
+ ))
sock.close()
except Exception as e:
- info = f"RDP probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "RDP", e)
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_6379(self, target, port):
+ # SAFETY: Read-only commands only. NEVER add CONFIG SET, SLAVEOF, MODULE LOAD, EVAL, DEBUG.
+ def _service_info_redis(self, target, port): # default port: 6379
"""
- Test Redis exposure by issuing a PING command.
+ Deep Redis probe: auth check, version, config readability, data size, client list.
Parameters
----------
@@ -322,32 +1714,240 @@ def _service_info_6379(self, target, port):
Returns
-------
- str | None
- Redis response summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings, raw = [], {"version": None, "os": None, "config_writable": False}
+ sock = self._redis_connect(target, port)
+ if not sock:
+ return probe_error(target, port, "Redis", Exception("connection failed"))
+
+ auth_findings = self._redis_check_auth(sock, raw)
+ if not auth_findings:
+ # NOAUTH response — requires auth, stop here
+ sock.close()
+ return probe_result(
+ raw_data=raw,
+ findings=[Finding(Severity.INFO, "Redis requires authentication", "PING returned NOAUTH.")],
+ )
+
+ findings += auth_findings
+ findings += self._redis_check_info(sock, raw)
+ findings += self._redis_check_config(sock, raw)
+ findings += self._redis_check_data(sock, raw)
+ findings += self._redis_check_clients(sock, raw)
+ findings += self._redis_check_persistence(sock, raw)
+
+ # CVE check
+ if raw["version"]:
+ findings += check_cves("redis", raw["version"])
+
+ sock.close()
+ return probe_result(raw_data=raw, findings=findings)
+
+ def _redis_connect(self, target, port):
+ """Open a TCP socket to Redis."""
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.settimeout(2)
+ sock.settimeout(3)
sock.connect((target, port))
- sock.send(b"PING\r\n")
- data = sock.recv(64).decode('utf-8', errors='ignore')
- if data.startswith("+PONG"):
- info = "VULNERABILITY: Redis responded to PING (no authentication)."
- elif data.upper().startswith("-NOAUTH"):
- info = "Redis requires authentication (NOAUTH)."
- else:
- info = f"Redis response: {data.strip()}"
- sock.close()
+ return sock
except Exception as e:
- info = f"Redis probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ self.P(f"Redis connect failed on {target}:{port}: {e}", color='y')
+ return None
-
- def _service_info_23(self, target, port):
+ def _redis_cmd(self, sock, cmd):
+ """Send an inline Redis command and return the response string."""
+ try:
+ sock.sendall(f"{cmd}\r\n".encode())
+ data = sock.recv(4096).decode('utf-8', errors='ignore')
+ return data
+ except Exception:
+ return ""
+
+ def _redis_check_auth(self, sock, raw):
+ """PING to check if auth is required. Returns findings if no auth, empty list if NOAUTH."""
+ resp = self._redis_cmd(sock, "PING")
+ if resp.startswith("+PONG"):
+ return [Finding(
+ severity=Severity.CRITICAL,
+ title="Redis unauthenticated access",
+ description="Redis responded to PING without authentication.",
+ evidence=f"Response: {resp.strip()[:80]}",
+ remediation="Set a strong password via requirepass in redis.conf.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ )]
+ if "-NOAUTH" in resp.upper():
+ return [] # signal: auth required
+ return [Finding(
+ severity=Severity.LOW,
+ title="Redis unusual PING response",
+ description=f"Unexpected response: {resp.strip()[:80]}",
+ confidence="tentative",
+ )]
+
+ def _redis_check_info(self, sock, raw):
+ """Extract version and OS from INFO server."""
+ findings = []
+ resp = self._redis_cmd(sock, "INFO server")
+ if resp.startswith("-"):
+ return findings
+ uptime_seconds = None
+ for line in resp.split("\r\n"):
+ if line.startswith("redis_version:"):
+ raw["version"] = line.split(":", 1)[1].strip()
+ elif line.startswith("os:"):
+ raw["os"] = line.split(":", 1)[1].strip()
+ elif line.startswith("uptime_in_seconds:"):
+ try:
+ uptime_seconds = int(line.split(":", 1)[1].strip())
+ raw["uptime_seconds"] = uptime_seconds
+ except (ValueError, IndexError):
+ pass
+ if raw["os"]:
+ self._emit_metadata("os_claims", "redis", raw["os"])
+ if raw["version"]:
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"Redis version disclosed: {raw['version']}",
+ description=f"Redis {raw['version']} on {raw['os'] or 'unknown OS'}.",
+ evidence=f"version={raw['version']}, os={raw['os']}",
+ remediation="Restrict INFO command access or rename it.",
+ confidence="certain",
+ ))
+ if uptime_seconds is not None and uptime_seconds < 60:
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title=f"Redis uptime <60s ({uptime_seconds}s) — possible container restart",
+ description="Very low uptime may indicate a recently restarted container or ephemeral instance.",
+ evidence=f"uptime_in_seconds={uptime_seconds}",
+ remediation="Investigate if the service is being automatically restarted.",
+ confidence="tentative",
+ ))
+ return findings
+
+ def _redis_check_config(self, sock, raw):
+ """CONFIG GET dir — if accessible, it's an RCE vector."""
+ findings = []
+ resp = self._redis_cmd(sock, "CONFIG GET dir")
+ if resp.startswith("-"):
+ return findings # blocked, good
+ raw["config_writable"] = True
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="Redis CONFIG command accessible (RCE vector)",
+ description="CONFIG GET is accessible, allowing attackers to write arbitrary files "
+ "via CONFIG SET dir / CONFIG SET dbfilename + SAVE.",
+ evidence=f"CONFIG GET dir response: {resp.strip()[:120]}",
+ remediation="Rename or disable CONFIG via rename-command in redis.conf.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-94",
+ confidence="certain",
+ ))
+ return findings
+
+ def _redis_check_data(self, sock, raw):
+ """DBSIZE — report if data is present."""
+ findings = []
+ resp = self._redis_cmd(sock, "DBSIZE")
+ if resp.startswith(":"):
+ try:
+ count = int(resp.strip().lstrip(":"))
+ raw["db_size"] = count
+ if count > 0:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Redis database contains {count} keys",
+ description="Unauthenticated access to a Redis instance with live data.",
+ evidence=f"DBSIZE={count}",
+ remediation="Enable authentication and restrict network access.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="certain",
+ ))
+ except ValueError:
+ pass
+ return findings
+
+ def _redis_check_clients(self, sock, raw):
+ """CLIENT LIST — extract connected client IPs."""
+ findings = []
+ resp = self._redis_cmd(sock, "CLIENT LIST")
+ if resp.startswith("-"):
+ return findings
+ ips = set()
+ for line in resp.split("\n"):
+ for part in line.split():
+ if part.startswith("addr="):
+ ip_port = part.split("=", 1)[1]
+ ip = ip_port.rsplit(":", 1)[0]
+ ips.add(ip)
+ if ips:
+ raw["connected_clients"] = list(ips)
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"Redis client IPs disclosed ({len(ips)} clients)",
+ description=f"CLIENT LIST reveals connected IPs: {', '.join(sorted(ips)[:5])}",
+ evidence=f"IPs: {', '.join(sorted(ips)[:10])}",
+ remediation="Rename or disable CLIENT command.",
+ confidence="certain",
+ ))
+ return findings
+
+ def _redis_check_persistence(self, sock, raw):
+ """Check INFO persistence for missing or stale RDB saves."""
+ findings = []
+ resp = self._redis_cmd(sock, "INFO persistence")
+ if resp.startswith("-"):
+ return findings
+ import time as _time
+ for line in resp.split("\r\n"):
+ if line.startswith("rdb_last_bgsave_time:"):
+ try:
+ ts = int(line.split(":", 1)[1].strip())
+ if ts == 0:
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title="Redis has never performed an RDB save",
+ description="rdb_last_bgsave_time is 0, meaning no background save has ever been performed. "
+ "This may indicate a cache-only instance with persistence disabled, or an ephemeral deployment.",
+ evidence="rdb_last_bgsave_time=0",
+ remediation="Verify whether RDB persistence is intentionally disabled; if not, configure BGSAVE.",
+ cwe_id="CWE-345",
+ confidence="tentative",
+ ))
+ elif (_time.time() - ts) > 365 * 86400:
+ age_days = int((_time.time() - ts) / 86400)
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"Redis RDB save is stale ({age_days} days old)",
+ description="The last RDB background save timestamp is over 1 year old. "
+ "This may indicate disabled persistence, a long-running cache-only instance, or stale data.",
+ evidence=f"rdb_last_bgsave_time={ts}, age={age_days}d",
+ remediation="Verify persistence configuration; stale saves may indicate data loss risk.",
+ cwe_id="CWE-345",
+ confidence="tentative",
+ ))
+ except (ValueError, IndexError):
+ pass
+ break
+ return findings
+
+
+ def _service_info_telnet(self, target, port): # default port: 23
"""
- Fetch Telnet negotiation banner.
+ Assess Telnet service security: banner, negotiation options, default
+ credentials, privilege level, system fingerprint, and credential validation.
+
+ Checks performed (in order):
+
+ 1. Banner grab and IAC option parsing.
+ 2. Default credential check — try common user:pass combos.
+ 3. Privilege escalation check — report if root shell is obtained.
+ 4. System fingerprint — run ``id`` and ``uname -a`` on successful login.
+ 5. Arbitrary credential acceptance test.
Parameters
----------
@@ -358,27 +1958,235 @@ def _service_info_23(self, target, port):
Returns
-------
- str | None
- Telnet banner or error message.
+ dict
+ Structured findings.
"""
- info = None
+ import time as _time
+
+ findings = []
+ result = {
+ "banner": None,
+ "negotiation_options": [],
+ "accepted_credentials": [],
+ "system_info": None,
+ }
+
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="Telnet service is running (unencrypted remote access).",
+ description="Telnet transmits all data including credentials in cleartext.",
+ evidence=f"Telnet port {port} is open on {target}.",
+ remediation="Replace Telnet with SSH for encrypted remote access.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-319",
+ confidence="certain",
+ ))
+
+ # --- 1. Banner grab + IAC negotiation parsing ---
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- sock.settimeout(2)
+ sock.settimeout(5)
sock.connect((target, port))
- banner = sock.recv(1024).decode('utf-8', errors='ignore')
- if banner:
- info = f"VULNERABILITY: Telnet banner: {banner.strip()}"
- else:
- info = "VULNERABILITY: Telnet open with no banner"
+ raw = sock.recv(2048)
sock.close()
except Exception as e:
- info = f"Telnet probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
-
+ return probe_error(target, port, "Telnet", e)
+
+ # Parse IAC sequences
+ iac_options = []
+ cmd_names = {251: "WILL", 252: "WONT", 253: "DO", 254: "DONT"}
+ opt_names = {
+ 0: "BINARY", 1: "ECHO", 3: "SGA", 5: "STATUS",
+ 24: "TERMINAL_TYPE", 31: "WINDOW_SIZE", 32: "TERMINAL_SPEED",
+ 33: "REMOTE_FLOW", 34: "LINEMODE", 36: "ENVIRON", 39: "NEW_ENVIRON",
+ }
+ i = 0
+ text_parts = []
+ while i < len(raw):
+ if raw[i] == 0xFF and i + 2 < len(raw):
+ cmd = cmd_names.get(raw[i + 1], f"CMD_{raw[i+1]}")
+ opt = opt_names.get(raw[i + 2], f"OPT_{raw[i+2]}")
+ iac_options.append(f"{cmd} {opt}")
+ i += 3
+ else:
+ if 32 <= raw[i] < 127:
+ text_parts.append(chr(raw[i]))
+ i += 1
+
+ banner_text = "".join(text_parts).strip()
+ if banner_text:
+ result["banner"] = banner_text
+ elif iac_options:
+ result["banner"] = "(IAC negotiation only, no text banner)"
+ else:
+ result["banner"] = "(no banner)"
+ result["negotiation_options"] = iac_options
+
+ # --- 2–4. Default credential check with system fingerprint ---
+ def _try_telnet_login(user, passwd):
+ """Attempt Telnet login, return (success, uid_line, uname_line)."""
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ s.settimeout(5)
+ s.connect((target, port))
+
+ # Read until login prompt
+ buf = b""
+ deadline = _time.time() + 5
+ while _time.time() < deadline:
+ try:
+ chunk = s.recv(1024)
+ if not chunk:
+ break
+ buf += chunk
+ if b"login:" in buf.lower() or b"username:" in buf.lower():
+ break
+ except socket.timeout:
+ break
+
+ if b"login:" not in buf.lower() and b"username:" not in buf.lower():
+ s.close()
+ return False, None, None
+
+ s.sendall(user.encode() + b"\n")
+
+ # Read until password prompt
+ buf = b""
+ deadline = _time.time() + 5
+ while _time.time() < deadline:
+ try:
+ chunk = s.recv(1024)
+ if not chunk:
+ break
+ buf += chunk
+ if b"assword:" in buf:
+ break
+ except socket.timeout:
+ break
+
+ if b"assword:" not in buf:
+ s.close()
+ return False, None, None
+
+ s.sendall(passwd.encode() + b"\n")
+ _time.sleep(1.5)
+
+ # Read response
+ resp = b""
+ try:
+ while True:
+ chunk = s.recv(4096)
+ if not chunk:
+ break
+ resp += chunk
+ except socket.timeout:
+ pass
+
+ resp_text = resp.decode("utf-8", errors="replace")
+
+ # Check for login failure indicators
+ fail_indicators = ["incorrect", "failed", "denied", "invalid", "login:"]
+ if any(ind in resp_text.lower() for ind in fail_indicators):
+ s.close()
+ return False, None, None
+
+ # Login succeeded — try to get system info
+ uid_line = None
+ uname_line = None
+ try:
+ s.sendall(b"id\n")
+ _time.sleep(0.5)
+ id_resp = s.recv(2048).decode("utf-8", errors="replace")
+ for line in id_resp.replace("\r\n", "\n").split("\n"):
+ cleaned = line.strip()
+ # Remove ANSI/control sequences
+ import re
+ cleaned = re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", cleaned)
+ if "uid=" in cleaned:
+ uid_line = cleaned
+ break
+ except Exception:
+ pass
+
+ try:
+ s.sendall(b"uname -a\n")
+ _time.sleep(0.5)
+ uname_resp = s.recv(2048).decode("utf-8", errors="replace")
+ for line in uname_resp.replace("\r\n", "\n").split("\n"):
+ cleaned = line.strip()
+ import re
+ cleaned = re.sub(r"\x1b\[[0-9;]*[a-zA-Z]", "", cleaned)
+ if "linux" in cleaned.lower() or "unix" in cleaned.lower() or "darwin" in cleaned.lower():
+ uname_line = cleaned
+ break
+ except Exception:
+ pass
+
+ s.close()
+ return True, uid_line, uname_line
- def _service_info_445(self, target, port):
+ except Exception:
+ return False, None, None
+
+ system_info_captured = False
+ for user, passwd in _TELNET_DEFAULT_CREDS:
+ success, uid_line, uname_line = _try_telnet_login(user, passwd)
+ if success:
+ result["accepted_credentials"].append(f"{user}:{passwd}")
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"Telnet default credential accepted: {user}:{passwd}",
+ description="The Telnet server accepted a well-known default credential.",
+ evidence=f"Accepted credential: {user}:{passwd}",
+ remediation="Change default passwords immediately and enforce strong credential policies.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-798",
+ confidence="certain",
+ ))
+ # Check for root access
+ if uid_line and "uid=0" in uid_line:
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"Root shell access via Telnet with {user}:{passwd}.",
+ description="Root-level shell access was obtained over an unencrypted Telnet session.",
+ evidence=f"uid=0 in id output: {uid_line}",
+ remediation="Disable root login via Telnet; use SSH with key-based auth instead.",
+ owasp_id="A04:2021",
+ cwe_id="CWE-250",
+ confidence="certain",
+ ))
+
+ # Capture system info once
+ if not system_info_captured and (uid_line or uname_line):
+ parts = []
+ if uid_line:
+ parts.append(uid_line)
+ if uname_line:
+ parts.append(uname_line)
+ result["system_info"] = " | ".join(parts)
+ system_info_captured = True
+
+ # --- 5. Arbitrary credential acceptance test ---
+ import string as _string
+ ruser = "".join(random.choices(_string.ascii_lowercase, k=8))
+ rpass = "".join(random.choices(_string.ascii_letters + _string.digits, k=12))
+ success, _, _ = _try_telnet_login(ruser, rpass)
+ if success:
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="Telnet accepts arbitrary credentials",
+ description="Random credentials were accepted, indicating a dangerous misconfiguration or deceptive service.",
+ evidence=f"Accepted random creds {ruser}:{rpass}",
+ remediation="Investigate immediately — authentication is non-functional.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+
+ return probe_result(raw_data=result, findings=findings)
+
+
+ def _service_info_smb(self, target, port): # default port: 445
"""
Probe SMB services for negotiation responses.
@@ -391,10 +2199,11 @@ def _service_info_445(self, target, port):
Returns
-------
- str | None
- SMB response summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
@@ -403,19 +2212,324 @@ def _service_info_445(self, target, port):
sock.sendall(probe)
data = sock.recv(4)
if data:
- info = "VULNERABILITY: SMB service responded to negotiation probe."
+ raw["banner"] = "SMB negotiation response received"
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="SMB service responded to negotiation probe",
+ description=f"SMB on {target}:{port} accepts negotiation requests, "
+ "exposing the host to SMB relay and enumeration attacks.",
+ evidence=f"SMB negotiate response: {data.hex()[:24]}",
+ remediation="Restrict SMB access to trusted networks; disable SMBv1.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="certain",
+ ))
else:
- info = "SMB port open but no negotiation response."
+ raw["banner"] = "SMB port open (no response)"
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="SMB port open but no negotiation response",
+ description=f"Port {port} is open but SMB did not respond to negotiation.",
+ confidence="tentative",
+ ))
sock.close()
except Exception as e:
- info = f"SMB probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "SMB", e)
+ return probe_result(raw_data=raw, findings=findings)
+
+
+ # NetBIOS name suffix → human-readable type
+ _NBNS_SUFFIX_TYPES = {
+ 0x00: "Workstation",
+ 0x03: "Messenger (logged-in user)",
+ 0x20: "File Server (SMB sharing)",
+ 0x1C: "Domain Controller",
+ 0x1B: "Domain Master Browser",
+ 0x1E: "Browser Election Service",
+ }
+
+ def _service_info_wins(self, target, port): # ports: 42 (WINS/TCP), 137 (NBNS/UDP)
+ """
+ Probe WINS / NetBIOS Name Service for name enumeration and service detection.
+
+ Port 42 (TCP): WINS replication — sends MS-WINSRA Association Start Request
+ to fingerprint the service and extract NBNS version. Also fires a UDP
+ side-probe to port 137 for NetBIOS name enumeration.
+ Port 137 (UDP): NBNS — sends wildcard node-status query (RFC 1002) to
+ enumerate registered NetBIOS names.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Port being probed.
+
+ Returns
+ -------
+ dict
+ Structured findings.
+ """
+ findings = []
+ raw = {"banner": None, "netbios_names": [], "wins_responded": False}
+
+ # -- Build NetBIOS wildcard node-status query (RFC 1002) --
+ tid = struct.pack('>H', random.randint(0, 0xFFFF))
+ # Flags: 0x0010 (recursion desired)
+ # Questions: 1, Answers/Auth/Additional: 0
+ header = tid + struct.pack('>HHHHH', 0x0010, 1, 0, 0, 0)
+ # Encoded wildcard name "*" (first-level NetBIOS encoding)
+ # '*' (0x2A) → half-bytes 0x02, 0x0A → chars 'C','K', padded with 'A' (0x00 half-bytes)
+ qname = b'\x20' + b'CKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA' + b'\x00'
+ # Type: NBSTAT (0x0021), Class: IN (0x0001)
+ question = struct.pack('>HH', 0x0021, 0x0001)
+ nbns_query = header + qname + question
+
+ def _parse_nbns_response(data):
+ """Parse a NetBIOS node-status response and return list of (name, suffix, flags)."""
+ names = []
+ if len(data) < 14:
+ return names
+ # Verify transaction ID matches
+ if data[:2] != tid:
+ return names
+ ancount = struct.unpack('>H', data[6:8])[0]
+ if ancount == 0:
+ return names
+ # Skip past header (12 bytes) then answer name (compressed pointer or full)
+ idx = 12
+ if idx < len(data) and data[idx] & 0xC0 == 0xC0:
+ idx += 2
+ else:
+ while idx < len(data) and data[idx] != 0:
+ idx += data[idx] + 1
+ idx += 1
+ # Type (2) + Class (2) + TTL (4) + RDLength (2) = 10 bytes
+ if idx + 10 > len(data):
+ return names
+ idx += 10
+ if idx >= len(data):
+ return names
+ num_names = data[idx]
+ idx += 1
+ # Each name entry: 15 bytes name + 1 byte suffix + 2 bytes flags = 18 bytes
+ for _ in range(num_names):
+ if idx + 18 > len(data):
+ break
+ name_bytes = data[idx:idx + 15]
+ suffix = data[idx + 15]
+ flags = struct.unpack('>H', data[idx + 16:idx + 18])[0]
+ name = name_bytes.decode('ascii', errors='ignore').rstrip()
+ names.append((name, suffix, flags))
+ idx += 18
+ return names
+
+ def _udp_nbns_probe(udp_port):
+ """Send UDP NBNS wildcard query, return parsed names or empty list."""
+ sock = None
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ sock.settimeout(3)
+ sock.sendto(nbns_query, (target, udp_port))
+ data, _ = sock.recvfrom(1024)
+ return _parse_nbns_response(data)
+ except Exception:
+ return []
+ finally:
+ if sock is not None:
+ sock.close()
+
+ def _add_nbns_findings(names, probe_label):
+ """Populate raw data and findings from enumerated NetBIOS names."""
+ raw["netbios_names"] = [
+ {"name": n, "suffix": f"0x{s:02X}", "type": self._NBNS_SUFFIX_TYPES.get(s, f"Unknown(0x{s:02X})")}
+ for n, s, _f in names
+ ]
+ name_list = "; ".join(
+ f"{n} <{s:02X}> ({self._NBNS_SUFFIX_TYPES.get(s, 'unknown')})"
+ for n, s, _f in names
+ )
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="NetBIOS name enumeration successful",
+ description=(
+ f"{probe_label} responded to a wildcard node-status query, "
+ "leaking computer name, domain membership, and potentially logged-in users."
+ ),
+ evidence=f"Names: {name_list[:200]}",
+ remediation="Block UDP port 137 at the firewall; disable NetBIOS over TCP/IP in network adapter settings.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title=f"NetBIOS names discovered ({len(names)} entries)",
+ description=f"Enumerated names: {name_list}",
+ evidence=f"Names: {name_list[:300]}",
+ confidence="certain",
+ ))
+
+ try:
+ if port == 137:
+ # -- Direct UDP NBNS probe --
+ names = _udp_nbns_probe(137)
+ if names:
+ raw["banner"] = f"NBNS: {len(names)} name(s) enumerated"
+ _add_nbns_findings(names, f"NBNS on {target}:{port}")
+ else:
+ raw["banner"] = "NBNS port open (no response to wildcard query)"
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="NBNS port open but no names returned",
+ description=f"UDP port {port} on {target} did not respond to NetBIOS wildcard query.",
+ confidence="tentative",
+ ))
+ else:
+ # -- TCP WINS replication probe (MS-WINSRA Association Start Request) --
+ # Also attempt UDP NBNS side-probe to port 137 for name enumeration
+ names = _udp_nbns_probe(137)
+ if names:
+ _add_nbns_findings(names, f"NBNS side-probe to {target}:137")
+
+ # Build MS-WINSRA Association Start Request per [MS-WINSRA] §2.2.3:
+ # Common Header (16 bytes):
+ # Packet Length: 41 (0x00000029) — excludes this field
+ # Reserved: 0x00007800 (opcode, ignored by spec)
+ # Destination Assoc Handle: 0x00000000 (first message, unknown)
+ # Message Type: 0x00000000 (Association Start Request)
+ # Body (25 bytes):
+ # Sender Assoc Handle: random 4 bytes
+ # NBNS Major Version: 2 (required)
+ # NBNS Minor Version: 5 (Win2k+)
+ # Reserved: 21 zero bytes (pad to 41)
+ sender_ctx = random.randint(1, 0xFFFFFFFF)
+ wrepl_header = struct.pack('>I', 41) # Packet Length
+ wrepl_header += struct.pack('>I', 0x00007800) # Reserved / opcode
+ wrepl_header += struct.pack('>I', 0) # Destination Assoc Handle
+ wrepl_header += struct.pack('>I', 0) # Message Type: Start Request
+ wrepl_body = struct.pack('>I', sender_ctx) # Sender Assoc Handle
+ wrepl_body += struct.pack('>HH', 2, 5) # Major=2, Minor=5
+ wrepl_body += b'\x00' * 21 # Reserved padding
+ wrepl_packet = wrepl_header + wrepl_body
+
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(3)
+ sock.connect((target, port))
+ sock.sendall(wrepl_packet)
+
+ # Distinguish three recv outcomes:
+ # data received → parse as WREPL (confirmed WINS)
+ # timeout → connection held open, no reply (likely WINS, non-partner)
+ # empty / closed → server sent FIN immediately (unconfirmed service)
+ data = None
+ recv_timed_out = False
+ try:
+ data = sock.recv(1024)
+ except socket.timeout:
+ recv_timed_out = True
+ finally:
+ sock.close()
+
+ if data and len(data) >= 20:
+ raw["wins_responded"] = True
+ # Parse response: first 4 bytes = Packet Length, next 16 = common header
+ resp_msg_type = struct.unpack('>I', data[12:16])[0] if len(data) >= 16 else None
+ version_info = ""
+ if resp_msg_type == 1 and len(data) >= 24:
+ # Association Start Response — extract version
+ resp_major = struct.unpack('>H', data[20:22])[0] if len(data) >= 22 else None
+ resp_minor = struct.unpack('>H', data[22:24])[0] if len(data) >= 24 else None
+ if resp_major is not None:
+ version_info = f" (NBNS version {resp_major}.{resp_minor})"
+ raw["nbns_version"] = {"major": resp_major, "minor": resp_minor}
+ raw["banner"] = f"WINS replication service{version_info}"
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="WINS replication service exposed",
+ description=(
+ f"WINS on {target}:{port} responded to a WREPL Association Start Request{version_info}. "
+ "WINS is a legacy name-resolution service vulnerable to spoofing, enumeration, and "
+ "multiple remote code execution flaws (CVE-2004-1080, CVE-2009-1923, CVE-2009-1924). "
+ "It should not be accessible from untrusted networks."
+ ),
+ evidence=f"WREPL response ({len(data)} bytes): {data[:24].hex()}",
+ remediation=(
+ "Decommission WINS or restrict TCP port 42 to trusted replication partners. "
+ "If WINS is required, apply all patches (MS04-045, MS09-039) and set the registry key "
+ "RplOnlyWCnfPnrs=1 to accept replication only from configured partners."
+ ),
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="certain",
+ ))
+ elif data:
+ # Got some data but not enough for a valid WREPL response
+ raw["wins_responded"] = True
+ raw["banner"] = f"Port {port} responded ({len(data)} bytes, non-WREPL)"
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"Service on port {port} responded but is not standard WINS",
+ description=(
+ f"TCP port {port} on {target} returned data that does not match the "
+ "WINS replication protocol (MS-WINSRA). Another service may be listening."
+ ),
+ evidence=f"Response ({len(data)} bytes): {data[:32].hex()}",
+ confidence="tentative",
+ ))
+ elif recv_timed_out:
+ # Connection accepted AND held open after our WREPL packet, but no
+ # reply — consistent with WINS silently dropping a non-partner request
+ # (RplOnlyWCnfPnrs=1). A non-WINS service would typically RST or FIN.
+ raw["banner"] = "WINS likely (connection held, no WREPL reply)"
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="WINS replication port open (non-partner rejected)",
+ description=(
+ f"TCP port {port} on {target} accepted a WREPL Association Start Request "
+ "and held the connection open without responding, consistent with a WINS "
+ "server configured to reject non-partner replication (RplOnlyWCnfPnrs=1). "
+ "An exposed WINS port is a legacy attack surface subject to remote code "
+ "execution flaws (CVE-2004-1080, CVE-2009-1923, CVE-2009-1924)."
+ ),
+ evidence="TCP connection accepted and held open; WREPL handshake: no reply after 3 s",
+ remediation=(
+ "Block TCP port 42 at the firewall if WINS replication is not needed. "
+ "If required, restrict to trusted replication partners only."
+ ),
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="firm",
+ ))
+ else:
+ # recv returned empty — server immediately closed the connection.
+ # Cannot confirm WINS; don't produce a finding. The port scan
+ # already reports the open port; a "service unconfirmed" finding
+ # adds no actionable value to the report.
+ pass
+ except Exception as e:
+ return probe_error(target, port, "WINS/NBNS", e)
+
+ if not findings:
+ # Could not confirm WINS — downgrade the protocol label so the UI
+ # does not display an unverified "WINS" tag from WELL_KNOWN_PORTS.
+ port_protocols = self.state.get("port_protocols")
+ if port_protocols and port_protocols.get(port) in ("wins", "nbns"):
+ port_protocols[port] = "unknown"
+ return None
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_5900(self, target, port):
+
+ def _service_info_vnc(self, target, port): # default port: 5900
"""
- Read VNC handshake string to assess remote desktop exposure.
+ VNC handshake: read version banner, negotiate security types.
+
+ Security types:
+ 1 (None) → CRITICAL: unauthenticated desktop access
+ 2 (VNC Auth) → MEDIUM: DES-based, max 8-char password
+ 19 (VeNCrypt) → INFO: TLS-secured
+ Other → LOW: unknown auth type
Parameters
----------
@@ -426,27 +2540,97 @@ def _service_info_5900(self, target, port):
Returns
-------
- str | None
- VNC banner summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None, "security_types": []}
+
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
sock.connect((target, port))
- banner = sock.recv(12).decode('ascii', errors='ignore')
- if banner:
- info = f"VULNERABILITY: VNC protocol banner: {banner.strip()}"
- else:
- info = "VULNERABILITY: VNC open with no banner"
+
+ # Read server banner (e.g. "RFB 003.008\n")
+ banner = sock.recv(12).decode('ascii', errors='ignore').strip()
+ raw["banner"] = banner
+
+ if not banner.startswith("RFB"):
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"VNC service detected (non-standard banner: {banner[:30]})",
+ description="VNC port open but banner is non-standard.",
+ evidence=f"Banner: {banner}",
+ remediation="Restrict VNC access to trusted networks or use SSH tunneling.",
+ confidence="tentative",
+ ))
+ sock.close()
+ return probe_result(raw_data=raw, findings=findings)
+
+ # Echo version back to negotiate
+ sock.sendall(banner.encode('ascii') + b"\n")
+
+ # Read security type list
+ sec_data = sock.recv(64)
+ sec_types = []
+ if len(sec_data) >= 1:
+ num_types = sec_data[0]
+ if num_types > 0 and len(sec_data) >= 1 + num_types:
+ sec_types = list(sec_data[1:1 + num_types])
+ raw["security_types"] = sec_types
sock.close()
+
+ _VNC_TYPE_NAMES = {1: "None", 2: "VNC Auth", 19: "VeNCrypt", 16: "Tight"}
+ type_labels = [f"{t}({_VNC_TYPE_NAMES.get(t, 'unknown')})" for t in sec_types]
+ raw["security_type_labels"] = type_labels
+
+ if 1 in sec_types:
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="VNC unauthenticated access (security type None)",
+ description=f"VNC on {target}:{port} allows connections without authentication.",
+ evidence=f"Banner: {banner}, security types: {type_labels}",
+ remediation="Disable security type None and require VNC Auth or VeNCrypt.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ if 2 in sec_types:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="VNC password auth (DES-based, max 8 chars)",
+ description=f"VNC Auth uses DES encryption with a maximum 8-character password.",
+ evidence=f"Banner: {banner}, security types: {type_labels}",
+ remediation="Use VeNCrypt (TLS) or SSH tunneling instead of plain VNC Auth.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-326",
+ confidence="certain",
+ ))
+ if 19 in sec_types:
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="VNC VeNCrypt (TLS-secured)",
+ description="VeNCrypt provides TLS-secured VNC connections.",
+ evidence=f"Banner: {banner}, security types: {type_labels}",
+ confidence="certain",
+ ))
+ if not sec_types:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"VNC service exposed: {banner}",
+ description="VNC protocol banner detected but security types could not be parsed.",
+ evidence=f"Banner: {banner}",
+ remediation="Restrict VNC access to trusted networks.",
+ confidence="firm",
+ ))
+
except Exception as e:
- info = f"VNC probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "VNC", e)
+
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_161(self, target, port):
+ def _service_info_snmp(self, target, port): # default port: 161
"""
Attempt SNMP community string disclosure using 'public'.
@@ -459,10 +2643,11 @@ def _service_info_161(self, target, port):
Returns
-------
- str | None
- SNMP response summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
sock = None
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
@@ -474,25 +2659,38 @@ def _service_info_161(self, target, port):
data, _ = sock.recvfrom(512)
readable = ''.join(chr(b) if 32 <= b < 127 else '.' for b in data)
if 'public' in readable.lower():
- info = (
- f"VULNERABILITY: SNMP responds to community 'public' on {target}:{port}"
- f" (response: {readable.strip()[:120]})"
- )
+ raw["banner"] = readable.strip()[:120]
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="SNMP default community string 'public' accepted",
+ description="SNMP agent responds to the default 'public' community string, "
+ "allowing unauthenticated read access to device configuration and network data.",
+ evidence=f"Response: {readable.strip()[:80]}",
+ remediation="Change the community string from 'public' to a strong value; migrate to SNMPv3.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-798",
+ confidence="certain",
+ ))
else:
- info = f"SNMP response: {readable.strip()[:120]}"
+ raw["banner"] = readable.strip()[:120]
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="SNMP service responded",
+ description=f"SNMP agent on {target}:{port} responded but did not accept 'public' community.",
+ evidence=f"Response: {readable.strip()[:80]}",
+ confidence="firm",
+ ))
except socket.timeout:
- info = f"SNMP probe timed out on {target}:{port}"
- self.P(info, color='y')
+ return probe_error(target, port, "SNMP", Exception("timed out"))
except Exception as e:
- info = f"SNMP probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
+ return probe_error(target, port, "SNMP", e)
finally:
if sock is not None:
sock.close()
- return info
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_53(self, target, port):
+ def _service_info_dns(self, target, port): # default port: 53
"""
Query CHAOS TXT version.bind to detect DNS version disclosure.
@@ -505,10 +2703,11 @@ def _service_info_53(self, target, port):
Returns
-------
- str | None
- DNS disclosure summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None, "dns_version": None}
sock = None
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
@@ -520,56 +2719,76 @@ def _service_info_53(self, target, port):
packet = header + qname + question
sock.sendto(packet, (target, port))
data, _ = sock.recvfrom(512)
- if len(data) < 12:
- return f"DNS CHAOS response too short on {target}:{port}"
- if struct.unpack('>H', data[:2])[0] != tid:
- return f"DNS CHAOS response transaction mismatch on {target}:{port}"
- ancount = struct.unpack('>H', data[6:8])[0]
- if not ancount:
- return f"DNS CHAOS response missing answers on {target}:{port}"
- idx = 12 + len(qname) + 4
- if idx >= len(data):
- return f"DNS CHAOS response truncated after question on {target}:{port}"
- if data[idx] & 0xc0 == 0xc0:
- idx += 2
- else:
- while idx < len(data) and data[idx] != 0:
- idx += data[idx] + 1
- idx += 1
- idx += 8
- if idx + 2 > len(data):
- return f"DNS CHAOS response missing TXT length on {target}:{port}"
- rdlength = struct.unpack('>H', data[idx:idx+2])[0]
- idx += 2
- if idx >= len(data):
- return f"DNS CHAOS response missing TXT payload on {target}:{port}"
- txt_length = data[idx]
- txt = data[idx+1:idx+1+txt_length].decode('utf-8', errors='ignore')
- if txt:
- info = (
- f"VULNERABILITY: DNS version disclosure '{txt}' via CHAOS TXT on {target}:{port}"
- )
- if info is None:
+
+ # Parse CHAOS TXT response
+ parsed = False
+ if len(data) >= 12 and struct.unpack('>H', data[:2])[0] == tid:
+ ancount = struct.unpack('>H', data[6:8])[0]
+ if ancount:
+ idx = 12 + len(qname) + 4
+ if idx < len(data):
+ if data[idx] & 0xc0 == 0xc0:
+ idx += 2
+ else:
+ while idx < len(data) and data[idx] != 0:
+ idx += data[idx] + 1
+ idx += 1
+ idx += 8
+ if idx + 2 <= len(data):
+ rdlength = struct.unpack('>H', data[idx:idx+2])[0]
+ idx += 2
+ if idx < len(data):
+ txt_length = data[idx]
+ txt = data[idx+1:idx+1+txt_length].decode('utf-8', errors='ignore')
+ if txt:
+ raw["dns_version"] = txt
+ raw["banner"] = f"DNS version: {txt}"
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"DNS version disclosure: {txt}",
+ description=f"CHAOS TXT version.bind query reveals DNS software version.",
+ evidence=f"version.bind TXT: {txt}",
+ remediation="Disable version.bind responses in the DNS server configuration.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ parsed = True
+
+ # Fallback: check raw data for version keywords
+ if not parsed:
readable = ''.join(chr(b) if 32 <= b < 127 else '.' for b in data)
if 'bind' in readable.lower() or 'version' in readable.lower():
- info = (
- f"VULNERABILITY: DNS version disclosure via CHAOS TXT on {target}:{port}"
- )
- if info is None:
- info = f"DNS CHAOS TXT query did not disclose version on {target}:{port}"
+ raw["banner"] = readable.strip()[:80]
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title="DNS version disclosure via CHAOS TXT",
+ description=f"CHAOS TXT response on {target}:{port} contains version keywords.",
+ evidence=f"Response contains: {readable.strip()[:80]}",
+ remediation="Disable version.bind responses in the DNS server configuration.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="firm",
+ ))
+ else:
+ raw["banner"] = "DNS service responding"
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="DNS CHAOS TXT query did not disclose version",
+ description=f"DNS on {target}:{port} responded but did not reveal version.",
+ confidence="firm",
+ ))
except socket.timeout:
- info = f"DNS CHAOS query timed out on {target}:{port}"
- self.P(info, color='y')
+ return probe_error(target, port, "DNS", Exception("CHAOS query timed out"))
except Exception as e:
- info = f"DNS CHAOS query failed on {target}:{port}: {e}"
- self.P(info, color='y')
+ return probe_error(target, port, "DNS", e)
finally:
if sock is not None:
sock.close()
- return info
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_1433(self, target, port):
+ def _service_info_mssql(self, target, port): # default port: 1433
"""
Send a TDS prelogin probe to expose SQL Server version data.
@@ -582,10 +2801,11 @@ def _service_info_1433(self, target, port):
Returns
-------
- str | None
- MSSQL response summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
@@ -597,20 +2817,33 @@ def _service_info_1433(self, target, port):
data = sock.recv(256)
if data:
readable = ''.join(chr(b) if 32 <= b < 127 else '.' for b in data)
- info = (
- f"VULNERABILITY: MSSQL prelogin succeeded on {target}:{port}"
- f" (response: {readable.strip()[:120]})"
- )
+ raw["banner"] = f"MSSQL prelogin response: {readable.strip()[:80]}"
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="MSSQL prelogin handshake succeeded",
+ description=f"SQL Server on {target}:{port} responds to TDS prelogin, "
+ "exposing version metadata and confirming the service is reachable.",
+ evidence=f"Prelogin response: {readable.strip()[:80]}",
+ remediation="Restrict SQL Server access to trusted networks; use firewall rules.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
sock.close()
except Exception as e:
- info = f"MSSQL probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "MSSQL", e)
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_5432(self, target, port):
+ def _service_info_postgresql(self, target, port): # default port: 5432
"""
- Probe PostgreSQL for weak authentication methods.
+ Probe PostgreSQL authentication method by parsing the auth response byte.
+
+ Auth codes:
+ 0 = AuthenticationOk (trust auth) → CRITICAL
+ 3 = CleartextPassword → MEDIUM
+ 5 = MD5Password → INFO (adequate, prefer SCRAM)
+ 10 = SASL (SCRAM-SHA-256) → INFO (strong)
Parameters
----------
@@ -621,10 +2854,11 @@ def _service_info_5432(self, target, port):
Returns
-------
- str | None
- PostgreSQL response summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"auth_type": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
@@ -633,20 +2867,168 @@ def _service_info_5432(self, target, port):
startup = struct.pack('!I', len(payload) + 8) + struct.pack('!I', 196608) + payload
sock.sendall(startup)
data = sock.recv(128)
- if b'AuthenticationCleartextPassword' in data:
- info = (
- f"VULNERABILITY: PostgreSQL requests cleartext passwords on {target}:{port}"
- )
- elif b'AuthenticationOk' in data:
- info = f"PostgreSQL responded with AuthenticationOk on {target}:{port}"
sock.close()
+
+ # Parse auth response: type byte 'R' (0x52), then int32 length, then int32 auth code
+ if len(data) >= 9 and data[0:1] == b'R':
+ auth_code = struct.unpack('!I', data[5:9])[0]
+ raw["auth_type"] = auth_code
+ if auth_code == 0:
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="PostgreSQL trust authentication (no password)",
+ description=f"PostgreSQL on {target}:{port} accepts connections without any password (auth code 0).",
+ evidence=f"Auth response code: {auth_code}",
+ remediation="Configure pg_hba.conf to require password or SCRAM authentication.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ elif auth_code == 3:
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="PostgreSQL cleartext password authentication",
+ description=f"PostgreSQL on {target}:{port} requests cleartext passwords.",
+ evidence=f"Auth response code: {auth_code}",
+ remediation="Switch to SCRAM-SHA-256 authentication in pg_hba.conf.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-319",
+ confidence="certain",
+ ))
+ elif auth_code == 5:
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="PostgreSQL MD5 authentication",
+ description="MD5 password auth is adequate but SCRAM-SHA-256 is preferred.",
+ evidence=f"Auth response code: {auth_code}",
+ remediation="Consider upgrading to SCRAM-SHA-256.",
+ confidence="certain",
+ ))
+ elif auth_code == 10:
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="PostgreSQL SASL/SCRAM authentication",
+ description="Strong authentication (SCRAM-SHA-256) is in use.",
+ evidence=f"Auth response code: {auth_code}",
+ confidence="certain",
+ ))
+ elif b'AuthenticationCleartextPassword' in data:
+ # Fallback: text-based detection for older/non-standard servers
+ raw["auth_type"] = "cleartext_text"
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title="PostgreSQL cleartext password authentication",
+ description=f"PostgreSQL on {target}:{port} requests cleartext passwords.",
+ evidence="Text response contained AuthenticationCleartextPassword",
+ remediation="Switch to SCRAM-SHA-256 authentication.",
+ owasp_id="A02:2021",
+ cwe_id="CWE-319",
+ confidence="firm",
+ ))
+ elif b'AuthenticationOk' in data:
+ raw["auth_type"] = "ok_text"
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="PostgreSQL trust authentication (no password)",
+ description=f"PostgreSQL on {target}:{port} accepted connection without authentication.",
+ evidence="Text response contained AuthenticationOk",
+ remediation="Configure pg_hba.conf to require password authentication.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="firm",
+ ))
+
+ if not findings:
+ findings.append(Finding(Severity.INFO, "PostgreSQL probe completed", "No auth weakness detected."))
except Exception as e:
- info = f"PostgreSQL probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "PostgreSQL", e)
+
+ return probe_result(raw_data=raw, findings=findings)
+
+ def _service_info_postgresql_creds(self, target, port): # default port: 5432
+ """
+ PostgreSQL default credential testing (opt-in via active_auth feature group).
+
+ Attempts cleartext password auth with common defaults.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Port being probed.
+ Returns
+ -------
+ dict
+ Structured findings.
+ """
+ findings = []
+ raw = {"tested_credentials": 0, "accepted_credentials": []}
+ creds = [("postgres", ""), ("postgres", "postgres"), ("postgres", "password")]
- def _service_info_11211(self, target, port):
+ for username, password in creds:
+ try:
+ sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+ sock.settimeout(3)
+ sock.connect((target, port))
+ payload = f'user\x00{username}\x00database\x00postgres\x00\x00'.encode()
+ startup = struct.pack('!I', len(payload) + 8) + struct.pack('!I', 196608) + payload
+ sock.sendall(startup)
+ data = sock.recv(128)
+
+ if len(data) >= 9 and data[0:1] == b'R':
+ auth_code = struct.unpack('!I', data[5:9])[0]
+ if auth_code == 0:
+ cred_str = f"{username}:(empty)" if not password else f"{username}:{password}"
+ raw["accepted_credentials"].append(cred_str)
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"PostgreSQL trust auth for {username}",
+ description=f"No password required for user {username}.",
+ evidence=f"Auth code 0 for {cred_str}",
+ remediation="Configure pg_hba.conf to require authentication.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ elif auth_code == 3:
+ # Send cleartext password
+ pwd_bytes = password.encode() + b'\x00'
+ pwd_msg = b'p' + struct.pack('!I', len(pwd_bytes) + 4) + pwd_bytes
+ sock.sendall(pwd_msg)
+ resp = sock.recv(128)
+ if resp and resp[0:1] == b'R' and len(resp) >= 9:
+ result_code = struct.unpack('!I', resp[5:9])[0]
+ if result_code == 0:
+ cred_str = f"{username}:{password}" if password else f"{username}:(empty)"
+ raw["accepted_credentials"].append(cred_str)
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"PostgreSQL default credential accepted: {cred_str}",
+ description=f"Cleartext password auth accepted for {cred_str}.",
+ evidence=f"Auth OK for {cred_str}",
+ remediation="Change default passwords.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-798",
+ confidence="certain",
+ ))
+ raw["tested_credentials"] += 1
+ sock.close()
+ except Exception:
+ continue
+
+ if not findings:
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="PostgreSQL default credentials rejected",
+ description=f"Tested {raw['tested_credentials']} credential pairs.",
+ confidence="certain",
+ ))
+
+ return probe_result(raw_data=raw, findings=findings)
+
+ def _service_info_memcached(self, target, port): # default port: 11211
"""
Issue Memcached stats command to detect unauthenticated access.
@@ -659,30 +3041,66 @@ def _service_info_11211(self, target, port):
Returns
-------
- str | None
- Memcached response summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2)
sock.connect((target, port))
+
+ # Extract version
+ sock.sendall(b'version\r\n')
+ ver_data = sock.recv(64).decode("utf-8", errors="replace").strip()
+ ver_match = _re.match(r'VERSION\s+(\d+(?:\.\d+)+)', ver_data)
+ if ver_match:
+ raw["version"] = ver_match.group(1)
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"Memcached version disclosed: {raw['version']}",
+ description=f"Memcached on {target}:{port} reveals version via VERSION command.",
+ evidence=f"VERSION {raw['version']}",
+ remediation="Restrict access to memcached to trusted networks.",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ findings += check_cves("memcached", raw["version"])
+
sock.sendall(b'stats\r\n')
data = sock.recv(128)
if data.startswith(b'STAT'):
- info = (
- f"VULNERABILITY: Memcached stats accessible without auth on {target}:{port}"
- )
+ raw["banner"] = data.decode("utf-8", errors="replace").strip()[:120]
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="Memcached stats accessible without authentication",
+ description=f"Memcached on {target}:{port} responds to stats without authentication, "
+ "exposing cache metadata and enabling cache poisoning or data exfiltration.",
+ evidence=f"stats command returned: {raw['banner'][:80]}",
+ remediation="Bind Memcached to localhost or use SASL authentication; restrict network access.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ else:
+ raw["banner"] = "Memcached port open"
+ findings.append(Finding(
+ severity=Severity.INFO,
+ title="Memcached port open",
+ description=f"Memcached port {port} is open on {target} but stats command was not accepted.",
+ evidence=f"Response: {data[:60].decode('utf-8', errors='replace')}",
+ confidence="firm",
+ ))
sock.close()
except Exception as e:
- info = f"Memcached probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "Memcached", e)
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_9200(self, target, port):
+ def _service_info_elasticsearch(self, target, port): # default port: 9200
"""
- Detect Elasticsearch/OpenSearch nodes leaking cluster metadata.
+ Deep Elasticsearch probe: cluster info, index listing, node IPs, CVE matching.
Parameters
----------
@@ -693,27 +3111,135 @@ def _service_info_9200(self, target, port):
Returns
-------
- str | None
- Elasticsearch exposure summary or error message.
+ dict
+ Structured findings.
"""
- info = None
- try:
- scheme = "http"
- base_url = f"{scheme}://{target}"
- if port != 80:
- base_url = f"{scheme}://{target}:{port}"
- resp = requests.get(base_url, timeout=3)
- if resp.ok and 'cluster_name' in resp.text:
- info = (
- f"VULNERABILITY: Elasticsearch cluster metadata exposed at {base_url}"
- )
- except Exception as e:
- info = f"Elasticsearch probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ findings, raw = [], {"cluster_name": None, "version": None}
+ base_url = f"http://{target}" if port == 80 else f"http://{target}:{port}"
+
+ # First check if this is actually Elasticsearch (GET / must return JSON with cluster_name or tagline)
+ findings += self._es_check_root(base_url, raw)
+ if not raw["cluster_name"] and not raw.get("tagline"):
+ # Not Elasticsearch — skip further probing to avoid noise on regular HTTP ports
+ return None
+
+ findings += self._es_check_indices(base_url, raw)
+ findings += self._es_check_nodes(base_url, raw)
+ if raw["version"]:
+ findings += check_cves("elasticsearch", raw["version"])
- def _service_info_502(self, target, port):
+ if not findings:
+ findings.append(Finding(Severity.INFO, "Elasticsearch probe clean", "No issues detected."))
+
+ return probe_result(raw_data=raw, findings=findings)
+
+ def _es_check_root(self, base_url, raw):
+ """GET / — extract version, cluster name."""
+ findings = []
+ try:
+ resp = requests.get(base_url, timeout=3)
+ if resp.ok:
+ try:
+ data = resp.json()
+ raw["cluster_name"] = data.get("cluster_name")
+ ver_info = data.get("version", {})
+ raw["version"] = ver_info.get("number") if isinstance(ver_info, dict) else None
+ raw["tagline"] = data.get("tagline")
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"Elasticsearch cluster metadata exposed",
+ description=f"Cluster '{raw['cluster_name']}' version {raw['version']} accessible without auth.",
+ evidence=f"cluster={raw['cluster_name']}, version={raw['version']}",
+ remediation="Enable X-Pack security or restrict network access.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="certain",
+ ))
+ except Exception:
+ if 'cluster_name' in resp.text:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title="Elasticsearch cluster metadata exposed",
+ description=f"Cluster metadata accessible at {base_url}.",
+ evidence=resp.text[:200],
+ remediation="Enable authentication.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="firm",
+ ))
+ except Exception:
+ pass
+ return findings
+
+ def _es_check_indices(self, base_url, raw):
+ """GET /_cat/indices — list accessible indices."""
+ findings = []
+ try:
+ resp = requests.get(f"{base_url}/_cat/indices?v", timeout=3)
+ if resp.ok and resp.text.strip():
+ lines = resp.text.strip().split("\n")
+ index_count = max(0, len(lines) - 1) # subtract header
+ raw["index_count"] = index_count
+ if index_count > 0:
+ findings.append(Finding(
+ severity=Severity.HIGH,
+ title=f"Elasticsearch {index_count} indices accessible",
+ description=f"{index_count} indices listed without authentication.",
+ evidence="\n".join(lines[:6]),
+ remediation="Enable authentication and restrict index access.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="certain",
+ ))
+ except Exception:
+ pass
+ return findings
+
+ def _es_check_nodes(self, base_url, raw):
+ """GET /_nodes — extract transport/publish addresses (IP leak)."""
+ findings = []
+ try:
+ resp = requests.get(f"{base_url}/_nodes", timeout=3)
+ if resp.ok:
+ data = resp.json()
+ nodes = data.get("nodes", {})
+ ips = set()
+ for node in nodes.values():
+ for key in ("transport_address", "publish_address", "host"):
+ val = node.get(key) or ""
+ # Extract IP from "1.2.3.4:9300" style
+ ip = val.rsplit(":", 1)[0] if ":" in val else val
+ if ip and ip not in ("127.0.0.1", "localhost", "0.0.0.0"):
+ ips.add(ip)
+ settings = node.get("settings", {})
+ if isinstance(settings, dict):
+ net = settings.get("network", {})
+ if isinstance(net, dict):
+ for k in ("host", "publish_host"):
+ v = net.get(k)
+ if v and v not in ("127.0.0.1", "localhost", "0.0.0.0"):
+ ips.add(v)
+ if ips:
+ raw["node_ips"] = list(ips)
+ for ip_str in ips:
+ self._emit_metadata("internal_ips", {"ip": ip_str, "source": f"es_nodes:{port}"})
+ findings.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Elasticsearch node IPs disclosed ({len(ips)})",
+ description=f"Node API exposes internal IPs: {', '.join(sorted(ips)[:5])}",
+ evidence=f"IPs: {', '.join(sorted(ips)[:10])}",
+ remediation="Restrict /_nodes endpoint access.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ except Exception:
+ pass
+ return findings
+
+
+ def _service_info_modbus(self, target, port): # default port: 502
"""
Send Modbus device identification request to detect exposed PLCs.
@@ -726,10 +3252,11 @@ def _service_info_502(self, target, port):
Returns
-------
- str | None
- Modbus exposure summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
@@ -739,18 +3266,25 @@ def _service_info_502(self, target, port):
data = sock.recv(256)
if data:
readable = ''.join(chr(b) if 32 <= b < 127 else '.' for b in data)
- info = (
- f"VULNERABILITY: Modbus device responded to identification request on {target}:{port}"
- f" (response: {readable.strip()[:120]})"
- )
+ raw["banner"] = readable.strip()[:120]
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="Modbus device responded to identification request",
+ description=f"Industrial control system on {target}:{port} is accessible without authentication. "
+ "Modbus has no built-in security — any network access means full device control.",
+ evidence=f"Device ID response: {readable.strip()[:80]}",
+ remediation="Isolate Modbus devices on a dedicated OT network; deploy a Modbus-aware firewall.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-284",
+ confidence="certain",
+ ))
sock.close()
except Exception as e:
- info = f"Modbus probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
+ return probe_error(target, port, "Modbus", e)
+ return probe_result(raw_data=raw, findings=findings)
- def _service_info_27017(self, target, port):
+ def _service_info_mongodb(self, target, port): # default port: 27017
"""
Attempt MongoDB isMaster handshake to detect unauthenticated access.
@@ -763,10 +3297,11 @@ def _service_info_27017(self, target, port):
Returns
-------
- str | None
- MongoDB exposure summary or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(3)
@@ -788,21 +3323,56 @@ def _service_info_27017(self, target, port):
sock.sendall(header + message)
data = sock.recv(256)
if b'isMaster' in data or b'ismaster' in data:
- info = (
- f"VULNERABILITY: MongoDB isMaster responded without auth on {target}:{port}"
- )
+ raw["banner"] = "MongoDB isMaster response"
+ findings.append(Finding(
+ severity=Severity.CRITICAL,
+ title="MongoDB unauthenticated access (isMaster responded)",
+ description=f"MongoDB on {target}:{port} accepts commands without authentication, "
+ "allowing full database read/write access.",
+ evidence="isMaster command succeeded without credentials.",
+ remediation="Enable MongoDB authentication (--auth) and bind to localhost or trusted networks.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
sock.close()
except Exception as e:
- info = f"MongoDB probe failed on {target}:{port}: {e}"
- self.P(info, color='y')
- return info
-
-
+ return probe_error(target, port, "MongoDB", e)
+ return probe_result(raw_data=raw, findings=findings)
+
+
+
+ # Product patterns for generic banner version extraction.
+ # Maps regex → CVE DB product name. Each regex must have a named group 'ver'.
+ _GENERIC_BANNER_PATTERNS = [
+ (_re.compile(r'OpenSSH[_\s](?P\d+\.\d+(?:\.\d+)?)', _re.I), "openssh"),
+ (_re.compile(r'Apache[/ ](?P\d+\.\d+(?:\.\d+)?)', _re.I), "apache"),
+ (_re.compile(r'nginx[/ ](?P\d+\.\d+(?:\.\d+)?)', _re.I), "nginx"),
+ (_re.compile(r'Exim\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "exim"),
+ (_re.compile(r'Postfix[/ ]?(?:.*?smtpd)?\s*(?P\d+\.\d+(?:\.\d+)?)', _re.I), "postfix"),
+ (_re.compile(r'ProFTPD\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "proftpd"),
+ (_re.compile(r'vsftpd\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "vsftpd"),
+ (_re.compile(r'Redis[/ ](?:server\s+)?v?(?P\d+\.\d+(?:\.\d+)?)', _re.I), "redis"),
+ (_re.compile(r'Samba\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "samba"),
+ (_re.compile(r'Asterisk\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "asterisk"),
+ (_re.compile(r'MySQL[/ ](?P\d+\.\d+(?:\.\d+)?)', _re.I), "mysql"),
+ (_re.compile(r'PostgreSQL\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "postgresql"),
+ (_re.compile(r'MongoDB\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "mongodb"),
+ (_re.compile(r'Elasticsearch[/ ](?P\d+\.\d+(?:\.\d+)?)', _re.I), "elasticsearch"),
+ (_re.compile(r'memcached\s+(?P\d+\.\d+(?:\.\d+)?)', _re.I), "memcached"),
+ (_re.compile(r'TightVNC[/ ](?P\d+\.\d+(?:\.\d+)?)', _re.I), "tightvnc"),
+ ]
def _service_info_generic(self, target, port):
"""
Attempt a generic TCP banner grab for uncovered ports.
+ Performs three checks on the banner:
+ 1. Version disclosure — flags any product/version string as info leak.
+ 2. CVE matching — runs extracted versions against the CVE database.
+ 3. Unauthenticated data exposure — flags services that send data
+ without any client request (potential auth bypass).
+
Parameters
----------
target : str
@@ -812,23 +3382,51 @@ def _service_info_generic(self, target, port):
Returns
-------
- str | None
- Generic banner text or error message.
+ dict
+ Structured findings.
"""
- info = None
+ findings = []
+ raw = {"banner": None}
try:
- # Generic service: attempt to connect and read a short banner if any
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(2)
sock.connect((target, port))
- data = sock.recv(100).decode('utf-8', errors='ignore')
+ data = sock.recv(256).decode('utf-8', errors='ignore')
if data:
- # Filter non-printable chars for readability
banner = ''.join(ch if 32 <= ord(ch) < 127 else '.' for ch in data)
- info = f"Service banner on port {port}: \"{banner.strip()}\""
+ readable = banner.strip().replace('.', '')
+ if not readable:
+ # Pure binary data with no printable content — nothing useful.
+ sock.close()
+ return None
+ raw["banner"] = banner.strip()
else:
- info = "No banner received (service may require protocol handshake)."
+ sock.close()
+ return None # No banner — nothing useful to report
sock.close()
except Exception as e:
- info = f"Generic banner grab failed on port {port}: {e}"
- return info
+ return probe_error(target, port, "generic", e)
+
+ banner_text = raw["banner"]
+
+ # --- 1. Version extraction + CVE check ---
+ for pattern, product in self._GENERIC_BANNER_PATTERNS:
+ m = pattern.search(banner_text)
+ if m:
+ version = m.group("ver")
+ raw["product"] = product
+ raw["version"] = version
+ findings.append(Finding(
+ severity=Severity.LOW,
+ title=f"Service version disclosed: {product} {version}",
+ description=f"Banner on {target}:{port} reveals {product} {version}. "
+ "Version disclosure aids attackers in targeting known vulnerabilities.",
+ evidence=f"Banner: {banner_text[:80]}",
+ remediation="Suppress or genericize the service banner.",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ findings += check_cves(product, version)
+ break # First match wins
+
+ return probe_result(raw_data=raw, findings=findings)
diff --git a/extensions/business/cybersec/red_mesh/test_redmesh.py b/extensions/business/cybersec/red_mesh/test_redmesh.py
index 33572bb0..cff52620 100644
--- a/extensions/business/cybersec/red_mesh/test_redmesh.py
+++ b/extensions/business/cybersec/red_mesh/test_redmesh.py
@@ -1,3 +1,4 @@
+import json
import sys
import struct
import unittest
@@ -29,9 +30,9 @@ def P(self, message, **kwargs):
class RedMeshOWASPTests(unittest.TestCase):
-
-
+
+
def setUp(self):
if MANUAL_RUN:
print()
@@ -58,6 +59,44 @@ def _build_worker(self, ports=None, exceptions=None):
worker.stop_event.is_set.return_value = False
return owner, worker
+ def _assert_has_finding(self, result, substring):
+ """Assert that a finding/vulnerability with 'substring' exists in result.
+
+ Handles both legacy string results and new dict results with findings/vulnerabilities.
+ """
+ if isinstance(result, str):
+ self.assertIn(substring, result)
+ return
+
+ if isinstance(result, dict):
+ # Check 'vulnerabilities' list (string titles)
+ vulns = result.get("vulnerabilities", [])
+ for v in vulns:
+ if substring in str(v):
+ return
+
+ # Check 'findings' list (dicts with 'title' and 'description')
+ findings = result.get("findings", [])
+ for f in findings:
+ if isinstance(f, dict):
+ if substring in str(f.get("title", "")) or substring in str(f.get("description", "")):
+ return
+ elif substring in str(f):
+ return
+
+ # Check 'error' key
+ if substring in str(result.get("error", "")):
+ return
+
+ # Fallback: check entire dict as string
+ result_str = json.dumps(result, default=str)
+ if substring in result_str:
+ return
+
+ self.fail(f"Finding '{substring}' not found in result: {json.dumps(result, indent=2, default=str)[:500]}")
+ else:
+ self.fail(f"Unexpected result type {type(result)}: {result}")
+
def test_broken_access_control_detected(self):
owner, worker = self._build_worker()
@@ -75,7 +114,7 @@ def fake_get(url, timeout=2, verify=False):
return resp
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
side_effect=fake_get,
):
result = worker._web_test_common("example.com", 80)
@@ -87,7 +126,7 @@ def test_cryptographic_failures_cookie_flags(self):
resp.headers = {"Set-Cookie": "sessionid=abc; Path=/"}
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_hardening_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_flags("example.com", 443)
@@ -101,11 +140,11 @@ def test_injection_sql_detected(self):
resp.text = "sql syntax error near line"
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_injection_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_sql_injection("example.com", 80)
- self.assertIn("VULNERABILITY: Potential SQL injection", result)
+ self._assert_has_finding(result, "SQL injection")
def test_insecure_design_path_traversal(self):
owner, worker = self._build_worker()
@@ -113,11 +152,11 @@ def test_insecure_design_path_traversal(self):
resp.text = "root:x:0:0:root:/root:/bin/bash"
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_injection_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_path_traversal("example.com", 80)
- self.assertIn("VULNERABILITY: Path traversal", result)
+ self._assert_has_finding(result, "Path traversal")
def test_security_misconfiguration_missing_headers(self):
owner, worker = self._build_worker()
@@ -125,7 +164,7 @@ def test_security_misconfiguration_missing_headers(self):
resp.headers = {"Server": "Test"}
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_hardening_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_security_headers("example.com", 80)
@@ -134,20 +173,23 @@ def test_security_misconfiguration_missing_headers(self):
def test_vulnerable_component_banner_exposed(self):
owner, worker = self._build_worker(ports=[80])
worker.state["open_ports"] = [80]
+ # Set enabled features to include the probe
+ worker._PentestLocalWorker__enabled_features = ["_service_info_http"]
resp = MagicMock()
resp.status_code = 200
resp.reason = "OK"
resp.headers = {"Server": "Apache/2.2.0"}
+ resp.text = ""
with patch(
- "extensions.business.cybersec.red_mesh.redmesh_utils.dir",
- return_value=["_service_info_80"],
- ), patch(
"extensions.business.cybersec.red_mesh.service_mixin.requests.get",
return_value=resp,
+ ), patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.requests.request",
+ side_effect=Exception("skip methods check"),
):
worker._gather_service_info()
- banner = worker.state["service_info"][80]["_service_info_80"]
- self.assertIn("Apache/2.2.0", banner)
+ banner = worker.state["service_info"][80]["_service_info_http"]
+ self._assert_has_finding(banner, "Apache/2.2.0")
def test_identification_auth_failure_anonymous_ftp(self):
owner, worker = self._build_worker(ports=[21])
@@ -172,8 +214,8 @@ def quit(self):
"extensions.business.cybersec.red_mesh.service_mixin.ftplib.FTP",
return_value=DummyFTP(),
):
- result = worker._service_info_21("example.com", 21)
- self.assertIn("VULNERABILITY: FTP allows anonymous login", result)
+ result = worker._service_info_ftp("example.com", 21)
+ self._assert_has_finding(result, "FTP allows anonymous login")
def test_service_checks_cover_non_standard_ports(self):
owner, worker = self._build_worker(ports=[2121])
@@ -198,8 +240,8 @@ def quit(self):
"extensions.business.cybersec.red_mesh.service_mixin.ftplib.FTP",
return_value=DummyFTP(),
):
- result = worker._service_info_21("example.com", 2121)
- self.assertIn("FTP allows anonymous login", result)
+ result = worker._service_info_ftp("example.com", 2121)
+ self._assert_has_finding(result, "FTP allows anonymous login")
def test_service_info_runs_all_methods_for_each_port(self):
owner, worker = self._build_worker(ports=[1234])
@@ -213,12 +255,9 @@ def fake_service_two(target, port):
setattr(worker, "_service_info_fake_one", fake_service_one)
setattr(worker, "_service_info_fake_two", fake_service_two)
+ worker._PentestLocalWorker__enabled_features = ["_service_info_fake_one", "_service_info_fake_two"]
- with patch(
- "extensions.business.cybersec.red_mesh.redmesh_utils.dir",
- return_value=["_service_info_fake_one", "_service_info_fake_two"],
- ):
- worker._gather_service_info()
+ worker._gather_service_info()
service_snap = worker.state["service_info"][1234]
self.assertEqual(len(service_snap), 2)
@@ -231,7 +270,7 @@ def test_software_data_integrity_secret_leak(self):
resp.text = "BEGIN RSA PRIVATE KEY"
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_homepage("example.com", 80)
@@ -258,6 +297,7 @@ def register(name):
def test_web_tests_include_uncommon_ports(self):
owner, worker = self._build_worker(ports=[9000])
worker.state["open_ports"] = [9000]
+ worker._PentestLocalWorker__enabled_features = ["_web_test_common"]
def fake_get(url, timeout=2, verify=False):
resp = MagicMock()
@@ -267,10 +307,7 @@ def fake_get(url, timeout=2, verify=False):
return resp
with patch(
- "extensions.business.cybersec.red_mesh.redmesh_utils.dir",
- return_value=["_web_test_common"],
- ), patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
side_effect=fake_get,
):
worker._run_web_tests()
@@ -290,12 +327,9 @@ def fake_web_two(target, port):
setattr(worker, "_web_test_fake_one", fake_web_one)
setattr(worker, "_web_test_fake_two", fake_web_two)
+ worker._PentestLocalWorker__enabled_features = ["_web_test_fake_one", "_web_test_fake_two"]
- with patch(
- "extensions.business.cybersec.red_mesh.redmesh_utils.dir",
- return_value=["_web_test_fake_one", "_web_test_fake_two"],
- ):
- worker._run_web_tests()
+ worker._run_web_tests()
web_snap = worker.state["web_tests_info"][10000]
self.assertEqual(len(web_snap), 2)
@@ -314,11 +348,11 @@ def test_cross_site_scripting_detection(self):
resp.text = f"Response with {payload} inside"
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_injection_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_xss("example.com", 80)
- self.assertIn("VULNERABILITY: Reflected XSS", result)
+ self._assert_has_finding(result, "XSS")
def test_tls_certificate_expiration_reporting(self):
owner, worker = self._build_worker(ports=[443])
@@ -330,36 +364,123 @@ def __enter__(self):
def __exit__(self, exc_type, exc, tb):
return False
- class DummySSL:
+ class DummySSLUnverified:
def __enter__(self):
return self
def __exit__(self, exc_type, exc, tb):
return False
- def getpeercert(self):
- return {"notAfter": "Dec 31 12:00:00 2030 GMT"}
-
def version(self):
return "TLSv1.3"
def cipher(self):
return ("TLS_AES_256_GCM_SHA384", None, None)
- class DummyContext:
+ def getpeercert(self, binary_form=False):
+ if binary_form:
+ return b"dummy"
+ return {"notAfter": "Dec 31 12:00:00 2030 GMT",
+ "subject": ((("commonName", "example.com"),),),
+ "issuer": ((("organizationName", "Test CA"),),)}
+
+ class DummySSLVerified:
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc, tb):
+ return False
+
+ def getpeercert(self):
+ return {"notAfter": "Dec 31 12:00:00 2030 GMT",
+ "subject": ((("commonName", "example.com"),),),
+ "issuer": ((("organizationName", "Test CA"),),)}
+
+ call_count = [0]
+
+ class DummyContextUnverified:
+ check_hostname = True
+ verify_mode = None
+
+ def wrap_socket(self, sock, server_hostname=None):
+ return DummySSLUnverified()
+
+ class DummyContextVerified:
def wrap_socket(self, sock, server_hostname=None):
- return DummySSL()
+ return DummySSLVerified()
+
+ def mock_ssl_context(protocol=None):
+ return DummyContextUnverified()
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.create_connection",
+ return_value=DummyConn(),
+ ), patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.ssl.SSLContext",
+ return_value=DummyContextUnverified(),
+ ), patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.ssl.create_default_context",
+ return_value=DummyContextVerified(),
+ ):
+ info = worker._service_info_tls("example.com", 443)
+ self.assertIsInstance(info, dict)
+ self.assertIn("findings", info)
+ # Should find TLS info (protocol is TLSv1.3 which is fine)
+ self.assertIn("protocol", info)
+ self.assertEqual(info["protocol"], "TLSv1.3")
+
+ def test_tls_self_signed_detection(self):
+ owner, worker = self._build_worker(ports=[443])
+
+ class DummyConn:
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc, tb):
+ return False
+
+ class DummySSLUnverified:
+ def __enter__(self):
+ return self
+
+ def __exit__(self, exc_type, exc, tb):
+ return False
+
+ def version(self):
+ return "TLSv1.2"
+
+ def cipher(self):
+ return ("AES256-SHA", None, None)
+
+ def getpeercert(self, binary_form=False):
+ return b"dummy" if binary_form else {}
+
+ class DummyContextUnverified:
+ check_hostname = True
+ verify_mode = None
+
+ def wrap_socket(self, sock, server_hostname=None):
+ return DummySSLUnverified()
+
+ class DummyContextVerified:
+ def wrap_socket(self, sock, server_hostname=None):
+ raise ssl.SSLCertVerificationError("self-signed certificate")
+
+ import ssl
with patch(
"extensions.business.cybersec.red_mesh.service_mixin.socket.create_connection",
return_value=DummyConn(),
+ ), patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.ssl.SSLContext",
+ return_value=DummyContextUnverified(),
), patch(
"extensions.business.cybersec.red_mesh.service_mixin.ssl.create_default_context",
- return_value=DummyContext(),
+ return_value=DummyContextVerified(),
):
info = worker._service_info_tls("example.com", 443)
- self.assertIn("TLS", info)
- self.assertIn("exp", info)
+
+ self._assert_has_finding(info, "Self-signed")
def test_port_scan_detects_open_ports(self):
owner, worker = self._build_worker(ports=[80, 81])
@@ -409,8 +530,8 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_23("example.com", 23)
- self.assertIn("VULNERABILITY: Telnet", info)
+ info = worker._service_info_telnet("example.com", 23)
+ self._assert_has_finding(info, "Telnet")
def test_service_smb_probe(self):
owner, worker = self._build_worker(ports=[445])
@@ -438,12 +559,52 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_445("example.com", 445)
- self.assertIn("VULNERABILITY: SMB", info)
+ info = worker._service_info_smb("example.com", 445)
+ self._assert_has_finding(info, "SMB")
+
+ def test_service_vnc_unauthenticated(self):
+ """VNC with security type None (1) should report CRITICAL."""
+ owner, worker = self._build_worker(ports=[5900])
+
+ recv_calls = [0]
+
+ class DummySocket:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def settimeout(self, timeout):
+ return None
+
+ def connect(self, addr):
+ return None
+
+ def recv(self, nbytes):
+ recv_calls[0] += 1
+ if recv_calls[0] == 1:
+ return b"RFB 003.008\n"
+ else:
+ # num_types=1, type=1 (None)
+ return bytes([1, 1])
+
+ def sendall(self, data):
+ return None
+
+ def close(self):
+ return None
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_vnc("example.com", 5900)
+ self._assert_has_finding(info, "unauthenticated")
- def test_service_vnc_banner(self):
+ def test_service_vnc_password_auth(self):
+ """VNC with security type 2 (VNC Auth) should report MEDIUM."""
owner, worker = self._build_worker(ports=[5900])
+ recv_calls = [0]
+
class DummySocket:
def __init__(self, *args, **kwargs):
pass
@@ -455,7 +616,14 @@ def connect(self, addr):
return None
def recv(self, nbytes):
- return b"RFB 003.008\n"
+ recv_calls[0] += 1
+ if recv_calls[0] == 1:
+ return b"RFB 003.008\n"
+ else:
+ return bytes([1, 2])
+
+ def sendall(self, data):
+ return None
def close(self):
return None
@@ -464,8 +632,8 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_5900("example.com", 5900)
- self.assertIn("VULNERABILITY: VNC", info)
+ info = worker._service_info_vnc("example.com", 5900)
+ self._assert_has_finding(info, "DES-based")
def test_service_snmp_public(self):
owner, worker = self._build_worker(ports=[161])
@@ -491,8 +659,8 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummyUDPSocket(),
):
- info = worker._service_info_161("example.com", 161)
- self.assertIn("VULNERABILITY: SNMP", info)
+ info = worker._service_info_snmp("example.com", 161)
+ self._assert_has_finding(info, "SNMP")
def test_service_dns_version_disclosure(self):
owner, worker = self._build_worker(ports=[53])
@@ -527,8 +695,8 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummyUDPSocket(),
):
- info = worker._service_info_53("example.com", 53)
- self.assertIn("VULNERABILITY: DNS version disclosure", info)
+ info = worker._service_info_dns("example.com", 53)
+ self._assert_has_finding(info, "DNS version disclosure")
def test_service_memcached_stats(self):
owner, worker = self._build_worker(ports=[11211])
@@ -556,20 +724,26 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_11211("example.com", 11211)
- self.assertIn("VULNERABILITY: Memcached", info)
+ info = worker._service_info_memcached("example.com", 11211)
+ self._assert_has_finding(info, "Memcached")
def test_service_elasticsearch_metadata(self):
owner, worker = self._build_worker(ports=[9200])
resp = MagicMock()
resp.ok = True
- resp.text = '{"cluster_name":"example"}'
+ resp.status_code = 200
+ resp.text = '{"cluster_name":"example","version":{"number":"7.10.0"},"tagline":"You Know, for Search"}'
+ resp.json.return_value = {
+ "cluster_name": "example",
+ "version": {"number": "7.10.0"},
+ "tagline": "You Know, for Search",
+ }
with patch(
"extensions.business.cybersec.red_mesh.service_mixin.requests.get",
return_value=resp,
):
- info = worker._service_info_9200("example.com", 9200)
- self.assertIn("VULNERABILITY: Elasticsearch", info)
+ info = worker._service_info_elasticsearch("example.com", 9200)
+ self._assert_has_finding(info, "Elasticsearch")
def test_service_modbus_identification(self):
owner, worker = self._build_worker(ports=[502])
@@ -597,10 +771,46 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_502("example.com", 502)
- self.assertIn("VULNERABILITY: Modbus", info)
+ info = worker._service_info_modbus("example.com", 502)
+ self._assert_has_finding(info, "Modbus")
+
+ def test_service_postgres_trust_auth(self):
+ """Auth code 0 (trust) should be CRITICAL."""
+ owner, worker = self._build_worker(ports=[5432])
+
+ class DummySocket:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def settimeout(self, timeout):
+ return None
+
+ def connect(self, addr):
+ return None
+
+ def sendall(self, data):
+ self.sent = data
+
+ def recv(self, nbytes):
+ # 'R' + int32(8) + int32(0) = AuthenticationOk
+ return b'R' + struct.pack('!II', 8, 0)
+
+ def close(self):
+ return None
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_postgresql("example.com", 5432)
+ self._assert_has_finding(info, "trust authentication")
+ # Verify it's CRITICAL severity
+ for f in info.get("findings", []):
+ if "trust" in f.get("title", "").lower():
+ self.assertEqual(f["severity"], "CRITICAL")
def test_service_postgres_cleartext(self):
+ """Auth code 3 (cleartext) should be MEDIUM."""
owner, worker = self._build_worker(ports=[5432])
class DummySocket:
@@ -617,7 +827,8 @@ def sendall(self, data):
self.sent = data
def recv(self, nbytes):
- return b"AuthenticationCleartextPassword"
+ # 'R' + int32(8) + int32(3) = CleartextPassword
+ return b'R' + struct.pack('!II', 8, 3)
def close(self):
return None
@@ -626,8 +837,11 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_5432("example.com", 5432)
- self.assertIn("VULNERABILITY: PostgreSQL", info)
+ info = worker._service_info_postgresql("example.com", 5432)
+ self._assert_has_finding(info, "cleartext")
+ for f in info.get("findings", []):
+ if "cleartext" in f.get("title", "").lower():
+ self.assertEqual(f["severity"], "MEDIUM")
def test_service_mssql_prelogin(self):
owner, worker = self._build_worker(ports=[1433])
@@ -655,8 +869,8 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_1433("example.com", 1433)
- self.assertIn("VULNERABILITY: MSSQL", info)
+ info = worker._service_info_mssql("example.com", 1433)
+ self._assert_has_finding(info, "MSSQL")
def test_service_mongo_unauth(self):
owner, worker = self._build_worker(ports=[27017])
@@ -684,8 +898,8 @@ def close(self):
"extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
return_value=DummySocket(),
):
- info = worker._service_info_27017("example.com", 27017)
- self.assertIn("VULNERABILITY: MongoDB", info)
+ info = worker._service_info_mongodb("example.com", 27017)
+ self._assert_has_finding(info, "MongoDB")
def test_web_graphql_introspection(self):
owner, worker = self._build_worker()
@@ -693,7 +907,7 @@ def test_web_graphql_introspection(self):
resp.status_code = 200
resp.text = "{\"data\":{\"__schema\":{}}}"
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.post",
+ "extensions.business.cybersec.red_mesh.web_api_mixin.requests.post",
return_value=resp,
):
result = worker._web_test_graphql_introspection("example.com", 80)
@@ -708,7 +922,7 @@ def fake_get(url, timeout=3, verify=False, headers=None):
return resp
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_api_mixin.requests.get",
side_effect=fake_get,
):
result = worker._web_test_metadata_endpoints("example.com", 80)
@@ -719,7 +933,7 @@ def test_web_api_auth_bypass(self):
resp = MagicMock()
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_api_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_api_auth_bypass("example.com", 80)
@@ -734,7 +948,7 @@ def test_cors_misconfiguration_detection(self):
}
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_hardening_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_cors_misconfiguration("example.com", 80)
@@ -746,7 +960,7 @@ def test_open_redirect_detection(self):
resp.status_code = 302
resp.headers = {"Location": "https://attacker.example"}
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.get",
+ "extensions.business.cybersec.red_mesh.web_hardening_mixin.requests.get",
return_value=resp,
):
result = worker._web_test_open_redirect("example.com", 80)
@@ -758,18 +972,1373 @@ def test_http_methods_detection(self):
resp.headers = {"Allow": "GET, POST, PUT"}
resp.status_code = 200
with patch(
- "extensions.business.cybersec.red_mesh.web_mixin.requests.options",
+ "extensions.business.cybersec.red_mesh.web_hardening_mixin.requests.options",
return_value=resp,
):
result = worker._web_test_http_methods("example.com", 80)
self.assertIn("VULNERABILITY: Risky HTTP methods", result)
+ # ===== NEW TESTS — findings.py =====
+
+ def test_findings_severity_json_serializable(self):
+ """Severity enum serializes via json.dumps."""
+ from extensions.business.cybersec.red_mesh.findings import Severity
+ self.assertEqual(json.dumps(Severity.CRITICAL), '"CRITICAL"')
+ self.assertEqual(json.dumps(Severity.INFO), '"INFO"')
+
+ def test_findings_dataclass_serializable(self):
+ """Finding serializes via asdict."""
+ from extensions.business.cybersec.red_mesh.findings import Finding, Severity
+ from dataclasses import asdict
+ f = Finding(Severity.HIGH, "Test", "Description", evidence="proof")
+ d = asdict(f)
+ self.assertEqual(d["severity"], "HIGH")
+ self.assertEqual(d["title"], "Test")
+ self.assertEqual(d["evidence"], "proof")
+ # Ensure JSON-serializable
+ json.dumps(d)
+
+ def test_probe_result_structure(self):
+ """probe_result produces dict with both findings and vulnerabilities."""
+ from extensions.business.cybersec.red_mesh.findings import Finding, Severity, probe_result
+ findings = [
+ Finding(Severity.CRITICAL, "Crit vuln", "Critical."),
+ Finding(Severity.LOW, "Low issue", "Low."),
+ Finding(Severity.INFO, "Info note", "Info."),
+ ]
+ result = probe_result(raw_data={"banner": "test"}, findings=findings)
+ self.assertEqual(result["banner"], "test")
+ self.assertEqual(len(result["findings"]), 3)
+ # vulnerabilities only includes CRITICAL/HIGH/MEDIUM
+ self.assertEqual(result["vulnerabilities"], ["Crit vuln"])
+
+ def test_probe_error_structure(self):
+ """probe_error returns None so failed probes are not stored."""
+ from extensions.business.cybersec.red_mesh.findings import probe_error
+ result = probe_error("host", 80, "TestProbe", Exception("oops"))
+ self.assertIsNone(result)
+
+ # ===== NEW TESTS — cve_db.py =====
+
+ def test_cve_matches_constraint_less_than(self):
+ from extensions.business.cybersec.red_mesh.cve_db import _matches_constraint
+ self.assertTrue(_matches_constraint("1.4.1", "<1.4.3"))
+ self.assertFalse(_matches_constraint("1.4.3", "<1.4.3"))
+ self.assertFalse(_matches_constraint("1.4.4", "<1.4.3"))
+
+ def test_cve_matches_constraint_range(self):
+ from extensions.business.cybersec.red_mesh.cve_db import _matches_constraint
+ self.assertTrue(_matches_constraint("5.7.16", ">=5.7,<5.7.20"))
+ self.assertFalse(_matches_constraint("5.7.20", ">=5.7,<5.7.20"))
+ self.assertFalse(_matches_constraint("5.6.99", ">=5.7,<5.7.20"))
+
+ def test_cve_check_elasticsearch(self):
+ from extensions.business.cybersec.red_mesh.cve_db import check_cves
+ findings = check_cves("elasticsearch", "1.4.1")
+ cve_ids = [f.title for f in findings]
+ self.assertTrue(any("CVE-2015-1427" in t for t in cve_ids))
+
+ def test_cve_check_no_match(self):
+ from extensions.business.cybersec.red_mesh.cve_db import check_cves
+ findings = check_cves("elasticsearch", "99.0.0")
+ self.assertEqual(len(findings), 0)
+
+ # ===== NEW TESTS — Redis deep probe =====
+
+ def test_redis_unauthenticated_access(self):
+ owner, worker = self._build_worker(ports=[6379])
+
+ cmd_responses = {
+ "PING": "+PONG\r\n",
+ "INFO server": "$100\r\nredis_version:6.0.5\r\nos:Linux 5.4.0\r\n",
+ "CONFIG GET dir": "*2\r\n$3\r\ndir\r\n$4\r\n/tmp\r\n",
+ "DBSIZE": ":42\r\n",
+ "CLIENT LIST": "id=1 addr=10.0.0.1:12345 fd=5\r\n",
+ }
+
+ class DummySocket:
+ def __init__(self, *args, **kwargs):
+ self._buf = b""
+
+ def settimeout(self, timeout):
+ return None
+
+ def connect(self, addr):
+ return None
+
+ def sendall(self, data):
+ cmd = data.decode().strip()
+ self._buf = cmd_responses.get(cmd, "-ERR\r\n").encode()
+
+ def recv(self, nbytes):
+ data = self._buf
+ self._buf = b""
+ return data
+
+ def close(self):
+ return None
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_redis("example.com", 6379)
+
+ self._assert_has_finding(info, "unauthenticated")
+ self._assert_has_finding(info, "CONFIG")
+ self.assertIsInstance(info, dict)
+ self.assertEqual(info.get("version"), "6.0.5")
+
+ def test_redis_requires_auth(self):
+ owner, worker = self._build_worker(ports=[6379])
+
+ class DummySocket:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def settimeout(self, timeout):
+ return None
+
+ def connect(self, addr):
+ return None
+
+ def sendall(self, data):
+ pass
+
+ def recv(self, nbytes):
+ return b"-NOAUTH Authentication required\r\n"
+
+ def close(self):
+ return None
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_redis("example.com", 6379)
+
+ self._assert_has_finding(info, "requires authentication")
+
+ # ===== NEW TESTS — MySQL version extraction =====
+
+ def test_mysql_version_extraction(self):
+ owner, worker = self._build_worker(ports=[3306])
+
+ class DummySocket:
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def settimeout(self, timeout):
+ return None
+
+ def connect(self, addr):
+ return None
+
+ def recv(self, nbytes):
+ # MySQL handshake: 3-byte length + seq + protocol(0x0a) + version + null
+ version = b"8.0.28"
+ payload = bytes([0x0a]) + version + b'\x00' + b'\x00' * 50
+ pkt_len = len(payload).to_bytes(3, 'little')
+ return pkt_len + b'\x00' + payload
+
+ def close(self):
+ return None
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_mysql("example.com", 3306)
+
+ self.assertIsInstance(info, dict)
+ self.assertEqual(info.get("version"), "8.0.28")
+ self._assert_has_finding(info, "8.0.28")
+
+ # ===== NEW TESTS — Tech fingerprint =====
+
+ def test_tech_fingerprint(self):
+ owner, worker = self._build_worker()
+ resp = MagicMock()
+ resp.headers = {"Server": "Apache/2.4.52", "X-Powered-By": "PHP/8.1"}
+ resp.text = ''
+ resp.status_code = 200
+ with patch(
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
+ return_value=resp,
+ ):
+ result = worker._web_test_tech_fingerprint("example.com", 80)
+ self.assertIsInstance(result, dict)
+ self._assert_has_finding(result, "Apache/2.4.52")
+ self._assert_has_finding(result, "PHP/8.1")
+ self._assert_has_finding(result, "WordPress")
+
+ # ===== NEW TESTS — Modbus fingerprint on non-standard port =====
+
+ def test_fingerprint_modbus_on_nonstandard_port(self):
+ """Port 1024 with Modbus response should be fingerprinted as modbus."""
+ owner, worker = self._build_worker(ports=[1024])
+ worker.state["open_ports"] = [1024]
+ worker.target = "10.0.0.1"
+
+ # Build a valid Modbus Read Device ID response:
+ # Transaction ID 0x0001, Protocol ID 0x0000, Length 0x0008, Unit 0x01,
+ # Function 0x2B, MEI type 0x0E, conformity 0x01, more 0x00, obj count 0x00
+ modbus_response = b'\x00\x01\x00\x00\x00\x08\x01\x2b\x0e\x01\x01\x00\x00'
+
+ call_index = [0]
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = b""
+ # First call: passive banner grab → empty (no banner)
+ # Second call: nudge probe → empty
+ # Third call: HTTP probe → empty
+ # Fourth call: modbus probe → valid response
+ idx = call_index[0]
+ call_index[0] += 1
+ if idx == 3:
+ mock_sock.recv.return_value = modbus_response
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertEqual(worker.state["port_protocols"][1024], "modbus")
+
+ def test_fingerprint_non_modbus_stays_unknown(self):
+ """Port with no recognizable response should remain unknown."""
+ owner, worker = self._build_worker(ports=[1024])
+ worker.state["open_ports"] = [1024]
+ worker.target = "10.0.0.1"
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = b""
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertEqual(worker.state["port_protocols"][1024], "unknown")
+
+ def test_fingerprint_mysql_false_positive_binary_data(self):
+ """Binary data that happens to have 0x00 at byte 3 and 0x0a at byte 4 must NOT be classified as mysql."""
+ owner, worker = self._build_worker(ports=[37364])
+ worker.state["open_ports"] = [37364]
+ worker.target = "10.0.0.1"
+
+ # Crafted binary blob: byte 3 = 0x00, byte 4 = 0x0a, but byte 5+ is not
+ # a printable version string — this is NOT a MySQL greeting.
+ fake_binary = b'\x07\x02\x03\x00\x0a\x80\xff\x00\x01\x02'
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = fake_binary
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertNotEqual(worker.state["port_protocols"][37364], "mysql")
+
+ def test_fingerprint_mysql_real_greeting(self):
+ """A genuine MySQL greeting packet should still be fingerprinted as mysql."""
+ owner, worker = self._build_worker(ports=[3306])
+ worker.state["open_ports"] = [3306]
+ worker.target = "10.0.0.1"
+
+ # Real MySQL handshake: 3-byte length + seq=0x00 + protocol=0x0a + "8.0.28\x00" + filler
+ version = b"8.0.28"
+ payload = bytes([0x0a]) + version + b'\x00' + b'\x00' * 50
+ pkt_len = len(payload).to_bytes(3, 'little')
+ mysql_greeting = pkt_len + b'\x00' + payload
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = mysql_greeting
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertEqual(worker.state["port_protocols"][3306], "mysql")
+
+ def test_fingerprint_telnet_real_iac(self):
+ """Banner starting with a valid IAC WILL sequence should be fingerprinted as telnet."""
+ owner, worker = self._build_worker(ports=[2323])
+ worker.state["open_ports"] = [2323]
+ worker.target = "10.0.0.1"
+
+ # IAC WILL ECHO (0xFF 0xFB 0x01) — valid telnet negotiation per RFC 854
+ telnet_banner = b'\xff\xfb\x01\xff\xfb\x03'
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = telnet_banner
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertEqual(worker.state["port_protocols"][2323], "telnet")
+
+ def test_fingerprint_telnet_false_positive_0xff(self):
+ """Binary data starting with 0xFF but no valid IAC command must NOT be classified as telnet."""
+ owner, worker = self._build_worker(ports=[8502])
+ worker.state["open_ports"] = [8502]
+ worker.target = "10.0.0.1"
+
+ # 0xFF followed by 0x01 — not a valid IAC command byte (WILL=0xFB, WONT=0xFC, DO=0xFD, DONT=0xFE)
+ fake_binary = b'\xff\x01\x03\x00\x00\x05\x01\x2b'
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = fake_binary
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertNotEqual(worker.state["port_protocols"][8502], "telnet")
+
+ def test_fingerprint_telnet_login_prompt(self):
+ """A text banner containing 'login:' should still be fingerprinted as telnet."""
+ owner, worker = self._build_worker(ports=[2323])
+ worker.state["open_ports"] = [2323]
+ worker.target = "10.0.0.1"
+
+ login_banner = b'Ubuntu 22.04 LTS\r\nlogin: '
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = login_banner
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertEqual(worker.state["port_protocols"][2323], "telnet")
+
+ def test_fingerprint_modbus_wrong_function_code(self):
+ """Response with protocol ID 0x0000 but wrong function code must NOT be classified as modbus."""
+ owner, worker = self._build_worker(ports=[1024])
+ worker.state["open_ports"] = [1024]
+ worker.target = "10.0.0.1"
+
+ # Protocol ID 0x0000 at bytes 2-3, but function code at byte 7 is 0x01 (not 0x2B)
+ bad_modbus = b'\x00\x01\x00\x00\x00\x05\x01\x01\x00\x00\x00'
+
+ call_index = [0]
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = b""
+ idx = call_index[0]
+ call_index[0] += 1
+ if idx == 3: # modbus probe is the 4th socket
+ mock_sock.recv.return_value = bad_modbus
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertNotEqual(worker.state["port_protocols"][1024], "modbus")
+
+ def test_fingerprint_mysql_bad_payload_length(self):
+ """MySQL-like bytes but absurd payload length prefix must NOT be classified as mysql."""
+ owner, worker = self._build_worker(ports=[9999])
+ worker.state["open_ports"] = [9999]
+ worker.target = "10.0.0.1"
+
+ # Payload length = 0x000001 (1 byte) — too small for a real MySQL handshake
+ # seq=0x00, protocol=0x0a, then "5\x00" as a tiny version
+ fake_pkt = b'\x01\x00\x00\x00\x0a5\x00'
+
+ def fake_socket_factory(*args, **kwargs):
+ mock_sock = MagicMock()
+ mock_sock.recv.return_value = fake_pkt
+ return mock_sock
+
+ with patch("extensions.business.cybersec.red_mesh.redmesh_utils.socket.socket", side_effect=fake_socket_factory):
+ worker._fingerprint_ports()
+
+ self.assertNotEqual(worker.state["port_protocols"][9999], "mysql")
+
+ # ===== NEW TESTS — Generic probe vulnerability detection =====
+
+ def test_generic_probe_version_disclosure(self):
+ """Generic probe should flag version disclosure from banner."""
+ owner, worker = self._build_worker(ports=[9999])
+
+ class DummySocket:
+ def __init__(self, *a, **kw): pass
+ def settimeout(self, t): pass
+ def connect(self, addr): pass
+ def recv(self, n): return b"220 mail.example.com ESMTP Exim 4.94.1 ready\r\n"
+ def close(self): pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ result = worker._service_info_generic("example.com", 9999)
+
+ self.assertIsInstance(result, dict)
+ self.assertEqual(result.get("product"), "exim")
+ self.assertEqual(result.get("version"), "4.94.1")
+ self._assert_has_finding(result, "version disclosed")
+
+ def test_generic_probe_cve_match(self):
+ """Generic probe should find CVEs from banner version."""
+ owner, worker = self._build_worker(ports=[9999])
+
+ class DummySocket:
+ def __init__(self, *a, **kw): pass
+ def settimeout(self, t): pass
+ def connect(self, addr): pass
+ def recv(self, n): return b"SSH-2.0-OpenSSH_7.4\r\n"
+ def close(self): pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ result = worker._service_info_generic("example.com", 9999)
+
+ self.assertIsInstance(result, dict)
+ self.assertEqual(result.get("product"), "openssh")
+ # OpenSSH 7.4 is vulnerable to CVE-2024-6387 (regreSSHion, <9.3)
+ self._assert_has_finding(result, "CVE-2024-6387")
+
+ def test_generic_probe_binary_returns_none(self):
+ """Generic probe should return None for pure binary banners."""
+ owner, worker = self._build_worker(ports=[9999])
+
+ class DummySocket:
+ def __init__(self, *a, **kw): pass
+ def settimeout(self, t): pass
+ def connect(self, addr): pass
+ def recv(self, n): return b'\x00\x01\x00\x00\x00\x05\x01\x03'
+ def close(self): pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ result = worker._service_info_generic("example.com", 9999)
+
+ self.assertIsNone(result)
+
+ def test_generic_probe_no_version_no_findings(self):
+ """Generic probe with readable banner but no product match should return no findings."""
+ owner, worker = self._build_worker(ports=[9999])
+
+ class DummySocket:
+ def __init__(self, *a, **kw): pass
+ def settimeout(self, t): pass
+ def connect(self, addr): pass
+ def recv(self, n): return b"Welcome to Custom Service\r\n"
+ def close(self): pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ result = worker._service_info_generic("example.com", 9999)
+
+ self.assertIsInstance(result, dict)
+ self.assertIn("Welcome to Custom Service", result.get("banner", ""))
+ self.assertEqual(result["findings"], [])
+
+ # ===== NEW TESTS — VPN endpoint detection =====
+
+ def test_vpn_endpoint_detection(self):
+ owner, worker = self._build_worker()
+
+ def fake_get(url, timeout=3, verify=False, allow_redirects=False):
+ resp = MagicMock()
+ if "/remote/login" in url:
+ resp.status_code = 200
+ resp.text = "Please Login - fortinet FortiGate"
+ resp.headers = {"Set-Cookie": ""}
+ else:
+ resp.status_code = 404
+ resp.text = ""
+ resp.headers = {}
+ return resp
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
+ side_effect=fake_get,
+ ):
+ result = worker._web_test_vpn_endpoints("example.com", 443)
+ self._assert_has_finding(result, "FortiGate")
+
+
+class TestFindingsModule(unittest.TestCase):
+ """Standalone tests for findings.py module."""
+
+ def test_severity_is_str_enum(self):
+ from extensions.business.cybersec.red_mesh.findings import Severity
+ self.assertIsInstance(Severity.CRITICAL, str)
+ self.assertEqual(Severity.CRITICAL, "CRITICAL")
+
+ def test_finding_is_frozen(self):
+ from extensions.business.cybersec.red_mesh.findings import Finding, Severity
+ f = Finding(Severity.HIGH, "test", "desc")
+ with self.assertRaises(AttributeError):
+ f.title = "modified"
+
+ def test_finding_hashable(self):
+ from extensions.business.cybersec.red_mesh.findings import Finding, Severity
+ f1 = Finding(Severity.HIGH, "test", "desc")
+ f2 = Finding(Severity.HIGH, "test", "desc")
+ self.assertEqual(hash(f1), hash(f2))
+ s = {f1, f2}
+ self.assertEqual(len(s), 1)
+
+
+class TestCveDatabase(unittest.TestCase):
+ """Standalone tests for cve_db.py module."""
+
+ def test_all_entries_valid(self):
+ from extensions.business.cybersec.red_mesh.cve_db import CVE_DATABASE, _matches_constraint
+ for entry in CVE_DATABASE:
+ self.assertTrue(entry.product)
+ self.assertTrue(entry.cve_id.startswith("CVE-"))
+ self.assertTrue(entry.title)
+ # Constraint should be parseable
+ result = _matches_constraint("0.0.1", entry.constraint)
+ self.assertIsInstance(result, bool)
+
+ def test_openssh_regresshion(self):
+ from extensions.business.cybersec.red_mesh.cve_db import check_cves
+ findings = check_cves("openssh", "8.9")
+ cve_ids = [f.title for f in findings]
+ self.assertTrue(any("CVE-2024-6387" in t for t in cve_ids), f"Expected regreSSHion CVE, got: {cve_ids}")
+
+ def test_apache_path_traversal(self):
+ from extensions.business.cybersec.red_mesh.cve_db import check_cves
+ findings = check_cves("apache", "2.4.49")
+ cve_ids = [f.title for f in findings]
+ self.assertTrue(any("CVE-2021-41773" in t for t in cve_ids))
+
+
+class TestCorrelationEngine(unittest.TestCase):
+ """Tests for the cross-service correlation engine."""
+
+ def _build_worker(self, ports=None):
+ if ports is None:
+ ports = [80]
+ owner = DummyOwner()
+ worker = PentestLocalWorker(
+ owner=owner,
+ target="example.com",
+ job_id="job-corr",
+ initiator="init@example",
+ local_id_prefix="C",
+ worker_target_ports=ports,
+ )
+ worker.stop_event = MagicMock()
+ worker.stop_event.is_set.return_value = False
+ return owner, worker
+
+ def test_port_ratio_anomaly(self):
+ """600/1000 open ports should trigger honeypot finding."""
+ _, worker = self._build_worker(ports=list(range(1, 1001)))
+ worker.state["open_ports"] = list(range(1, 601))
+ worker.state["ports_scanned"] = list(range(1, 1001))
+ worker._post_scan_correlate()
+ findings = worker.state["correlation_findings"]
+ self.assertTrue(
+ any("honeypot" in f["title"].lower() and "port" in f["title"].lower() for f in findings),
+ f"Expected port ratio honeypot finding, got: {findings}"
+ )
+
+ def test_port_ratio_normal(self):
+ """5/1000 open ports should NOT trigger honeypot finding."""
+ _, worker = self._build_worker(ports=list(range(1, 1001)))
+ worker.state["open_ports"] = [22, 80, 443, 8080, 8443]
+ worker.state["ports_scanned"] = list(range(1, 1001))
+ worker._post_scan_correlate()
+ findings = worker.state["correlation_findings"]
+ self.assertFalse(
+ any("port" in f["title"].lower() and "honeypot" in f["title"].lower() for f in findings),
+ f"Unexpected port ratio finding: {findings}"
+ )
+
+ def test_os_mismatch(self):
+ """Ubuntu + Darwin should trigger OS mismatch finding."""
+ _, worker = self._build_worker()
+ worker.state["scan_metadata"]["os_claims"] = {
+ "ssh:22": "Ubuntu",
+ "redis:6379": "Darwin 21.6.0",
+ }
+ worker._post_scan_correlate()
+ findings = worker.state["correlation_findings"]
+ self.assertTrue(
+ any("os mismatch" in f["title"].lower() for f in findings),
+ f"Expected OS mismatch finding, got: {findings}"
+ )
+
+ def test_os_consistent(self):
+ """Ubuntu + Debian should NOT trigger OS mismatch (both Linux)."""
+ _, worker = self._build_worker()
+ worker.state["scan_metadata"]["os_claims"] = {
+ "ssh:22": "Ubuntu",
+ "redis:6379": "Linux 5.4.0",
+ }
+ worker._post_scan_correlate()
+ findings = worker.state["correlation_findings"]
+ self.assertFalse(
+ any("os mismatch" in f["title"].lower() for f in findings),
+ f"Unexpected OS mismatch finding: {findings}"
+ )
+
+ def test_infrastructure_leak_multi_subnet(self):
+ """Two /16 subnets should trigger infrastructure leak."""
+ _, worker = self._build_worker()
+ worker.state["scan_metadata"]["internal_ips"] = [
+ {"ip": "10.0.1.5", "source": "es_nodes:9200"},
+ {"ip": "172.17.0.2", "source": "ftp_pasv:21"},
+ ]
+ worker._post_scan_correlate()
+ findings = worker.state["correlation_findings"]
+ self.assertTrue(
+ any("infrastructure leak" in f["title"].lower() or "subnet" in f["title"].lower() for f in findings),
+ f"Expected infrastructure leak finding, got: {findings}"
+ )
+
+ def test_timezone_drift(self):
+ """Two different timezone offsets should trigger drift finding."""
+ _, worker = self._build_worker()
+ worker.state["scan_metadata"]["timezone_hints"] = [
+ {"offset": "+0000", "source": "smtp:25"},
+ {"offset": "-0500", "source": "smtp:587"},
+ ]
+ worker._post_scan_correlate()
+ findings = worker.state["correlation_findings"]
+ self.assertTrue(
+ any("timezone" in f["title"].lower() for f in findings),
+ f"Expected timezone drift finding, got: {findings}"
+ )
+
+ def test_emit_metadata_dict(self):
+ """_emit_metadata should populate os_claims dict correctly."""
+ _, worker = self._build_worker()
+ worker._emit_metadata("os_claims", "ssh:22", "Ubuntu")
+ self.assertEqual(worker.state["scan_metadata"]["os_claims"]["ssh:22"], "Ubuntu")
+
+ def test_emit_metadata_list(self):
+ """_emit_metadata should append to internal_ips list."""
+ _, worker = self._build_worker()
+ entry = {"ip": "10.0.0.1", "source": "test"}
+ worker._emit_metadata("internal_ips", entry)
+ self.assertIn(entry, worker.state["scan_metadata"]["internal_ips"])
+
+ def test_emit_metadata_missing_state(self):
+ """_emit_metadata should be a no-op when scan_metadata is absent."""
+ _, worker = self._build_worker()
+ del worker.state["scan_metadata"]
+ # Should not raise
+ worker._emit_metadata("os_claims", "ssh:22", "Ubuntu")
+
+ def test_mysql_salt_low_entropy(self):
+ """All-same-byte MySQL salt should trigger low entropy finding."""
+ _, worker = self._build_worker(ports=[3306])
+ # Build a MySQL handshake with all-zero salt bytes
+ version = b"5.7.99-fake"
+ # protocol_version(1) + version + null + thread_id(4) + salt1(8) + filler(1)
+ # + caps(2) + charset(1) + status(2) + caps_upper(2) + auth_len(1) + reserved(10) + salt2(12) + null
+ salt1 = b'\x00' * 8
+ salt2 = b'\x00' * 12
+ after_version = b'\x01\x00\x00\x00' + salt1 + b'\x00' # thread_id + salt1 + filler
+ after_version += b'\x00\x00' # caps
+ after_version += b'\x21' # charset
+ after_version += b'\x00\x00' # status
+ after_version += b'\x00\x00' # caps_upper
+ after_version += b'\x15' # auth_len
+ after_version += b'\x00' * 10 # reserved
+ after_version += salt2 + b'\x00'
+ after_version += b'mysql_native_password\x00'
+ payload = bytes([0x0a]) + version + b'\x00' + after_version
+ pkt_len = len(payload).to_bytes(3, 'little')
+ packet = pkt_len + b'\x00' + payload
+
+ class DummySocket:
+ def __init__(self, *args, **kwargs):
+ pass
+ def settimeout(self, timeout):
+ pass
+ def connect(self, addr):
+ pass
+ def recv(self, nbytes):
+ return packet
+ def close(self):
+ pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_mysql("example.com", 3306)
+ self.assertIsInstance(info, dict)
+ # Should have a low entropy finding
+ found = any("entropy" in f.get("title", "").lower() for f in info.get("findings", []))
+ self.assertTrue(found, f"Expected low entropy finding, got: {info.get('findings', [])}")
+
+ def test_ftp_pasv_ip_leak(self):
+ """PASV with RFC1918 IP should trigger internal IP leak finding."""
+ _, worker = self._build_worker(ports=[21])
+
+ class DummyFTP:
+ def __init__(self, timeout=3):
+ pass
+ def connect(self, target, port, timeout=3):
+ return None
+ def getwelcome(self):
+ return "220 FTP Ready"
+ def login(self, *args, **kwargs):
+ return None
+ def sendcmd(self, cmd):
+ if cmd == "PASV":
+ return "227 Entering Passive Mode (192,168,1,100,4,1)"
+ if cmd == "SYST":
+ return "215 UNIX"
+ if cmd == "FEAT":
+ return "211 End"
+ if cmd == "AUTH TLS":
+ raise Exception("not supported")
+ return ""
+ def set_pasv(self, val):
+ pass
+ def pwd(self):
+ return "/"
+ def cwd(self, path):
+ raise Exception("denied")
+ def quit(self):
+ pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.ftplib.FTP",
+ return_value=DummyFTP(),
+ ):
+ info = worker._service_info_ftp("example.com", 21)
+ self.assertIsInstance(info, dict)
+ found = any("pasv" in f.get("title", "").lower() for f in info.get("findings", []))
+ self.assertTrue(found, f"Expected PASV IP leak finding, got: {info.get('findings', [])}")
+
+ def test_web_200_for_all(self):
+ """200 on random path should trigger catch-all finding."""
+ _, worker = self._build_worker()
+
+ def fake_get(url, timeout=2, verify=False):
+ resp = MagicMock()
+ resp.headers = {}
+ resp.text = ""
+ resp.status_code = 200
+ resp.reason = "OK"
+ return resp
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
+ side_effect=fake_get,
+ ):
+ result = worker._web_test_common("example.com", 80)
+ self.assertIsInstance(result, dict)
+ found = any("random path" in f.get("title", "").lower() or "200 for" in f.get("title", "").lower()
+ for f in result.get("findings", []))
+ self.assertTrue(found, f"Expected 200-for-all finding, got: {result.get('findings', [])}")
+
+ def test_tls_san_parsing(self):
+ """DER cert with SAN IPs should be correctly extracted."""
+ _, worker = self._build_worker(ports=[443])
+ # Generate a test cert with SANs using cryptography
+ try:
+ from cryptography import x509
+ from cryptography.x509.oid import NameOID
+ from cryptography.hazmat.primitives import hashes, serialization
+ from cryptography.hazmat.primitives.asymmetric import rsa
+ import datetime
+ import ipaddress
+
+ key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
+ subject = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "test.example.com")])
+ cert = (
+ x509.CertificateBuilder()
+ .subject_name(subject)
+ .issuer_name(subject)
+ .public_key(key.public_key())
+ .serial_number(x509.random_serial_number())
+ .not_valid_before(datetime.datetime.utcnow())
+ .not_valid_after(datetime.datetime.utcnow() + datetime.timedelta(days=365))
+ .add_extension(
+ x509.SubjectAlternativeName([
+ x509.DNSName("test.example.com"),
+ x509.DNSName("www.example.com"),
+ x509.IPAddress(ipaddress.IPv4Address("10.0.0.1")),
+ x509.IPAddress(ipaddress.IPv4Address("192.168.1.1")),
+ ]),
+ critical=False,
+ )
+ .sign(key, hashes.SHA256())
+ )
+ cert_der = cert.public_bytes(serialization.Encoding.DER)
+
+ dns_names, ip_addresses = worker._tls_parse_san_from_der(cert_der)
+ self.assertIn("test.example.com", dns_names)
+ self.assertIn("www.example.com", dns_names)
+ self.assertIn("10.0.0.1", ip_addresses)
+ self.assertIn("192.168.1.1", ip_addresses)
+ except ImportError:
+ self.skipTest("cryptography library not available")
+
+ def test_cve_confidence_tentative(self):
+ """All CVE findings should have tentative confidence."""
+ from extensions.business.cybersec.red_mesh.cve_db import check_cves
+ findings = check_cves("openssh", "8.9")
+ self.assertTrue(len(findings) > 0, "Expected at least one CVE finding")
+ for f in findings:
+ self.assertEqual(f.confidence, "tentative", f"Expected tentative confidence, got {f.confidence} for {f.title}")
+
+ def test_ssh_dsa_key(self):
+ """ssh-dss in key_types should trigger DSA finding."""
+ _, worker = self._build_worker(ports=[22])
+
+ class DummySecOpts:
+ ciphers = ["aes256-ctr"]
+ kex = ["curve25519-sha256"]
+ key_types = ["ssh-rsa", "ssh-dss"]
+
+ class DummyTransport:
+ def __init__(self, *args, **kwargs):
+ pass
+ def connect(self):
+ pass
+ def get_security_options(self):
+ return DummySecOpts()
+ def get_remote_server_key(self):
+ return None
+ def close(self):
+ pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.paramiko.Transport",
+ return_value=DummyTransport(),
+ ):
+ findings, weak_labels = worker._ssh_check_ciphers("example.com", 22)
+ found = any("dsa" in f.title.lower() or "ssh-dss" in f.title.lower() for f in findings)
+ self.assertTrue(found, f"Expected DSA key finding, got: {[f.title for f in findings]}")
+
+ def test_execute_job_correlation(self):
+ """execute_job should include correlation_completed in completed_tests."""
+ _, worker = self._build_worker()
+
+ with patch.object(worker, "_scan_ports_step"), \
+ patch.object(worker, "_fingerprint_ports"), \
+ patch.object(worker, "_gather_service_info"), \
+ patch.object(worker, "_run_web_tests"), \
+ patch.object(worker, "_post_scan_correlate"):
+ worker.execute_job()
+
+ self.assertTrue(worker.state["done"])
+ self.assertIn("correlation_completed", worker.state["completed_tests"])
+
+
+class TestScannerEnhancements(unittest.TestCase):
+ """Tests for the 5 partial scanner enhancements (Tier 1)."""
+
+ def _build_worker(self, ports=None):
+ if ports is None:
+ ports = [80]
+ owner = DummyOwner()
+ worker = PentestLocalWorker(
+ owner=owner,
+ target="example.com",
+ job_id="job-enh",
+ initiator="init@example",
+ local_id_prefix="E",
+ worker_target_ports=ports,
+ )
+ worker.stop_event = MagicMock()
+ worker.stop_event.is_set.return_value = False
+ return owner, worker
+
+ # --- Item 1: TLS validity period ---
+
+ def test_tls_validity_period_10yr(self):
+ """Certificate with 10-year validity should flag MEDIUM."""
+ _, worker = self._build_worker(ports=[443])
+ try:
+ from cryptography import x509
+ from cryptography.x509.oid import NameOID
+ from cryptography.hazmat.primitives import hashes, serialization
+ from cryptography.hazmat.primitives.asymmetric import rsa
+ import datetime
+
+ key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
+ subject = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "test.example.com")])
+ cert = (
+ x509.CertificateBuilder()
+ .subject_name(subject)
+ .issuer_name(subject)
+ .public_key(key.public_key())
+ .serial_number(x509.random_serial_number())
+ .not_valid_before(datetime.datetime(2020, 1, 1, tzinfo=datetime.timezone.utc))
+ .not_valid_after(datetime.datetime(2030, 1, 1, tzinfo=datetime.timezone.utc))
+ .sign(key, hashes.SHA256())
+ )
+ cert_der = cert.public_bytes(serialization.Encoding.DER)
+
+ findings = worker._tls_check_validity_period(cert_der)
+ self.assertEqual(len(findings), 1)
+ self.assertEqual(findings[0].severity, "MEDIUM")
+ self.assertIn("validity span", findings[0].title.lower())
+ except ImportError:
+ self.skipTest("cryptography library not available")
+
+ def test_tls_validity_period_1yr(self):
+ """Certificate with 1-year validity should produce no finding."""
+ _, worker = self._build_worker(ports=[443])
+ try:
+ from cryptography import x509
+ from cryptography.x509.oid import NameOID
+ from cryptography.hazmat.primitives import hashes, serialization
+ from cryptography.hazmat.primitives.asymmetric import rsa
+ import datetime
+
+ key = rsa.generate_private_key(public_exponent=65537, key_size=2048)
+ subject = x509.Name([x509.NameAttribute(NameOID.COMMON_NAME, "test.example.com")])
+ cert = (
+ x509.CertificateBuilder()
+ .subject_name(subject)
+ .issuer_name(subject)
+ .public_key(key.public_key())
+ .serial_number(x509.random_serial_number())
+ .not_valid_before(datetime.datetime(2024, 1, 1, tzinfo=datetime.timezone.utc))
+ .not_valid_after(datetime.datetime(2025, 1, 1, tzinfo=datetime.timezone.utc))
+ .sign(key, hashes.SHA256())
+ )
+ cert_der = cert.public_bytes(serialization.Encoding.DER)
+
+ findings = worker._tls_check_validity_period(cert_der)
+ self.assertEqual(len(findings), 0)
+ except ImportError:
+ self.skipTest("cryptography library not available")
+
+ # --- Item 2: Redis stale persistence ---
+
+ def test_redis_persistence_stale(self):
+ """rdb_last_bgsave_time 400 days old should flag LOW."""
+ _, worker = self._build_worker(ports=[6379])
+ import time
+
+ stale_ts = int(time.time()) - 400 * 86400
+
+ cmd_responses = {
+ "PING": "+PONG\r\n",
+ "INFO server": "$50\r\nredis_version:7.0.0\r\nos:Linux\r\n",
+ "CONFIG GET dir": "-ERR\r\n",
+ "DBSIZE": ":0\r\n",
+ "CLIENT LIST": "-ERR\r\n",
+ "INFO persistence": f"$50\r\nrdb_last_bgsave_time:{stale_ts}\r\n",
+ }
+
+ class DummySocket:
+ def __init__(self, *a, **kw):
+ self._buf = b""
+ def settimeout(self, t):
+ return None
+ def connect(self, addr):
+ return None
+ def sendall(self, data):
+ cmd = data.decode().strip()
+ self._buf = cmd_responses.get(cmd, "-ERR\r\n").encode()
+ def recv(self, nbytes):
+ data = self._buf
+ self._buf = b""
+ return data
+ def close(self):
+ return None
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_redis("example.com", 6379)
+
+ found = any("stale" in f.get("title", "").lower() for f in info.get("findings", []))
+ self.assertTrue(found, f"Expected stale persistence finding, got: {info.get('findings', [])}")
+
+ def test_redis_persistence_never_saved(self):
+ """rdb_last_bgsave_time=0 should flag LOW (never saved)."""
+ _, worker = self._build_worker(ports=[6379])
+
+ cmd_responses = {
+ "PING": "+PONG\r\n",
+ "INFO server": "$50\r\nredis_version:7.0.0\r\nos:Linux\r\n",
+ "CONFIG GET dir": "-ERR\r\n",
+ "DBSIZE": ":0\r\n",
+ "CLIENT LIST": "-ERR\r\n",
+ "INFO persistence": "$30\r\nrdb_last_bgsave_time:0\r\n",
+ }
+
+ class DummySocket:
+ def __init__(self, *a, **kw):
+ self._buf = b""
+ def settimeout(self, t):
+ return None
+ def connect(self, addr):
+ return None
+ def sendall(self, data):
+ cmd = data.decode().strip()
+ self._buf = cmd_responses.get(cmd, "-ERR\r\n").encode()
+ def recv(self, nbytes):
+ data = self._buf
+ self._buf = b""
+ return data
+ def close(self):
+ return None
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket(),
+ ):
+ info = worker._service_info_redis("example.com", 6379)
+
+ found = any("never" in f.get("title", "").lower() for f in info.get("findings", []))
+ self.assertTrue(found, f"Expected never-saved finding, got: {info.get('findings', [])}")
+
+ # --- Item 3: SSH RSA key size ---
+
+ def test_ssh_rsa_1024_high(self):
+ """1024-bit RSA key should flag HIGH."""
+ _, worker = self._build_worker(ports=[22])
+
+ class DummyKey:
+ def get_name(self):
+ return "ssh-rsa"
+ def get_bits(self):
+ return 1024
+
+ class DummySecOpts:
+ ciphers = ["aes256-ctr"]
+ kex = ["curve25519-sha256"]
+ key_types = ["ssh-rsa"]
+
+ class DummyTransport:
+ def __init__(self, *a, **kw): pass
+ def connect(self): pass
+ def get_security_options(self): return DummySecOpts()
+ def get_remote_server_key(self): return DummyKey()
+ def close(self): pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.paramiko.Transport",
+ return_value=DummyTransport(),
+ ):
+ findings, weak_labels = worker._ssh_check_ciphers("example.com", 22)
+ found = any("critically weak" in f.title.lower() and "1024" in f.title for f in findings)
+ self.assertTrue(found, f"Expected HIGH RSA finding, got: {[f.title for f in findings]}")
+ sev = [f.severity for f in findings if "1024" in f.title]
+ self.assertTrue(any(s == "HIGH" or str(s) == "HIGH" or getattr(s, 'value', None) == "HIGH" for s in sev))
+
+ def test_ssh_rsa_2048_low(self):
+ """2048-bit RSA key should flag LOW (below NIST recommendation)."""
+ _, worker = self._build_worker(ports=[22])
+
+ class DummyKey:
+ def get_name(self):
+ return "ssh-rsa"
+ def get_bits(self):
+ return 2048
+
+ class DummySecOpts:
+ ciphers = ["aes256-ctr"]
+ kex = ["curve25519-sha256"]
+ key_types = ["ssh-rsa"]
+
+ class DummyTransport:
+ def __init__(self, *a, **kw): pass
+ def connect(self): pass
+ def get_security_options(self): return DummySecOpts()
+ def get_remote_server_key(self): return DummyKey()
+ def close(self): pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.paramiko.Transport",
+ return_value=DummyTransport(),
+ ):
+ findings, weak_labels = worker._ssh_check_ciphers("example.com", 22)
+ found = any("nist" in f.title.lower() and "2048" in f.title for f in findings)
+ self.assertTrue(found, f"Expected LOW RSA finding, got: {[f.title for f in findings]}")
+ sev = [f.severity for f in findings if "2048" in f.title]
+ self.assertTrue(any(s == "LOW" or str(s) == "LOW" or getattr(s, 'value', None) == "LOW" for s in sev))
+
+ def test_ssh_rsa_4096_no_finding(self):
+ """4096-bit RSA key should produce no RSA-related finding."""
+ _, worker = self._build_worker(ports=[22])
+
+ class DummyKey:
+ def get_name(self):
+ return "ssh-rsa"
+ def get_bits(self):
+ return 4096
+
+ class DummySecOpts:
+ ciphers = ["aes256-ctr"]
+ kex = ["curve25519-sha256"]
+ key_types = ["ssh-rsa"]
+
+ class DummyTransport:
+ def __init__(self, *a, **kw): pass
+ def connect(self): pass
+ def get_security_options(self): return DummySecOpts()
+ def get_remote_server_key(self): return DummyKey()
+ def close(self): pass
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.paramiko.Transport",
+ return_value=DummyTransport(),
+ ):
+ findings, weak_labels = worker._ssh_check_ciphers("example.com", 22)
+ rsa_findings = [f for f in findings if "rsa" in f.title.lower()]
+ self.assertEqual(len(rsa_findings), 0, f"Expected no RSA findings, got: {[f.title for f in rsa_findings]}")
+
+ # --- Item 4: SMTP K8s pod name + .internal ---
+
+ def test_smtp_k8s_pod_hostname(self):
+ """K8s-style pod hostname should flag LOW."""
+ _, worker = self._build_worker(ports=[25])
+
+ class DummySMTP:
+ def __init__(self, timeout=5): pass
+ def connect(self, target, port):
+ return (220, b"ESMTP ready")
+ def ehlo(self, identity):
+ return (250, b"nginx-7f4b5c9d-kx9wqmrt Hello client [1.2.3.4]")
+ def docmd(self, cmd):
+ return (500, b"unrecognized")
+ def quit(self): pass
+
+ with patch(
+ "smtplib.SMTP",
+ return_value=DummySMTP(),
+ ):
+ info = worker._service_info_smtp("example.com", 25)
+
+ self.assertIsInstance(info, dict)
+ found = any("kubernetes" in f.get("title", "").lower() or "pod name" in f.get("title", "").lower()
+ for f in info.get("findings", []))
+ self.assertTrue(found, f"Expected K8s pod finding, got: {info.get('findings', [])}")
+
+ def test_smtp_internal_hostname(self):
+ """Hostname ending in .internal should flag LOW."""
+ _, worker = self._build_worker(ports=[25])
+
+ class DummySMTP:
+ def __init__(self, timeout=5): pass
+ def connect(self, target, port):
+ return (220, b"ESMTP ready")
+ def ehlo(self, identity):
+ return (250, b"ip-10-0-1-5.ec2.internal Hello client [1.2.3.4]")
+ def docmd(self, cmd):
+ return (500, b"unrecognized")
+ def quit(self): pass
+
+ with patch(
+ "smtplib.SMTP",
+ return_value=DummySMTP(),
+ ):
+ info = worker._service_info_smtp("example.com", 25)
+
+ self.assertIsInstance(info, dict)
+ found = any(".internal" in f.get("title", "").lower() or "cloud-internal" in f.get("title", "").lower()
+ for f in info.get("findings", []))
+ self.assertTrue(found, f"Expected .internal finding, got: {info.get('findings', [])}")
+
+ # --- Item 5: Web endpoint probe extension ---
+
+ def test_web_xmlrpc_endpoint(self):
+ """xmlrpc.php returning 200 should flag MEDIUM."""
+ _, worker = self._build_worker()
+
+ def fake_get(url, timeout=2, verify=False):
+ resp = MagicMock()
+ resp.headers = {}
+ resp.text = ""
+ if url.endswith("/xmlrpc.php"):
+ resp.status_code = 200
+ else:
+ resp.status_code = 404
+ return resp
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
+ side_effect=fake_get,
+ ):
+ result = worker._web_test_common("example.com", 80)
+ found = any("xmlrpc" in f.get("title", "").lower() for f in result.get("findings", []))
+ self.assertTrue(found, f"Expected xmlrpc finding, got: {result.get('findings', [])}")
+
+ def test_web_wp_login_endpoint(self):
+ """wp-login.php returning 200 should flag LOW."""
+ _, worker = self._build_worker()
+
+ def fake_get(url, timeout=2, verify=False):
+ resp = MagicMock()
+ resp.headers = {}
+ resp.text = ""
+ if url.endswith("/wp-login.php"):
+ resp.status_code = 200
+ else:
+ resp.status_code = 404
+ return resp
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
+ side_effect=fake_get,
+ ):
+ result = worker._web_test_common("example.com", 80)
+ found = any("wp-login" in f.get("title", "").lower() for f in result.get("findings", []))
+ self.assertTrue(found, f"Expected wp-login finding, got: {result.get('findings', [])}")
+
+ def test_web_security_txt_endpoint(self):
+ """security.txt returning 200 should produce INFO finding."""
+ _, worker = self._build_worker()
+
+ def fake_get(url, timeout=2, verify=False):
+ resp = MagicMock()
+ resp.headers = {}
+ resp.text = ""
+ if url.endswith("/.well-known/security.txt"):
+ resp.status_code = 200
+ else:
+ resp.status_code = 404
+ return resp
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.web_discovery_mixin.requests.get",
+ side_effect=fake_get,
+ ):
+ result = worker._web_test_common("example.com", 80)
+ found = any("security.txt" in f.get("title", "").lower() or "security policy" in f.get("description", "").lower()
+ for f in result.get("findings", []))
+ self.assertTrue(found, f"Expected security.txt finding, got: {result.get('findings', [])}")
+
+
+ # --- Item 6: HTTP empty reply fallback ---
+
+ def test_http_empty_reply_fallback(self):
+ """HTTP probe should fall back to raw socket when requests.get fails with empty reply."""
+ _, worker = self._build_worker(ports=[81])
+
+ class DummySocket:
+ def __init__(self, chunks):
+ self._chunks = list(chunks)
+ def settimeout(self, t): pass
+ def connect(self, addr): pass
+ def send(self, data): pass
+ def recv(self, n):
+ return self._chunks.pop(0) if self._chunks else b""
+ def close(self): pass
+
+ from requests.exceptions import ConnectionError as ReqConnError
+
+ # Case 1: requests fails, raw socket also gets empty reply
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.requests.get",
+ side_effect=ReqConnError("RemoteDisconnected"),
+ ), patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket([b""]),
+ ):
+ result = worker._service_info_http("10.0.0.1", 81)
+ self.assertIsNotNone(result, "Should return a result, not None")
+ self.assertEqual(result.get("banner"), "(empty reply)")
+ titles = [f["title"] for f in result.get("findings", [])]
+ self.assertTrue(any("empty reply" in t.lower() for t in titles),
+ f"Expected empty reply finding, got: {titles}")
+
+ def test_http_empty_reply_fallback_with_banner(self):
+ """HTTP probe raw socket fallback should capture server banner and detect Host-header drop."""
+ _, worker = self._build_worker(ports=[81])
+
+ class DummySocket:
+ def __init__(self, chunks):
+ self._chunks = list(chunks)
+ def settimeout(self, t): pass
+ def connect(self, addr): pass
+ def send(self, data): pass
+ def recv(self, n):
+ return self._chunks.pop(0) if self._chunks else b""
+ def close(self): pass
+
+ from requests.exceptions import ConnectionError as ReqConnError
+ raw_resp = b"HTTP/1.1 200 OK\r\nServer: nginx/1.24.0\r\n\r\n"
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.requests.get",
+ side_effect=ReqConnError("RemoteDisconnected"),
+ ), patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket([raw_resp, b""]),
+ ):
+ result = worker._service_info_http("10.0.0.1", 81)
+ self.assertIsNotNone(result, "Should return a result, not None")
+ self.assertEqual(result.get("banner"), "HTTP/1.1 200 OK")
+ self.assertEqual(result.get("server"), "nginx/1.24.0")
+ titles = [f["title"] for f in result.get("findings", [])]
+ self.assertTrue(any("host header" in t.lower() for t in titles),
+ f"Expected Host-header-drop finding, got: {titles}")
+
+ def test_http_fallback_directory_listing(self):
+ """HTTP probe raw socket fallback should detect directory listing."""
+ _, worker = self._build_worker(ports=[81])
+
+ class DummySocket:
+ def __init__(self, chunks):
+ self._chunks = list(chunks)
+ def settimeout(self, t): pass
+ def connect(self, addr): pass
+ def send(self, data): pass
+ def recv(self, n):
+ return self._chunks.pop(0) if self._chunks else b""
+ def close(self): pass
+
+ from requests.exceptions import ConnectionError as ReqConnError
+ raw_resp = (
+ b"HTTP/1.1 200 OK\r\nServer: nginx\r\n\r\n"
+ b"Directory listing for /"
+ b'../'
+ )
+
+ with patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.requests.get",
+ side_effect=ReqConnError("RemoteDisconnected"),
+ ), patch(
+ "extensions.business.cybersec.red_mesh.service_mixin.socket.socket",
+ return_value=DummySocket([raw_resp, b""]),
+ ):
+ result = worker._service_info_http("10.0.0.1", 81)
+ self.assertIsNotNone(result)
+ titles = [f["title"] for f in result.get("findings", [])]
+ self.assertTrue(any("directory listing" in t.lower() for t in titles),
+ f"Expected directory listing finding, got: {titles}")
+ self.assertEqual(result.get("title"), "Directory listing for /")
+
class VerboseResult(unittest.TextTestResult):
def addSuccess(self, test):
super().addSuccess(test)
- self.stream.writeln() # emits an extra “\n” after the usual “ok”
+ self.stream.writeln() # emits an extra "\n" after the usual "ok"
if __name__ == "__main__":
runner = unittest.TextTestRunner(verbosity=2, resultclass=VerboseResult)
- runner.run(unittest.defaultTestLoader.loadTestsFromTestCase(RedMeshOWASPTests))
+ suite = unittest.TestSuite()
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromTestCase(RedMeshOWASPTests))
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromTestCase(TestFindingsModule))
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromTestCase(TestCveDatabase))
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromTestCase(TestCorrelationEngine))
+ suite.addTests(unittest.defaultTestLoader.loadTestsFromTestCase(TestScannerEnhancements))
+ runner.run(suite)
diff --git a/extensions/business/cybersec/red_mesh/web_api_mixin.py b/extensions/business/cybersec/red_mesh/web_api_mixin.py
new file mode 100644
index 00000000..d14d0e0a
--- /dev/null
+++ b/extensions/business/cybersec/red_mesh/web_api_mixin.py
@@ -0,0 +1,153 @@
+import requests
+
+from .findings import Finding, Severity, probe_result, probe_error
+
+
+class _WebApiExposureMixin:
+ """
+ Detect GraphQL introspection leaks, cloud metadata endpoints,
+ and API auth bypass (OWASP WSTG-APIT).
+ """
+
+ def _web_test_graphql_introspection(self, target, port):
+ """
+ Check if GraphQL introspection is exposed in production endpoints.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings on GraphQL introspection exposure.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+ graphql_url = base_url.rstrip("/") + "/graphql"
+ try:
+ payload = {"query": "{__schema{types{name}}}"}
+ resp = requests.post(graphql_url, json=payload, timeout=5, verify=False)
+ if resp.status_code == 200 and "__schema" in resp.text:
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title="GraphQL introspection enabled",
+ description=f"GraphQL endpoint at {graphql_url} exposes the full schema "
+ "via introspection, revealing all types, queries, and mutations.",
+ evidence=f"POST {graphql_url} with __schema query returned 200 with schema data.",
+ remediation="Disable introspection in production (e.g. introspection: false in Apollo Server).",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"GraphQL probe failed on {graphql_url}: {e}", color='y')
+ return probe_error(target, port, "graphql", e)
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_metadata_endpoints(self, target, port):
+ """
+ Probe cloud metadata paths to detect SSRF-style exposure.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings on metadata endpoint exposure.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+
+ metadata_paths = [
+ ("/latest/meta-data/", "AWS EC2"),
+ ("/metadata/computeMetadata/v1/", "GCP"),
+ ("/computeMetadata/v1/", "GCP (alt)"),
+ ]
+ try:
+ for path, provider in metadata_paths:
+ url = base_url.rstrip("/") + path
+ resp = requests.get(url, timeout=3, verify=False, headers={"Metadata-Flavor": "Google"})
+ if resp.status_code == 200:
+ findings_list.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"Cloud metadata endpoint exposed ({provider})",
+ description=f"Metadata endpoint at {url} is accessible, potentially leaking "
+ "IAM credentials, instance identity tokens, and cloud configuration.",
+ evidence=f"GET {url} returned 200 OK.",
+ remediation="Block metadata endpoint access from application layer; use IMDSv2 (AWS) or metadata concealment.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-918",
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"Metadata endpoint probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "metadata", e)
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_api_auth_bypass(self, target, port):
+ """
+ Detect APIs that succeed despite invalid Authorization headers.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings related to auth bypass behavior.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+ candidate_paths = ["/api/", "/api/health", "/api/status"]
+ try:
+ for path in candidate_paths:
+ url = base_url.rstrip("/") + path
+ resp = requests.get(
+ url,
+ timeout=3,
+ verify=False,
+ headers={"Authorization": "Bearer invalid-token"},
+ )
+ if resp.status_code in (200, 204):
+ findings_list.append(Finding(
+ severity=Severity.HIGH,
+ title=f"API auth bypass: {path} accepts invalid token",
+ description=f"API endpoint {url} returned success with a fabricated Bearer token, "
+ "indicating missing or broken authentication middleware.",
+ evidence=f"GET {url} with 'Bearer invalid-token' returned {resp.status_code}.",
+ remediation="Validate Bearer tokens in authentication middleware for all API endpoints.",
+ owasp_id="A07:2021",
+ cwe_id="CWE-287",
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"API auth bypass probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "api_auth", e)
+
+ return probe_result(findings=findings_list)
diff --git a/extensions/business/cybersec/red_mesh/web_discovery_mixin.py b/extensions/business/cybersec/red_mesh/web_discovery_mixin.py
new file mode 100644
index 00000000..e5799668
--- /dev/null
+++ b/extensions/business/cybersec/red_mesh/web_discovery_mixin.py
@@ -0,0 +1,317 @@
+import re as _re
+import uuid as _uuid
+import requests
+
+from .findings import Finding, Severity, probe_result, probe_error
+
+
+class _WebDiscoveryMixin:
+ """
+ Enumerate exposed files, admin panels, and homepage secrets (OWASP WSTG-INFO).
+ """
+
+ def _web_test_common(self, target, port):
+ """
+ Look for exposed common endpoints and weak access controls.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings from endpoint checks.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+
+ # --- Catch-all detection: 200-for-all ---
+ try:
+ canary_path = f"/{_uuid.uuid4().hex}"
+ canary_resp = requests.get(base_url + canary_path, timeout=2, verify=False)
+ if canary_resp.status_code == 200:
+ findings_list.append(Finding(
+ severity=Severity.HIGH,
+ title="Web server returns 200 for random paths",
+ description="A request to a non-existent random UUID path returned HTTP 200, "
+ "suggesting a catch-all rule or severely misconfigured server.",
+ evidence=f"GET {base_url}{canary_path} returned 200.",
+ remediation="Investigate the catch-all behavior; ensure proper 404 responses for unknown paths.",
+ cwe_id="CWE-345",
+ confidence="firm",
+ ))
+ except Exception:
+ pass
+
+ # Severity depends on what the path exposes
+ _PATH_META = {
+ "/.env": (Severity.HIGH, "CWE-538", "A05:2021",
+ "Environment file may contain database passwords, API keys, and secrets."),
+ "/.git/": (Severity.HIGH, "CWE-538", "A01:2021",
+ "Git repository exposed — source code, credentials, and history downloadable."),
+ "/admin": (Severity.MEDIUM, "CWE-200", "A01:2021",
+ "Admin panel accessible — verify authentication is enforced."),
+ "/robots.txt": (Severity.INFO, "", "",
+ "Robots.txt present — may reveal hidden paths."),
+ "/login": (Severity.INFO, "", "",
+ "Login page accessible."),
+ "/xmlrpc.php": (Severity.MEDIUM, "CWE-288", "A01:2021",
+ "WordPress XML-RPC endpoint — brute-force amplification and DDoS vector."),
+ "/wp-login.php": (Severity.LOW, "CWE-200", "A01:2021",
+ "WordPress login page accessible — confirms WordPress deployment."),
+ "/.well-known/security.txt": (Severity.INFO, "", "",
+ "Security policy (RFC 9116) published."),
+ }
+
+ try:
+ for path, (severity, cwe, owasp, desc) in _PATH_META.items():
+ url = base_url + path
+ resp = requests.get(url, timeout=2, verify=False)
+ if resp.status_code == 200:
+ findings_list.append(Finding(
+ severity=severity,
+ title=f"Accessible resource: {path}",
+ description=desc,
+ evidence=f"GET {url} returned 200 OK.",
+ remediation=f"Restrict access to {path} or remove it from production." if severity != Severity.INFO else "",
+ owasp_id=owasp,
+ cwe_id=cwe,
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"Common endpoint probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "common", e)
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_homepage(self, target, port):
+ """
+ Scan landing pages for clear-text secrets or database dumps.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings from homepage inspection.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+
+ _MARKER_META = {
+ "API_KEY": (Severity.CRITICAL, "API key found in page source"),
+ "PASSWORD": (Severity.CRITICAL, "Password string found in page source"),
+ "SECRET": (Severity.HIGH, "Secret string found in page source"),
+ "BEGIN RSA PRIVATE KEY": (Severity.CRITICAL, "RSA private key found in page source"),
+ }
+
+ try:
+ resp_main = requests.get(base_url, timeout=3, verify=False)
+ text = resp_main.text[:10000]
+ for marker, (severity, title) in _MARKER_META.items():
+ if marker in text:
+ findings_list.append(Finding(
+ severity=severity,
+ title=title,
+ description=f"The string '{marker}' was found in the HTML source of {base_url}.",
+ evidence=f"Marker '{marker}' present in first 10KB of response.",
+ remediation="Remove sensitive data from client-facing HTML; use server-side environment variables.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-540",
+ confidence="firm",
+ ))
+ except Exception as e:
+ self.P(f"Homepage probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "homepage", e)
+
+ return probe_result(findings=findings_list)
+
+ def _web_test_tech_fingerprint(self, target, port):
+ """
+ Technology fingerprinting: extract Server header, X-Powered-By,
+ meta generator, and detect tech mismatches.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings with technology details.
+ """
+ findings_list = []
+ raw = {"server": None, "powered_by": None, "generator": None, "technologies": []}
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}" if port in (80, 443) else f"{scheme}://{target}:{port}"
+
+ try:
+ resp = requests.get(base_url, timeout=4, verify=False)
+
+ # Server header
+ server = resp.headers.get("Server")
+ if server:
+ raw["server"] = server
+ raw["technologies"].append(server)
+ findings_list.append(Finding(
+ severity=Severity.LOW,
+ title=f"Server header disclosed: {server}",
+ description=f"Server header reveals software: {server}.",
+ evidence=f"Server: {server}",
+ remediation="Remove or obfuscate the Server header.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+
+ # X-Powered-By header
+ powered_by = resp.headers.get("X-Powered-By")
+ if powered_by:
+ raw["powered_by"] = powered_by
+ raw["technologies"].append(powered_by)
+ findings_list.append(Finding(
+ severity=Severity.LOW,
+ title=f"X-Powered-By disclosed: {powered_by}",
+ description=f"X-Powered-By header reveals technology: {powered_by}.",
+ evidence=f"X-Powered-By: {powered_by}",
+ remediation="Remove X-Powered-By header.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+
+ # Meta generator tag
+ body = resp.text[:10000]
+ gen_match = _re.search(
+ r'',
+ remediation="Remove the generator meta tag.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="certain",
+ ))
+
+ # Tech mismatch detection
+ if raw["generator"] and raw["server"]:
+ gen_lower = raw["generator"].lower()
+ srv_lower = raw["server"].lower()
+ # Flag CMS + unexpected server combo (e.g. MediaWiki on Python/aiohttp)
+ cms_indicators = {"wordpress": "php", "mediawiki": "php",
+ "drupal": "php", "joomla": "php"}
+ for cms, expected_tech in cms_indicators.items():
+ if cms in gen_lower and expected_tech not in srv_lower:
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Technology mismatch: {raw['generator']} on {raw['server']}",
+ description=f"{raw['generator']} typically runs on {expected_tech}, "
+ f"but server is {raw['server']}. Possible reverse proxy or misconfiguration.",
+ evidence=f"Generator={raw['generator']}, Server={raw['server']}",
+ remediation="Verify this is intentional.",
+ confidence="tentative",
+ ))
+ break
+
+ except Exception as e:
+ self.P(f"Tech fingerprint failed on {base_url}: {e}", color='y')
+
+ return probe_result(raw_data=raw, findings=findings_list)
+
+ def _web_test_vpn_endpoints(self, target, port):
+ """
+ Detect VPN management endpoints from major vendors.
+
+ Probes:
+ - Cisco ASA: /+CSCOE+/logon.html + webvpn cookie
+ - FortiGate: /remote/login
+ - Pulse Secure: /dana-na/auth/url_default/welcome.cgi
+ - Palo Alto GP: /global-protect/login.esp
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings.
+ """
+ findings_list = []
+ raw = {"vpn_endpoints": []}
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}" if port in (80, 443) else f"{scheme}://{target}:{port}"
+
+ vpn_checks = [
+ {
+ "path": "/+CSCOE+/logon.html",
+ "product": "Cisco ASA/AnyConnect",
+ "check": lambda resp: resp.status_code == 200 and ("webvpn" in resp.headers.get("Set-Cookie", "").lower() or "webvpn" in resp.text.lower()),
+ },
+ {
+ "path": "/remote/login",
+ "product": "FortiGate SSL VPN",
+ "check": lambda resp: resp.status_code == 200 and ("fortinet" in resp.text.lower() or "fortitoken" in resp.text.lower() or "fgt_lang" in resp.headers.get("Set-Cookie", "").lower()),
+ },
+ {
+ "path": "/dana-na/auth/url_default/welcome.cgi",
+ "product": "Pulse Secure / Ivanti VPN",
+ "check": lambda resp: resp.status_code in (200, 302) and ("pulse" in resp.text.lower() or "dana" in resp.text.lower() or "dsid" in resp.headers.get("Set-Cookie", "").lower()),
+ },
+ {
+ "path": "/global-protect/login.esp",
+ "product": "Palo Alto GlobalProtect",
+ "check": lambda resp: resp.status_code == 200 and ("global-protect" in resp.text.lower() or "panGPBannerContent" in resp.text),
+ },
+ ]
+
+ for entry in vpn_checks:
+ try:
+ url = base_url.rstrip("/") + entry["path"]
+ resp = requests.get(url, timeout=3, verify=False, allow_redirects=False)
+ if entry["check"](resp):
+ raw["vpn_endpoints"].append({"product": entry["product"], "path": entry["path"]})
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"VPN endpoint detected: {entry['product']}",
+ description=f"{entry['product']} login page accessible at {url}.",
+ evidence=f"URL: {url}, status={resp.status_code}",
+ remediation="Restrict VPN management portal access; verify patching status.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-200",
+ confidence="firm",
+ ))
+ except Exception:
+ pass
+
+ return probe_result(raw_data=raw, findings=findings_list)
diff --git a/extensions/business/cybersec/red_mesh/web_hardening_mixin.py b/extensions/business/cybersec/red_mesh/web_hardening_mixin.py
new file mode 100644
index 00000000..94b65785
--- /dev/null
+++ b/extensions/business/cybersec/red_mesh/web_hardening_mixin.py
@@ -0,0 +1,302 @@
+import requests
+from urllib.parse import quote
+
+from .findings import Finding, Severity, probe_result, probe_error
+
+
+class _WebHardeningMixin:
+ """
+ Audit cookie flags, security headers, CORS policy, redirect handling,
+ and HTTP methods (OWASP WSTG-CONF).
+ """
+
+ def _web_test_flags(self, target, port):
+ """
+ Check cookies for Secure/HttpOnly/SameSite and directory listing.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings on cookie flags and directory listing.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+
+ try:
+ resp_main = requests.get(base_url, timeout=3, verify=False)
+ # Check cookies for Secure/HttpOnly flags
+ cookies_hdr = resp_main.headers.get("Set-Cookie", "")
+ if cookies_hdr:
+ for cookie in cookies_hdr.split(","):
+ cookie_name = cookie.strip().split("=")[0] if "=" in cookie else cookie.strip()[:30]
+ if "Secure" not in cookie:
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Cookie missing Secure flag: {cookie_name}",
+ description=f"Cookie will be sent over unencrypted HTTP connections.",
+ evidence=f"Set-Cookie: {cookie.strip()[:80]} on {base_url}",
+ remediation="Add the Secure attribute to this cookie.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-614",
+ confidence="certain",
+ ))
+ if "HttpOnly" not in cookie:
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Cookie missing HttpOnly flag: {cookie_name}",
+ description=f"Cookie is accessible to JavaScript, enabling theft via XSS.",
+ evidence=f"Set-Cookie: {cookie.strip()[:80]} on {base_url}",
+ remediation="Add the HttpOnly attribute to this cookie.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-1004",
+ confidence="certain",
+ ))
+ if "SameSite" not in cookie:
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Cookie missing SameSite flag: {cookie_name}",
+ description=f"Cookie may be sent with cross-site requests, enabling CSRF.",
+ evidence=f"Set-Cookie: {cookie.strip()[:80]} on {base_url}",
+ remediation="Add SameSite=Lax or SameSite=Strict to this cookie.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-1275",
+ confidence="certain",
+ ))
+ # Detect directory listing
+ if "Index of /" in resp_main.text:
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title="Directory listing exposed",
+ description=f"Directory listing is enabled at {base_url}, revealing file structure.",
+ evidence=f"'Index of /' found in response body.",
+ remediation="Disable directory listing in the web server configuration.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-548",
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"Cookie/flags probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "flags", e)
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_security_headers(self, target, port):
+ """
+ Flag missing HTTP security headers.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings about security headers presence.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+
+ _HEADER_META = {
+ "Content-Security-Policy": (Severity.MEDIUM, "CWE-693", "A05:2021",
+ "Prevents XSS and data injection by controlling resource loading."),
+ "X-Frame-Options": (Severity.MEDIUM, "CWE-1021", "A05:2021",
+ "Prevents clickjacking by controlling iframe embedding."),
+ "X-Content-Type-Options": (Severity.LOW, "CWE-693", "A05:2021",
+ "Prevents MIME-type sniffing attacks."),
+ "Strict-Transport-Security": (Severity.MEDIUM, "CWE-319", "A02:2021",
+ "Enforces HTTPS and prevents protocol downgrade attacks."),
+ "Referrer-Policy": (Severity.LOW, "CWE-200", "A05:2021",
+ "Controls how much referrer information is included with requests."),
+ }
+
+ try:
+ resp_main = requests.get(base_url, timeout=3, verify=False)
+ for header, (severity, cwe, owasp, desc) in _HEADER_META.items():
+ if header not in resp_main.headers:
+ findings_list.append(Finding(
+ severity=severity,
+ title=f"Missing security header: {header}",
+ description=desc,
+ evidence=f"Header {header} absent from {base_url} response.",
+ remediation=f"Add the {header} header to server responses.",
+ owasp_id=owasp,
+ cwe_id=cwe,
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"Security header probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "security_headers", e)
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_cors_misconfiguration(self, target, port):
+ """
+ Detect overly permissive CORS policies.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings related to CORS policy.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+ try:
+ malicious_origin = "https://attacker.example"
+ resp = requests.get(
+ base_url,
+ timeout=3,
+ verify=False,
+ headers={"Origin": malicious_origin}
+ )
+ acao = resp.headers.get("Access-Control-Allow-Origin", "")
+ acac = resp.headers.get("Access-Control-Allow-Credentials", "")
+ if acao == "*" and acac.lower() == "true":
+ findings_list.append(Finding(
+ severity=Severity.CRITICAL,
+ title="CORS allows credentials with wildcard origin",
+ description="Any origin can make credentialed cross-site requests, enabling full account takeover.",
+ evidence=f"Access-Control-Allow-Origin: *, Allow-Credentials: true on {base_url}",
+ remediation="Never combine Access-Control-Allow-Origin: * with Allow-Credentials: true.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-942",
+ confidence="certain",
+ ))
+ elif acao in ("*", malicious_origin):
+ findings_list.append(Finding(
+ severity=Severity.HIGH,
+ title=f"CORS misconfiguration: {acao} allowed",
+ description=f"CORS policy reflects attacker-controlled origins, enabling cross-site data theft.",
+ evidence=f"Access-Control-Allow-Origin: {acao} on {base_url}",
+ remediation="Restrict Access-Control-Allow-Origin to trusted domains only.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-942",
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"CORS probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "cors", e)
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_open_redirect(self, target, port):
+ """
+ Check common redirect parameters for open redirect abuse.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings about open redirects.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+ try:
+ payload = "https://attacker.example"
+ redirect_url = base_url.rstrip("/") + f"/login?next={quote(payload, safe=':/')}"
+ resp = requests.get(
+ redirect_url,
+ timeout=3,
+ verify=False,
+ allow_redirects=False
+ )
+ if 300 <= resp.status_code < 400:
+ location = resp.headers.get("Location", "")
+ if payload in location:
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title="Open redirect via next parameter",
+ description="The login endpoint redirects to attacker-controlled URLs via the next parameter.",
+ evidence=f"Location: {location} at {redirect_url}",
+ remediation="Validate redirect targets against an allowlist of trusted domains.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-601",
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"Open redirect probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "open_redirect", e)
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_http_methods(self, target, port):
+ """
+ Surface risky HTTP verbs enabled on the root resource.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings related to allowed HTTP methods.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}"
+ if port not in (80, 443):
+ base_url = f"{scheme}://{target}:{port}"
+ try:
+ resp = requests.options(base_url, timeout=3, verify=False)
+ allow = resp.headers.get("Allow", "")
+ if allow:
+ risky = [method for method in ("PUT", "DELETE", "TRACE", "CONNECT") if method in allow.upper()]
+ if risky:
+ risky_str = ", ".join(risky)
+ findings_list.append(Finding(
+ severity=Severity.MEDIUM,
+ title=f"Risky HTTP methods enabled: {risky_str}",
+ description=f"OPTIONS response lists dangerous methods on {base_url}.",
+ evidence=f"Allow: {allow}",
+ remediation="Disable risky HTTP methods in the web server configuration.",
+ owasp_id="A05:2021",
+ cwe_id="CWE-749",
+ confidence="certain",
+ ))
+ except Exception as e:
+ self.P(f"HTTP methods probe failed on {base_url}: {e}", color='y')
+ return probe_error(target, port, "http_methods", e)
+
+ return probe_result(findings=findings_list)
diff --git a/extensions/business/cybersec/red_mesh/web_injection_mixin.py b/extensions/business/cybersec/red_mesh/web_injection_mixin.py
new file mode 100644
index 00000000..c37ef9b1
--- /dev/null
+++ b/extensions/business/cybersec/red_mesh/web_injection_mixin.py
@@ -0,0 +1,336 @@
+import time
+import requests
+from urllib.parse import quote
+
+from .findings import Finding, Severity, probe_result, probe_error
+
+
+class _InjectionTestBase:
+ """Shared execution engine for injection-style web tests."""
+
+ def _run_injection_test(self, target, port, *, params, payloads, check_fn,
+ finding_factory, max_findings=3):
+ """
+ Iterate params x payloads, call check_fn(response, needle) for each,
+ create findings via finding_factory(param, payload, response, url).
+ """
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}" if port in (80, 443) else f"{scheme}://{target}:{port}"
+ findings = []
+
+ for param in params:
+ if len(findings) >= max_findings:
+ break
+ for payload, needle in payloads:
+ try:
+ url = f"{base_url}?{param}={payload}"
+ resp = requests.get(url, timeout=3, verify=False)
+ if check_fn(resp, needle):
+ findings.append(finding_factory(param, payload, resp, url))
+ break # Found for this param, next param
+ except Exception:
+ pass
+
+ return findings
+
+
+class _WebInjectionMixin(_InjectionTestBase):
+ """
+ Non-destructive probes for path traversal, reflected XSS,
+ and SQL injection (OWASP WSTG-INPV).
+ """
+
+ def _web_test_path_traversal(self, target, port):
+ """
+ Attempt path traversal via URL path and query parameters with encoding variants.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}" if port in (80, 443) else f"{scheme}://{target}:{port}"
+
+ unix_needles = ("root:x:", "root:*:", "daemon:")
+ win_needles = ("[boot loader]", "[operating systems]", "[fonts]")
+
+ # --- 1. Path-based traversal ---
+ path_payloads = [
+ "/../../../../etc/passwd",
+ "/..%2f..%2f..%2f..%2fetc/passwd",
+ "/....//....//....//....//etc/passwd",
+ "/../../../../windows/win.ini",
+ ]
+ for payload_path in path_payloads:
+ if len(findings_list) >= 3:
+ break
+ try:
+ url = base_url.rstrip("/") + payload_path
+ resp = requests.get(url, timeout=2, verify=False)
+ if any(n in resp.text for n in unix_needles):
+ findings_list.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"Path traversal: /etc/passwd via path",
+ description=f"Server returned /etc/passwd content via path traversal.",
+ evidence=f"URL: {url}, body contains passwd markers",
+ remediation="Sanitize path components; use a web application firewall.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-22",
+ confidence="certain",
+ ))
+ break
+ if any(n in resp.text for n in win_needles):
+ findings_list.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"Path traversal: win.ini via path",
+ description=f"Server returned Windows system file content.",
+ evidence=f"URL: {url}, body contains win.ini markers",
+ remediation="Sanitize path components.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-22",
+ confidence="certain",
+ ))
+ break
+ except Exception:
+ pass
+
+ # --- 2. Query parameter traversal ---
+ params = ["file", "path", "page", "doc", "template", "include", "name"]
+ payloads_qs = [
+ ("../../../../etc/passwd", unix_needles),
+ ("..%2f..%2f..%2f..%2fetc/passwd", unix_needles),
+ ("..%252f..%252f..%252f..%252fetc/passwd", unix_needles), # double-encoded
+ ("..\\..\\..\\..\\windows\\win.ini", win_needles),
+ ]
+ for param in params:
+ if len(findings_list) >= 3:
+ break
+ for payload, needles in payloads_qs:
+ try:
+ url = f"{base_url}?{param}={payload}"
+ resp = requests.get(url, timeout=2, verify=False)
+ if any(n in resp.text for n in needles):
+ findings_list.append(Finding(
+ severity=Severity.CRITICAL,
+ title=f"Path traversal via ?{param}= parameter",
+ description=f"Parameter '{param}' allows reading system files.",
+ evidence=f"URL: {url}",
+ remediation=f"Validate and sanitize the '{param}' parameter.",
+ owasp_id="A01:2021",
+ cwe_id="CWE-22",
+ confidence="certain",
+ ))
+ break
+ except Exception:
+ pass
+
+ return probe_result(findings=findings_list)
+
+
+ def _web_test_xss(self, target, port):
+ """
+ Probe reflected XSS via URL path injection and query parameters.
+
+ Tests multiple payloads across common parameter names.
+
+ Parameters
+ ----------
+ target : str
+ Hostname or IP address.
+ port : int
+ Web port to probe.
+
+ Returns
+ -------
+ dict
+ Structured findings.
+ """
+ findings_list = []
+ scheme = "https" if port in (443, 8443) else "http"
+ base_url = f"{scheme}://{target}" if port in (80, 443) else f"{scheme}://{target}:{port}"
+
+ xss_payloads = [
+ ('
', 'onerror=alert'),
+ ('