Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
19 changes: 12 additions & 7 deletions agentic/orchestrator.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
VirusTotalToolManager,
ZoomEyeToolManager,
CriminalIpToolManager,
UncoverToolManager,
PhaseAwareToolExecutor,
)
from orchestrator_helpers import (
Expand Down Expand Up @@ -219,13 +220,14 @@ def _build_rotator(main_key: str, tool_name: str) -> KeyRotator:
# OSINT tools — Censys, FOFA, OTX, Netlas, VirusTotal, ZoomEye, CriminalIP
if hasattr(self, '_osint_managers') and self.tool_executor:
_osint_key_map = {
'censys': {'id_field': 'censysApiId', 'secret_field': 'censysApiSecret'},
'censys': {'token_field': 'censysApiToken', 'org_field': 'censysOrgId'},
'fofa': {'key_field': 'fofaApiKey', 'rotation_name': 'fofa'},
'otx': {'key_field': 'otxApiKey', 'rotation_name': 'otx'},
'netlas': {'key_field': 'netlasApiKey', 'rotation_name': 'netlas'},
'virustotal': {'key_field': 'virusTotalApiKey', 'rotation_name': 'virustotal'},
'zoomeye': {'key_field': 'zoomEyeApiKey', 'rotation_name': 'zoomeye'},
'criminalip': {'key_field': 'criminalIpApiKey', 'rotation_name': 'criminalip'},
'uncover': {'key_field': None},
}
for tool_name, key_cfg in _osint_key_map.items():
mgr = self._osint_managers.get(tool_name)
Expand All @@ -235,12 +237,14 @@ def _build_rotator(main_key: str, tool_name: str) -> KeyRotator:
if not enabled:
self.tool_executor.update_osint_tool(tool_name, None)
continue
if tool_name == 'censys':
api_id = user_settings.get(key_cfg['id_field'], '')
api_secret = user_settings.get(key_cfg['secret_field'], '')
if api_id and api_secret and (mgr.api_id != api_id or mgr.api_secret != api_secret):
mgr.api_id = api_id
mgr.api_secret = api_secret
if tool_name == 'uncover':
self.tool_executor.update_osint_tool(tool_name, mgr.get_tool())
elif tool_name == 'censys':
api_token = user_settings.get(key_cfg['token_field'], '')
org_id = user_settings.get(key_cfg['org_field'], '')
if api_token and org_id and (mgr.api_token != api_token or mgr.org_id != org_id):
mgr.api_token = api_token
mgr.org_id = org_id
self.tool_executor.update_osint_tool(tool_name, mgr.get_tool())
logger.info(f"Updated {tool_name} tool with API credentials")
else:
Expand Down Expand Up @@ -321,6 +325,7 @@ async def _setup_tools(self) -> None:
'virustotal': VirusTotalToolManager(),
'zoomeye': ZoomEyeToolManager(),
'criminalip': CriminalIpToolManager(),
'uncover': UncoverToolManager(),
}
osint_tools = {
name: mgr.get_tool()
Expand Down
2 changes: 2 additions & 0 deletions agentic/project_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,7 @@
'virustotal': ['informational', 'exploitation'],
'zoomeye': ['informational', 'exploitation'],
'criminalip': ['informational', 'exploitation'],
'uncover': ['informational', 'exploitation'],
},

# Kali Shell Library Installation
Expand Down Expand Up @@ -128,6 +129,7 @@
'VIRUSTOTAL_ENABLED': True,
'ZOOMEYE_ENABLED': True,
'CRIMINALIP_ENABLED': True,
'UNCOVER_ENABLED': True,

# Social Engineering Simulation
'PHISHING_SMTP_CONFIG': '', # Free-text SMTP config for phishing email delivery (optional)
Expand Down
14 changes: 13 additions & 1 deletion agentic/prompts/tool_registry.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@
'**censys** (Internet-wide host/service search)\n'
' - **action="search"** — Search hosts by query (e.g. "services.port=443 AND location.country=US")\n'
' - **action="host"** — Detailed IP info: services, TLS certs, OS, ASN\n'
' - Paid API — requires Censys API ID + Secret'
' - Paid API — requires Censys API Token + Organization ID'
),
},
"fofa": {
Expand Down Expand Up @@ -258,4 +258,16 @@
' - **action="domain_report"** — Risk assessment, technologies, domain intel'
),
},
"uncover": {
"purpose": "Multi-engine internet search (Shodan, Censys, FOFA, ZoomEye, Netlas, etc.)",
"when_to_use": "Search multiple OSINT engines at once for exposed assets, or look up a specific IP across all engines",
"args_format": '"action": "search|ip", "query": "search query", "ip": "1.2.3.4"',
"description": (
'**uncover** (Multi-engine internet search)\n'
' - **action="search"** — Search across all configured engines simultaneously\n'
' - **action="ip"** — Lookup a specific IP across all engines\n'
' - Supports: Shodan, Censys, FOFA, ZoomEye, Netlas, CriminalIP, Quake, Hunter, and more\n'
' - Returns IP, port, hostname, and source engine for each result'
),
},
}
122 changes: 106 additions & 16 deletions agentic/tools.py
Original file line number Diff line number Diff line change
Expand Up @@ -1008,16 +1008,16 @@ def _osint_http_error(tool_name: str, e: 'httpx.HTTPStatusError') -> str:


class CensysToolManager:
"""Censys internet search — host/service discovery via certificate and banner data."""
"""Censys internet search — host/service discovery via Platform API v3."""

API_BASE = "https://search.censys.io/api/v2"
API_BASE = "https://api.platform.censys.io/v3/global"

def __init__(self, api_id: str = '', api_secret: str = ''):
self.api_id = api_id
self.api_secret = api_secret
def __init__(self, api_token: str = '', org_id: str = ''):
self.api_token = api_token
self.org_id = org_id

def get_tool(self) -> Optional[callable]:
if not self.api_id or not self.api_secret:
if not self.api_token or not self.org_id:
logger.warning("Censys API credentials not configured - censys tool unavailable.")
return None
manager = self
Expand All @@ -1039,32 +1039,38 @@ async def censys(action: str, query: str = "", ip: str = "") -> str:
Returns:
Formatted results from the Censys API
"""
auth = (manager.api_id, manager.api_secret)
headers = {
"Authorization": f"Bearer {manager.api_token}",
"Accept": "application/json",
"Content-Type": "application/json",
}
params = {"organization_id": manager.org_id}
try:
async with httpx.AsyncClient(timeout=30.0, auth=auth) as client:
async with httpx.AsyncClient(timeout=30.0, headers=headers, params=params) as client:
if action == "search":
if not query:
return "Error: 'query' required for action='search'"
resp = await client.get(
f"{manager.API_BASE}/hosts/search",
params={"q": query, "per_page": 25},
resp = await client.post(
f"{manager.API_BASE}/search/query",
json={"query": query, "page_size": 25},
)
elif action == "host":
if not ip:
return "Error: 'ip' required for action='host'"
resp = await client.get(f"{manager.API_BASE}/hosts/{ip}")
resp = await client.get(f"{manager.API_BASE}/asset/host/{ip}")
else:
return f"Error: Unknown action '{action}'. Valid: search, host"

resp.raise_for_status()
data = resp.json()

if action == "search":
hits = data.get("result", {}).get("hits", [])
total = data.get("result", {}).get("total", 0)
result = data.get("result", {})
hits = result.get("hits", [])
total = result.get("total", 0)
if not hits:
return f"No Censys results for: {query}"
lines = [f"Censys search: {total} hosts (showing {len(hits)})"]
lines = [f"Censys search: {total} hits (showing {len(hits)})"]
for i, h in enumerate(hits[:25], 1):
ip_addr = h.get("ip", "?")
services = h.get("services", [])
Expand Down Expand Up @@ -1748,7 +1754,17 @@ async def criminalip(action: str, ip: str = "", domain: str = "") -> str:
return "\n".join(lines)

except httpx.HTTPStatusError as e:
return _osint_http_error("Criminal IP", e)
status = e.response.status_code
if status in (401, 403):
return "Criminal IP API error: API key is invalid or expired. Check Global Settings."
if status == 402:
return "Criminal IP API error: Credit/quota exhausted. Check your plan."
body = e.response.text[:300].lower()
if any(kw in body for kw in ("credit", "quota", "exceeded", "insufficient")):
return "Criminal IP API error: Credit/quota exhausted. Check your plan."
if status == 429:
return "Criminal IP API error: Rate limit exceeded. Try again later."
return f"Criminal IP API error: HTTP {status}"
except Exception as e:
logger.error(f"Criminal IP {action} failed: {e}")
return f"Criminal IP error: {str(e)}"
Expand All @@ -1757,6 +1773,80 @@ async def criminalip(action: str, ip: str = "", domain: str = "") -> str:
return criminalip


class UncoverToolManager:
"""ProjectDiscovery Uncover — multi-engine internet search."""

def __init__(self, api_key: str = ''):
self.api_key = api_key
self.key_rotator = None

def get_tool(self) -> Optional[callable]:
manager = self

@tool
async def uncover(action: str, query: str = '', ip: str = '') -> str:
"""
Uncover multi-engine internet search for exposed assets.

Searches across Shodan, Censys, FOFA, ZoomEye, Netlas, CriminalIP,
and other engines simultaneously.

Args:
action: "search" to search by query, "ip" to lookup a specific IP
query: Search query (e.g. 'ssl:"Example Inc."', 'hostname:example.com')
ip: IP address for IP-specific lookup

Returns:
Discovered hosts with IP, port, and source engine
"""
if action == "ip" and ip:
query = ip
elif action == "search" and not query:
return "Error: 'query' is required for search action"
elif not query:
return "Error: provide action='search' with query, or action='ip' with ip"

try:
import subprocess
import json as _json
cmd = ["docker", "run", "--rm",
"projectdiscovery/uncover:latest",
"-q", query, "-json", "-silent", "-l", "25"]
result = subprocess.run(cmd, capture_output=True, text=True, timeout=60)
if result.returncode != 0:
return f"Uncover error: {result.stderr[:200]}"

lines = []
for line in result.stdout.strip().split('\n'):
if not line.strip():
continue
try:
entry = _json.loads(line)
ip_val = entry.get('ip', '')
port_val = entry.get('port', '')
host_val = entry.get('host', '')
source = entry.get('source', '')
parts = [f"{ip_val}:{port_val}"]
if host_val and host_val != ip_val:
parts.append(f"host={host_val}")
if source:
parts.append(f"[{source}]")
lines.append(" ".join(parts))
except _json.JSONDecodeError:
continue

if not lines:
return f"Uncover: no results for '{query}'"
return f"Uncover results ({len(lines)}):\n" + "\n".join(lines[:25])
except subprocess.TimeoutExpired:
return "Uncover: search timed out"
except Exception as e:
return f"Uncover error: {str(e)}"

logger.info("Uncover multi-engine search tool configured")
return uncover


# =============================================================================
# PHASE-AWARE TOOL EXECUTOR
# =============================================================================
Expand Down
91 changes: 91 additions & 0 deletions graph_db/mixins/osint_mixin.py
Original file line number Diff line number Diff line change
Expand Up @@ -2109,4 +2109,95 @@ def update_graph_from_criminalip(self, recon_data: dict, user_id: str, project_i
print(f"[graph-db] update_graph_from_criminalip complete: {stats}")
return stats

def update_graph_from_uncover(self, recon_data: dict, user_id: str, project_id: str) -> dict:
"""Update Neo4j graph with uncover target expansion results.

Creates Subdomain and IP nodes for newly discovered assets.
Uses ON CREATE SET to avoid overwriting richer data from other tools.
"""
stats = {
"subdomains_created": 0, "ips_created": 0,
"relationships_created": 0, "errors": [],
}
domain = recon_data.get("domain", "") or ""
try:
uncover = recon_data.get("uncover") or {}
hosts = uncover.get("hosts") or []
ips = uncover.get("ips") or []
ip_ports = uncover.get("ip_ports") or {}

if not hosts and not ips:
return stats

with self.driver.session() as session:
for hostname in hosts:
if not hostname:
continue
try:
session.run(
"""
MERGE (s:Subdomain {name: $name, user_id: $user_id, project_id: $project_id})
ON CREATE SET s.discovered_at = datetime(), s.updated_at = datetime(),
s.source = 'uncover', s.status = 'unverified'
""",
name=hostname, user_id=user_id, project_id=project_id,
)
stats["subdomains_created"] += 1
if domain:
session.run(
"""
MATCH (s:Subdomain {name: $name, user_id: $user_id, project_id: $project_id})
MATCH (d:Domain {name: $domain, user_id: $user_id, project_id: $project_id})
MERGE (s)-[:BELONGS_TO]->(d)
MERGE (d)-[:HAS_SUBDOMAIN]->(s)
""",
name=hostname, domain=domain,
user_id=user_id, project_id=project_id,
)
stats["relationships_created"] += 2
except Exception as e:
stats["errors"].append(f"Uncover subdomain {hostname}: {e}")

for ip in ips:
if not ip:
continue
try:
session.run(
"""
MERGE (i:IP {address: $address, user_id: $user_id, project_id: $project_id})
ON CREATE SET i.updated_at = datetime(), i.uncover_discovered = true
SET i.uncover_enriched = true, i.updated_at = datetime()
""",
address=ip, user_id=user_id, project_id=project_id,
)
stats["ips_created"] += 1

ports = ip_ports.get(ip, [])
for port_num in ports:
if not port_num or port_num <= 0:
continue
session.run(
"""
MERGE (p:Port {number: $port, protocol: 'tcp', ip_address: $ip,
user_id: $user_id, project_id: $project_id})
ON CREATE SET p.state = 'open', p.source = 'uncover',
p.updated_at = datetime()
MERGE (i:IP {address: $ip, user_id: $user_id, project_id: $project_id})
MERGE (i)-[:HAS_PORT]->(p)
""",
port=int(port_num), ip=ip,
user_id=user_id, project_id=project_id,
)
stats["relationships_created"] += 1
except Exception as e:
stats["errors"].append(f"Uncover IP {ip}: {e}")

except Exception as e:
stats["errors"].append(f"update_graph_from_uncover: {e}")

print(f"[+][graph-db] Uncover Graph Update: "
f"{stats['subdomains_created']} subdomains, "
f"{stats['ips_created']} IPs, "
f"{stats['relationships_created']} relationships")
print(f"[graph-db] update_graph_from_uncover complete")
return stats
Loading