Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion keepercommander/discovery_common/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '1.1.4'
__version__ = '1.1.10'
93 changes: 91 additions & 2 deletions keepercommander/discovery_common/infrastructure.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
from ..keeper_dag import DAG, EdgeType
from ..keeper_dag.exceptions import DAGVertexException
from ..keeper_dag.crypto import urlsafe_str_to_bytes
from ..keeper_dag.types import PamGraphId, PamEndpoints
from ..keeper_dag.types import PamGraphId
from discovery_common.types import DiscoveryObject
import os
import importlib
import time
from typing import Any, Optional, TYPE_CHECKING
from typing import Any, Optional, Dict, List, TYPE_CHECKING

if TYPE_CHECKING:
from ..keeper_dag.vertex import DAGVertex
Expand Down Expand Up @@ -59,6 +60,8 @@ def __init__(self, record: Any, logger: Optional[Any] = None, history_level: int

self.conn = get_connection(logger=logger, **kwargs)

self._cache: Optional[Dict] = None

@property
def dag(self) -> DAG:
if self._dag is None:
Expand Down Expand Up @@ -123,6 +126,12 @@ def close(self):
Clean up resources held by this Infrastructure instance.
Releases the DAG instance and connection to prevent memory leaks.
"""
if self._cache:
for v in self._cache.values():
v["vertex"] = None
v["content"] = None
self._cache.clear()

if self._dag is not None:
self._dag = None
self.conn = None
Expand Down Expand Up @@ -150,6 +159,86 @@ def save(self, delta_graph: Optional[bool] = None):
self._dag.save(delta_graph=delta_graph)
self.logger.debug(f"infrastructure took {time.time()-ts} secs to save")

def cache_objects(self):

self.logger.debug(f"building id to infrastructure cache")

self._cache = {}

def _cache(v: DAGVertex, parent_content: Optional[DiscoveryObject] = None):
c = DiscoveryObject.get_discovery_object(v)
key = c.object_type_value.lower() + c.id.lower()
self._cache[key] = {
"key": key,
"uid": v.uid,
"parent_uid": parent_content.uid if parent_content else None,
"vertex": v,
"content": c,
"was_found": False,
"could_login": True,
"is_new": False,
"md5": c.md5
}

for next_v in v.has_vertices():
_cache(next_v, c)

if self.has_discovery_data:
ts = time.time()
_cache(self.get_configuration, None)
self.logger.info(f" infrastructure cache build time: {time.time()-ts} seconds")
else:
self.logger.info(f" no infrastructure data to cache")

def get_cache_info(self, object_type_value: str, object_id: str) -> Dict:
return self._cache.get(object_type_value.lower() + object_id.lower())

def get_cache_info_by_key(self, key: str) -> Dict:
return self._cache.get(key.lower())

def get_missing_cache_list(self, uid: Optional[str] = None) -> List[str]:
not_found_list = []
for k, v in self._cache.items():
if not v["is_new"] and not v["was_found"]:
if uid is None or uid == v["uid"] or uid == v["parent_uid"]:
not_found_list.append(k)
return not_found_list

def add_info_to_cache(self, vertex: DAGVertex, content: DiscoveryObject, parent_vertex: Optional[DAGVertex] = None):
if self._cache is None:
self._cache = {}

key = content.object_type_value.lower() + content.id.lower()
self._cache[key] = {
"key": key,
"uid": vertex.uid,
"parent_uid": parent_vertex.uid if parent_vertex else None,
"vertex": vertex,
"content": content,
"was_found": True,
"could_login": True,
"is_new": True,
"md5": content.md5
}

def update_cache_info(self, info: Dict):
key = info["key"]
self._cache[key] = info

def find_content(self, query: Dict, ignore_case: bool = False) -> Optional[DAGVertex]:
"""
Find the vertex that matches the query.

Will only find one.
If it does not match, return None
If matches on more, return None
"""

vertices = self.dag.search_content(query=query, ignore_case=ignore_case)
if len(vertices) != 1:
return None
return vertices[0]

def to_dot(self, graph_format: str = "svg", show_hex_uid: bool = False,
show_version: bool = True, show_only_active_vertices: bool = False,
show_only_active_edges: bool = False, sync_point: int = None, graph_type: str = "dot"):
Expand Down
22 changes: 4 additions & 18 deletions keepercommander/discovery_common/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
from .utils import get_connection, make_agent
from .types import JobContent, JobItem, Settings, DiscoveryDelta
from ..keeper_dag import DAG, EdgeType
from ..keeper_dag.types import PamGraphId, PamEndpoints
from ..keeper_dag.types import PamGraphId
import logging
import os
import base64
Expand Down Expand Up @@ -320,26 +320,12 @@ def get_job(self, job_id) -> Optional[JobItem]:
# Get the job item from the job vertex DATA edge.
# Replace the one from the job history if we have it.
try:
job = job_vertex.content_as_object(JobItem)
found_job = job_vertex.content_as_object(JobItem)
if found_job is not None:
job = found_job
except Exception as err:
self.logger.debug(f"could not find job item on job vertex, use job histry entry: {err}")

# If the job delta is None, check to see if it chunked as vertices.
delta_lookup = {}
vertices = job_vertex.has_vertices()
self.logger.debug(f"found {len(vertices)} delta vertices")
for vertex in vertices:
edge = vertex.get_edge(job_vertex, edge_type=EdgeType.KEY)
delta_lookup[int(edge.path)] = vertex

json_value = ""
# Sort numerically increasing and then append their content.
# This will re-assemble the JSON
for key in sorted(delta_lookup):
json_value += delta_lookup[key].content_as_str
if json_value != "":
self.logger.debug(f"delta content length is {len(json_value)}")
job.delta = DiscoveryDelta.model_validate_json(json_value)
else:
self.logger.debug("could not find job vertex")

Expand Down
5 changes: 1 addition & 4 deletions keepercommander/discovery_common/process.py
Original file line number Diff line number Diff line change
Expand Up @@ -1019,7 +1019,7 @@ def _process_level(self,

if admin_uid is not None:
self.logger.debug(" found directory user admin, connect to resource")
# self.record_link.belongs_to(admin_uid, add_content.record_uid, acl=acl)
self.record_link.belongs_to(admin_uid, add_content.record_uid, acl=acl)
should_prompt_for_admin = False
else:
self.logger.debug(" did not find the directory user for the admin, "
Expand Down Expand Up @@ -1562,7 +1562,4 @@ def run(self,
self.infra.save(delta_graph=False)
self.logger.debug("# ####################################################################################")

# Update the user service mapping
self.user_service.run(infra=self.infra)

return bulk_process_results
39 changes: 37 additions & 2 deletions keepercommander/discovery_common/rm_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -137,6 +137,30 @@ class RmAzureGroupAddMeta(RmMetaBase):
group_types: List[str] = []


class RmGcpUserAddMeta(RmMetaBase):
account_enabled: Optional[bool] = True
display_name: Optional[str] = None
password_reset_required: Optional[bool] = False
password_reset_required_with_mfa: Optional[bool] = False
groups: List[str] = []


class RmGcpGroupAddMeta(RmMetaBase):
group_types: List[str] = []


class RmOktaUserAddMeta(RmMetaBase):
account_enabled: Optional[bool] = True
display_name: Optional[str] = None
password_reset_required: Optional[bool] = False
password_reset_required_with_mfa: Optional[bool] = False
groups: List[str] = []


class RmOktaGroupAddMeta(RmMetaBase):
group_types: List[str] = []


class RmDomainUserAddMeta(RmMetaBase):
roles: List[str] = []
groups: List[str] = []
Expand Down Expand Up @@ -253,6 +277,10 @@ class RmMongoDbRoleAddMeta(RmMetaBase):
# MACHINE


class RmUserDeleteBaseMeta(RmMetaBase):
remove_home_dir: Optional[bool] = True


class RmLinuxGroupAddMeta(RmMetaBase):
gid: Optional[int] = None
system_group: Optional[bool] = False
Expand Down Expand Up @@ -291,8 +319,7 @@ class RmLinuxUserAddMeta(RmMachineUserAddMeta):
non_system_dir_mode: Optional[str] = None


class RmLinuxUserDeleteMeta(RmMetaBase):
remove_home_dir: Optional[bool] = True
class RmLinuxUserDeleteMeta(RmUserDeleteBaseMeta):
remove_user_group: Optional[bool] = True


Expand All @@ -308,6 +335,10 @@ class RmWindowsUserAddMeta(RmMachineUserAddMeta):
groups: List[str] = []


class RmWindowsUserDeleteMeta(RmUserDeleteBaseMeta):
pass


class RmMacOsUserAddMeta(RmMachineUserAddMeta):
display_name: Optional[str] = None
uid: Optional[str] = None
Expand All @@ -325,6 +356,10 @@ class RmMacOsRoleAddMeta(RmMetaBase):
record_name: Optional[str] = None


class RmMacOsUserDeleteMeta(RmUserDeleteBaseMeta):
pass


# DIRECTORY


Expand Down
108 changes: 95 additions & 13 deletions keepercommander/discovery_common/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import datetime
import base64
import json
import hashlib
from keeper_secrets_manager_core.crypto import CryptoUtils
from typing import Any, Union, Optional, List, TYPE_CHECKING

Expand Down Expand Up @@ -524,6 +525,18 @@ class DiscoveryObject(BaseModel):
# Specific information for a record type.
item: Union[DiscoveryConfiguration, DiscoveryUser, DiscoveryMachine, DiscoveryDatabase, DiscoveryDirectory]

@property
def md5(self) -> str:
data = self.model_dump()

# Don't include these in the MD5
data.pop("missing_since_ts", None)
data.pop("access_user", None)

m = hashlib.md5()
m.update(json.dumps(data).encode('utf-8'))
return m.hexdigest()

@property
def record_exists(self):
return self.record_uid is not None
Expand Down Expand Up @@ -603,29 +616,98 @@ class NormalizedRecord(BaseModel):
title: str
fields: List[RecordField] = []
note: Optional[str] = None
record_exists: bool = True

def _field(self,
field_type: Optional[str] = None,
label: Optional[str] = None) -> Optional[RecordField]:
if field_type is None and label is None:
raise ValueError("either field_type or label needs to be set to find field in NormalizedRecord.")

def _field(self, field_type, label) -> Optional[RecordField]:
for field in self.fields:
value = field.value
if value is None or len(value) == 0:
continue
if field.label == field_type and value[0].lower() == label.lower():
if field_type is not None and field_type == field.type:
return field
if label is not None and label == field.label:
return field
return None

def find_user(self, user):
def find_field(self,
field_type: Optional[str] = None,
label: Optional[str] = None) -> Optional[RecordField]:

return self._field(field_type=field_type, label=label)

def get_value(self,
field_type: Optional[str] = None,
label: Optional[str] = None) -> Optional[Any]:

field = self.find_field(field_type=field_type, label=label)
if field is None or field.value is None or len(field.value) == 0:
return None
return field.value[0]

def get_user(self) -> Optional[str]:
field = self._field(field_type="login")
if field is None:
return None
value = field.value
if isinstance(value, list):
if len(value) == 0:
return None
value = value[0]
return value

def get_dn(self) -> Optional[str]:
field = self._field(label="distinguishedName")
if field is None:
return None
value = field.value
if isinstance(value, list):
if len(value) == 0:
return None
value = value[0]
return value

def has_user(self, user) -> bool:

from .utils import split_user_and_domain

res = self._field("login", user)
if res is None:
user, _ = split_user_and_domain(user)
res = self._field("login", user)
user, _ = split_user_and_domain(user)

field = self._field(field_type="login")
if field is None:
return False

value = field.value
if isinstance(value, list):
if len(value) == 0:
return False
value = value[0]
elif isinstance(value, str):
value = value.lower()

if user.lower() == value:
return True

return False

def has_dn(self, user) -> bool:
field = self._field(label="distinguishedName")
if field is None:
return False

return res
value = field.value
if isinstance(value, list):
if len(value) == 0:
return False
value = value[0]
elif isinstance(value, str):
value = value.lower()

def find_dn(self, user):
return self._field("distinguishedName", user)
if user.lower() == value:
return True

return False


class PromptResult(BaseModel):
Expand Down
Loading
Loading