diff --git a/.gitignore b/.gitignore index 7bc716bf0..02c4882b3 100644 --- a/.gitignore +++ b/.gitignore @@ -2,6 +2,7 @@ .DS_Store *.iml .cache/ +.cursor/ venv/ *.json *.pyc diff --git a/keepercommander/commands/discoveryrotation.py b/keepercommander/commands/discoveryrotation.py index 11802a5a4..627099419 100644 --- a/keepercommander/commands/discoveryrotation.py +++ b/keepercommander/commands/discoveryrotation.py @@ -73,7 +73,7 @@ from .pam_debug.rotation_setting import PAMDebugRotationSettingsCommand from .pam_debug.link import PAMDebugLinkCommand from .pam_debug.vertex import PAMDebugVertexCommand -from .pam_import.edit import PAMProjectCommand +from .pam_import.commands import PAMProjectCommand from .pam_launch.launch import PAMLaunchCommand from .pam_service.list import PAMActionServiceListCommand from .pam_service.add import PAMActionServiceAddCommand diff --git a/keepercommander/commands/pam_import/README.md b/keepercommander/commands/pam_import/README.md index 97db7271b..ed922152a 100644 --- a/keepercommander/commands/pam_import/README.md +++ b/keepercommander/commands/pam_import/README.md @@ -1,15 +1,28 @@ -## PAM Import Command +## PAM Project Import Commands PAM Import command helps customers with thousands of managed companies to automate the creation of folders, gateways, machines, users, connections, tunnels and (optionally) rotations. ### Command line options -`pam project import --name=project1 --filename=/path/to/import.json --dry-run` +Initial Import. +`pam project import --name=project1 --filename=/path/to/import.json [--dry-run]` - `--name`, `-n` → Project name _(overrides `"project":""` from JSON)_ - `--filename`, `-f` → JSON file to load import data from. - `--dry-run`, `-d` → Test import without modifying vault. +Adding new PAM resources and users to an existing PAM configuration from an import file. The command validates folders and records, then creates only new items (match by title, existing records are skipped). The import JSON format is the same. +`pam project extend --config= --filename=/path/to/import.json [--dry-run]` + +- `--config`, `-c` → PAM Configuration record UID or title. +- `--filename`, `-f` → JSON file to load import data from. +- `--dry-run`, `-d` → Test import without modifying vault. + +> **Notes:** +- Use **`--dry-run`** to preview what would be created and to see detailed validation output without changing the vault. +- If the command reports errors, run it again with **`--dry-run`** for more detailed error messages. + + ### JSON format details Text UI (TUI) elements (a.k.a. JSON Keys) match their Web UI counterparts so you can create the correponding record type in your web vault to help you visualize all options and possible values. diff --git a/keepercommander/commands/pam_import/base.py b/keepercommander/commands/pam_import/base.py new file mode 100644 index 000000000..54a903825 --- /dev/null +++ b/keepercommander/commands/pam_import/base.py @@ -0,0 +1,3334 @@ +# _ __ +# | |/ /___ ___ _ __ ___ _ _ ® +# | ' None: + self._initialize() + settings = settings if isinstance(settings, dict) else {} + environment_type = str(environment_type).strip() + if environment_type not in PAM_ENVIRONMENT_TYPES: + environment_type = str(settings.get("environment", "")).strip() + if environment_type not in PAM_ENVIRONMENT_TYPES: + logging.warning("Unrecognized environment type " + f"""{bcolors.WARNING}"{environment_type}"{bcolors.ENDC} """ + f"""must be one of {PAM_ENVIRONMENT_TYPES} - switching to "local" """) + environment_type = "local" + self.environment = environment_type + + # common properties shared across all PAM config types: + self.pam_resources = { + "controllerUid": controller_uid, + "folderUid": folder_uid + # "resourceRef": "" - unused/legacy + } + val = settings.get("title", None) + if isinstance(val, str): self.title = val + + # gateway_name, ksm_app_name used externally during gw creation, use controllerUid here + + choices = ("on", "off", "default") + val = settings.get("connections", None) + if isinstance(val, str) and val in choices: self.connections = val + val = settings.get("rotation", None) + if isinstance(val, str) and val in choices: self.rotation = val + val = settings.get("tunneling", None) + if isinstance(val, str) and val in choices: self.tunneling = val + val = settings.get("remote_browser_isolation", None) + if isinstance(val, str) and val in choices: self.remote_browser_isolation = val + val = settings.get("graphical_session_recording", None) + if isinstance(val, str) and val in choices: self.graphical_session_recording = val + val = settings.get("text_session_recording", None) + if isinstance(val, str) and val in choices: self.text_session_recording = val + val = settings.get("ai_threat_detection", None) + if isinstance(val, str) and val in choices: self.ai_threat_detection = val + val = settings.get("ai_terminate_session_on_detection", None) + if isinstance(val, str) and val in choices: self.ai_terminate_session_on_detection = val + + val = settings.get("port_mapping", None) # multiline + if isinstance(val, str): val = [val] + if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): + self.port_mapping = val + elif val is not None: + logging.warning("Unrecognized port_mapping values (skipped) - expecting list of strings,"\ + """ ex. ["2222=ssh", "33060=mysql"]""") + + # {"type": "on-demand"} or {"type": "CRON", "cron": "30 18 * * *", "tz": "America/Chicago" } + val = settings.get("default_rotation_schedule", None) + if isinstance(val, dict): + schedule_type = str(val.get("type", "")).lower() + schedule_type = {"on-demand": "ON_DEMAND", "cron": "CRON"}.get(schedule_type, "") + if schedule_type != "": + if schedule_type == "ON_DEMAND": + self.default_rotation_schedule = { "type": "ON_DEMAND" } + elif schedule_type == "CRON": + cron = str(val.get("cron", "")).strip() + if cron: + self.default_rotation_schedule = { "type": "CRON", "cron": cron } + tz = str(val.get("tz", "")).strip() + if tz: self.default_rotation_schedule["tz"] = tz + else: + logging.warning("Skipped unrecognized CRON settings in default_rotation_schedule") + else: + logging.warning("Skipped unrecognized default_rotation_schedule type") + + self.scripts = PamScriptsObject.load(settings.get("scripts", None)) + self.attachments = PamAttachmentsObject.load(settings.get("attachments", None)) + + # Local Network + if environment_type == "local": + val = settings.get("network_id", None) + if isinstance(val, str): self.network_id = val + val = settings.get("network_cidr", None) + if isinstance(val, str): self.network_cidr = val + elif environment_type == "aws": + val = settings.get("aws_id", None) # required + if isinstance(val, str): self.aws_id = val + val = settings.get("aws_access_key_id", None) + if isinstance(val, str): self.aws_access_key_id = val + val = settings.get("aws_secret_access_key", None) + if isinstance(val, str): self.aws_secret_access_key = val + + val = settings.get("aws_region_names", None) # multiline + if isinstance(val, str): val = [val] + if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): + self.aws_region_names = val + elif val is not None: + logging.warning("Unrecognized aws_region_names values (skipped) - expecting list of strings") + elif environment_type == "azure": + val = settings.get("az_entra_id", None) # required + if isinstance(val, str): self.az_entra_id = val + val = settings.get("az_client_id", None) # required + if isinstance(val, str): self.az_client_id = val + val = settings.get("az_client_secret", None) # required + if isinstance(val, str): self.az_client_secret = val + val = settings.get("az_subscription_id", None) # required + if isinstance(val, str): self.az_subscription_id = val + val = settings.get("az_tenant_id", None) # required + if isinstance(val, str): self.az_tenant_id = val + val = settings.get("az_resource_groups", None) # multiline + if isinstance(val, str): val = [val] + if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): + self.az_resource_groups = val + elif val is not None: + logging.warning("Unrecognized az_resource_groups values (skipped) - expecting list of strings") + elif environment_type == "domain": + val = settings.get("dom_domain_id", None) # required + if isinstance(val, str): self.dom_domain_id = val + val = settings.get("dom_hostname", None) # required + if isinstance(val, str): self.dom_hostname = val + val = settings.get("dom_port", None) # required + if isinstance(val, int) and 0 <= val <= 65535: val = str(val) + if isinstance(val, str): self.dom_port = val + val = utils.value_to_boolean(settings.get("dom_use_ssl")) # required, bool + if isinstance(val, bool): self.dom_use_ssl = val + val = utils.value_to_boolean(settings.get("dom_scan_dc_cidr")) # optional, bool + if isinstance(val, bool): self.dom_scan_dc_cidr = val + val = settings.get("dom_network_cidr", None) # optional + if isinstance(val, str): self.dom_network_cidr = val + val = settings.get("dom_administrative_credential", None) # required, existing pamUser + if isinstance(val, str): self.dom_administrative_credential = val + # self.admin_credential_ref - will be resolved from dom_administrative_credential (later) + elif environment_type == "gcp": + val = settings.get("gcp_id", None) # required + if isinstance(val, str): self.gcp_id = val + # --service-account-key accepts only JSON.stringify(value) anyways + val = settings.get("gcp_service_account_key", None) # required + if isinstance(val, str): self.gcp_service_account_key = val + val = settings.get("gcp_google_admin_email", None) # required + if isinstance(val, str): self.gcp_google_admin_email = val + val = settings.get("gcp_region_names", None) # required, multiline + if isinstance(val, str): val = [val] + if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): + self.gcp_region_names = val + elif val is not None: + logging.warning("Unrecognized gcp_region_names values (skipped) - expecting list of strings") + elif environment_type == "oci": + val = settings.get("oci_id", None) # required + if isinstance(val, str): self.oci_id = val + val = settings.get("oci_admin_id", None) # required + if isinstance(val, str): self.oci_admin_id = val + val = settings.get("oci_admin_public_key", None) # required + if isinstance(val, str): self.oci_admin_public_key = val + val = settings.get("oci_admin_private_key", None) # required + if isinstance(val, str): self.oci_admin_private_key = val + val = settings.get("oci_tenancy", None) # required + if isinstance(val, str): self.oci_tenancy = val + val = settings.get("oci_region", None) # required + if isinstance(val, str): self.oci_region = val + + +class PamScriptsObject(): + def __init__(self): + self.scripts: List[PamScriptObject] = [] + + @classmethod + def load(cls, data: Optional[Union[str, list]]) -> PamScriptsObject: + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Pam Scripts failed to load from: {str(data)[:80]}...") + if not(data and isinstance(data, list)): return obj + + for s in data: + so = PamScriptObject.load(s) + if so.validate(): + obj.scripts.append(so) + else: + logging.warning(f"""Script file not found (skipped): "{str(so.file)}" """) + if not obj.scripts: logging.warning("Skipped empty scripts section") + return obj + + # def to_json(self): pass # File upload will create the JSON format + + +class PamScriptObject(): + def __init__(self): + self.file: str = "" + self.script_command: str = "" + self.additional_credentials: List[str] = [] + self.file_ref: str = "" # fileRef generated by file upload + self.record_refs: List[str] = [] # "recordRef":["uid1","uid2"] from additional_credentials + + def validate(self): + valid = isinstance(self.file, str) + valid = valid and Path(self.file).resolve().exists() + return valid + + @classmethod + def load(cls, data: Union[str, dict]) -> PamScriptObject: + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"PAM script failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + # TUI: "script": { "script_command": "pwsh.exe", "file": "path/file.ext", "additional_credentials": ["admin1", "user2"] }, + # JSON: "script": [{"command":"", "fileRef":"path/file.ext", "recordRef": ["uid1", "uid2"]}] + # use file upload to attach to existing record and get UIDs + cmd = data.get("script_command", None) + if isinstance(cmd, str) and cmd.strip() != "": obj.script_command = cmd.strip() + file = data.get("file", None) + if isinstance(file, str) and file.strip() != "": obj.file = file.strip() + # before use call validate() which also checks if file exists + + # NB! If script has additional_credentials these must be added later, + # after pamUser creation + acs = data.get("additional_credentials", None) + if isinstance(acs, str): acs = [acs] + if isinstance(acs, list) and acs: obj.additional_credentials = acs + + return obj + + # def to_json(self): pass # File upload will create the JSON format + + +class PamAttachmentsObject(): + def __init__(self): + self.attachments: List[PamAttachmentObject] = [] + # self.file_ref: List[str] # fileRef: [] populated by file upload + + @classmethod + def load(cls, data: Optional[Union[str, list]]) -> PamAttachmentsObject: + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"PAM Attachments failed to load from: {str(data)[:80]}...") + if not(data and isinstance(data, list)): return obj + + for a in data: + if isinstance(a, str): a = { "file": a } + ao = PamAttachmentObject.load(a) + if ao.validate(): + obj.attachments.append(ao) + else: + logging.warning(f"""File attachment not found (skipped): "{str(ao.file)}" """) + if not obj.attachments: logging.warning("Skipped empty file attachments section") + return obj + + # def to_json(self): pass # File upload will create the JSON format + + +class PamAttachmentObject(): + def __init__(self): + self.file: str = "" + self.title: str = "" + self.file_ref: str = "" # fileRef generated by file upload + + def validate(self): + valid = isinstance(self.file, str) + valid = valid and Path(self.file).resolve().exists() + return valid + + @classmethod + def load(cls, data: Union[str, dict]) -> PamAttachmentObject: + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Failed to load file attachment from: {str(data)[:80]}") + if isinstance(data, str): data = {"file": data} + if not isinstance(data, dict): return obj + + # TUI: "attachments": [{ "file": "path/file.ext", "title": "File1" }] + # TUI: "attachments": ["path/file1", "file2"] - currently / title=filename + # JSON: "fileRef": ["uid1", "uid2"] # file upload generated + # use file upload to attach to existing record and get UIDs + title = data.get("title", None) + if isinstance(title, str) and title.strip() != "": obj.title = title.strip() + file = data.get("file", None) + if isinstance(file, str) and file.strip() != "": obj.file = file.strip() + # before use call validate() which also checks if file exists + + return obj + + # def to_json(self): pass # File upload will create the JSON format + + +class PamRotationScheduleObject(): + def __init__(self): + self.type: str = "" # on-demand|CRON + self.cron: str = "" # ex. "cron": "30 18 * * *" + self.tz: str = "" # timezone - default = "Etc/UTC" + # {"type": "on-demand"}|{"type": "CRON", "cron": "30 18 * * *"} + # http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html#examples + + @classmethod + def load(cls, data: Union[str, dict]) -> PamRotationScheduleObject: + schedule_types = ("on-demand", "cron") + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Failed to load rotation schedule from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + type = data.get("type", None) + if type and isinstance(type, str) and type.strip().lower() in schedule_types: + obj.type = type.strip().lower() + elif type: + logging.error(f"""Schedule type "{str(type)[:80]}" is unknown - must be one of {schedule_types}""") + + if obj.type.lower() == "cron": + cron = data.get("cron", None) + if isinstance(cron, str) and cron.strip() != "": obj.cron = cron.strip() + if obj.cron: # validate + try: + parsed_cron = vault.TypedField.import_schedule_field(obj.cron) + except: + parsed_cron = {} + if not (parsed_cron and parsed_cron.get("time", "")): + logging.error(f"Failed to load CRON from: {obj.cron}") + tz = data.get("tz", None) + if isinstance(tz, str) and tz.strip() != "": obj.tz = tz.strip() + + return obj + +class PamRotationParams(): + def __init__(self, configUid: str, profiles: dict): + self.configUid: str = configUid # iam_user|scripts_only=NOOP + self.ownerUid: str = "" # general - pamMachine rec UID + self.ownerTitle: str = "" # general - pamMachine rec title + self.rotation_profiles: dict = profiles or {} + +class PamRotationSettingsObject(): + def __init__(self): + self.rotation: str = "" # general|iam_user|scripts_only=NOOP + self.resource: str = "" # general:MachineTitle, IAM/Scripts:skip - auto/PamConfig + self.enabled: str = "" # on|off|default + self.schedule = None # {"type": "on-demand"}|{"type": "CRON", "cron": "30 18 * * *"} + self.password_complexity: str = "" # "32,5,5,5,5" + self.resourceUid: str = "" # general:machineUID, iam_user,scripts_only:PamConfigUID + + @classmethod + def load(cls, data: Optional[Union[str, dict]], rotation_params: Optional[PamRotationParams] = None) -> PamRotationSettingsObject: + rotation_types = ("general", "iam_user", "scripts_only") + enabled_types = ("on", "off", "default") + rx_complexity = r"^(\d+,\d+,\d+,\d+,\d+)$" + obj = cls() + + # autodetect profile name (and load from rotation_profiles section) + if isinstance(data, str) and rotation_params and isinstance(rotation_params.rotation_profiles, dict): + profile = rotation_params.rotation_profiles.get(data, None) + if profile and isinstance(profile, dict): + data = profile + + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Failed to load rotation settings from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + rotation = data.get("rotation", None) + if rotation and isinstance(rotation, str) and rotation.strip().lower() in rotation_types: + obj.rotation = rotation.strip().lower() + elif rotation: + logging.error(f"""Rotation type "{str(rotation)[:80]}" is unknown - must be one of {rotation_types}""") + + # type: iam_user|scripts_only=NOOP - automatically pick up current PAM Config + # type: general - automatically picks owner record (uid by title) + if obj.rotation == "general": + resource = data.get("resource", None) + if isinstance(resource, str) and resource.strip() != "": + obj.resource = resource.strip() + if rotation_params and rotation_params.ownerTitle: + if obj.resource and obj.resource.lower() != rotation_params.ownerTitle.lower(): + logging.warning("Rotation record owner must be its parent - replacing " + f"""configured owner "resource":"{obj.resource}" """ + f"""with actual parent "{rotation_params.ownerTitle}" """) + obj.resource = rotation_params.ownerTitle + elif obj.rotation in ("iam_user", "scripts_only"): + if rotation_params and rotation_params.configUid: + obj.resource = rotation_params.configUid + + enabled = data.get("enabled", None) + if enabled and isinstance(enabled, str) and enabled.strip().lower() in enabled_types: + obj.enabled = enabled.strip().lower() + elif enabled: + logging.error(f"""Unknown rotation enablement type "{str(enabled)[:80]}" - must be one of {enabled_types}""") + + obj.schedule = PamRotationScheduleObject.load(data.get("schedule", None) or "") + complexity = data.get("password_complexity", None) + if complexity and isinstance(complexity, str): + if re.fullmatch(rx_complexity, complexity): + obj.password_complexity = complexity.strip() + if complexity and not obj.password_complexity: + logging.error(f"""Invalid password complexity "{str(enabled)[:20]}" - must be in csv format, ex. "32,5,5,5,5" """) + # pwd_complexity_rule_list = {} populated by password_complexity + + return obj + + +class DagOptionValue(Enum): + ON = "on" + OFF = "off" + DEFAULT = "default" + + @classmethod + def map(cls, dag_option: str): + try: return cls(str(dag_option).lower()) + except ValueError: return None + +class DagSettingsObject(): + def __init__(self): + self.pam_resource: Optional[str] = None + self.rotation: Optional[DagOptionValue] = None + self.connections: Optional[DagOptionValue] = None + self.tunneling: Optional[DagOptionValue] = None + self.remote_browser_isolation: Optional[DagOptionValue] = None + self.graphical_session_recording: Optional[DagOptionValue] = None + self.text_session_recording: Optional[DagOptionValue] = None + self.ai_threat_detection: Optional[DagOptionValue] = None + self.ai_terminate_session_on_detection: Optional[DagOptionValue] = None + # NB! PAM User has its own rotation_settings: {}, cannot enable con/tun on user anyways + # remote_browser_isolation uses rbi, pam_resource, graphical_session_recording + # rotation uses only pam_resource, rotation + # machine/db/dir uses all + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"DAG settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + val = data.get("resource", None) + if isinstance(val, str): obj.pam_resource = val + obj.rotation = DagOptionValue.map(data.get("rotation", None) or "") + obj.connections = DagOptionValue.map(data.get("connections", None) or "") + obj.tunneling = DagOptionValue.map(data.get("tunneling", None) or "") + obj.remote_browser_isolation = DagOptionValue.map(data.get("remote_browser_isolation", None) or "") + obj.graphical_session_recording = DagOptionValue.map(data.get("graphical_session_recording", None) or "") + obj.text_session_recording = DagOptionValue.map(data.get("text_session_recording", None) or "") + obj.ai_threat_detection = DagOptionValue.map(data.get("ai_threat_detection", None) or "") + obj.ai_terminate_session_on_detection = DagOptionValue.map(data.get("ai_terminate_session_on_detection", None) or "") + + return obj + + +class DagJitSettingsObject(): + def __init__(self): + self.create_ephemeral: bool = False + self.elevate: bool = False + self.elevation_method: str = "group" + self.elevation_string: str = "" + self.base_distinguished_name: str = "" + self.ephemeral_account_type: Optional[str] = None # Omit if missing + self.pam_directory_record: Optional[str] = None # Title of pamDirectory from pam_data.resources[], resolved to UID + self.pam_directory_uid: Optional[str] = None # Resolved pamDirectory record UID (set in process_data) + + @classmethod + def validate_enum_value(cls, value: str, allowed_values: List[str], field_name: str) -> Optional[str]: + """Validate value against predefined list. Returns validated value or None if invalid.""" + if not value or value == "": + return None # Empty string not allowed for enum fields + value_lower = value.lower() + allowed_lower = [v.lower() for v in allowed_values] + if value_lower in allowed_lower: + # Return original case from allowed_values + idx = allowed_lower.index(value_lower) + return allowed_values[idx] + logging.warning(f"Invalid {field_name} value '{value}'. Allowed: {allowed_values}. Skipping.") + return None + + @classmethod + def load(cls, data: Union[str, dict]) -> Optional['DagJitSettingsObject']: + """Load JIT settings from JSON. Returns None if data is missing/empty.""" + obj = cls() + try: + data = json.loads(data) if isinstance(data, str) else data + except: + logging.error(f"JIT settings failed to load from: {str(data)[:80]}") + return None + + if not isinstance(data, dict): + return None + + # Check if object is empty (no valid fields) + has_valid_fields = False + + # Parse boolean fields with defaults + create_ephemeral = utils.value_to_boolean(data.get("create_ephemeral", None)) + if create_ephemeral is not None: + obj.create_ephemeral = create_ephemeral + has_valid_fields = True + + elevate = utils.value_to_boolean(data.get("elevate", None)) + if elevate is not None: + obj.elevate = elevate + has_valid_fields = True + + # Parse elevation_method with validation (defaults to "group" if missing or invalid) + elevation_method = data.get("elevation_method", None) + if elevation_method is not None: + validated = cls.validate_enum_value(str(elevation_method), ["group", "role"], "elevation_method") + if validated: + obj.elevation_method = validated + has_valid_fields = True + # If validation fails, keep the default "group" - still include in DAG JSON + # If missing, keep the default "group" - still include in DAG JSON + + # Parse string fields + elevation_string = data.get("elevation_string", None) + if elevation_string is not None and str(elevation_string).strip(): + obj.elevation_string = str(elevation_string).strip() + has_valid_fields = True + + base_distinguished_name = data.get("base_distinguished_name", None) + if base_distinguished_name is not None and str(base_distinguished_name).strip(): + obj.base_distinguished_name = str(base_distinguished_name).strip() + has_valid_fields = True + + # Parse ephemeral_account_type with validation (omit if missing) + ephemeral_account_type = data.get("ephemeral_account_type", None) + if ephemeral_account_type is not None: + validated = cls.validate_enum_value( + str(ephemeral_account_type), + ["linux", "mac", "windows", "domain"], + "ephemeral_account_type" + ) + if validated: + obj.ephemeral_account_type = validated + has_valid_fields = True + + # Parse pam_directory_record (title of pamDirectory from pam_data.resources[]; resolved to pam_directory_uid later) + pam_directory_record = data.get("pam_directory_record", None) + if pam_directory_record is not None and str(pam_directory_record).strip(): + obj.pam_directory_record = str(pam_directory_record).strip() + has_valid_fields = True + + # Silently ignore any other unknown fields (permissive parsing) + + # Return None if no valid fields were found (empty object) + return obj if has_valid_fields else None + + def to_dag_dict(self) -> Dict[str, Any]: + """Convert to DAG JSON format (camelCase).""" + result = { + "createEphemeral": self.create_ephemeral, + "elevate": self.elevate, + "elevationMethod": self.elevation_method, # Always included (defaults to "group" if missing/invalid) + "elevationString": self.elevation_string, + "baseDistinguishedName": self.base_distinguished_name + } + # Only include ephemeralAccountType if it was set (omit if missing/invalid) + if self.ephemeral_account_type: + result["ephemeralAccountType"] = self.ephemeral_account_type + return result + + +class DagAiSettingsObject(): + def __init__(self): + self.version: str = "v1.0.0" + self.risk_levels: Dict[str, Dict[str, Any]] = {} + + @classmethod + def _parse_tag_list(cls, items: Any) -> List[str]: + tags: List[str] = [] + if not isinstance(items, list): + return tags + for item in items: + tag = "" + if isinstance(item, str): + tag = item.strip() + elif isinstance(item, dict): + tag = str(item.get("tag", "")).strip() + if tag: + tags.append(tag) + return tags + + @classmethod + def load(cls, data: Union[str, dict]) -> Optional['DagAiSettingsObject']: + """Load AI settings from JSON. Returns None if data is missing/empty.""" + obj = cls() + try: + data = json.loads(data) if isinstance(data, str) else data + except: + logging.error(f"AI settings failed to load from: {str(data)[:80]}") + return None + + if not isinstance(data, dict): + return None + + risk_levels = data.get("risk_levels", None) + if not isinstance(risk_levels, dict): + return None + + for level in ["critical", "high", "medium", "low"]: + level_data = risk_levels.get(level, None) + if not isinstance(level_data, dict): + continue + + ai_session_terminate = utils.value_to_boolean(level_data.get("ai_session_terminate", None)) + activities = level_data.get("activities", None) or {} + if not isinstance(activities, dict): + activities = {} + + allow_tags = cls._parse_tag_list(activities.get("allow", [])) + deny_tags = cls._parse_tag_list(activities.get("deny", [])) + + if ai_session_terminate is None and not allow_tags and not deny_tags: + continue + + obj.risk_levels[level] = { + "ai_session_terminate": ai_session_terminate, + "allow": allow_tags, + "deny": deny_tags + } + + return obj if obj.risk_levels else None + + def _build_tag_entries(self, tags: List[str], action: str, user_id: str) -> List[Dict[str, Any]]: + entries: List[Dict[str, Any]] = [] + for tag in tags: + if not tag: + continue + entries.append({ + "tag": tag, + "auditLog": [{ + "date": utils.current_milli_time(), + "userId": user_id, + "action": action + }] + }) + return entries + + def to_dag_dict(self, user_id: str) -> Optional[Dict[str, Any]]: + if not self.risk_levels: + return None + + if not user_id: + logging.warning("AI settings auditLog userId is missing; auditLog will have empty userId.") + user_id = "" + + risk_levels: Dict[str, Any] = {} + for level, data in self.risk_levels.items(): + level_out: Dict[str, Any] = {} + + if data.get("ai_session_terminate") is not None: + level_out["aiSessionTerminate"] = data["ai_session_terminate"] + + tags_out: Dict[str, Any] = {} + allow_entries = self._build_tag_entries(data.get("allow", []), "added_to_allow", user_id) + if allow_entries: + tags_out["allow"] = allow_entries + deny_entries = self._build_tag_entries(data.get("deny", []), "added_to_deny", user_id) + if deny_entries: + tags_out["deny"] = deny_entries + + if tags_out: + level_out["tags"] = tags_out + + if level_out: + risk_levels[level] = level_out + + if not risk_levels: + return None + + return { + "version": self.version, + "riskLevels": risk_levels + } + + +class PamUserObject(): + def __init__(self): + self.folder_path = None # pam extend only + self.uid_imported = None # pam extend only - lookup by 1) uid 2) folder_path/title + self.uid = "" + self.type = "pamUser" + self.title = None + self.notes = None + self.login = None + self.password = None + self.privatePEMKey = None + self.distinguishedName = None + self.connectDatabase = None + self.managed = None + self.oneTimeCode = None + self.attachments = None # fileRef + self.scripts = None # script + self.rotation_settings = None # DAG: rotation settings + + @classmethod + def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"PAM User failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + dtype = str(data["type"]) if "type" in data else "pamUser" + if dtype and dtype.lower() != "pamUser".lower(): + logging.warning(f"""PAM User data using wrong type "pamUser" != "{dtype[:80]}" """) + + obj.type = "pamUser" + obj.title = str(data["title"]) if "title" in data else None + obj.notes = str(data["notes"]) if "notes" in data else None + + obj.login = str(data["login"]) if "login" in data else None + obj.password = str(data["password"]) if "password" in data else None + obj.privatePEMKey = str(data["private_pem_key"]) if "private_pem_key" in data else None + obj.distinguishedName = str(data["distinguished_name"]) if "distinguished_name" in data else None + obj.connectDatabase = str(data["connect_database"]) if "connect_database" in data else None + obj.managed = utils.value_to_boolean(data["managed"]) if "managed" in data else None + obj.oneTimeCode = str(data["otp"]) if "otp" in data else None + + obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) + obj.scripts = PamScriptsObject.load(data.get("scripts", None)) + rso = PamRotationSettingsObject.load(data.get("rotation_settings", None), rotation_params) + if not is_blank_instance(rso): + obj.rotation_settings = rso + + obj.folder_path = str(data["folder_path"]) if "folder_path" in data else None + obj.uid_imported = str(data["uid"]) if "uid" in data else None + + if (obj.title is None or not obj.title.strip()) and obj.login and obj.login.strip(): + obj.title = f"PAM User - {str(obj.login).strip()}" + + obj.validate_record() + + return obj + + def create_record(self, params, folder_uid): + args = { + "force": True, + "folder": folder_uid, + "record_type": self.type + } + if self.uid: args["record_uid"] = self.uid + if self.title: args["title"] = self.title + if self.notes: args["notes"] = self.notes + + fields = [] + if self.login: fields.append(f"f.login={self.login}") + if self.password: fields.append(f"f.password={self.password}") + if self.privatePEMKey: fields.append(f"f.secret.privatePEMKey={self.privatePEMKey}") + if self.distinguishedName: fields.append(f"f.text.distinguishedName={self.distinguishedName}") + if self.connectDatabase: fields.append(f"f.text.connectDatabase={self.connectDatabase}") + + managed = utils.value_to_boolean(self.managed) + if managed is not None: fields.append(f"f.checkbox.managed={str(managed).lower()}") + + if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") + + files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] + if files and isinstance(files, list): + for x in files: + if x and isinstance(x, PamAttachmentObject) and x.file: + fields.append(f"file=@{x.file}") + + if fields: args["fields"] = fields + uid = RecordEditAddCommand().execute(params, **args) + if uid and isinstance(uid, str): + self.uid = uid + + # after record creation add PAM scripts + if uid and self.scripts and self.scripts.scripts: + add_pam_scripts(params, uid, self.scripts.scripts) + + # DAG: after record creation - self.scripts, self.rotation_settings + return uid + + def validate_record(self): + if not self.password: + logging.warning("PAM User is missing required field `login`") + if not self.rotation_settings: + logging.debug("PAM User is missing rotation settings") + if isinstance(self.rotation_settings, PamRotationSettingsObject): + if (str(self.rotation_settings.rotation).lower() == "general" and + not self.rotation_settings.resource): + logging.warning("PAM User with rotation type=general is missing required machine `resource=xxx`") + if self.uid_imported is not None and (not isinstance(self.uid_imported, str) or not RecordV3.is_valid_ref_uid(self.uid_imported)): + logging.error(f"PAM User uid_imported is not a valid UID: {self.uid_imported}") + + +class LoginUserObject(): + def __init__(self): + self.folder_path = None # pam extend only + self.uid_imported = None # pam extend only - lookup by 1) uid 2) folder_path/title + self.uid = "" + self.type = "login" + self.title = None + self.notes = None + self.login = None + self.password = None + self.url = None + self.oneTimeCode = None + self.attachments = None + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Record type `login` failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + dtype = str(data["type"]) if "type" in data else "login" + if dtype.lower() != "login".lower(): + logging.warning(f"""User data using wrong type "login" != "{dtype[:80]}" """) + + obj.uid = "" + obj.type = "login" + obj.title = str(data["title"]) if "title" in data else None + obj.notes = str(data["notes"]) if "notes" in data else None + + obj.login = str(data["login"]) if "login" in data else None + obj.password = str(data["password"]) if "password" in data else None + obj.url = str(data["url"]) if "url" in data else None + obj.oneTimeCode = str(data["otp"]) if "otp" in data else None + obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) + + obj.folder_path = str(data["folder_path"]) if "folder_path" in data else None + obj.uid_imported = str(data["uid"]) if "uid" in data else None + + obj.validate_record() + + return obj + + def create_record(self, params, folder_uid): + args = { + "force": True, + "folder": folder_uid, + "record_type": self.type + } + if self.uid: args["record_uid"] = self.uid + if self.title: args["title"] = self.title + if self.notes: args["notes"] = self.notes + + fields = [] + if self.login: fields.append(f"f.login={self.login}") + if self.password: fields.append(f"f.password={self.password}") + if self.url: fields.append(f"f.url={self.url}") + if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") + + files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] + if files and isinstance(files, list): + for x in files: + if x and isinstance(x, PamAttachmentObject) and x.file: + fields.append(f"file=@{x.file}") + + if fields: args["fields"] = fields + uid = RecordEditAddCommand().execute(params, **args) + if uid and isinstance(uid, str): + self.uid = uid + return uid + + def validate_record(self): + if self.uid_imported is not None and (not isinstance(self.uid_imported, str) or not RecordV3.is_valid_ref_uid(self.uid_imported)): + logging.error(f"Login User uid_imported is not a valid UID: {self.uid_imported}") + +class PamBaseMachineParser(): + def __init__(self): + self.folder_path = None # pam extend only + self.uid_imported = None # pam extend only - lookup by 1) uid 2) folder_path/title + self.type = "" + self.title = None + self.notes = None + self.host = None + self.port = None + self.sslVerification = None + self.providerGroup = None + self.providerRegion = None + self.oneTimeCode = None + self.attachments = None + self.scripts = None + self.pam_settings : Optional[PamSettingsFieldData] = None + + # pamMachine + self.operatingSystem = None + self.instanceName = None + self.instanceId = None + # Warning! Unused, split into linked pamUser record + self.login = None + self.password = None + self.privatePEMKey = None + + # pamDatabase + self.useSSL = None + self.databaseId = None + self.databaseType = None # postgresql|postgresql-flexible|mysql|mysql-flexible|mariadb|mariadb-flexible|mssql|oracle|mongodb + + # pamDirectory + self.domainName = None + self.alternativeIPs = None + self.directoryId = None + self.directoryType = None # active_directory|openldap + self.userMatch = None + + @classmethod + def load(cls, record_type: str, data: Union[str, dict]): + pam_machine_types = ("pamMachine", "pamDatabase", "pamDirectory") + pam_db_types = ("postgresql", "postgresql-flexible", "mysql", "mysql-flexible", "mariadb", "mariadb-flexible", "mssql", "oracle", "mongodb") + pam_dir_types = ("active_directory", "openldap") + + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"""Record type "{record_type}" failed to load from: {str(data)[:80]}""") + if not isinstance(data, dict): return obj + + dtype = str(data.get("type", None)) + data_type = next((s for s in pam_machine_types if s.lower() == dtype.lower()), None) + rec_type = next((s for s in pam_machine_types if s.lower() == str(record_type).lower()), None) + if rec_type and data_type and rec_type != data_type: + logging.warning(f"""Expected machine record type "{rec_type}" != data record type "{data_type}" - Parsing as "{rec_type}" """) + if rec_type is None: + msg = f"""Unknown expected record type "{record_type}". """ + if data_type is None: + msg = msg + f"""Unknown data record type "{dtype}" - Parsing it as generic pamMachine.""" + else: + msg = msg + f"""Using data record type "{data_type}".""" + logging.error(f"""{msg} Expected record types "{pam_machine_types}" """) + + obj.type = rec_type or data_type or "pamMachine" + obj.title = str(data["title"]) if "title" in data else None + obj.notes = str(data["notes"]) if "notes" in data else None + obj.host = str(data["host"]) if "host" in data else None + obj.port = str(data["port"]) if "port" in data else None + obj.sslVerification = utils.value_to_boolean(data["ssl_verification"]) if "ssl_verification" in data else None + obj.providerGroup = str(data["provider_group"]) if "provider_group" in data else None + obj.providerRegion = str(data["provider_region"]) if "provider_region" in data else None + obj.oneTimeCode = str(data["otp"]) if "otp" in data else None + obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) + obj.scripts = PamScriptsObject.load(data.get("scripts", None)) + + psd = data.get("pam_settings", None) + if psd: + obj.pam_settings = PamSettingsFieldData.load(psd) + if not obj.pam_settings: + logging.error(f"""{obj.type}: failed to load PAM Settings from "{str(data)[:80]}" """) + + # pamMachine + obj.operatingSystem = str(data["operating_system"]) if "operating_system" in data else None + obj.instanceName = str(data["instance_name"]) if "instance_name" in data else None + obj.instanceId = str(data["instance_id"]) if "instance_id" in data else None + # Warning! Unused, split into linked pamUser record + obj.login = str(data["login"]) if "login" in data else None + obj.password = str(data["password"]) if "password" in data else None + obj.privatePEMKey = str(data["private_pem_key"]) if "private_pem_key" in data else None + + # pamDatabase + obj.useSSL = utils.value_to_boolean(data["use_ssl"]) if "use_ssl" in data else None + obj.databaseId = str(data["database_id"]) if "database_id" in data else None + + dbtype = str(data["database_type"]) if "database_type" in data else None + pamdbt = next((s for s in pam_db_types if s.lower() == str(dbtype).lower()), None) + if dbtype and not pamdbt: + logging.error(f"""Unexpected DB type "{dbtype}" - should be one of the known DB types "{pam_db_types}" """) + pamdbt = dbtype.lower() # use provided db type "as-is" + if not pamdbt and obj.type == "pamDatabase": + logging.debug(f"""pamDatabase - unable to determine DB type: database_type should be one of "{pam_db_types}" """) + obj.databaseType = pamdbt + + # pamDirectory + obj.domainName = str(data["domain_name"]) if "domain_name" in data else None + obj.alternativeIPs = multiline_to_str(parse_multiline(data, "alternative_ips", "Error parsing alternative_ips")) + obj.directoryId = str(data["directory_id"]) if "directory_id" in data else None + obj.userMatch = str(data["user_match"]) if "user_match" in data else None + + dt = str(data["directory_type"]) if "directory_type" in data else None + pamdt = next((s for s in pam_dir_types if s.lower() == str(dt).lower()), None) + if dt and not pamdt: + logging.error(f"""Unexpected Directory type "{dt}" - should be one of "{pam_dir_types}" """) + pamdt = dt.lower() # use provided directory type "as-is" + if not pamdt and obj.type == "pamDirectory": + logging.debug(f"""pamDirectory - unable to determine Directory type: directory_type should be one of "{pam_dir_types}" """) + obj.directoryType = pamdt # active_directory|openldap + + obj.folder_path = str(data["folder_path"]) if "folder_path" in data else None + obj.uid_imported = str(data["uid"]) if "uid" in data else None + + return obj + +class PamMachineObject(): + def __init__(self): + self.folder_path = None # pam extend only + self.uid_imported = None # pam extend only - lookup by 1) uid 2) folder_path/title + self.uid = "" + self.type = "pamMachine" + self.title = None + self.notes = None + self.host = None # pamHostname + self.port = None # pamHostname + self.sslVerification = None + self.operatingSystem = None + self.instanceName = None + self.instanceId = None + self.providerGroup = None + self.providerRegion = None + self.oneTimeCode = None + self.attachments = None # fileRef + self.scripts = None # script + + # Warning! unused - use users[] to link users, rotation scripts etc. + self.login = None + self.password = None + self.privatePEMKey = None + + self.pam_settings : Optional[PamSettingsFieldData] = None + self.users = None # List[PamUserObject] - one is admin(istrative credential) + + self.is_admin_external: bool = False # (True<=>found:pamDirectory#Title.pamUser#Title) + self.administrative_credentials_uid: str = "" # external or internal user UID + + @classmethod + def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"""Record type "pamMachine" failed to load from: {str(data)[:80]}""") + if not isinstance(data, dict): return obj + + bmp = PamBaseMachineParser.load("pamMachine", data) + + if bmp and bmp.type.lower() != "pamMachine".lower(): + logging.warning(f"""PAM Machine data using wrong type "pamMachine" != "{bmp.type}" """) + + obj.type = "pamMachine" + obj.title = bmp.title + obj.notes = bmp.notes + obj.host = bmp.host + obj.port = bmp.port + obj.sslVerification = bmp.sslVerification + obj.operatingSystem = bmp.operatingSystem + obj.instanceName = bmp.instanceName + obj.instanceId = bmp.instanceId + obj.providerGroup = bmp.providerGroup + obj.providerRegion = bmp.providerRegion + obj.oneTimeCode = bmp.oneTimeCode + obj.attachments = bmp.attachments + obj.scripts = bmp.scripts + obj.pam_settings = bmp.pam_settings + obj.folder_path = bmp.folder_path + obj.uid_imported = bmp.uid_imported + + # Warning! unused - use users[] to link users, rotation scripts etc. + obj.login = bmp.login + obj.password = bmp.password + obj.privatePEMKey = bmp.privatePEMKey + + if (obj.title is None or not obj.title.strip()) and obj.login and obj.login.strip(): + obj.title = f"PAM Machine - {str(obj.login).strip()}" + if rotation_params: + rotation_params.ownerTitle = obj.title or "" + + obj.users = [] + users = data.get("users", None) + if users: + for user in users: + rt = str(user.get("type", "")) if isinstance(user, dict) else "" + if not rt: rt = "pamUser" # pamMachine user list is pamUser recs only + if rt.lower() != "pamUser".lower(): + logging.error(f"""{obj.title}:{obj.type}.users[] Expected record type pamUser, got "{rt}" - skipped.""") + continue + usr = PamUserObject.load(user, rotation_params) + if usr: + obj.users.append(usr) + else: + logging.warning(f"""Warning: PAM Machine "{obj.title}" with empty users section.""") + + obj.validate_record() + + return obj + + def create_record(self, params, folder_uid): + args = { + "force": True, + "folder": folder_uid, + "record_type": self.type + } + if self.uid: args["record_uid"] = self.uid + if self.title: args["title"] = self.title + if self.notes: args["notes"] = self.notes + + fields = [] + hostname = self.host.strip() if isinstance(self.host, str) and self.host.strip() else "" + port = self.port.strip() if isinstance(self.port, str) and self.port.strip() else "" + if hostname or port: + val = json.dumps({"hostName": hostname, "port": port}) + fields.append(f"f.pamHostname=$JSON:{val}") + + sslv = utils.value_to_boolean(self.sslVerification) + if sslv is not None: fields.append(f"checkbox.sslVerification={str(sslv).lower()}") + if self.operatingSystem: fields.append(f"f.text.operatingSystem={self.operatingSystem}") + if self.instanceName: fields.append(f"f.text.instanceName={self.instanceName}") + if self.instanceId: fields.append(f"f.text.instanceId={self.instanceId}") + if self.providerGroup: fields.append(f"f.text.providerGroup={self.providerGroup}") + if self.providerRegion: fields.append(f"f.text.providerRegion={self.providerRegion}") + + # Warning! unused - use users[] to link users, rotation scripts etc. + # if self.login: fields.append(f"f.login={self.login}") + # if self.password: fields.append(f"f.password={self.password}") + # if self.privatePEMKey: fields.append(f"f.secret.privatePEMKey={self.privatePEMKey}") + + if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") + + files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] + if files and isinstance(files, list): + for x in files: + if x and isinstance(x, PamAttachmentObject) and x.file: + fields.append(f"file=@{x.file}") + + # pam_settings port_forward/connection belong to the record + if self.pam_settings and isinstance(self.pam_settings, PamSettingsFieldData): + allowSupplyHost = True if self.pam_settings.allowSupplyHost is True else False + portForward = self.pam_settings.portForward.to_record_dict() if self.pam_settings.portForward else {} + connection = self.pam_settings.connection.to_record_dict() if self.pam_settings.connection else {} + if portForward or connection or allowSupplyHost: + val = json.dumps({"allowSupplyHost": allowSupplyHost, "portForward": portForward or {}, "connection": connection or {}}) + fields.append(f"c.pamSettings=$JSON:{val}") + # switch to f.* once RT definition(s) update w/ pamSettings field + + if fields: args["fields"] = fields + uid = RecordEditAddCommand().execute(params, **args) + if uid and isinstance(uid, str): + self.uid = uid + + # after record creation add PAM scripts + if uid and self.scripts and self.scripts.scripts: + add_pam_scripts(params, uid, self.scripts.scripts) + + # DAG: after record creation - self.scripts, self.pam_settings.options + return uid + + def validate_record(self): + # Warning! unused - use users[] to link users, rotation scripts etc. + if self.login or self.password or self.privatePEMKey: + logging.warning(f"""PAM Machine "{self.title}" detected legacy format - """ + "please create separate pamUser record with login, password, privatePEMKey") + if not (self.host or self.port): + logging.warning(f"""PAM Machine "{self.title}" is missing required field `pamHostname` data (host/port)""") + errmsg = validate_pam_connection(self.pam_settings.connection, "pamMachine") if self.pam_settings else "" + if errmsg: + logging.warning(f"""PAM Machine "{self.title}" has incorrect connection setup: {errmsg}""") + if self.uid_imported is not None and (not isinstance(self.uid_imported, str) or not RecordV3.is_valid_ref_uid(self.uid_imported)): + logging.error(f"PAM Machine uid_imported is not a valid UID: {self.uid_imported}") + +def validate_pam_connection(connection, record_type): + errmsg = "" + if connection: + # Apparently all machine types allow connections using ANY protocol + # ex. pamDatabase allowing SSH/RDP or pamMachine allowing proto: mysql + # known_mach_types = [ConnectionSettingsRDP, ConnectionSettingsVNC, ConnectionSettingsTelnet, ConnectionSettingsSSH, ConnectionSettingsKubernetes] + # known_db_types = [ConnectionSettingsSqlServer, ConnectionSettingsPostgreSQL, ConnectionSettingsMySQL] + + known_conn_types = PamSettingsFieldData.pam_connection_classes + [ConnectionSettingsHTTP] + known_mach_types = PamSettingsFieldData.pam_connection_classes + known_db_types = known_mach_types + known_rbi_types = [ConnectionSettingsHTTP] + + # known_conn_proto = [x.protocol.value.lower() for x in known_conn_types] # pylint: disable=E1101 + known_mach_proto = [x.protocol.value.lower() for x in known_mach_types] # pylint: disable=E1101 + known_db_proto = [x.protocol.value.lower() for x in known_db_types] # pylint: disable=E1101 + known_rbi_proto = [x.protocol.value.lower() for x in known_rbi_types] # pylint: disable=E1101 + + rt = str(record_type).lower().strip() + if type(connection) not in known_conn_types: + errmsg = f"""PAM Connection of unknown type "{type(connection).__name__}" """ + elif rt == "pamMachine".lower(): + if type(connection) not in known_mach_types: + errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ + if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and + connection.protocol.value.lower() not in known_mach_proto): + errmsg = errmsg + f""" Unexpected PAM Machine connection protocol "{connection.protocol.value}" """ + elif rt == "pamDatabase".lower(): + if type(connection) not in known_db_types: + errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ + if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and + connection.protocol.value.lower() not in known_db_proto): + errmsg = errmsg + f""" Unexpected PAM Database connection protocol "{connection.protocol.value}" """ + elif rt == "pamDirectory".lower(): + if type(connection) not in known_mach_types: + errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ + if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and + connection.protocol.value.lower() not in known_mach_proto): + errmsg = errmsg + f""" Unexpected PAM Directory connection protocol "{connection.protocol.value}" """ + elif rt == "pamRemoteBrowser".lower(): + if type(connection) not in known_rbi_types: + errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ + if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and + connection.protocol.value.lower() not in known_rbi_proto): + errmsg = errmsg + f""" Unexpected PAM Remote Browser connection protocol "{connection.protocol.value}" """ + if errmsg: + logging.debug(errmsg) + return errmsg + + +class PamDatabaseObject(): + def __init__(self): + self.folder_path = None # pam extend only + self.uid_imported = None # pam extend only - lookup by 1) uid 2) folder_path/title + self.uid = "" + self.type = "pamDatabase" + self.title = None + self.notes = None + self.host = None # pamHostname + self.port = None # pamHostname + self.useSSL = None + self.databaseId = None + self.databaseType = None + self.providerGroup = None + self.providerRegion = None + self.oneTimeCode = None + self.attachments = None # fileRef + self.scripts = None # script + + self.trafficEncryptionSeed = None + self.pam_settings : Optional[PamSettingsFieldData] = None + self.users = None # List[PamUserObject] - one is admin(istrative credential) + + @classmethod + def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"""Record type "pamDatabase" failed to load from: {str(data)[:80]}""") + if not isinstance(data, dict): return obj + + bmp = PamBaseMachineParser.load("pamDatabase", data) + + if bmp and bmp.type.lower() != "pamDatabase".lower(): + logging.warning(f"""PAM Database data using wrong type "pamDatabase" != "{bmp.type}" """) + + obj.type = "pamDatabase" + obj.title = bmp.title + obj.notes = bmp.notes + obj.host = bmp.host + obj.port = bmp.port + obj.useSSL = bmp.useSSL + obj.databaseId = bmp.databaseId + obj.databaseType = bmp.databaseType + obj.providerGroup = bmp.providerGroup + obj.providerRegion = bmp.providerRegion + obj.oneTimeCode = bmp.oneTimeCode + obj.attachments = bmp.attachments + obj.scripts = bmp.scripts + obj.pam_settings = bmp.pam_settings + obj.folder_path = bmp.folder_path + obj.uid_imported = bmp.uid_imported + + if (obj.title is None or not obj.title.strip()) and obj.databaseId and obj.databaseId.strip(): + obj.title = f"PAM Database - {str(obj.databaseId).strip()}" + if rotation_params: + rotation_params.ownerTitle = obj.title or "" + + obj.users = [] + users = data.get("users", None) + if users: + for user in users: + rt = str(user.get("type", "")) if isinstance(user, dict) else "" + if not rt: rt = "pamUser" # pamDatabase user list is pamUser recs only + if rt.lower() != "pamUser".lower(): + logging.error(f"""{obj.title}:{obj.type}.users[] Expected record type pamUser, got "{rt}" - skipped.""") + continue + usr = PamUserObject.load(user, rotation_params) + if usr: + obj.users.append(usr) + else: + logging.warning(f"""Warning: PAM Database "{obj.title}" with empty users section.""") + + obj.validate_record() + + return obj + + def create_record(self, params, folder_uid): + args = { + "force": True, + "folder": folder_uid, + "record_type": self.type + } + if self.uid: args["record_uid"] = self.uid + if self.title: args["title"] = self.title + if self.notes: args["notes"] = self.notes + + fields = [] + hostname = self.host.strip() if isinstance(self.host, str) and self.host.strip() else "" + port = self.port.strip() if isinstance(self.port, str) and self.port.strip() else "" + if hostname or port: + val = json.dumps({"hostName": hostname, "port": port}) + fields.append(f"f.pamHostname=$JSON:{val}") + + ssl = utils.value_to_boolean(self.useSSL) + if ssl is not None: fields.append(f"f.checkbox.useSSL={str(ssl).lower()}") + if self.databaseId: fields.append(f"f.text.databaseId={self.databaseId}") + if self.databaseType: fields.append(f"f.databaseType={self.databaseType}") + if self.providerGroup: fields.append(f"f.text.providerGroup={self.providerGroup}") + if self.providerRegion: fields.append(f"f.text.providerRegion={self.providerRegion}") + + if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") + + files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] + if files and isinstance(files, list): + for x in files: + if x and isinstance(x, PamAttachmentObject) and x.file: + fields.append(f"file=@{x.file}") + + # pam_settings port_forward/connection belong to the record + if self.pam_settings and isinstance(self.pam_settings, PamSettingsFieldData): + allowSupplyHost = True if self.pam_settings.allowSupplyHost is True else False + portForward = self.pam_settings.portForward.to_record_dict() if self.pam_settings.portForward else {} + connection = self.pam_settings.connection.to_record_dict() if self.pam_settings.connection else {} + if portForward or connection or allowSupplyHost: + val = json.dumps({"allowSupplyHost": allowSupplyHost, "portForward": portForward or {}, "connection": connection or {}}) + fields.append(f"c.pamSettings=$JSON:{val}") + # switch to f.* once RT definition(s) update w/ pamSettings field + + if fields: args["fields"] = fields + uid = RecordEditAddCommand().execute(params, **args) + if uid and isinstance(uid, str): + self.uid = uid + + # after record creation add PAM scripts + if uid and self.scripts and self.scripts.scripts: + add_pam_scripts(params, uid, self.scripts.scripts) + + # DAG: after record creation - self.scripts, self.pam_settings.options + return uid + + def validate_record(self): + if not (self.host or self.port): + logging.warning(f"""PAM Database "{self.title}" is missing required field "pamHostname" data (host/port)""") + errmsg = validate_pam_connection(self.pam_settings.connection, "pamDatabase") if self.pam_settings else "" + if errmsg: + logging.warning(f"""PAM Database "{self.title}" has incorrect connection setup: {errmsg}""") + if self.uid_imported is not None and (not isinstance(self.uid_imported, str) or not RecordV3.is_valid_ref_uid(self.uid_imported)): + logging.error(f"PAM Database uid_imported is not a valid UID: {self.uid_imported}") + +class PamDirectoryObject(): + def __init__(self): + self.folder_path = None # pam extend only + self.uid_imported = None # pam extend only - lookup by 1) uid 2) folder_path/title + self.uid = "" + self.type = "pamDirectory" + self.title = None + self.notes = None + self.host = None # pamHostname + self.port = None # pamHostname + self.useSSL = None + self.domainName = None + self.alternativeIPs = None + self.directoryId = None + self.directoryType = None # " + self.userMatch = None + self.providerGroup = None + self.providerRegion = None + self.oneTimeCode = None + self.attachments = None # fileRef + self.scripts = None # script + + self.pam_settings : Optional[PamSettingsFieldData] = None + self.users = None # List[PamUserObject] - one is admin(istrative credential) + + @classmethod + def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"""Record type "pamDirectory" failed to load from: {str(data)[:80]}""") + if not isinstance(data, dict): return obj + + bmp = PamBaseMachineParser.load("pamDirectory", data) + + if bmp and bmp.type.lower() != "pamDirectory".lower(): + logging.warning(f"""PAM Directory data using wrong type "pamDirectory" != "{bmp.type}" """) + + obj.type = "pamDirectory" + obj.title = bmp.title + obj.notes = bmp.notes + obj.host = bmp.host + obj.port = bmp.port + obj.useSSL = bmp.useSSL + obj.domainName = bmp.domainName + obj.alternativeIPs = bmp.alternativeIPs + obj.directoryId = bmp.directoryId + obj.directoryType = bmp.directoryType + obj.userMatch = bmp.userMatch + obj.providerGroup = bmp.providerGroup + obj.providerRegion = bmp.providerRegion + obj.oneTimeCode = bmp.oneTimeCode + obj.attachments = bmp.attachments + obj.scripts = bmp.scripts + obj.pam_settings = bmp.pam_settings + obj.folder_path = bmp.folder_path + obj.uid_imported = bmp.uid_imported + + if (obj.title is None or not obj.title.strip()) and obj.domainName and obj.domainName.strip(): + obj.title = f"PAM Directory - {str(obj.domainName).strip()}" + if rotation_params: + rotation_params.ownerTitle = obj.title or "" + + obj.users = [] + users = data.get("users", None) + if users: + for user in users: + rt = str(user.get("type", "")) if isinstance(user, dict) else "" + if not rt: rt = "pamUser" # pamDirectory user list is pamUser recs only + if rt.lower() != "pamUser".lower(): + logging.error(f"""{obj.title}:{obj.type}.users[] Expected record type pamUser, got "{rt}" - skipped.""") + continue + usr = PamUserObject.load(user, rotation_params) + if usr: + obj.users.append(usr) + else: + logging.warning(f"""Warning: PAM Directory "{obj.title}" with empty users section.""") + + obj.validate_record() + + return obj + + def create_record(self, params, folder_uid): + args = { + "force": True, + "folder": folder_uid, + "record_type": self.type + } + if self.uid: args["record_uid"] = self.uid + if self.title: args["title"] = self.title + if self.notes: args["notes"] = self.notes + + fields = [] + hostname = self.host.strip() if isinstance(self.host, str) and self.host.strip() else "" + port = self.port.strip() if isinstance(self.port, str) and self.port.strip() else "" + if hostname or port: + val = json.dumps({"hostName": hostname, "port": port}) + fields.append(f"f.pamHostname=$JSON:{val}") + + ssl = utils.value_to_boolean(self.useSSL) + if ssl is not None: fields.append(f"f.checkbox.useSSL={str(ssl).lower()}") + if self.domainName: fields.append(f"f.text.domainName={self.domainName}") + if self.alternativeIPs: fields.append(f"f.multiline.alternativeIPs={self.alternativeIPs}") + if self.directoryId: fields.append(f"f.text.directoryId={self.directoryId}") + if self.directoryType: fields.append(f"f.directoryType={self.directoryType}") + if self.userMatch: fields.append(f"f.text.userMatch={self.userMatch}") + if self.providerGroup: fields.append(f"f.text.providerGroup={self.providerGroup}") + if self.providerRegion: fields.append(f"f.text.providerRegion={self.providerRegion}") + + if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") + + files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] + if files and isinstance(files, list): + for x in files: + if x and isinstance(x, PamAttachmentObject) and x.file: + fields.append(f"file=@{x.file}") + + # pam_settings port_forward/connection belong to the record + if self.pam_settings and isinstance(self.pam_settings, PamSettingsFieldData): + allowSupplyHost = True if self.pam_settings.allowSupplyHost is True else False + portForward = self.pam_settings.portForward.to_record_dict() if self.pam_settings.portForward else {} + connection = self.pam_settings.connection.to_record_dict() if self.pam_settings.connection else {} + if portForward or connection or allowSupplyHost: + val = json.dumps({"allowSupplyHost": allowSupplyHost, "portForward": portForward or {}, "connection": connection or {}}) + fields.append(f"c.pamSettings=$JSON:{val}") + # switch to f.* once RT definition(s) update w/ pamSettings field + + if fields: args["fields"] = fields + uid = RecordEditAddCommand().execute(params, **args) + if uid and isinstance(uid, str): + self.uid = uid + + # after record creation add PAM scripts + if uid and self.scripts and self.scripts.scripts: + add_pam_scripts(params, uid, self.scripts.scripts) + + # DAG: after record creation - self.scripts, self.pam_settings.options + return uid + + def validate_record(self): + if not (self.host or self.port): + logging.warning(f"""PAM Directory "{self.title}" is missing required field `pamHostname` data (host/port)""") + errmsg = validate_pam_connection(self.pam_settings.connection, "pamDirectory") if self.pam_settings else "" + if errmsg: + logging.warning(f"""PAM Directory "{self.title}" has incorrect connection setup: {errmsg}""") + if self.uid_imported is not None and (not isinstance(self.uid_imported, str) or not RecordV3.is_valid_ref_uid(self.uid_imported)): + logging.error(f"PAM Directory uid_imported is not a valid UID: {self.uid_imported}") + +class PamRemoteBrowserObject(): + def __init__(self): + self.folder_path = None # pam extend only + self.uid_imported = None # pam extend only - lookup by 1) uid 2) folder_path/title + self.uid = "" + self.type = "pamRemoteBrowser" + self.title = None + self.notes = None + self.rbiUrl = None + self.oneTimeCode = None + self.attachments = None # fileRef + + self.rbi_settings : Optional[PamRemoteBrowserSettings] = None # ft: pamRemoteBrowserSettings + # Use httpCredentialsUid <- resolved from autofill_credentials (ref rt:Login in pam_data.users[]) + + @classmethod + def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"""Record type "pamRemoteBrowser" failed to load from: {str(data)[:80]}""") + if not isinstance(data, dict): return obj + + dtype = data.get("type", None) + if dtype and str(dtype).lower() != "pamRemoteBrowser".lower(): + logging.warning(f"""PAM RBI data using wrong type "pamRemoteBrowser" != "{dtype}" """) + + obj.type = "pamRemoteBrowser" + obj.title = str(data["title"]) if "title" in data else None + obj.notes = str(data["notes"]) if "notes" in data else None + obj.rbiUrl = str(data["url"]) if "url" in data else None + obj.oneTimeCode = str(data["otp"]) if "otp" in data else None + obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) + + psd = data.get("pam_settings", None) + rbi_settings = PamRemoteBrowserSettings.load(psd) + obj.rbi_settings = None if is_empty_instance(rbi_settings) else rbi_settings + if psd and not obj.rbi_settings: + logging.error(f"""{obj.type}: failed to load RBI Settings from "{str(psd)[:80]}" """) + + if (obj.title is None or not obj.title.strip()) and obj.rbiUrl and str(obj.rbiUrl).strip(): + hostname = str(obj.rbiUrl).lower() + hostname = re.sub(r"^\s*https?://", "", hostname, flags=re.IGNORECASE) + hostname = hostname.split("/", 1)[0] + if hostname: + obj.title = f"PAM RBI - {hostname}" + + obj.folder_path = str(data["folder_path"]) if "folder_path" in data else None + obj.uid_imported = str(data["uid"]) if "uid" in data else None + + obj.validate_record() + + return obj + + def create_record(self, params, folder_uid): + args = { + "force": True, + "folder": folder_uid, + "record_type": self.type + } + if self.uid: args["record_uid"] = self.uid + if self.title: args["title"] = self.title + if self.notes: args["notes"] = self.notes + + fields = [] + if self.rbiUrl: fields.append(f"rbiUrl={self.rbiUrl}") + + if self.oneTimeCode: fields.append(f"oneTimeCode={self.oneTimeCode}") + + files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] + if files and isinstance(files, list): + for x in files: + if x and isinstance(x, PamAttachmentObject) and x.file: + fields.append(f"file=@{x.file}") + + # pam_settings connection belongs to the record + connection = {} + if self.rbi_settings and isinstance(self.rbi_settings, PamRemoteBrowserSettings): + if self.rbi_settings.connection: + connection = self.rbi_settings.connection.to_record_dict() + if connection: + val = json.dumps({"connection": connection or {}}) + fields.append(f"pamRemoteBrowserSettings=$JSON:{val}") + # switch to f.* once RT definition(s) update w/ pamRemoteBrowserSettings field + + if fields: args["fields"] = fields + uid = RecordEditAddCommand().execute(params, **args) + if uid and isinstance(uid, str): + self.uid = uid + + # DAG: after record creation - self.pam_settings.options + return uid + + def validate_record(self): + errmsg = validate_pam_connection(self.rbi_settings.connection, "pamRemoteBrowser") if self.rbi_settings else "" + if errmsg: + logging.warning(f"""PAM RBI "{self.title}" has incorrect connection setup: {errmsg}""") + if self.uid_imported is not None and (not isinstance(self.uid_imported, str) or not RecordV3.is_valid_ref_uid(self.uid_imported)): + logging.error(f"PAM RBI uid_imported is not a valid UID: {self.uid_imported}") + +# PAM Settings field data +FONT_SIZES = (8, 9, 10, 11, 12, 14, 18, 24, 30, 36, 48, 60, 72, 96) +class ConnectionProtocol(Enum): + RDP = "rdp" + VNC = "vnc" + TELNET = "telnet" + SSH = "ssh" + KUBERNETES = "kubernetes" + SQLSERVER = "sql-server" + POSTGRESQL = "postgresql" + MYSQL = "mysql" + HTTP = "http" + +class RDPSecurity(Enum): + ANY = "any" + NLA = "nla" + TLS = "tls" + VMCONNECT = "vmconnect" + RDP = "rdp" + + @classmethod + def map(cls, rdp_security: str): + try: return cls(str(rdp_security).lower()) + except ValueError: return None + +class TerminalThemes(Enum): + BLACK_WHITE = "black-white" # Black on white + GRAY_BLACK = "gray-black" # Gray on black + GREEN_BLACK = "green-black" # Green on black + WHITE_BLACK = "white-black" # White on black + CUSTOM = "custom" # Not a valid value to send to guac + # example custom color scheme: + # "colorScheme": "background: rgb:00/3D/FC;\nforeground: rgb:74/1A/1A;\ncolor0: rgb:00/00/00;\ncolor1: rgb:99/3E/3E;\ncolor2: rgb:3E/99/3E;\ncolor3: rgb:99/99/3E;\ncolor4: rgb:3E/3E/99;\ncolor5: rgb:99/3E/99;\ncolor6: rgb:3E/99/99;\ncolor7: rgb:99/99/99;\ncolor8: rgb:3E/3E/3E;\ncolor9: rgb:FF/67/67;\ncolor10: rgb:67/FF/67;\ncolor11: rgb:FF/FF/67;\ncolor12: rgb:67/67/FF;\ncolor13: rgb:FF/67/FF;\ncolor14: rgb:67/FF/FF;\ncolor15: rgb:FF/FF/FF;" + + @classmethod + def map(cls, tty_theme: str): + try: return cls(str(tty_theme).lower()) + except ValueError: return None + +def parse_multiline(data: dict, key: str, message: str = "") -> Optional[List[str]]: + if data and isinstance(data, dict) and key and isinstance(key, str): + val = data.get(key, None) # "multiline": ["line1" "line2"] + if isinstance(val, str): val = [val] # allow for "multiline": "line1" + if val and isinstance(val, list): + if any(not isinstance(x, str) or x == "" for x in val): + logging.warning(f"{message} - value: {val[:24]}" if (isinstance(message, str) and message != "") + else "Error parsing multiline value (skipped): "\ + f"found empty or non string values - value: {val[:24]}") + else: + return val + return None + +def multiline_to_str(lines: Optional[List[str]]) -> Optional[str]: + if lines and isinstance(lines, list): + return "\n".join(lines) + return None + +def multiline_stringify(lines: Optional[List[str]]) -> Optional[str]: + if lines and isinstance(lines, list): + # nb! strip() may remove more quotes esp. at end of string + val = json.dumps("\n".join(lines)) + if val and val.startswith("\"") and val.endswith("\""): + val = val[1:-1] + return val + return None + +def parse_dag_option(option: Optional[str]) -> Optional[str]: + key = str(option).lower() + if key in ("on", "off", "default"): + return key + return None + +class ClipboardConnectionSettings: + def __init__(self, disableCopy: Optional[bool] = None, disablePaste: Optional[bool] = None): + self.disableCopy = disableCopy + self.disablePaste = disablePaste + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Clipboard Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + obj.disableCopy = utils.value_to_boolean(data.get("disable_copy", None)) + obj.disablePaste = utils.value_to_boolean(data.get("disable_paste", None)) + return obj + +def clipboard_connection_settings(connection_settings: Union[PamConnectionSettings, ConnectionSettingsHTTP]) -> Optional[ClipboardConnectionSettings]: + if connection_settings and connection_settings.protocol and connection_settings.protocol in ( + ConnectionProtocol.RDP, + ConnectionProtocol.VNC, + ConnectionProtocol.TELNET, + ConnectionProtocol.SSH, + ConnectionProtocol.SQLSERVER, + ConnectionProtocol.MYSQL, + ConnectionProtocol.POSTGRESQL, + ConnectionProtocol.HTTP + ): + disableCopy = getattr(connection_settings, "disableCopy", None) + disablePaste = getattr(connection_settings, "disablePaste", None) + return ClipboardConnectionSettings(disableCopy, disablePaste) + +class SFTPRootDirectorySettings: + def __init__(self, enableSftp: Optional[bool] = None, sftpRootDirectory: Optional[str] = None): + self.enableSftp = enableSftp + self.sftpRootDirectory = sftpRootDirectory + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"SFTP Root Directory Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + obj.enableSftp = utils.value_to_boolean(data.get("enable_sftp", None)) + val = data.get("sftp_root_directory", None) + if isinstance(val, str): obj.sftpRootDirectory = val + return obj + + def to_dict(self): + dict: Dict[str, Any] = {} + if self.enableSftp is not None and isinstance(self.enableSftp, bool): + dict["enableSftp"] = self.enableSftp + if self.sftpRootDirectory and isinstance(self.sftpRootDirectory, str) and self.sftpRootDirectory.strip(): + dict["sftpRootDirectory"] = self.sftpRootDirectory.strip() + + return dict + +class SFTPConnectionSettings(SFTPRootDirectorySettings): + def __init__( + self, + enableSftp: Optional[bool] = None, + sftpRootDirectory: Optional[str] = None, + sftpResource: Optional[List[str]] = None, + sftpUser: Optional[List[str]] = None, + sftpDirectory: Optional[str] = None, + sftpServerAliveInterval: Optional[int] = None + ): + super().__init__(enableSftp, sftpRootDirectory) + self.sftpResource = sftpResource + self.sftpUser = sftpUser + self.sftpDirectory = sftpDirectory + self.sftpServerAliveInterval = sftpServerAliveInterval + self.sftpResourceUid = None # resolve from sftpResource + self.sftpUserUid = None # resolve from sftpUser + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"SFTP Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + rds = SFTPRootDirectorySettings.load(data) + if rds: + obj.enableSftp = rds.enableSftp + obj.sftpRootDirectory = rds.sftpRootDirectory + + # which is the resource record (not yet in web UI) + obj.sftpResource = parse_multiline(data, "sftp_resource", "Error parsing sftp_resource") + obj.sftpUser = parse_multiline(data, "sftp_user_credentials", "Error parsing sftp_user_credentials") + val = data.get("sftp_upload_directory", None) + if isinstance(val, str): obj.sftpDirectory = val + val = data.get("sftp_keepalive_interval", None) + if type(val) is int: obj.sftpServerAliveInterval = abs(val) + elif val and str(val).isdecimal(): obj.sftpServerAliveInterval = int(val) + + return obj + + def to_dict(self): + dict: Dict[str, Any] = {} + if self.sftpRootDirectory and isinstance(self.sftpRootDirectory, str) and self.sftpRootDirectory.strip(): + dict["sftpRootDirectory"] = self.sftpRootDirectory.strip() + if self.enableSftp is not None and isinstance(self.enableSftp, bool): + dict["enableSftp"] = self.enableSftp + + # if resolved from sftpResource + if self.sftpResourceUid and isinstance(self.sftpResourceUid, str) and self.sftpResourceUid.strip(): + dict["sftpResourceUid"] = self.sftpResourceUid.strip() + # if resolved from sftpUser + if self.sftpUserUid and isinstance(self.sftpUserUid, str) and self.sftpUserUid.strip(): + dict["sftpUserUid"] = self.sftpUserUid.strip() + + if self.sftpDirectory and isinstance(self.sftpDirectory, str) and self.sftpDirectory.strip(): + dict["sftpDirectory"] = self.sftpDirectory.strip() + if self.sftpServerAliveInterval and type(self.sftpServerAliveInterval) is int and abs(self.sftpServerAliveInterval) > 0: + dict["sftpServerAliveInterval"] = abs(self.sftpServerAliveInterval) + + return dict + +def sftp_enabled(connection_settings: Union[PamConnectionSettings, ConnectionSettingsHTTP]) -> Optional[bool]: + if connection_settings and connection_settings.protocol and connection_settings.protocol in ( + ConnectionProtocol.RDP, + ConnectionProtocol.VNC, + ConnectionProtocol.SSH + ): + sftp = getattr(connection_settings, "sftp", None) + if sftp: + enabled = getattr(sftp, "enableSftp", None) + return enabled + +class TerminalDisplayConnectionSettings: + fontSizes: List[int] = [8,9,10,11,12,14,18,24,30,36,48,60,72,96] + def __init__(self, colorScheme: Optional[str] = None, fontSize: Optional[int] = None): + self.colorScheme = colorScheme + self.fontSize = fontSize + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Terminal Display Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + val = data.get("color_scheme", None) + if isinstance(val, str): obj.colorScheme = val + val = data.get("font_size", None) + if type(val) is int: obj.fontSize = val + elif val and str(val).isdecimal(): obj.fontSize = int(val) + if obj.fontSize and type(obj.fontSize) is int: + font_size: int = obj.fontSize + closest_number = min(obj.fontSizes, key=lambda x: abs(x - font_size)) + if closest_number != font_size: + logging.error(f"Terminal Display Connection Settings - adjusted invalid font_size from: {obj.fontSize} to: {closest_number}") + obj.fontSize = closest_number + return obj + +class BaseConnectionSettings: + def __init__(self, port: Optional[str] = None, allowSupplyUser: Optional[bool] = None, userRecords: Optional[List[str]] = None, recordingIncludeKeys: Optional[bool] = None): + self.port = port # Override port from host + self.allowSupplyUser = allowSupplyUser + self.recordingIncludeKeys = recordingIncludeKeys + self.userRecords = userRecords + self.userRecordUid = None # resolved from userRecords + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Base Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + val = data.get("port", None) # Override port from host + if isinstance(val, str) or str(val).isdecimal(): obj.port = str(val) + + obj.allowSupplyUser = utils.value_to_boolean(data.get("allow_supply_user", None)) + obj.userRecords = parse_multiline(data, "administrative_credentials", "Error parsing administrative_credentials") + obj.recordingIncludeKeys = utils.value_to_boolean(data.get("recording_include_keys", None)) + return obj + +class ConnectionSettingsRDP(BaseConnectionSettings, ClipboardConnectionSettings): + protocol = ConnectionProtocol.RDP + def __init__( + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + security: Optional[RDPSecurity] = None, + disableAuth: Optional[bool] = None, + ignoreCert: Optional[bool] = None, + loadBalanceInfo: Optional[str] = None, + preconnectionId: Optional[str] = None, + preconnectionBlob: Optional[str] = None, + sftp: Optional[SFTPConnectionSettings] = None, + disableAudio: Optional[bool] = None, + resizeMethod: Optional[str] = None, + enableWallpaper: Optional[bool] = None, + enableFullWindowDrag: Optional[bool] = None + ): + BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) + ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) + self.security = security if isinstance(security, RDPSecurity) else None + self.disableAuth = disableAuth + self.ignoreCert = ignoreCert + self.loadBalanceInfo = loadBalanceInfo + self.preconnectionId = preconnectionId + self.preconnectionBlob = preconnectionBlob + self.sftp = sftp if isinstance(sftp, SFTPConnectionSettings) else None + self.disableAudio = disableAudio + self.resizeMethod = resizeMethod # disable_dynamic_resizing ? "" : "display-update" + # Performance Properties + self.enableWallpaper = enableWallpaper + self.enableFullWindowDrag = enableFullWindowDrag + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Connection Settings RDP failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bcs = BaseConnectionSettings.load(data) + if bcs: + obj.port = bcs.port + obj.allowSupplyUser = bcs.allowSupplyUser + obj.userRecords = bcs.userRecords + obj.recordingIncludeKeys = bcs.recordingIncludeKeys + + ccs = ClipboardConnectionSettings.load(data) + if ccs: + obj.disableCopy = ccs.disableCopy + obj.disablePaste = ccs.disablePaste + + val = data.get("security", None) + if isinstance(val, str): obj.security = RDPSecurity.map(val) + obj.disableAuth = utils.value_to_boolean(data.get("disable_authentication", None)) + obj.ignoreCert = utils.value_to_boolean(data.get("ignore_server_cert", None)) + + val = data.get("load_balance_info", None) + if isinstance(val, str): obj.loadBalanceInfo = val # LoadBalance Info/Cookie + val = data.get("preconnection_id", None) + if isinstance(val, str): obj.preconnectionId = val + val = data.get("preconnection_blob", None) + if isinstance(val, str): obj.preconnectionBlob = val + sftp = data.get("sftp", None) + if isinstance(sftp, dict): obj.sftp = SFTPConnectionSettings.load(sftp) + + obj.disableAudio = utils.value_to_boolean(data.get("disable_audio", None)) + obj.enableWallpaper = utils.value_to_boolean(data.get("enable_wallpaper", None)) + obj.enableFullWindowDrag = utils.value_to_boolean(data.get("enable_full_window_drag", None)) + + # disable_dynamic_resizing ? "" : "display-update" + val = utils.value_to_boolean(data.get("disable_dynamic_resizing", None)) + if val is not True: obj.resizeMethod = "display-update" + + return obj + + def to_record_dict(self): + kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.RDP.value } # pylint: disable=E1101 + + # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) + recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] + uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] + if uids: + kvp["userRecords"] = uids + + if self.port and isinstance(self.port, str) and self.port.strip(): + kvp["port"] = self.port.strip() + if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): + kvp["allowSupplyUser"] = self.allowSupplyUser + if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): + kvp["recordingIncludeKeys"] = self.recordingIncludeKeys + if self.disableCopy is not None and isinstance(self.disableCopy, bool): + kvp["disableCopy"] = self.disableCopy + if self.disablePaste is not None and isinstance(self.disablePaste, bool): + kvp["disablePaste"] = self.disablePaste + if isinstance(self.security, RDPSecurity): + kvp["security"] = self.security.value.lower() + + if self.disableAuth is not None and isinstance(self.disableAuth, bool): + kvp["disableAuth"] = self.disableAuth + if self.ignoreCert is not None and isinstance(self.ignoreCert, bool): + kvp["ignoreCert"] = self.ignoreCert + + if self.loadBalanceInfo and isinstance(self.loadBalanceInfo, str) and self.loadBalanceInfo.strip(): + kvp["loadBalanceInfo"] = self.loadBalanceInfo.strip() + if self.preconnectionId and isinstance(self.preconnectionId, str) and self.preconnectionId.strip(): + kvp["preconnectionId"] = self.preconnectionId.strip() + if self.preconnectionBlob and isinstance(self.preconnectionBlob, str) and self.preconnectionBlob.strip(): + kvp["preconnectionBlob"] = self.preconnectionBlob.strip() + + if self.disableAudio is not None and isinstance(self.disableAudio, bool): + kvp["disableAudio"] = self.disableAudio + if self.enableFullWindowDrag is not None and isinstance(self.enableFullWindowDrag, bool): + kvp["enableFullWindowDrag"] = self.enableFullWindowDrag + if self.enableWallpaper is not None and isinstance(self.enableWallpaper, bool): + kvp["enableWallpaper"] = self.enableWallpaper + + # populated on load - "resizeMethod": disable_dynamic_resizing ? "" : "display-update" + if str(self.resizeMethod) == "display-update": + kvp["resizeMethod"] = self.resizeMethod + + if isinstance(self.sftp, SFTPConnectionSettings): + sftp = self.sftp.to_dict() + if sftp: + kvp["sftp"] = sftp + + return kvp + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +# field type: pamRemoteBrowserSettings +class ConnectionSettingsHTTP(BaseConnectionSettings, ClipboardConnectionSettings): + protocol = ConnectionProtocol.HTTP + def __init__( + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + allowUrlManipulation: Optional[bool] = None, + allowedUrlPatterns: Optional[str] = None, + allowedResourceUrlPatterns: Optional[str] = None, + httpCredentials: Optional[List[str]] = None, # autofill_credentials: login|pamUser + autofillConfiguration: Optional[str] = None, + ignoreInitialSslCert: Optional[bool] = None + ): + BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) + ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) + self.allowUrlManipulation = allowUrlManipulation + self.allowedUrlPatterns = allowedUrlPatterns + self.allowedResourceUrlPatterns = allowedResourceUrlPatterns + self.httpCredentials = httpCredentials # autofill_credentials: login|pamUser + self.autofillConfiguration = autofillConfiguration + self.ignoreInitialSslCert = ignoreInitialSslCert + self.httpCredentialsUid = None # resolved from httpCredentials + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Connection Settings HTTP failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bcs = BaseConnectionSettings.load(data) + if bcs: + # obj.port = bcs.port # not yet in web UI of RBI + obj.allowSupplyUser = bcs.allowSupplyUser + obj.userRecords = bcs.userRecords + obj.recordingIncludeKeys = bcs.recordingIncludeKeys + + ccs = ClipboardConnectionSettings.load(data) + if ccs: + obj.disableCopy = ccs.disableCopy + obj.disablePaste = ccs.disablePaste + + obj.allowUrlManipulation = utils.value_to_boolean(data.get("allow_url_manipulation", None)) + obj.allowedUrlPatterns = multiline_to_str(parse_multiline(data, "allowed_url_patterns", "Error parsing allowed_url_patterns")) + obj.allowedResourceUrlPatterns = multiline_to_str(parse_multiline(data, "allowed_resource_url_patterns", "Error parsing allowed_resource_url_patterns")) + obj.httpCredentials = parse_multiline(data, "autofill_credentials", "Error parsing autofill_credentials") + obj.autofillConfiguration = multiline_to_str(parse_multiline(data, "autofill_targets", "Error parsing autofill_targets")) + obj.ignoreInitialSslCert = utils.value_to_boolean(data.get("ignore_server_cert", None)) + + return obj + + def to_record_dict(self): + kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.HTTP.value } # pylint: disable=E1101 + + # if resolved (autofill_credentials->httpCredentialsUid) login|pamUser + recs: list = self.httpCredentialsUid if self.httpCredentialsUid and isinstance(self.httpCredentialsUid, list) else [] + uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] + if uids: + kvp["httpCredentialsUid"] = uids[0] # single credential + + # port - unused for RBI + # if self.port and isinstance(self.port, str) and self.port.strip(): + # kvp["port"] = self.port.strip() + if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): + kvp["allowSupplyUser"] = self.allowSupplyUser + if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): + kvp["recordingIncludeKeys"] = self.recordingIncludeKeys + if self.disableCopy is not None and isinstance(self.disableCopy, bool): + kvp["disableCopy"] = self.disableCopy + if self.disablePaste is not None and isinstance(self.disablePaste, bool): + kvp["disablePaste"] = self.disablePaste + + if self.allowUrlManipulation is not None and isinstance(self.allowUrlManipulation, bool): + kvp["allowUrlManipulation"] = self.allowUrlManipulation + if self.allowedUrlPatterns and isinstance(self.allowedUrlPatterns, str) and self.allowedUrlPatterns.strip(): + kvp["allowedUrlPatterns"] = self.allowedUrlPatterns.strip() + if self.allowedResourceUrlPatterns and isinstance(self.allowedResourceUrlPatterns, str) and self.allowedResourceUrlPatterns.strip(): + kvp["allowedResourceUrlPatterns"] = self.allowedResourceUrlPatterns.strip() + if self.autofillConfiguration and isinstance(self.autofillConfiguration, str) and self.autofillConfiguration.strip(): + kvp["autofillConfiguration"] = self.autofillConfiguration.strip() + if self.ignoreInitialSslCert is not None and isinstance(self.ignoreInitialSslCert, bool): + kvp["ignoreInitialSslCert"] = self.ignoreInitialSslCert + + return kvp + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +class ConnectionSettingsVNC(BaseConnectionSettings, ClipboardConnectionSettings): + protocol = ConnectionProtocol.VNC + def __init__( # pylint: disable=R0917 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + destHost: Optional[str] = None, + destPort: Optional[str] = None, + sftp: Optional[SFTPConnectionSettings] = None + ): + BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) + ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) + self.destHost = destHost + self.destPort = destPort + self.sftp = sftp if isinstance(sftp, SFTPConnectionSettings) else None + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Connection Settings VNC failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bcs = BaseConnectionSettings.load(data) + if bcs: + obj.port = bcs.port + obj.allowSupplyUser = bcs.allowSupplyUser + obj.userRecords = bcs.userRecords + obj.recordingIncludeKeys = bcs.recordingIncludeKeys + + ccs = ClipboardConnectionSettings.load(data) + if ccs: + obj.disableCopy = ccs.disableCopy + obj.disablePaste = ccs.disablePaste + + val = data.get("destination_host", None) + if isinstance(val, str): obj.destHost = val + val = data.get("destination_port", None) + if isinstance(val, str): obj.destPort = val + + sftp = data.get("sftp", None) + if isinstance(sftp, dict): obj.sftp = SFTPConnectionSettings.load(sftp) + + return obj + + def to_record_dict(self): + kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.VNC.value } # pylint: disable=E1101 + + # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) + recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] + uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] + if uids: + kvp["userRecords"] = uids + + if self.port and isinstance(self.port, str) and self.port.strip(): + kvp["port"] = self.port.strip() + if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): + kvp["allowSupplyUser"] = self.allowSupplyUser + if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): + kvp["recordingIncludeKeys"] = self.recordingIncludeKeys + if self.disableCopy is not None and isinstance(self.disableCopy, bool): + kvp["disableCopy"] = self.disableCopy + if self.disablePaste is not None and isinstance(self.disablePaste, bool): + kvp["disablePaste"] = self.disablePaste + + if self.destHost and isinstance(self.destHost, str) and self.destHost.strip(): + kvp["destHost"] = self.destHost.strip() + if self.destPort and isinstance(self.destPort, str) and self.destPort.strip(): + kvp["destPort"] = self.destPort.strip() + + if isinstance(self.sftp, SFTPConnectionSettings): + sftp = self.sftp.to_dict() + if sftp: + kvp["sftp"] = sftp + + return kvp + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +class ConnectionSettingsTelnet(BaseConnectionSettings, ClipboardConnectionSettings, TerminalDisplayConnectionSettings): + protocol = ConnectionProtocol.TELNET + def __init__( # pylint: disable=R0917 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + colorScheme: Optional[str] = None, + fontSize: Optional[int] = None, + usernameRegex: Optional[str] = None, + passwordRegex: Optional[str] = None, + loginSuccessRegex: Optional[str] = None, + loginFailureRegex: Optional[str] = None + ): + BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) + ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) + TerminalDisplayConnectionSettings.__init__(self, colorScheme, fontSize) + self.usernameRegex = usernameRegex + self.passwordRegex = passwordRegex + self.loginSuccessRegex = loginSuccessRegex + self.loginFailureRegex = loginFailureRegex + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Connection Settings Telnet failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bcs = BaseConnectionSettings.load(data) + if bcs: + obj.port = bcs.port + obj.allowSupplyUser = bcs.allowSupplyUser + obj.userRecords = bcs.userRecords + obj.recordingIncludeKeys = bcs.recordingIncludeKeys + + ccs = ClipboardConnectionSettings.load(data) + if ccs: + obj.disableCopy = ccs.disableCopy + obj.disablePaste = ccs.disablePaste + + tcs = TerminalDisplayConnectionSettings.load(data) + if tcs: + obj.colorScheme = tcs.colorScheme + obj.fontSize = tcs.fontSize + + val = data.get("username_regex", None) + if isinstance(val, str): obj.usernameRegex = val + val = data.get("password_regex", None) + if isinstance(val, str): obj.passwordRegex = val + val = data.get("login_success_regex", None) + if isinstance(val, str): obj.loginSuccessRegex = val + val = data.get("login_failure_regex", None) + if isinstance(val, str): obj.loginFailureRegex = val + + return obj + + def to_record_dict(self): + kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.TELNET.value } # pylint: disable=E1101 + + # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) + recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] + uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] + if uids: + kvp["userRecords"] = uids + + if self.port and isinstance(self.port, str) and self.port.strip(): + kvp["port"] = self.port.strip() + if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): + kvp["allowSupplyUser"] = self.allowSupplyUser + if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): + kvp["recordingIncludeKeys"] = self.recordingIncludeKeys + if self.disableCopy is not None and isinstance(self.disableCopy, bool): + kvp["disableCopy"] = self.disableCopy + if self.disablePaste is not None and isinstance(self.disablePaste, bool): + kvp["disablePaste"] = self.disablePaste + + if self.colorScheme and isinstance(self.colorScheme, str) and self.colorScheme.strip(): + kvp["colorScheme"] = self.colorScheme.strip() + if self.fontSize and type(self.fontSize) is int and self.fontSize > 4: + kvp["fontSize"] = str(self.fontSize) + if self.usernameRegex and isinstance(self.usernameRegex, str) and self.usernameRegex.strip(): + kvp["usernameRegex"] = self.usernameRegex.strip() + if self.passwordRegex and isinstance(self.passwordRegex, str) and self.passwordRegex.strip(): + kvp["passwordRegex"] = self.passwordRegex.strip() + if self.loginSuccessRegex and isinstance(self.loginSuccessRegex, str) and self.loginSuccessRegex.strip(): + kvp["loginSuccessRegex"] = self.loginSuccessRegex.strip() + if self.loginFailureRegex and isinstance(self.loginFailureRegex, str) and self.loginFailureRegex.strip(): + kvp["loginFailureRegex"] = self.loginFailureRegex.strip() + + return kvp + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +class ConnectionSettingsSSH(BaseConnectionSettings, ClipboardConnectionSettings, TerminalDisplayConnectionSettings): + protocol = ConnectionProtocol.SSH + def __init__( # pylint: disable=R0917 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + colorScheme: Optional[str] = None, + fontSize: Optional[int] = None, + hostKey: Optional[str] = None, + command: Optional[str] = None, + sftp: Optional[SFTPRootDirectorySettings] = None + ): + BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) + ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) + TerminalDisplayConnectionSettings.__init__(self, colorScheme, fontSize) + self.hostKey = hostKey + self.command = command + self.sftp = sftp if isinstance(sftp, SFTPRootDirectorySettings) else None + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Connection Settings SSH failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bcs = BaseConnectionSettings.load(data) + if bcs: + obj.port = bcs.port + obj.allowSupplyUser = bcs.allowSupplyUser + obj.userRecords = bcs.userRecords + obj.recordingIncludeKeys = bcs.recordingIncludeKeys + + ccs = ClipboardConnectionSettings.load(data) + if ccs: + obj.disableCopy = ccs.disableCopy + obj.disablePaste = ccs.disablePaste + + tcs = TerminalDisplayConnectionSettings.load(data) + if tcs: + obj.colorScheme = tcs.colorScheme + obj.fontSize = tcs.fontSize + + val = data.get("public_host_key", None) + if isinstance(val, str): obj.hostKey = val + val = data.get("command", None) + if isinstance(val, str): obj.command = val + sftp = data.get("sftp", None) + if isinstance(sftp, dict): obj.sftp = SFTPRootDirectorySettings.load(sftp) + + return obj + + def to_record_dict(self): + kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.SSH.value } # pylint: disable=E1101 + + # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) + recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] + uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] + if uids: + kvp["userRecords"] = uids + + if self.port and isinstance(self.port, str) and self.port.strip(): + kvp["port"] = self.port.strip() + if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): + kvp["allowSupplyUser"] = self.allowSupplyUser + if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): + kvp["recordingIncludeKeys"] = self.recordingIncludeKeys + if self.disableCopy is not None and isinstance(self.disableCopy, bool): + kvp["disableCopy"] = self.disableCopy + if self.disablePaste is not None and isinstance(self.disablePaste, bool): + kvp["disablePaste"] = self.disablePaste + + if self.colorScheme and isinstance(self.colorScheme, str) and self.colorScheme.strip(): + kvp["colorScheme"] = self.colorScheme.strip() + if self.fontSize and type(self.fontSize) is int and self.fontSize > 4: + kvp["fontSize"] = str(self.fontSize) + if self.hostKey and isinstance(self.hostKey, str) and self.hostKey.strip(): + kvp["hostKey"] = self.hostKey.strip() + if self.command and isinstance(self.command, str) and self.command.strip(): + kvp["command"] = self.command.strip() + + if isinstance(self.sftp, SFTPRootDirectorySettings): + srds = self.sftp.to_dict() + if srds: + kvp["sftp"] = srds + + return kvp + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +class ConnectionSettingsKubernetes(BaseConnectionSettings, TerminalDisplayConnectionSettings): + protocol = ConnectionProtocol.KUBERNETES + def __init__( # pylint: disable=R0917 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + colorScheme: Optional[str] = None, + fontSize: Optional[int] = None, + ignoreCert: Optional[bool] = None, + caCert: Optional[str] = None, + namespace: Optional[str] = None, + pod: Optional[str] = None, + container: Optional[str] = None, + clientCert: Optional[str] = None, + clientKey: Optional[str] = None + ): + BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) + TerminalDisplayConnectionSettings.__init__(self, colorScheme, fontSize) + self.ignoreCert = ignoreCert + self.caCert = caCert + self.namespace = namespace + self.pod = pod + self.container = container + self.clientCert = clientCert + self.clientKey = clientKey + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Connection Settings K8S failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bcs = BaseConnectionSettings.load(data) + if bcs: + obj.port = bcs.port + obj.allowSupplyUser = bcs.allowSupplyUser + obj.userRecords = bcs.userRecords + obj.recordingIncludeKeys = bcs.recordingIncludeKeys + + tcs = TerminalDisplayConnectionSettings.load(data) + if tcs: + obj.colorScheme = tcs.colorScheme + obj.fontSize = tcs.fontSize + + val = data.get("namespace", None) + if isinstance(val, str): obj.namespace = val + val = data.get("pod_name", None) + if isinstance(val, str): obj.pod = val + val = data.get("container", None) + if isinstance(val, str): obj.container = val + obj.ignoreCert = utils.value_to_boolean(data.get("ignore_server_cert", None)) + obj.caCert = multiline_to_str(parse_multiline(data, "ca_certificate", "Error parsing ca_certificate")) + obj.clientCert = multiline_to_str(parse_multiline(data, "client_certificate", "Error parsing client_certificate")) + obj.clientKey = multiline_to_str(parse_multiline(data, "client_key", "Error parsing client_key")) + + return obj + + def to_record_dict(self): + kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.KUBERNETES.value } # pylint: disable=E1101 + + # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) + recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] + uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] + if uids: + kvp["userRecords"] = uids + + if self.port and isinstance(self.port, str) and self.port.strip(): + kvp["port"] = self.port.strip() + if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): + kvp["allowSupplyUser"] = self.allowSupplyUser + if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): + kvp["recordingIncludeKeys"] = self.recordingIncludeKeys + if self.colorScheme and isinstance(self.colorScheme, str) and self.colorScheme.strip(): + kvp["colorScheme"] = self.colorScheme.strip() + if self.fontSize and type(self.fontSize) is int and self.fontSize > 4: + kvp["fontSize"] = str(self.fontSize) + if self.namespace and isinstance(self.namespace, str) and self.namespace.strip(): + kvp["namespace"] = self.namespace.strip() + if self.pod and isinstance(self.pod, str) and self.pod.strip(): + kvp["pod"] = self.pod.strip() + + if self.container and isinstance(self.container, str) and self.container.strip(): + kvp["container"] = self.container.strip() + if self.ignoreCert is not None and isinstance(self.ignoreCert, bool): + kvp["ignoreCert"] = self.ignoreCert + if self.caCert and isinstance(self.caCert, str) and self.caCert.strip(): + kvp["caCert"] = self.caCert.strip() + if self.clientCert and isinstance(self.clientCert, str) and self.clientCert.strip(): + kvp["clientCert"] = self.clientCert.strip() + if self.clientKey and isinstance(self.clientKey, str) and self.clientKey.strip(): + kvp["clientKey"] = self.clientKey.strip() + + return kvp + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +class BaseDatabaseConnectionSettings(BaseConnectionSettings, ClipboardConnectionSettings): + def __init__( # pylint: disable=R0917 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + database: Optional[str] = None, + disableCsvExport: Optional[bool] = None, + disableCsvImport: Optional[bool] = None + ): + BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) + ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) + self.database = database + self.disableCsvExport = disableCsvExport + self.disableCsvImport = disableCsvImport + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Database Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bcs = BaseConnectionSettings.load(data) + if bcs: + obj.port = bcs.port + obj.allowSupplyUser = bcs.allowSupplyUser + obj.userRecords = bcs.userRecords + obj.recordingIncludeKeys = bcs.recordingIncludeKeys + + ccs = ClipboardConnectionSettings.load(data) + if ccs: + obj.disableCopy = ccs.disableCopy + obj.disablePaste = ccs.disablePaste + + val = data.get("default_database", None) + if isinstance(val, str): obj.database = val + obj.disableCsvExport = utils.value_to_boolean(data.get("disable_csv_export", None)) + obj.disableCsvImport = utils.value_to_boolean(data.get("disable_csv_import", None)) + + return obj + + def to_record_dict(self): + kvp: Dict[str, Any] = {} + + # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) + recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] + uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] + if uids: + kvp["userRecords"] = uids + + if self.port and isinstance(self.port, str) and self.port.strip(): + kvp["port"] = self.port.strip() + if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): + kvp["allowSupplyUser"] = self.allowSupplyUser + if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): + kvp["recordingIncludeKeys"] = self.recordingIncludeKeys + if self.disableCopy is not None and isinstance(self.disableCopy, bool): + kvp["disableCopy"] = self.disableCopy + if self.disablePaste is not None and isinstance(self.disablePaste, bool): + kvp["disablePaste"] = self.disablePaste + if self.disableCsvExport is not None and isinstance(self.disableCsvExport, bool): + kvp["disableCsvExport"] = self.disableCsvExport + if self.disableCsvImport is not None and isinstance(self.disableCsvImport, bool): + kvp["disableCsvImport"] = self.disableCsvImport + if self.database and isinstance(self.database, str) and self.database.strip(): + kvp["database"] = self.database.strip() + + return kvp + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +class ConnectionSettingsSqlServer(BaseDatabaseConnectionSettings): + protocol = ConnectionProtocol.SQLSERVER + def __init__( # pylint: disable=W0246 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + database: Optional[str] = None, + disableCsvExport: Optional[bool] = None, + disableCsvImport: Optional[bool] = None + ): + super().__init__(port, allowSupplyUser, userRecords, recordingIncludeKeys, + disableCopy, disablePaste, database, + disableCsvExport, disableCsvImport) + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"SQLServer Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bdcs = BaseDatabaseConnectionSettings.load(data) + if bdcs: + obj.port = bdcs.port + obj.allowSupplyUser = bdcs.allowSupplyUser + obj.userRecords = bdcs.userRecords + obj.recordingIncludeKeys = bdcs.recordingIncludeKeys + obj.disableCopy = bdcs.disableCopy + obj.disablePaste = bdcs.disablePaste + obj.database = bdcs.database + obj.disableCsvExport = bdcs.disableCsvExport + obj.disableCsvImport = bdcs.disableCsvImport + + return obj + + def to_record_dict(self): + dict = super().to_record_dict() + dict["protocol"] = ConnectionProtocol.SQLSERVER.value # pylint: disable=E1101 + return dict + +class ConnectionSettingsPostgreSQL(BaseDatabaseConnectionSettings): + protocol = ConnectionProtocol.POSTGRESQL + def __init__( # pylint: disable=W0246,R0917 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + database: Optional[str] = None, + disableCsvExport: Optional[bool] = None, + disableCsvImport: Optional[bool] = None + ): + super().__init__(port, allowSupplyUser, userRecords, recordingIncludeKeys, + disableCopy, disablePaste, database, + disableCsvExport, disableCsvImport) + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"PostgreSQL Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bdcs = BaseDatabaseConnectionSettings.load(data) + if bdcs: + obj.port = bdcs.port + obj.allowSupplyUser = bdcs.allowSupplyUser + obj.userRecords = bdcs.userRecords + obj.recordingIncludeKeys = bdcs.recordingIncludeKeys + obj.disableCopy = bdcs.disableCopy + obj.disablePaste = bdcs.disablePaste + obj.database = bdcs.database + obj.disableCsvExport = bdcs.disableCsvExport + obj.disableCsvImport = bdcs.disableCsvImport + + return obj + + def to_record_dict(self): + dict = super().to_record_dict() + dict["protocol"] = ConnectionProtocol.POSTGRESQL.value # pylint: disable=E1101 + return dict + +class ConnectionSettingsMySQL(BaseDatabaseConnectionSettings): + protocol = ConnectionProtocol.MYSQL + def __init__( # pylint: disable=W0246,R0917 + self, + port: Optional[str] = None, # Override port from host + allowSupplyUser: Optional[bool] = None, + userRecords: Optional[List[str]] = None, + recordingIncludeKeys: Optional[bool] = None, + disableCopy: Optional[bool] = None, + disablePaste: Optional[bool] = None, + database: Optional[str] = None, + disableCsvExport: Optional[bool] = None, + disableCsvImport: Optional[bool] = None + ): + super().__init__(port, allowSupplyUser, userRecords, recordingIncludeKeys, + disableCopy, disablePaste, database, + disableCsvExport, disableCsvImport) + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"MySQL Connection Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + bdcs = BaseDatabaseConnectionSettings.load(data) + if bdcs: + obj.port = bdcs.port + obj.allowSupplyUser = bdcs.allowSupplyUser + obj.userRecords = bdcs.userRecords + obj.recordingIncludeKeys = bdcs.recordingIncludeKeys + obj.disableCopy = bdcs.disableCopy + obj.disablePaste = bdcs.disablePaste + obj.database = bdcs.database + obj.disableCsvExport = bdcs.disableCsvExport + obj.disableCsvImport = bdcs.disableCsvImport + + return obj + + def to_record_dict(self): + dict = super().to_record_dict() + dict["protocol"] = ConnectionProtocol.MYSQL.value # pylint: disable=E1101 + return dict + +PamConnectionSettings = Optional[ + Union[ + ConnectionSettingsRDP, + ConnectionSettingsVNC, + ConnectionSettingsTelnet, + ConnectionSettingsSSH, + ConnectionSettingsKubernetes, + ConnectionSettingsSqlServer, + ConnectionSettingsPostgreSQL, + ConnectionSettingsMySQL + ] +] + +class PamPortForwardSettings: + def __init__(self, port: Optional[str] = None, reusePort: Optional[bool] = None): + self.port = port # Override Port from host + self.reusePort = reusePort # Attempt to use the last connected port if available + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"Port Forward Settings failed to load from: {str(data)[:80]}") + if not isinstance(data, dict): return obj + + obj.port = data.get("port", None) + obj.reusePort = utils.value_to_boolean(data.get("reuse_port", None)) + return obj + + def to_record_dict(self): + dict = {} + if self.port and isinstance(self.port, str) and self.port.strip(): + dict["port"] = self.port.strip() + if self.reusePort is not None and isinstance(self.reusePort, bool): + dict["reusePort"] = self.reusePort + return dict + + def to_record_json(self): + dict = self.to_record_dict() or {} + rec_json = json.dumps(dict) + return rec_json + +class PamRemoteBrowserSettings: + def __init__( + self, + options: Optional[DagSettingsObject] = None, + connection: Optional[ConnectionSettingsHTTP] = None + ): + self.options = options + self.connection = connection + + @classmethod + def load(cls, data: Optional[Union[str, dict]]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"PAM RBI Settings field failed to load from: {str(data)[:80]}...") + if not isinstance(data, dict): return obj + + options = DagSettingsObject.load(data.get("options", {})) + if not is_empty_instance(options): + obj.options = options + + cdata = data.get("connection", {}) + # TO DO: if isinstance(cdata, str): lookup_by_name(pam_data.connections) + if not isinstance(cdata, dict): + logging.warning(f"""PAM RBI Settings: Connection must be a JSON object - skipping... "{str(cdata)[:24]}" """) + if cdata and isinstance(cdata, dict): + proto = cdata.get("protocol", "") + if proto and isinstance(proto, str): + if proto.lower() == "http": + conn = ConnectionSettingsHTTP.load(cdata) + if not is_empty_instance(conn): + obj.connection = conn + else: + logging.warning(f"""Connection skipped: unknown protocol "{str(proto)[:24]}" """) + + if not obj.connection and cdata and isinstance(cdata, dict): + logging.error(f"PAM RBI Settings failed to load from: {str(cdata)[:80]}...") + + return obj + +class PamSettingsFieldData: + def __init__( + self, + allowSupplyHost: Optional[bool] = None, + connection: PamConnectionSettings = None, # Optional[PamConnectionSettings] + portForward: Optional[PamPortForwardSettings] = None, + options: Optional[DagSettingsObject] = None, + jit_settings: Optional[DagJitSettingsObject] = None, + ai_settings: Optional[DagAiSettingsObject] = None, + ): + self.allowSupplyHost = allowSupplyHost + self.connection = connection + self.portForward = portForward + self.options = options + self.jit_settings = jit_settings + self.ai_settings = ai_settings + + # PamConnectionSettings excludes ConnectionSettingsHTTP + pam_connection_classes = [ + ConnectionSettingsRDP, + ConnectionSettingsVNC, + ConnectionSettingsTelnet, + ConnectionSettingsSSH, + ConnectionSettingsKubernetes, + ConnectionSettingsSqlServer, + ConnectionSettingsPostgreSQL, + ConnectionSettingsMySQL + ] + + @classmethod + def get_connection_class(cls, cdata: dict): + if cdata and isinstance(cdata, dict): + proto = cdata.get("protocol", "") + if proto and isinstance(proto, str): + proto = proto.lower() + for con in cls.pam_connection_classes: + pr = getattr(con, "protocol", "") + if isinstance(pr, ConnectionProtocol) and pr.value.lower() == proto: # pylint: disable=E1101 + return con.load(cdata) + logging.warning(f"""Connection skipped: unknown protocol "{str(proto)[:24]}" """) + return None + + def is_empty(self): + empty = is_empty_instance(self.options) + empty = empty and is_empty_instance(self.portForward) + empty = empty and is_empty_instance(self.connection, ["protocol"]) + # NB! JIT and AI settings are in import json but not in record json (just DAG json) + empty = empty and self.jit_settings is None and self.ai_settings is None + return empty + + @classmethod + def load(cls, data: Union[str, dict]): + obj = cls() + try: data = json.loads(data) if isinstance(data, str) else data + except: logging.error(f"PAM Settings Field failed to load from: {str(data)[:80]}...") + if not isinstance(data, dict): return obj + + obj.allowSupplyHost = utils.value_to_boolean(data.get("allow_supply_host", None)) + options_dict = data.get("options", {}) or {} + options = DagSettingsObject.load(options_dict) + if not is_empty_instance(options): + obj.options = options + if isinstance(options_dict, dict): + jit_value = options_dict.get("jit_settings", None) + if jit_value is not None: + jit_settings = DagJitSettingsObject.load(jit_value) + if jit_settings: + obj.jit_settings = jit_settings + ai_value = options_dict.get("ai_settings", None) + if ai_value is not None: + ai_settings = DagAiSettingsObject.load(ai_value) + if ai_settings: + obj.ai_settings = ai_settings + + portForward = PamPortForwardSettings.load(data.get("port_forward", {})) + if not is_empty_instance(portForward): + obj.portForward = portForward + + cdata = data.get("connection", {}) + # TO DO: if isinstance(cdata, str): lookup_by_name(pam_data.connections) + if not isinstance(cdata, dict): + logging.warning(f"""PAM Settings: Connection must be a JSON object - skipping... "{str(cdata)[:24]}" """) + obj.connection = cls.get_connection_class(cdata) + if not obj.connection and cdata and isinstance(cdata, dict): + logging.error(f"PAM Settings failed to load from: {str(cdata)[:80]}...") + + return obj + +def is_empty_instance(obj, skiplist: Optional[List[str]] = None): + """ Checks if all attributes (not on skiplist) are None """ + if not obj: return True + if not isinstance(skiplist, list): skiplist= [] + for attr, value in vars(obj).items(): + if not (attr in skiplist or value is None): + return False + return True + +def is_blank_instance(obj, skiplist: Optional[List[str]] = None): + """ Checks if all attributes (not on skiplist) are None or empty """ + if not obj: return True + if not isinstance(skiplist, list): skiplist= [] + for attr, value in vars(obj).items(): + if not (attr in skiplist or not value): + return False + return True + +def get_sftp_attribute(obj, name: str) -> str: + # Get one of pam_settings.connection.sftp.{sftpResource,sftpResourceUid,sftpUser,sftpUserUid} + value: str = "" + if (name and obj and + hasattr(obj, "pam_settings") and + hasattr(obj.pam_settings, "connection") and + hasattr(obj.pam_settings.connection, "sftp")): + if name == "sftpResource" and hasattr(obj.pam_settings.connection.sftp, "sftpResource"): + value = obj.pam_settings.connection.sftp.sftpResource or "" + elif name == "sftpResourceUid" and hasattr(obj.pam_settings.connection.sftp, "sftpResourceUid"): + value = obj.pam_settings.connection.sftp.sftpResourceUid or "" + elif name == "sftpUser" and hasattr(obj.pam_settings.connection.sftp, "sftpUser"): + value = obj.pam_settings.connection.sftp.sftpUser or "" + elif name == "sftpUserUid" and hasattr(obj.pam_settings.connection.sftp, "sftpUserUid"): + value = obj.pam_settings.connection.sftp.sftpUserUid or "" + else: + logging.debug(f"""Unknown sftp attribute "{name}" (skipped)""") + value = value[0] if isinstance(value, list) else value + value = value if isinstance(value, str) else "" + return value + +def set_sftp_uid(obj, name: str, uid: str) -> bool: + if not(obj and name): + return False + if not(uid and isinstance(uid, str) and RecordV3.is_valid_ref_uid(uid)): + logging.debug(f"""Invalid sftp UID "{uid}" (skipped)""") + return False + if (hasattr(obj, "pam_settings") and + hasattr(obj.pam_settings, "connection") and + hasattr(obj.pam_settings.connection, "sftp")): + if name == "sftpResourceUid" and hasattr(obj.pam_settings.connection.sftp, "sftpResourceUid"): + obj.pam_settings.connection.sftp.sftpResourceUid = uid + return True + elif name == "sftpUserUid" and hasattr(obj.pam_settings.connection.sftp, "sftpUserUid"): + obj.pam_settings.connection.sftp.sftpUserUid = uid + return True + else: + logging.debug(f"""Unknown sftp UID attribute "{name}" (skipped)""") + return False + +def is_admin_external(mach) -> bool: + res = False + if (mach and hasattr(mach, "is_admin_external") and mach.is_admin_external is True): + res = True + return res + +def get_admin_credential(obj, uid:bool=False) -> str: + # Get one of pam_settings.connection.{userRecords,userRecordUid} + value: str = "" + if (obj and hasattr(obj, "pam_settings") and + hasattr(obj.pam_settings, "connection") and + ((uid and hasattr(obj.pam_settings.connection, "userRecordUid")) or + (not uid and hasattr(obj.pam_settings.connection, "userRecords")))): + if uid and obj.pam_settings.connection.userRecordUid: + value = obj.pam_settings.connection.userRecordUid + elif not uid and obj.pam_settings.connection.userRecords: + value = obj.pam_settings.connection.userRecords + value = value[0] if isinstance(value, list) else value + value = value if isinstance(value, str) else "" + return value + +def set_user_record_uid(obj, uid: str, is_external: bool = False) -> bool: + if not(uid and isinstance(uid, str) and RecordV3.is_valid_ref_uid(uid)): + logging.debug(f"""Invalid userRecordUid "{uid}" (skipped)""") + return False + + if (uid and obj and hasattr(obj, "pam_settings") and + hasattr(obj.pam_settings, "connection") and + hasattr(obj.pam_settings.connection, "userRecordUid")): + obj.pam_settings.connection.userRecordUid = uid + if is_external is True: + if hasattr(obj, "is_admin_external"): + obj.is_admin_external = True + if hasattr(obj, "administrative_credentials_uid"): + obj.administrative_credentials_uid = uid + return True + else: + logging.debug("""Object has no attribute "userRecordUid" (skipped)""") + return False + +def find_external_user(mach, machines, title: str) -> list: + # Local pamMachine could reference pamDirectory AD user as its admin + res = [] + if title and machines and mach.type == "pamMachine": + mu = title.split(".", 1) # machine/user titles + mname = mu[0] if len(mu) > 1 else "" + uname = mu[1] if len(mu) > 1 else mu[0] + for m in machines: + if m.type == "pamDirectory" and (not mname or mname == m.title): + res.extend(search_machine(m, uname) or []) + return res + +def find_user(mach, users, title: str) -> list: + if not isinstance(mach, list): + res = search_machine(mach, title) or search_users(users, title) + else: + res = search_users(users, title) + for m in mach: + res = res or search_machine(m, title) + if res: break + return res or [] + +def search_users(users, user: str) -> list: + res = [] + if isinstance(users, list): + res = [x for x in users if getattr(x, "title", None) == user] + res = res or [x for x in users if getattr(x, "login", None) == user] + return res + +def search_machine(mach, user: str) -> list: + if mach and hasattr(mach, "users") and isinstance(mach.users, list): + return search_users(mach.users, user) + return [] + +def parse_command_options(obj, enable:bool) -> dict: + # Parse command options from DagSettingsObject (pam_resource - skiped/external) + args = {} + if not obj: return args + choices = {"on": True, "off": False} + record_key = "record" if enable else "resource_uid" + args[record_key] = obj.uid + opts = None + if isinstance(obj, PamRemoteBrowserObject): + opts = obj.rbi_settings.options if obj.rbi_settings and obj.rbi_settings.options else None + elif isinstance(obj, PamUserObject): + logging.warning("Trying to parse DAG settings from PAM User (skipped)") # PamUserObject.rotation_settings are different + elif not isinstance(obj, LoginUserObject): + opts = obj.pam_settings.options if obj.pam_settings and obj.pam_settings.options else None + if opts: + if enable: # PAMTunnelEditCommand.execute format enable_rotation=True/disable_rotation=True + val = opts.rotation.value if opts.rotation else "" + key = "enable_rotation" if val == "on" else "disable_rotation" if val == "off" else None + if key is not None: args[key] = True + val = opts.connections.value if opts.connections else "" + key = "enable_connections" if val == "on" else "disable_connections" if val == "off" else None + if key is not None: args[key] = True + val = opts.tunneling.value if opts.tunneling else "" + key = "enable_tunneling" if val == "on" else "disable_tunneling" if val == "off" else None + if key is not None: args[key] = True + val = opts.text_session_recording.value if opts.text_session_recording else "" + key = "enable_typescript_recording" if val == "on" else "disable_typescript_recording" if val == "off" else None + if key is not None: + args[key] = True + args[key.replace("_typescript_", "_typescripts_")] = True # legacy compat. + val = opts.graphical_session_recording.value if opts.graphical_session_recording else "" + key = "enable_connections_recording" if val == "on" else "disable_connections_recording" if val == "off" else None + if key is not None: args[key] = True + val = opts.remote_browser_isolation.value if opts.remote_browser_isolation else "" + key = "enable_remote_browser_isolation" if val == "on" else "disable_remote_browser_isolation" if val == "off" else None + if key is not None: args[key] = True + # AI and JIT settings don't apply to RBI records + if not isinstance(obj, PamRemoteBrowserObject): + val = opts.ai_threat_detection.value if opts.ai_threat_detection else "" + key = "enable_ai_threat_detection" if val == "on" else "disable_ai_threat_detection" if val == "off" else None + if key is not None: args[key] = True + val = opts.ai_terminate_session_on_detection.value if opts.ai_terminate_session_on_detection else "" + key = "enable_ai_terminate_session_on_detection" if val == "on" else "disable_ai_terminate_session_on_detection" if val == "off" else None + if key is not None: args[key] = True + else: # TunnelDAG.set_resource_allowed format rotation=True/False + if opts.rotation and opts.rotation.value in ("on", "off"): + args["rotation"] = choices[opts.rotation.value] + if opts.connections and opts.connections.value in ("on", "off"): + args["connections"] = choices[opts.connections.value] + if opts.tunneling and opts.tunneling.value in ("on", "off"): + args["tunneling"] = choices[opts.tunneling.value] + if opts.text_session_recording and opts.text_session_recording.value in ("on", "off"): + # args["typescriptrecording"] = choices[opts.text_session_recording.value] + args["typescript_recording"] = choices[opts.text_session_recording.value] + if opts.graphical_session_recording and opts.graphical_session_recording.value in ("on", "off"): + # args["recording"] = choices[opts.graphical_session_recording.value] + args["session_recording"] = choices[opts.graphical_session_recording.value] + if opts.remote_browser_isolation and opts.remote_browser_isolation.value in ("on", "off"): + args["remote_browser_isolation"] = choices[opts.remote_browser_isolation.value] + # AI and JIT settings don't apply to RBI records + if not isinstance(obj, PamRemoteBrowserObject): + if opts.ai_threat_detection and opts.ai_threat_detection.value in ("on", "off"): + args["ai_enabled"] = choices[opts.ai_threat_detection.value] + if opts.ai_terminate_session_on_detection and opts.ai_terminate_session_on_detection.value in ("on", "off"): + args["ai_session_terminate"] = choices[opts.ai_terminate_session_on_detection.value] + + return args + +def resolve_domain_admin(pce, users): + if not(users and isinstance(users, list)): + return + if (pce and hasattr(pce, "dom_administrative_credential") and pce.dom_administrative_credential and + hasattr(pce, "admin_credential_ref")): + dac = pce.dom_administrative_credential + res = {"titles": set(), "logins": set()} + for obj in users: + uid = getattr(obj, "uid", "") or "" + title = getattr(obj, "title", "") or "" + login = getattr(obj, "login", "") or "" + if not uid: # cannot resolve script credential to an empty UID + logging.debug(f"""Unable to resolve domain admin creds from rec without UID - "{title}:{login}" (skipped)""") + continue + if title and title == dac: + res["titles"].add(uid) + elif login and login == dac: + res["logins"].add(uid) + num_unique_uids = len(res["titles"] | res["logins"]) + if num_unique_uids != 1: + logging.debug(f"{num_unique_uids} matches while resolving domain admin creds for '{dac}' ") + if res["titles"]: + pce.admin_credential_ref = next(iter(res["titles"])) + elif res["logins"]: + pce.admin_credential_ref = next(iter(res["logins"])) + if pce.admin_credential_ref: + logging.debug(f"Domain admin credential '{dac}' resolved to '{pce.admin_credential_ref}' ") + +def resolve_script_creds(rec, users, resources): + creds = set() + if (rec and hasattr(rec, "scripts") and rec.scripts and + hasattr(rec.scripts, "scripts") and rec.scripts.scripts): + creds = set(chain.from_iterable( + (x.additional_credentials for x in rec.scripts.scripts if x.additional_credentials)) + ) + if not creds: # nothing to resolve + return + res = {x: {"titles":[], "logins":[]} for x in creds} + for obj in chain(users, resources): + uid = getattr(obj, "uid", "") or "" + title = getattr(obj, "title", "") or "" + login = getattr(obj, "login", "") or "" + if not uid: # cannot resolve script credential to an empty UID + logging.debug(f"""Unable to resolve script creds from rec without UID - "{title}:{login}" (skipped)""") + continue + if title and title in creds: + res[title]["titles"].append(uid) + elif login and login in creds: + res[login]["logins"].append(login) + + # recursive search in machine users + if hasattr(obj, "users") and obj.users and isinstance(obj.users, list): + for usr in obj.users: + uid = getattr(usr, "uid", "") or "" + title = getattr(usr, "title", "") or "" + login = getattr(usr, "login", "") or "" + if not uid: # cannot resolve script credential to an empty UID + logging.debug(f"""Unable to resolve script creds from rec without UID - "{title}:{login}" (skipped)""") + continue + if title and title in creds: + res[title]["titles"].append(uid) + elif login and login in creds: + res[login]["logins"].append(login) + + if logging.getLogger().getEffectiveLevel() <= logging.DEBUG: + for k, v in res.items(): + tlen = len(v.get("titles", [])) + llen = len(v.get("login", [])) + if tlen+llen != 1: + logging.debug(f"{tlen+llen} matches while resolving script creds for {k}") + + for script in (x for x in rec.scripts.scripts if x.additional_credentials): + for cred in script.additional_credentials: + matches = res.get(cred) or {} + match = next(chain(matches.get("titles") or [], matches.get("logins") or []), None) + if match: + script.record_refs.append(match) + else: + title = getattr(rec, "title", "") or "" + login = getattr(rec, "login", "") or "" + logging.warning(f"""Unable to resolve script creds "{cred}" from "{title}:{login}" """) + if script.record_refs: + script.record_refs = list(set(script.record_refs)) + +def add_pam_scripts(params, record, scripts): + """Add post-rotation script(s) to a rotation record""" + if not (isinstance(record, str) and record != "" + and isinstance(scripts, list) and len(scripts) > 0): + return # nothing to do - no record or no script(s) + + ruid = record if record in params.record_cache else "" + if not ruid: + records = list(vault_extensions.find_records( + params, search_str=record, record_version=(3, 6), + record_type=PAM_ROTATION_TYPES + PAM_CONFIG_TYPES)) + if len(records) == 0: + logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - Record "{record}" not found!""") + elif len(records) > 1: + logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - Record "{record}" is not unique. Use record UID!""") + else: + ruid = records[0].record_uid + rec = vault.KeeperRecord.load(params, ruid) if ruid else None + if rec and isinstance(rec, vault.TypedRecord): + if rec.version not in (3, 6): + logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - Record "{rec.record_uid}" is not a rotation record (skipped).""") + return + + script_field = next((x for x in rec.fields if x.type == "script"), None) + if not script_field: + script_field = vault.TypedField.new_field("script", [], "rotationScripts") + rec.fields.append(script_field) + for script in scripts: + file_name = script.file + full_name = os.path.abspath(os.path.expanduser(file_name)) + if not os.path.isfile(full_name): + logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - File "{file_name}" not found (skipped).""") + continue + facade = record_facades.FileRefRecordFacade() + facade.record = rec + pre = set(facade.file_ref) + upload_task = attachment.FileUploadTask(full_name) + attachment.upload_attachments(params, rec, [upload_task]) + post = set(facade.file_ref) + df = post.difference(pre) + if len(df) == 1: + file_uid = df.pop() + facade.file_ref.remove(file_uid) + script_value = { + "fileRef": file_uid, + "recordRef": [], + "command": "", + } + # command and recordRef are optional + if script.script_command: + script_value["command"] = script.script_command + if script.record_refs: + for ref in script.record_refs: + script_value["recordRef"].append(ref) + if ref not in params.record_cache: + logging.debug(f"{bcolors.WARNING}Warning: {bcolors.ENDC} " + "Add rotation script - Additional Credentials Record " + f""" "{ref}" not found (recordRef added)!""") + script_field.value.append(script_value) # type: ignore + + record_management.update_record(params, rec) + api.sync_down(params) + params.sync_data = True diff --git a/keepercommander/commands/pam_import/commands.py b/keepercommander/commands/pam_import/commands.py new file mode 100644 index 000000000..63ac5f78a --- /dev/null +++ b/keepercommander/commands/pam_import/commands.py @@ -0,0 +1,20 @@ +# _ __ +# | |/ /___ ___ _ __ ___ _ _ ® +# | ' pam_directory_uid (pamDirectory in pam_data.resources by title) - if (mach.pam_settings and mach.pam_settings.jit_settings and - getattr(mach.pam_settings.jit_settings, "pam_directory_record", None)): - jit = mach.pam_settings.jit_settings + # RBI has rbi_settings only (no pam_settings.jit_settings) + ps = getattr(mach, "pam_settings", None) + jit = getattr(ps, "jit_settings", None) if ps else None + if jit and getattr(jit, "pam_directory_record", None): ref = (jit.pam_directory_record or "").strip() if ref: matches = [x for x in pam_directories if getattr(x, "title", None) == ref] @@ -1683,31 +1629,34 @@ def process_data(self, params, project): tdag.set_resource_allowed(**args) # After setting allowedSettings, save JIT settings if present - # JIT settings don't apply to RBI records (only machine/db/directory) - if mach.pam_settings and mach.pam_settings.jit_settings: - jit_dag_dict = mach.pam_settings.jit_settings.to_dag_dict() + # JIT settings don't apply to RBI records (only machine/db/directory); RBI has rbi_settings, no pam_settings.jit_settings + ps = getattr(mach, "pam_settings", None) + jit = getattr(ps, "jit_settings", None) if ps else None + ai = getattr(ps, "ai_settings", None) if ps else None + if jit: + jit_dag_dict = jit.to_dag_dict() if jit_dag_dict: # Only save if not empty set_resource_jit_settings(params, mach.uid, jit_dag_dict, pam_cfg_uid) # After setting allowedSettings, save AI settings if present # AI settings don't apply to RBI records (only machine/db/directory) - if mach.pam_settings and mach.pam_settings.ai_settings: + if ai: user_id = "" if getattr(params, "account_uid_bytes", None): user_id = utils.base64_url_encode(params.account_uid_bytes) elif getattr(params, "user", ""): user_id = params.user - ai_dag_dict = mach.pam_settings.ai_settings.to_dag_dict(user_id=user_id) + ai_dag_dict = ai.to_dag_dict(user_id=user_id) if ai_dag_dict: # Only save if not empty set_resource_keeper_ai_settings(params, mach.uid, ai_dag_dict, pam_cfg_uid) # Web vault UI visualizer shows only latest and meta is most wanted path. # Note: DAG may take a while to sync in web vault # Dummy update to meta so it is latest among DATA (after jit/ai). - if mach.pam_settings and (mach.pam_settings.jit_settings or mach.pam_settings.ai_settings): + if jit or ai: refresh_meta_to_latest(params, mach.uid, pam_cfg_uid) # Bump LINK to config only when AI is present (AI adds the encryption KEY). - if mach.pam_settings and mach.pam_settings.ai_settings: + if ai: refresh_link_to_config_to_latest(params, mach.uid, pam_cfg_uid) # Machine - create its users (if any) @@ -1740,11 +1689,13 @@ def process_data(self, params, project): if resources: print(f"{len(resources)}/{len(resources)}\n") # link machine -> pamDirectory (LINK, path=domain) for jit_settings.pam_directory_uid + # RBI has rbi_settings only (no pam_settings.jit_settings) jit_domain_links_added = False for mach in resources: - if not (mach and mach.pam_settings and mach.pam_settings.jit_settings): + ps = getattr(mach, "pam_settings", None) + jit = getattr(ps, "jit_settings", None) if ps else None + if not (mach and jit): continue - jit = mach.pam_settings.jit_settings dir_uid = getattr(jit, "pam_directory_uid", None) if not dir_uid: continue @@ -1776,3200 +1727,13 @@ def process_data(self, params, project): prf = vault.TypedField.new_field('pamResources', {}) pcrec.fields.append(prf) prf.value = prf.value or [{}] - prf.value[0]["adminCredentialRef"] = pce.admin_credential_ref - record_management.update_record(params, pcrec) - tdag.link_user_to_config_with_options(pce.admin_credential_ref, is_admin='on') + if isinstance(prf.value[0], dict): + prf.value[0]["adminCredentialRef"] = pce.admin_credential_ref + record_management.update_record(params, pcrec) + tdag.link_user_to_config_with_options(pce.admin_credential_ref, is_admin='on') + else: + logging.error(f"Unable to add adminCredentialRef - bad pamResources field in PAM Config {pcuid}") else: logging.debug(f"Unable to resolve domain admin '{pce.dom_administrative_credential}' for PAM Domain configuration.") logging.debug("Done processing project data.") - - -class PamConfigEnvironment(): - def _initialize(self): - self.uid: str = "" # known after creation - self.environment: str = "" # local|aws|azure|domain|gcp|oci - self.title: str = "" - # self.gateway_name: str = "" # used externally, use controllerUid here - # self.ksm_app_name: str = "" # used externally, use controllerUid here - # self.application_folder_uid: str = "" # auto (Users folder) in pam_resources - - # default settings - self.connections: str = "on" - self.rotation: str = "on" - self.tunneling: str = "on" - self.remote_browser_isolation: str = "on" - self.graphical_session_recording: str = "on" - self.text_session_recording: str = "on" - self.ai_threat_detection: str = "off" - self.ai_terminate_session_on_detection: str = "off" - - self.port_mapping: List[str] = [] # ex. ["2222=ssh", "33306=mysql"] for discovery, rotation etc. - self.default_rotation_schedule: dict = {} # "type": "On-Demand|CRON" - self.scripts = None # PamScriptsObject - PAM Config scripts run on gateway after every rotation - self.attachments = None # PamAttachmentsObject - - # common settings (shared across all config types) - self.pam_resources = {} # {"folderUid": "", "controllerUid": ""} - "resourceRef": unused/legacy - - # Local environment: pamNetworkConfiguration - self.network_id: str = "" # required, text:networkId prefix for naming resources during discovery - self.network_cidr: str = "" # required, text:networkCIDR network CIDR used for discovery - - # AWS environment: pamAwsConfiguration - self.aws_id: str = "" # required, text:awsId - self.aws_access_key_id: str = "" # required, secret:accessKeyId - self.aws_secret_access_key: str = "" # required, secret:accessSecretKey - self.aws_region_names: List[str] = [] # optional, multiline:regionNames - - # Azure environment: pamAzureConfiguration - self.az_entra_id: str = "" # required, text:azureId - self.az_client_id: str = "" # required, secret:clientId - self.az_client_secret: str = "" # required, secret:clientSecret - self.az_subscription_id: str = "" # required, secret:subscriptionId - self.az_tenant_id: str = "" # required, secret:tenantId - self.az_resource_groups: List[str] = [] # optional, multiline:resourceGroups - - # Domain environment: pamDomainConfiguration - self.dom_domain_id: str = "" # required, text:pamDomainId - self.dom_hostname: str = "" # required, pamHostname: - self.dom_port: str = "" # required, pamHostname: - self.dom_use_ssl: bool = False # required, checkbox:useSSL - self.dom_scan_dc_cidr: bool = False # optional, checkbox:scanDCCIDR - self.dom_network_cidr: str = "" # optional, text:networkCIDR - self.dom_administrative_credential: str = "" # required, existing pamUser: pamResources.value[0][adminCredentialRef] - self.admin_credential_ref: str = "" # UID resolved from dom_administrative_credential by record title - # Domain Administrator User: pamUser record should have an ACL edge to the pamDomainConfiguration record with is_admin = True - # Domain users are the equivalent to cloud users, IAM/Azure users. The parent of the pamUser is the configuration record. - # The user does not belong to a machine, database or directory resource. - - # Google Cloud Platform (GCP) environment: pamGcpConfiguration - self.gcp_id: str = "" # required, text:pamGcpId - self.gcp_service_account_key: str = "" # required, json:pamServiceAccountKey - self.gcp_google_admin_email: str = "" # required, email:pamGoogleAdminEmail - self.gcp_region_names: List[str] = [] # required, multiline:pamGcpRegionName - - # Oracle Cloud Infrastructure (OCI) environment: pamOciConfiguration - # NB! OCI settings subject to change: - self.oci_id: str = "" # required, text:pamOciId - self.oci_admin_id: str = "" # required, secret:adminOcid - self.oci_admin_public_key: str = "" # required, secret:adminPublicKey - self.oci_admin_private_key: str = "" # required, secret:adminPrivateKey - self.oci_tenancy: str = "" # required, text:tenancyOci - self.oci_region: str = "" # required, text:regionOci - - def __init__(self, environment_type:str, settings:dict, controller_uid:str, folder_uid:str) -> None: - self._initialize() - settings = settings if isinstance(settings, dict) else {} - environment_type = str(environment_type).strip() - if environment_type not in PAM_ENVIRONMENT_TYPES: - environment_type = str(settings.get("environment", "")).strip() - if environment_type not in PAM_ENVIRONMENT_TYPES: - logging.warning("Unrecognized environment type " - f"""{bcolors.WARNING}"{environment_type}"{bcolors.ENDC} """ - f"""must be one of {PAM_ENVIRONMENT_TYPES} - switching to "local" """) - environment_type = "local" - self.environment = environment_type - - # common properties shared across all PAM config types: - self.pam_resources = { - "controllerUid": controller_uid, - "folderUid": folder_uid - # "resourceRef": "" - unused/legacy - } - val = settings.get("title", None) - if isinstance(val, str): self.title = val - - # gateway_name, ksm_app_name used externally during gw creation, use controllerUid here - - choices = ("on", "off", "default") - val = settings.get("connections", None) - if isinstance(val, str) and val in choices: self.connections = val - val = settings.get("rotation", None) - if isinstance(val, str) and val in choices: self.rotation = val - val = settings.get("tunneling", None) - if isinstance(val, str) and val in choices: self.tunneling = val - val = settings.get("remote_browser_isolation", None) - if isinstance(val, str) and val in choices: self.remote_browser_isolation = val - val = settings.get("graphical_session_recording", None) - if isinstance(val, str) and val in choices: self.graphical_session_recording = val - val = settings.get("text_session_recording", None) - if isinstance(val, str) and val in choices: self.text_session_recording = val - val = settings.get("ai_threat_detection", None) - if isinstance(val, str) and val in choices: self.ai_threat_detection = val - val = settings.get("ai_terminate_session_on_detection", None) - if isinstance(val, str) and val in choices: self.ai_terminate_session_on_detection = val - - val = settings.get("port_mapping", None) # multiline - if isinstance(val, str): val = [val] - if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): - self.port_mapping = val - elif val is not None: - logging.warning("Unrecognized port_mapping values (skipped) - expecting list of strings,"\ - """ ex. ["2222=ssh", "33060=mysql"]""") - - # {"type": "on-demand"} or {"type": "CRON", "cron": "30 18 * * *", "tz": "America/Chicago" } - val = settings.get("default_rotation_schedule", None) - if isinstance(val, dict): - schedule_type = str(val.get("type", "")).lower() - schedule_type = {"on-demand": "ON_DEMAND", "cron": "CRON"}.get(schedule_type, "") - if schedule_type != "": - if schedule_type == "ON_DEMAND": - self.default_rotation_schedule = { "type": "ON_DEMAND" } - elif schedule_type == "CRON": - cron = str(val.get("cron", "")).strip() - if cron: - self.default_rotation_schedule = { "type": "CRON", "cron": cron } - tz = str(val.get("tz", "")).strip() - if tz: self.default_rotation_schedule["tz"] = tz - else: - logging.warning("Skipped unrecognized CRON settings in default_rotation_schedule") - else: - logging.warning("Skipped unrecognized default_rotation_schedule type") - - self.scripts = PamScriptsObject.load(settings.get("scripts", None)) - self.attachments = PamAttachmentsObject.load(settings.get("attachments", None)) - - # Local Network - if environment_type == "local": - val = settings.get("network_id", None) - if isinstance(val, str): self.network_id = val - val = settings.get("network_cidr", None) - if isinstance(val, str): self.network_cidr = val - elif environment_type == "aws": - val = settings.get("aws_id", None) # required - if isinstance(val, str): self.aws_id = val - val = settings.get("aws_access_key_id", None) - if isinstance(val, str): self.aws_access_key_id = val - val = settings.get("aws_secret_access_key", None) - if isinstance(val, str): self.aws_secret_access_key = val - - val = settings.get("aws_region_names", None) # multiline - if isinstance(val, str): val = [val] - if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): - self.aws_region_names = val - elif val is not None: - logging.warning("Unrecognized aws_region_names values (skipped) - expecting list of strings") - elif environment_type == "azure": - val = settings.get("az_entra_id", None) # required - if isinstance(val, str): self.az_entra_id = val - val = settings.get("az_client_id", None) # required - if isinstance(val, str): self.az_client_id = val - val = settings.get("az_client_secret", None) # required - if isinstance(val, str): self.az_client_secret = val - val = settings.get("az_subscription_id", None) # required - if isinstance(val, str): self.az_subscription_id = val - val = settings.get("az_tenant_id", None) # required - if isinstance(val, str): self.az_tenant_id = val - val = settings.get("az_resource_groups", None) # multiline - if isinstance(val, str): val = [val] - if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): - self.az_resource_groups = val - elif val is not None: - logging.warning("Unrecognized az_resource_groups values (skipped) - expecting list of strings") - elif environment_type == "domain": - val = settings.get("dom_domain_id", None) # required - if isinstance(val, str): self.dom_domain_id = val - val = settings.get("dom_hostname", None) # required - if isinstance(val, str): self.dom_hostname = val - val = settings.get("dom_port", None) # required - if isinstance(val, int) and 0 <= val <= 65535: val = str(val) - if isinstance(val, str): self.dom_port = val - val = utils.value_to_boolean(settings.get("dom_use_ssl")) # required, bool - if isinstance(val, bool): self.dom_use_ssl = val - val = utils.value_to_boolean(settings.get("dom_scan_dc_cidr")) # optional, bool - if isinstance(val, bool): self.dom_scan_dc_cidr = val - val = settings.get("dom_network_cidr", None) # optional - if isinstance(val, str): self.dom_network_cidr = val - val = settings.get("dom_administrative_credential", None) # required, existing pamUser - if isinstance(val, str): self.dom_administrative_credential = val - # self.admin_credential_ref - will be resolved from dom_administrative_credential (later) - elif environment_type == "gcp": - val = settings.get("gcp_id", None) # required - if isinstance(val, str): self.gcp_id = val - # --service-account-key accepts only JSON.stringify(value) anyways - val = settings.get("gcp_service_account_key", None) # required - if isinstance(val, str): self.gcp_service_account_key = val - val = settings.get("gcp_google_admin_email", None) # required - if isinstance(val, str): self.gcp_google_admin_email = val - val = settings.get("gcp_region_names", None) # required, multiline - if isinstance(val, str): val = [val] - if (isinstance(val, list) and all(isinstance(x, str) and x != "" for x in val)): - self.gcp_region_names = val - elif val is not None: - logging.warning("Unrecognized gcp_region_names values (skipped) - expecting list of strings") - elif environment_type == "oci": - val = settings.get("oci_id", None) # required - if isinstance(val, str): self.oci_id = val - val = settings.get("oci_admin_id", None) # required - if isinstance(val, str): self.oci_admin_id = val - val = settings.get("oci_admin_public_key", None) # required - if isinstance(val, str): self.oci_admin_public_key = val - val = settings.get("oci_admin_private_key", None) # required - if isinstance(val, str): self.oci_admin_private_key = val - val = settings.get("oci_tenancy", None) # required - if isinstance(val, str): self.oci_tenancy = val - val = settings.get("oci_region", None) # required - if isinstance(val, str): self.oci_region = val - - -class PamScriptsObject(): - def __init__(self): - self.scripts: List[PamScriptObject] = [] - - @classmethod - def load(cls, data: Optional[Union[str, list]]) -> PamScriptsObject: - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Pam Scripts failed to load from: {str(data)[:80]}...") - if not(data and isinstance(data, list)): return obj - - for s in data: - so = PamScriptObject.load(s) - if so.validate(): - obj.scripts.append(so) - else: - logging.warning(f"""Script file not found (skipped): "{str(so.file)}" """) - if not obj.scripts: logging.warning("Skipped empty scripts section") - return obj - - # def to_json(self): pass # File upload will create the JSON format - - -class PamScriptObject(): - def __init__(self): - self.file: str = "" - self.script_command: str = "" - self.additional_credentials: List[str] = [] - self.file_ref: str = "" # fileRef generated by file upload - self.record_refs: List[str] = [] # "recordRef":["uid1","uid2"] from additional_credentials - - def validate(self): - valid = isinstance(self.file, str) - valid = valid and Path(self.file).resolve().exists() - return valid - - @classmethod - def load(cls, data: Union[str, dict]) -> PamScriptObject: - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"PAM script failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - # TUI: "script": { "script_command": "pwsh.exe", "file": "path/file.ext", "additional_credentials": ["admin1", "user2"] }, - # JSON: "script": [{"command":"", "fileRef":"path/file.ext", "recordRef": ["uid1", "uid2"]}] - # use file upload to attach to existing record and get UIDs - cmd = data.get("script_command", None) - if isinstance(cmd, str) and cmd.strip() != "": obj.script_command = cmd.strip() - file = data.get("file", None) - if isinstance(file, str) and file.strip() != "": obj.file = file.strip() - # before use call validate() which also checks if file exists - - # NB! If script has additional_credentials these must be added later, - # after pamUser creation - acs = data.get("additional_credentials", None) - if isinstance(acs, str): acs = [acs] - if isinstance(acs, list) and acs: obj.additional_credentials = acs - - return obj - - # def to_json(self): pass # File upload will create the JSON format - - -class PamAttachmentsObject(): - def __init__(self): - self.attachments: List[PamAttachmentObject] = [] - # self.file_ref: List[str] # fileRef: [] populated by file upload - - @classmethod - def load(cls, data: Optional[Union[str, list]]) -> PamAttachmentsObject: - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"PAM Attachments failed to load from: {str(data)[:80]}...") - if not(data and isinstance(data, list)): return obj - - for a in data: - if isinstance(a, str): a = { "file": a } - ao = PamAttachmentObject.load(a) - if ao.validate(): - obj.attachments.append(ao) - else: - logging.warning(f"""File attachment not found (skipped): "{str(ao.file)}" """) - if not obj.attachments: logging.warning("Skipped empty file attachments section") - return obj - - # def to_json(self): pass # File upload will create the JSON format - - -class PamAttachmentObject(): - def __init__(self): - self.file: str = "" - self.title: str = "" - self.file_ref: str = "" # fileRef generated by file upload - - def validate(self): - valid = isinstance(self.file, str) - valid = valid and Path(self.file).resolve().exists() - return valid - - @classmethod - def load(cls, data: Union[str, dict]) -> PamAttachmentObject: - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Failed to load file attachment from: {str(data)[:80]}") - if isinstance(data, str): data = {"file": data} - if not isinstance(data, dict): return obj - - # TUI: "attachments": [{ "file": "path/file.ext", "title": "File1" }] - # TUI: "attachments": ["path/file1", "file2"] - currently / title=filename - # JSON: "fileRef": ["uid1", "uid2"] # file upload generated - # use file upload to attach to existing record and get UIDs - title = data.get("title", None) - if isinstance(title, str) and title.strip() != "": obj.title = title.strip() - file = data.get("file", None) - if isinstance(file, str) and file.strip() != "": obj.file = file.strip() - # before use call validate() which also checks if file exists - - return obj - - # def to_json(self): pass # File upload will create the JSON format - - -class PamRotationScheduleObject(): - def __init__(self): - self.type: str = "" # on-demand|CRON - self.cron: str = "" # ex. "cron": "30 18 * * *" - self.tz: str = "" # timezone - default = "Etc/UTC" - # {"type": "on-demand"}|{"type": "CRON", "cron": "30 18 * * *"} - # http://www.quartz-scheduler.org/documentation/quartz-2.3.0/tutorials/crontrigger.html#examples - - @classmethod - def load(cls, data: Union[str, dict]) -> PamRotationScheduleObject: - schedule_types = ("on-demand", "cron") - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Failed to load rotation schedule from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - type = data.get("type", None) - if type and isinstance(type, str) and type.strip().lower() in schedule_types: - obj.type = type.strip().lower() - elif type: - logging.error(f"""Schedule type "{str(type)[:80]}" is unknown - must be one of {schedule_types}""") - - if obj.type.lower() == "cron": - cron = data.get("cron", None) - if isinstance(cron, str) and cron.strip() != "": obj.cron = cron.strip() - if obj.cron: # validate - try: - parsed_cron = vault.TypedField.import_schedule_field(obj.cron) - except: - parsed_cron = {} - if not (parsed_cron and parsed_cron.get("time", "")): - logging.error(f"Failed to load CRON from: {obj.cron}") - tz = data.get("tz", None) - if isinstance(tz, str) and tz.strip() != "": obj.tz = tz.strip() - - return obj - -class PamRotationParams(): - def __init__(self, configUid: str, profiles: dict): - self.configUid: str = configUid # iam_user|scripts_only=NOOP - self.ownerUid: str = "" # general - pamMachine rec UID - self.ownerTitle: str = "" # general - pamMachine rec title - self.rotation_profiles: dict = profiles or {} - -class PamRotationSettingsObject(): - def __init__(self): - self.rotation: str = "" # general|iam_user|scripts_only=NOOP - self.resource: str = "" # general:MachineTitle, IAM/Scripts:skip - auto/PamConfig - self.enabled: str = "" # on|off|default - self.schedule = None # {"type": "on-demand"}|{"type": "CRON", "cron": "30 18 * * *"} - self.password_complexity: str = "" # "32,5,5,5,5" - self.resourceUid: str = "" # general:machineUID, iam_user,scripts_only:PamConfigUID - - @classmethod - def load(cls, data: Optional[Union[str, dict]], rotation_params: Optional[PamRotationParams] = None) -> PamRotationSettingsObject: - rotation_types = ("general", "iam_user", "scripts_only") - enabled_types = ("on", "off", "default") - rx_complexity = r"^(\d+,\d+,\d+,\d+,\d+)$" - obj = cls() - - # autodetect profile name (and load from rotation_profiles section) - if isinstance(data, str) and rotation_params and isinstance(rotation_params.rotation_profiles, dict): - profile = rotation_params.rotation_profiles.get(data, None) - if profile and isinstance(profile, dict): - data = profile - - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Failed to load rotation settings from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - rotation = data.get("rotation", None) - if rotation and isinstance(rotation, str) and rotation.strip().lower() in rotation_types: - obj.rotation = rotation.strip().lower() - elif rotation: - logging.error(f"""Rotation type "{str(rotation)[:80]}" is unknown - must be one of {rotation_types}""") - - # type: iam_user|scripts_only=NOOP - automatically pick up current PAM Config - # type: general - automatically picks owner record (uid by title) - if obj.rotation == "general": - resource = data.get("resource", None) - if isinstance(resource, str) and resource.strip() != "": - obj.resource = resource.strip() - if rotation_params and rotation_params.ownerTitle: - if obj.resource and obj.resource.lower() != rotation_params.ownerTitle.lower(): - logging.warning("Rotation record owner must be its parent - replacing " - f"""configured owner "resource":"{obj.resource}" """ - f"""with actual parent "{rotation_params.ownerTitle}" """) - obj.resource = rotation_params.ownerTitle - elif obj.rotation in ("iam_user", "scripts_only"): - if rotation_params and rotation_params.configUid: - obj.resource = rotation_params.configUid - - enabled = data.get("enabled", None) - if enabled and isinstance(enabled, str) and enabled.strip().lower() in enabled_types: - obj.enabled = enabled.strip().lower() - elif enabled: - logging.error(f"""Unknown rotation enablement type "{str(enabled)[:80]}" - must be one of {enabled_types}""") - - obj.schedule = PamRotationScheduleObject.load(data.get("schedule", None) or "") - complexity = data.get("password_complexity", None) - if complexity and isinstance(complexity, str): - if re.fullmatch(rx_complexity, complexity): - obj.password_complexity = complexity.strip() - if complexity and not obj.password_complexity: - logging.error(f"""Invalid password complexity "{str(enabled)[:20]}" - must be in csv format, ex. "32,5,5,5,5" """) - # pwd_complexity_rule_list = {} populated by password_complexity - - return obj - - -class DagOptionValue(Enum): - ON = "on" - OFF = "off" - DEFAULT = "default" - - @classmethod - def map(cls, dag_option: str): - try: return cls(str(dag_option).lower()) - except ValueError: return None - -class DagSettingsObject(): - def __init__(self): - self.pam_resource: Optional[str] = None - self.rotation: Optional[DagOptionValue] = None - self.connections: Optional[DagOptionValue] = None - self.tunneling: Optional[DagOptionValue] = None - self.remote_browser_isolation: Optional[DagOptionValue] = None - self.graphical_session_recording: Optional[DagOptionValue] = None - self.text_session_recording: Optional[DagOptionValue] = None - self.ai_threat_detection: Optional[DagOptionValue] = None - self.ai_terminate_session_on_detection: Optional[DagOptionValue] = None - # NB! PAM User has its own rotation_settings: {}, cannot enable con/tun on user anyways - # remote_browser_isolation uses rbi, pam_resource, graphical_session_recording - # rotation uses only pam_resource, rotation - # machine/db/dir uses all - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"DAG settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - val = data.get("resource", None) - if isinstance(val, str): obj.pam_resource = val - obj.rotation = DagOptionValue.map(data.get("rotation", None) or "") - obj.connections = DagOptionValue.map(data.get("connections", None) or "") - obj.tunneling = DagOptionValue.map(data.get("tunneling", None) or "") - obj.remote_browser_isolation = DagOptionValue.map(data.get("remote_browser_isolation", None) or "") - obj.graphical_session_recording = DagOptionValue.map(data.get("graphical_session_recording", None) or "") - obj.text_session_recording = DagOptionValue.map(data.get("text_session_recording", None) or "") - obj.ai_threat_detection = DagOptionValue.map(data.get("ai_threat_detection", None) or "") - obj.ai_terminate_session_on_detection = DagOptionValue.map(data.get("ai_terminate_session_on_detection", None) or "") - - return obj - - -class DagJitSettingsObject(): - def __init__(self): - self.create_ephemeral: bool = False - self.elevate: bool = False - self.elevation_method: str = "group" - self.elevation_string: str = "" - self.base_distinguished_name: str = "" - self.ephemeral_account_type: Optional[str] = None # Omit if missing - self.pam_directory_record: Optional[str] = None # Title of pamDirectory from pam_data.resources[], resolved to UID - self.pam_directory_uid: Optional[str] = None # Resolved pamDirectory record UID (set in process_data) - - @classmethod - def validate_enum_value(cls, value: str, allowed_values: List[str], field_name: str) -> Optional[str]: - """Validate value against predefined list. Returns validated value or None if invalid.""" - if not value or value == "": - return None # Empty string not allowed for enum fields - value_lower = value.lower() - allowed_lower = [v.lower() for v in allowed_values] - if value_lower in allowed_lower: - # Return original case from allowed_values - idx = allowed_lower.index(value_lower) - return allowed_values[idx] - logging.warning(f"Invalid {field_name} value '{value}'. Allowed: {allowed_values}. Skipping.") - return None - - @classmethod - def load(cls, data: Union[str, dict]) -> Optional['DagJitSettingsObject']: - """Load JIT settings from JSON. Returns None if data is missing/empty.""" - obj = cls() - try: - data = json.loads(data) if isinstance(data, str) else data - except: - logging.error(f"JIT settings failed to load from: {str(data)[:80]}") - return None - - if not isinstance(data, dict): - return None - - # Check if object is empty (no valid fields) - has_valid_fields = False - - # Parse boolean fields with defaults - create_ephemeral = utils.value_to_boolean(data.get("create_ephemeral", None)) - if create_ephemeral is not None: - obj.create_ephemeral = create_ephemeral - has_valid_fields = True - - elevate = utils.value_to_boolean(data.get("elevate", None)) - if elevate is not None: - obj.elevate = elevate - has_valid_fields = True - - # Parse elevation_method with validation (defaults to "group" if missing or invalid) - elevation_method = data.get("elevation_method", None) - if elevation_method is not None: - validated = cls.validate_enum_value(str(elevation_method), ["group", "role"], "elevation_method") - if validated: - obj.elevation_method = validated - has_valid_fields = True - # If validation fails, keep default "group" from __init__() - still include in DAG JSON - # If missing, keep default "group" from __init__() - still include in DAG JSON - - # Parse string fields - elevation_string = data.get("elevation_string", None) - if elevation_string is not None and str(elevation_string).strip(): - obj.elevation_string = str(elevation_string).strip() - has_valid_fields = True - - base_distinguished_name = data.get("base_distinguished_name", None) - if base_distinguished_name is not None and str(base_distinguished_name).strip(): - obj.base_distinguished_name = str(base_distinguished_name).strip() - has_valid_fields = True - - # Parse ephemeral_account_type with validation (omit if missing) - ephemeral_account_type = data.get("ephemeral_account_type", None) - if ephemeral_account_type is not None: - validated = cls.validate_enum_value( - str(ephemeral_account_type), - ["linux", "mac", "windows", "domain"], - "ephemeral_account_type" - ) - if validated: - obj.ephemeral_account_type = validated - has_valid_fields = True - - # Parse pam_directory_record (title of pamDirectory from pam_data.resources[]; resolved to pam_directory_uid later) - pam_directory_record = data.get("pam_directory_record", None) - if pam_directory_record is not None and str(pam_directory_record).strip(): - obj.pam_directory_record = str(pam_directory_record).strip() - has_valid_fields = True - - # Silently ignore any other unknown fields (permissive parsing) - - # Return None if no valid fields were found (empty object) - return obj if has_valid_fields else None - - def to_dag_dict(self) -> Dict[str, Any]: - """Convert to DAG JSON format (camelCase).""" - result = { - "createEphemeral": self.create_ephemeral, - "elevate": self.elevate, - "elevationMethod": self.elevation_method, # Always included (defaults to "group" if missing/invalid) - "elevationString": self.elevation_string, - "baseDistinguishedName": self.base_distinguished_name - } - # Only include ephemeralAccountType if it was set (omit if missing/invalid) - if self.ephemeral_account_type: - result["ephemeralAccountType"] = self.ephemeral_account_type - return result - - -class DagAiSettingsObject(): - def __init__(self): - self.version: str = "v1.0.0" - self.risk_levels: Dict[str, Dict[str, Any]] = {} - - @classmethod - def _parse_tag_list(cls, items: Any) -> List[str]: - tags: List[str] = [] - if not isinstance(items, list): - return tags - for item in items: - tag = "" - if isinstance(item, str): - tag = item.strip() - elif isinstance(item, dict): - tag = str(item.get("tag", "")).strip() - if tag: - tags.append(tag) - return tags - - @classmethod - def load(cls, data: Union[str, dict]) -> Optional['DagAiSettingsObject']: - """Load AI settings from JSON. Returns None if data is missing/empty.""" - obj = cls() - try: - data = json.loads(data) if isinstance(data, str) else data - except: - logging.error(f"AI settings failed to load from: {str(data)[:80]}") - return None - - if not isinstance(data, dict): - return None - - risk_levels = data.get("risk_levels", None) - if not isinstance(risk_levels, dict): - return None - - for level in ["critical", "high", "medium", "low"]: - level_data = risk_levels.get(level, None) - if not isinstance(level_data, dict): - continue - - ai_session_terminate = utils.value_to_boolean(level_data.get("ai_session_terminate", None)) - activities = level_data.get("activities", None) or {} - if not isinstance(activities, dict): - activities = {} - - allow_tags = cls._parse_tag_list(activities.get("allow", [])) - deny_tags = cls._parse_tag_list(activities.get("deny", [])) - - if ai_session_terminate is None and not allow_tags and not deny_tags: - continue - - obj.risk_levels[level] = { - "ai_session_terminate": ai_session_terminate, - "allow": allow_tags, - "deny": deny_tags - } - - return obj if obj.risk_levels else None - - def _build_tag_entries(self, tags: List[str], action: str, user_id: str) -> List[Dict[str, Any]]: - entries: List[Dict[str, Any]] = [] - for tag in tags: - if not tag: - continue - entries.append({ - "tag": tag, - "auditLog": [{ - "date": utils.current_milli_time(), - "userId": user_id, - "action": action - }] - }) - return entries - - def to_dag_dict(self, user_id: str) -> Optional[Dict[str, Any]]: - if not self.risk_levels: - return None - - if not user_id: - logging.warning("AI settings auditLog userId is missing; auditLog will have empty userId.") - user_id = "" - - risk_levels: Dict[str, Any] = {} - for level, data in self.risk_levels.items(): - level_out: Dict[str, Any] = {} - - if data.get("ai_session_terminate") is not None: - level_out["aiSessionTerminate"] = data["ai_session_terminate"] - - tags_out: Dict[str, Any] = {} - allow_entries = self._build_tag_entries(data.get("allow", []), "added_to_allow", user_id) - if allow_entries: - tags_out["allow"] = allow_entries - deny_entries = self._build_tag_entries(data.get("deny", []), "added_to_deny", user_id) - if deny_entries: - tags_out["deny"] = deny_entries - - if tags_out: - level_out["tags"] = tags_out - - if level_out: - risk_levels[level] = level_out - - if not risk_levels: - return None - - return { - "version": self.version, - "riskLevels": risk_levels - } - - -class PamUserObject(): - def __init__(self): - self.uid = "" - self.type = "pamUser" - self.title = None - self.notes = None - self.login = None - self.password = None - self.privatePEMKey = None - self.distinguishedName = None - self.connectDatabase = None - self.managed = None - self.oneTimeCode = None - self.attachments = None # fileRef - self.scripts = None # script - self.rotation_settings = None # DAG: rotation settings - - @classmethod - def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"PAM User failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - dtype = str(data["type"]) if "type" in data else "pamUser" - if dtype and dtype.lower() != "pamUser".lower(): - logging.warning(f"""PAM User data using wrong type "pamUser" != "{dtype[:80]}" """) - - obj.type = "pamUser" - obj.title = str(data["title"]) if "title" in data else None - obj.notes = str(data["notes"]) if "notes" in data else None - - obj.login = str(data["login"]) if "login" in data else None - obj.password = str(data["password"]) if "password" in data else None - obj.privatePEMKey = str(data["private_pem_key"]) if "private_pem_key" in data else None - obj.distinguishedName = str(data["distinguished_name"]) if "distinguished_name" in data else None - obj.connectDatabase = str(data["connect_database"]) if "connect_database" in data else None - obj.managed = utils.value_to_boolean(data["managed"]) if "managed" in data else None - obj.oneTimeCode = str(data["otp"]) if "otp" in data else None - - obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) - obj.scripts = PamScriptsObject.load(data.get("scripts", None)) - rso = PamRotationSettingsObject.load(data.get("rotation_settings", None), rotation_params) - if not is_blank_instance(rso): - obj.rotation_settings = rso - - if (obj.title is None or not obj.title.strip()) and obj.login and obj.login.strip(): - obj.title = f"PAM User - {str(obj.login).strip()}" - - obj.validate_record() - - return obj - - def create_record(self, params, folder_uid): - args = { - "force": True, - "folder": folder_uid, - "record_type": self.type - } - if self.uid: args["record_uid"] = self.uid - if self.title: args["title"] = self.title - if self.notes: args["notes"] = self.notes - - fields = [] - if self.login: fields.append(f"f.login={self.login}") - if self.password: fields.append(f"f.password={self.password}") - if self.privatePEMKey: fields.append(f"f.secret.privatePEMKey={self.privatePEMKey}") - if self.distinguishedName: fields.append(f"f.text.distinguishedName={self.distinguishedName}") - if self.connectDatabase: fields.append(f"f.text.connectDatabase={self.connectDatabase}") - - managed = utils.value_to_boolean(self.managed) - if managed is not None: fields.append(f"f.checkbox.managed={str(managed).lower()}") - - if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") - - files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] - if files and isinstance(files, list): - for x in files: - if x and isinstance(x, PamAttachmentObject) and x.file: - fields.append(f"file=@{x.file}") - - if fields: args["fields"] = fields - uid = RecordEditAddCommand().execute(params, **args) - if uid and isinstance(uid, str): - self.uid = uid - - # after record creation add PAM scripts - if uid and self.scripts and self.scripts.scripts: - add_pam_scripts(params, uid, self.scripts.scripts) - - # DAG: after record creation - self.scripts, self.rotation_settings - return uid - - def validate_record(self): - if not self.password: - logging.warning("PAM User is missing required field `login`") - if not self.rotation_settings: - logging.debug("PAM User is missing rotation settings") - if isinstance(self.rotation_settings, PamRotationSettingsObject): - if (str(self.rotation_settings.rotation).lower() == "general" and - not self.rotation_settings.resource): - logging.warning("PAM User with rotation type=general is missing required machine `resource=xxx`") - - -class LoginUserObject(): - def __init__(self): - self.uid = "" - self.type = "login" - self.title = None - self.notes = None - self.login = None - self.password = None - self.url = None - self.oneTimeCode = None - self.attachments = None - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Record type `login` failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - dtype = str(data["type"]) if "type" in data else "login" - if dtype.lower() != "login".lower(): - logging.warning(f"""User data using wrong type "login" != "{dtype[:80]}" """) - - obj.uid = "" - obj.type = "login" - obj.title = str(data["title"]) if "title" in data else None - obj.notes = str(data["notes"]) if "notes" in data else None - - obj.login = str(data["login"]) if "login" in data else None - obj.password = str(data["password"]) if "password" in data else None - obj.url = str(data["url"]) if "url" in data else None - obj.oneTimeCode = str(data["otp"]) if "otp" in data else None - obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) - - return obj - - def create_record(self, params, folder_uid): - args = { - "force": True, - "folder": folder_uid, - "record_type": self.type - } - if self.uid: args["record_uid"] = self.uid - if self.title: args["title"] = self.title - if self.notes: args["notes"] = self.notes - - fields = [] - if self.login: fields.append(f"f.login={self.login}") - if self.password: fields.append(f"f.password={self.password}") - if self.url: fields.append(f"f.url={self.url}") - if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") - - files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] - if files and isinstance(files, list): - for x in files: - if x and isinstance(x, PamAttachmentObject) and x.file: - fields.append(f"file=@{x.file}") - - if fields: args["fields"] = fields - uid = RecordEditAddCommand().execute(params, **args) - if uid and isinstance(uid, str): - self.uid = uid - return uid - -class PamBaseMachineParser(): - def __init__(self): - self.type = "" - self.title = None - self.notes = None - self.host = None - self.port = None - self.sslVerification = None - self.providerGroup = None - self.providerRegion = None - self.oneTimeCode = None - self.attachments = None - self.scripts = None - self.pam_settings : Optional[PamSettingsFieldData] = None - - # pamMachine - self.operatingSystem = None - self.instanceName = None - self.instanceId = None - # Warning! Unused, split into linked pamUser record - self.login = None - self.password = None - self.privatePEMKey = None - - # pamDatabase - self.useSSL = None - self.databaseId = None - self.databaseType = None # postgresql|postgresql-flexible|mysql|mysql-flexible|mariadb|mariadb-flexible|mssql|oracle|mongodb - - # pamDirectory - self.domainName = None - self.alternativeIPs = None - self.directoryId = None - self.directoryType = None # active_directory|openldap - self.userMatch = None - - @classmethod - def load(cls, record_type: str, data: Union[str, dict]): - pam_machine_types = ("pamMachine", "pamDatabase", "pamDirectory") - pam_db_types = ("postgresql", "postgresql-flexible", "mysql", "mysql-flexible", "mariadb", "mariadb-flexible", "mssql", "oracle", "mongodb") - pam_dir_types = ("active_directory", "openldap") - - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"""Record type "{record_type}" failed to load from: {str(data)[:80]}""") - if not isinstance(data, dict): return obj - - dtype = str(data.get("type", None)) - data_type = next((s for s in pam_machine_types if s.lower() == dtype.lower()), None) - rec_type = next((s for s in pam_machine_types if s.lower() == str(record_type).lower()), None) - if rec_type and data_type and rec_type != data_type: - logging.warning(f"""Expected machine record type "{rec_type}" != data record type "{data_type}" - Parsing as "{rec_type}" """) - if rec_type is None: - msg = f"""Unknown expected record type "{record_type}". """ - if data_type is None: - msg = msg + f"""Unknown data record type "{dtype}" - Parsing it as generic pamMachine.""" - else: - msg = msg + f"""Using data record type "{data_type}".""" - logging.error(f"""{msg} Expected record types "{pam_machine_types}" """) - - obj.type = rec_type or data_type or "pamMachine" - obj.title = str(data["title"]) if "title" in data else None - obj.notes = str(data["notes"]) if "notes" in data else None - obj.host = str(data["host"]) if "host" in data else None - obj.port = str(data["port"]) if "port" in data else None - obj.sslVerification = utils.value_to_boolean(data["ssl_verification"]) if "ssl_verification" in data else None - obj.providerGroup = str(data["provider_group"]) if "provider_group" in data else None - obj.providerRegion = str(data["provider_region"]) if "provider_region" in data else None - obj.oneTimeCode = str(data["otp"]) if "otp" in data else None - obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) - obj.scripts = PamScriptsObject.load(data.get("scripts", None)) - - psd = data.get("pam_settings", None) - if psd: - obj.pam_settings = PamSettingsFieldData.load(psd) - if not obj.pam_settings: - logging.error(f"""{obj.type}: failed to load PAM Settings from "{str(data)[:80]}" """) - - # pamMachine - obj.operatingSystem = str(data["operating_system"]) if "operating_system" in data else None - obj.instanceName = str(data["instance_name"]) if "instance_name" in data else None - obj.instanceId = str(data["instance_id"]) if "instance_id" in data else None - # Warning! Unused, split into linked pamUser record - obj.login = str(data["login"]) if "login" in data else None - obj.password = str(data["password"]) if "password" in data else None - obj.privatePEMKey = str(data["private_pem_key"]) if "private_pem_key" in data else None - - # pamDatabase - obj.useSSL = utils.value_to_boolean(data["use_ssl"]) if "use_ssl" in data else None - obj.databaseId = str(data["database_id"]) if "database_id" in data else None - - dbtype = str(data["database_type"]) if "database_type" in data else None - pamdbt = next((s for s in pam_db_types if s.lower() == str(dbtype).lower()), None) - if dbtype and not pamdbt: - logging.error(f"""Unexpected DB type "{dbtype}" - should be one of the known DB types "{pam_db_types}" """) - pamdbt = dbtype.lower() # use provided db type "as-is" - if not pamdbt and obj.type == "pamDatabase": - logging.debug(f"""pamDatabase - unable to determine DB type: database_type should be one of "{pam_db_types}" """) - obj.databaseType = pamdbt - - # pamDirectory - obj.domainName = str(data["domain_name"]) if "domain_name" in data else None - obj.alternativeIPs = multiline_to_str(parse_multiline(data, "alternative_ips", "Error parsing alternative_ips")) - obj.directoryId = str(data["directory_id"]) if "directory_id" in data else None - obj.userMatch = str(data["user_match"]) if "user_match" in data else None - - dt = str(data["directory_type"]) if "directory_type" in data else None - pamdt = next((s for s in pam_dir_types if s.lower() == str(dt).lower()), None) - if dt and not pamdt: - logging.error(f"""Unexpected Directory type "{dt}" - should be one of "{pam_dir_types}" """) - pamdt = dt.lower() # use provided directory type "as-is" - if not pamdt and obj.type == "pamDirectory": - logging.debug(f"""pamDirectory - unable to determine Directory type: directory_type should be one of "{pam_dir_types}" """) - obj.directoryType = pamdt # active_directory|openldap - - return obj - -class PamMachineObject(): - def __init__(self): - self.uid = "" - self.type = "pamMachine" - self.title = None - self.notes = None - self.host = None # pamHostname - self.port = None # pamHostname - self.sslVerification = None - self.operatingSystem = None - self.instanceName = None - self.instanceId = None - self.providerGroup = None - self.providerRegion = None - self.oneTimeCode = None - self.attachments = None # fileRef - self.scripts = None # script - - # Warning! unused - use users[] to link users, rotation scripts etc. - self.login = None - self.password = None - self.privatePEMKey = None - - self.pam_settings : Optional[PamSettingsFieldData] = None - self.users = None # List[PamUserObject] - one is admin(istrative credential) - - self.is_admin_external: bool = False # (True<=>found:pamDirectory#Title.pamUser#Title) - self.administrative_credentials_uid: str = "" # external or internal user UID - - @classmethod - def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"""Record type "pamMachine" failed to load from: {str(data)[:80]}""") - if not isinstance(data, dict): return obj - - bmp = PamBaseMachineParser.load("pamMachine", data) - - if bmp and bmp.type.lower() != "pamMachine".lower(): - logging.warning(f"""PAM Machine data using wrong type "pamMachine" != "{bmp.type}" """) - - obj.type = "pamMachine" - obj.title = bmp.title - obj.notes = bmp.notes - obj.host = bmp.host - obj.port = bmp.port - obj.sslVerification = bmp.sslVerification - obj.operatingSystem = bmp.operatingSystem - obj.instanceName = bmp.instanceName - obj.instanceId = bmp.instanceId - obj.providerGroup = bmp.providerGroup - obj.providerRegion = bmp.providerRegion - obj.oneTimeCode = bmp.oneTimeCode - obj.attachments = bmp.attachments - obj.scripts = bmp.scripts - obj.pam_settings = bmp.pam_settings - - # Warning! unused - use users[] to link users, rotation scripts etc. - obj.login = bmp.login - obj.password = bmp.password - obj.privatePEMKey = bmp.privatePEMKey - - if (obj.title is None or not obj.title.strip()) and obj.login and obj.login.strip(): - obj.title = f"PAM Machine - {str(obj.login).strip()}" - if rotation_params: - rotation_params.ownerTitle = obj.title or "" - - obj.users = [] - users = data.get("users", None) - if users: - for user in users: - rt = str(user.get("type", "")) if isinstance(user, dict) else "" - if not rt: rt = "pamUser" # pamMachine user list is pamUser recs only - if rt.lower() != "pamUser".lower(): - logging.error(f"""{obj.title}:{obj.type}.users[] Expected record type pamUser, got "{rt}" - skipped.""") - continue - usr = PamUserObject.load(user, rotation_params) - if usr: - obj.users.append(usr) - else: - logging.warning(f"""Warning: PAM Machine "{obj.title}" with empty users section.""") - - obj.validate_record() - - return obj - - def create_record(self, params, folder_uid): - args = { - "force": True, - "folder": folder_uid, - "record_type": self.type - } - if self.uid: args["record_uid"] = self.uid - if self.title: args["title"] = self.title - if self.notes: args["notes"] = self.notes - - fields = [] - hostname = self.host.strip() if isinstance(self.host, str) and self.host.strip() else "" - port = self.port.strip() if isinstance(self.port, str) and self.port.strip() else "" - if hostname or port: - val = json.dumps({"hostName": hostname, "port": port}) - fields.append(f"f.pamHostname=$JSON:{val}") - - sslv = utils.value_to_boolean(self.sslVerification) - if sslv is not None: fields.append(f"checkbox.sslVerification={str(sslv).lower()}") - if self.operatingSystem: fields.append(f"f.text.operatingSystem={self.operatingSystem}") - if self.instanceName: fields.append(f"f.text.instanceName={self.instanceName}") - if self.instanceId: fields.append(f"f.text.instanceId={self.instanceId}") - if self.providerGroup: fields.append(f"f.text.providerGroup={self.providerGroup}") - if self.providerRegion: fields.append(f"f.text.providerRegion={self.providerRegion}") - - # Warning! unused - use users[] to link users, rotation scripts etc. - # if self.login: fields.append(f"f.login={self.login}") - # if self.password: fields.append(f"f.password={self.password}") - # if self.privatePEMKey: fields.append(f"f.secret.privatePEMKey={self.privatePEMKey}") - - if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") - - files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] - if files and isinstance(files, list): - for x in files: - if x and isinstance(x, PamAttachmentObject) and x.file: - fields.append(f"file=@{x.file}") - - # pam_settings port_forward/connection belong to the record - if self.pam_settings and isinstance(self.pam_settings, PamSettingsFieldData): - allowSupplyHost = True if self.pam_settings.allowSupplyHost is True else False - portForward = self.pam_settings.portForward.to_record_dict() if self.pam_settings.portForward else {} - connection = self.pam_settings.connection.to_record_dict() if self.pam_settings.connection else {} - if portForward or connection or allowSupplyHost: - val = json.dumps({"allowSupplyHost": allowSupplyHost, "portForward": portForward or {}, "connection": connection or {}}) - fields.append(f"c.pamSettings=$JSON:{val}") - # switch to f.* once RT definition(s) update w/ pamSettings field - - if fields: args["fields"] = fields - uid = RecordEditAddCommand().execute(params, **args) - if uid and isinstance(uid, str): - self.uid = uid - - # after record creation add PAM scripts - if uid and self.scripts and self.scripts.scripts: - add_pam_scripts(params, uid, self.scripts.scripts) - - # DAG: after record creation - self.scripts, self.pam_settings.options - return uid - - def validate_record(self): - # Warning! unused - use users[] to link users, rotation scripts etc. - if self.login or self.password or self.privatePEMKey: - logging.warning(f"""PAM Machine "{self.title}" detected legacy format - """ - "please create separate pamUser record with login, password, privatePEMKey") - if not (self.host or self.port): - logging.warning(f"""PAM Machine "{self.title}" is missing required field `pamHostname` data (host/port)""") - errmsg = validate_pam_connection(self.pam_settings.connection, "pamMachine") if self.pam_settings else "" - if errmsg: - logging.warning(f"""PAM Machine "{self.title}" has incorrect connection setup: {errmsg}""") - -def validate_pam_connection(connection, record_type): - errmsg = "" - if connection: - # Apparently all machine types allow connections using ANY protocol - # ex. pamDatabase allowing SSH/RDP or pamMachine allowing proto: mysql - # known_mach_types = [ConnectionSettingsRDP, ConnectionSettingsVNC, ConnectionSettingsTelnet, ConnectionSettingsSSH, ConnectionSettingsKubernetes] - # known_db_types = [ConnectionSettingsSqlServer, ConnectionSettingsPostgreSQL, ConnectionSettingsMySQL] - - known_conn_types = PamSettingsFieldData.pam_connection_classes + [ConnectionSettingsHTTP] - known_mach_types = PamSettingsFieldData.pam_connection_classes - known_db_types = known_mach_types - known_rbi_types = [ConnectionSettingsHTTP] - - # known_conn_proto = [x.protocol.value.lower() for x in known_conn_types] # pylint: disable=E1101 - known_mach_proto = [x.protocol.value.lower() for x in known_mach_types] # pylint: disable=E1101 - known_db_proto = [x.protocol.value.lower() for x in known_db_types] # pylint: disable=E1101 - known_rbi_proto = [x.protocol.value.lower() for x in known_rbi_types] # pylint: disable=E1101 - - rt = str(record_type).lower().strip() - if type(connection) not in known_conn_types: - errmsg = f"""PAM Connection of unknown type "{type(connection).__name__}" """ - elif rt == "pamMachine".lower(): - if type(connection) not in known_mach_types: - errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ - if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and - connection.protocol.value.lower() not in known_mach_proto): - errmsg = errmsg + f""" Unexpected PAM Machine connection protocol "{connection.protocol.value}" """ - elif rt == "pamDatabase".lower(): - if type(connection) not in known_db_types: - errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ - if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and - connection.protocol.value.lower() not in known_db_proto): - errmsg = errmsg + f""" Unexpected PAM Database connection protocol "{connection.protocol.value}" """ - elif rt == "pamDirectory".lower(): - if type(connection) not in known_mach_types: - errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ - if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and - connection.protocol.value.lower() not in known_mach_proto): - errmsg = errmsg + f""" Unexpected PAM Directory connection protocol "{connection.protocol.value}" """ - elif rt == "pamRemoteBrowser".lower(): - if type(connection) not in known_rbi_types: - errmsg = f"""PAM Connection of type "{type(connection).__name__}" is incompatible with "{record_type}" """ - if (isinstance(getattr(connection, "protocol", ""), ConnectionProtocol) and - connection.protocol.value.lower() not in known_rbi_proto): - errmsg = errmsg + f""" Unexpected PAM Remote Browser connection protocol "{connection.protocol.value}" """ - if errmsg: - logging.debug(errmsg) - return errmsg - - -class PamDatabaseObject(): - def __init__(self): - self.uid = "" - self.type = "pamDatabase" - self.title = None - self.notes = None - self.host = None # pamHostname - self.port = None # pamHostname - self.useSSL = None - self.databaseId = None - self.databaseType = None - self.providerGroup = None - self.providerRegion = None - self.oneTimeCode = None - self.attachments = None # fileRef - self.scripts = None # script - - self.trafficEncryptionSeed = None - self.pam_settings : Optional[PamSettingsFieldData] = None - self.users = None # List[PamUserObject] - one is admin(istrative credential) - - @classmethod - def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"""Record type "pamDatabase" failed to load from: {str(data)[:80]}""") - if not isinstance(data, dict): return obj - - bmp = PamBaseMachineParser.load("pamDatabase", data) - - if bmp and bmp.type.lower() != "pamDatabase".lower(): - logging.warning(f"""PAM Database data using wrong type "pamDatabase" != "{bmp.type}" """) - - obj.type = "pamDatabase" - obj.title = bmp.title - obj.notes = bmp.notes - obj.host = bmp.host - obj.port = bmp.port - obj.useSSL = bmp.useSSL - obj.databaseId = bmp.databaseId - obj.databaseType = bmp.databaseType - obj.providerGroup = bmp.providerGroup - obj.providerRegion = bmp.providerRegion - obj.oneTimeCode = bmp.oneTimeCode - obj.attachments = bmp.attachments - obj.scripts = bmp.scripts - obj.pam_settings = bmp.pam_settings - - if (obj.title is None or not obj.title.strip()) and obj.databaseId and obj.databaseId.strip(): - obj.title = f"PAM Database - {str(obj.databaseId).strip()}" - if rotation_params: - rotation_params.ownerTitle = obj.title or "" - - obj.users = [] - users = data.get("users", None) - if users: - for user in users: - rt = str(user.get("type", "")) if isinstance(user, dict) else "" - if not rt: rt = "pamUser" # pamDatabase user list is pamUser recs only - if rt.lower() != "pamUser".lower(): - logging.error(f"""{obj.title}:{obj.type}.users[] Expected record type pamUser, got "{rt}" - skipped.""") - continue - usr = PamUserObject.load(user, rotation_params) - if usr: - obj.users.append(usr) - else: - logging.warning(f"""Warning: PAM Database "{obj.title}" with empty users section.""") - - obj.validate_record() - - return obj - - def create_record(self, params, folder_uid): - args = { - "force": True, - "folder": folder_uid, - "record_type": self.type - } - if self.uid: args["record_uid"] = self.uid - if self.title: args["title"] = self.title - if self.notes: args["notes"] = self.notes - - fields = [] - hostname = self.host.strip() if isinstance(self.host, str) and self.host.strip() else "" - port = self.port.strip() if isinstance(self.port, str) and self.port.strip() else "" - if hostname or port: - val = json.dumps({"hostName": hostname, "port": port}) - fields.append(f"f.pamHostname=$JSON:{val}") - - ssl = utils.value_to_boolean(self.useSSL) - if ssl is not None: fields.append(f"f.checkbox.useSSL={str(ssl).lower()}") - if self.databaseId: fields.append(f"f.text.databaseId={self.databaseId}") - if self.databaseType: fields.append(f"f.databaseType={self.databaseType}") - if self.providerGroup: fields.append(f"f.text.providerGroup={self.providerGroup}") - if self.providerRegion: fields.append(f"f.text.providerRegion={self.providerRegion}") - - if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") - - files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] - if files and isinstance(files, list): - for x in files: - if x and isinstance(x, PamAttachmentObject) and x.file: - fields.append(f"file=@{x.file}") - - # pam_settings port_forward/connection belong to the record - if self.pam_settings and isinstance(self.pam_settings, PamSettingsFieldData): - allowSupplyHost = True if self.pam_settings.allowSupplyHost is True else False - portForward = self.pam_settings.portForward.to_record_dict() if self.pam_settings.portForward else {} - connection = self.pam_settings.connection.to_record_dict() if self.pam_settings.connection else {} - if portForward or connection or allowSupplyHost: - val = json.dumps({"allowSupplyHost": allowSupplyHost, "portForward": portForward or {}, "connection": connection or {}}) - fields.append(f"c.pamSettings=$JSON:{val}") - # switch to f.* once RT definition(s) update w/ pamSettings field - - if fields: args["fields"] = fields - uid = RecordEditAddCommand().execute(params, **args) - if uid and isinstance(uid, str): - self.uid = uid - - # after record creation add PAM scripts - if uid and self.scripts and self.scripts.scripts: - add_pam_scripts(params, uid, self.scripts.scripts) - - # DAG: after record creation - self.scripts, self.pam_settings.options - return uid - - def validate_record(self): - if not (self.host or self.port): - logging.warning(f"""PAM Database "{self.title}" is missing required field "pamHostname" data (host/port)""") - errmsg = validate_pam_connection(self.pam_settings.connection, "pamDatabase") if self.pam_settings else "" - if errmsg: - logging.warning(f"""PAM Database "{self.title}" has incorrect connection setup: {errmsg}""") - -class PamDirectoryObject(): - def __init__(self): - self.uid = "" - self.type = "pamDirectory" - self.title = None - self.notes = None - self.host = None # pamHostname - self.port = None # pamHostname - self.useSSL = None - self.domainName = None - self.alternativeIPs = None - self.directoryId = None - self.directoryType = None # " - self.userMatch = None - self.providerGroup = None - self.providerRegion = None - self.oneTimeCode = None - self.attachments = None # fileRef - self.scripts = None # script - - self.pam_settings : Optional[PamSettingsFieldData] = None - self.users = None # List[PamUserObject] - one is admin(istrative credential) - - @classmethod - def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"""Record type "pamDirectory" failed to load from: {str(data)[:80]}""") - if not isinstance(data, dict): return obj - - bmp = PamBaseMachineParser.load("pamDirectory", data) - - if bmp and bmp.type.lower() != "pamDirectory".lower(): - logging.warning(f"""PAM Directory data using wrong type "pamDirectory" != "{bmp.type}" """) - - obj.type = "pamDirectory" - obj.title = bmp.title - obj.notes = bmp.notes - obj.host = bmp.host - obj.port = bmp.port - obj.useSSL = bmp.useSSL - obj.domainName = bmp.domainName - obj.alternativeIPs = bmp.alternativeIPs - obj.directoryId = bmp.directoryId - obj.directoryType = bmp.directoryType - obj.userMatch = bmp.userMatch - obj.providerGroup = bmp.providerGroup - obj.providerRegion = bmp.providerRegion - obj.oneTimeCode = bmp.oneTimeCode - obj.attachments = bmp.attachments - obj.scripts = bmp.scripts - obj.pam_settings = bmp.pam_settings - - if (obj.title is None or not obj.title.strip()) and obj.domainName and obj.domainName.strip(): - obj.title = f"PAM Directory - {str(obj.domainName).strip()}" - if rotation_params: - rotation_params.ownerTitle = obj.title or "" - - obj.users = [] - users = data.get("users", None) - if users: - for user in users: - rt = str(user.get("type", "")) if isinstance(user, dict) else "" - if not rt: rt = "pamUser" # pamDirectory user list is pamUser recs only - if rt.lower() != "pamUser".lower(): - logging.error(f"""{obj.title}:{obj.type}.users[] Expected record type pamUser, got "{rt}" - skipped.""") - continue - usr = PamUserObject.load(user, rotation_params) - if usr: - obj.users.append(usr) - else: - logging.warning(f"""Warning: PAM Directory "{obj.title}" with empty users section.""") - - obj.validate_record() - - return obj - - def create_record(self, params, folder_uid): - args = { - "force": True, - "folder": folder_uid, - "record_type": self.type - } - if self.uid: args["record_uid"] = self.uid - if self.title: args["title"] = self.title - if self.notes: args["notes"] = self.notes - - fields = [] - hostname = self.host.strip() if isinstance(self.host, str) and self.host.strip() else "" - port = self.port.strip() if isinstance(self.port, str) and self.port.strip() else "" - if hostname or port: - val = json.dumps({"hostName": hostname, "port": port}) - fields.append(f"f.pamHostname=$JSON:{val}") - - ssl = utils.value_to_boolean(self.useSSL) - if ssl is not None: fields.append(f"f.checkbox.useSSL={str(ssl).lower()}") - if self.domainName: fields.append(f"f.text.domainName={self.domainName}") - if self.alternativeIPs: fields.append(f"f.multiline.alternativeIPs={self.alternativeIPs}") - if self.directoryId: fields.append(f"f.text.directoryId={self.directoryId}") - if self.directoryType: fields.append(f"f.directoryType={self.directoryType}") - if self.userMatch: fields.append(f"f.text.userMatch={self.userMatch}") - if self.providerGroup: fields.append(f"f.text.providerGroup={self.providerGroup}") - if self.providerRegion: fields.append(f"f.text.providerRegion={self.providerRegion}") - - if self.oneTimeCode: fields.append(f"f.oneTimeCode={self.oneTimeCode}") - - files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] - if files and isinstance(files, list): - for x in files: - if x and isinstance(x, PamAttachmentObject) and x.file: - fields.append(f"file=@{x.file}") - - # pam_settings port_forward/connection belong to the record - if self.pam_settings and isinstance(self.pam_settings, PamSettingsFieldData): - allowSupplyHost = True if self.pam_settings.allowSupplyHost is True else False - portForward = self.pam_settings.portForward.to_record_dict() if self.pam_settings.portForward else {} - connection = self.pam_settings.connection.to_record_dict() if self.pam_settings.connection else {} - if portForward or connection or allowSupplyHost: - val = json.dumps({"allowSupplyHost": allowSupplyHost, "portForward": portForward or {}, "connection": connection or {}}) - fields.append(f"c.pamSettings=$JSON:{val}") - # switch to f.* once RT definition(s) update w/ pamSettings field - - if fields: args["fields"] = fields - uid = RecordEditAddCommand().execute(params, **args) - if uid and isinstance(uid, str): - self.uid = uid - - # after record creation add PAM scripts - if uid and self.scripts and self.scripts.scripts: - add_pam_scripts(params, uid, self.scripts.scripts) - - # DAG: after record creation - self.scripts, self.pam_settings.options - return uid - - def validate_record(self): - if not (self.host or self.port): - logging.warning(f"""PAM Directory "{self.title}" is missing required field `pamHostname` data (host/port)""") - errmsg = validate_pam_connection(self.pam_settings.connection, "pamDirectory") if self.pam_settings else "" - if errmsg: - logging.warning(f"""PAM Directory "{self.title}" has incorrect connection setup: {errmsg}""") - -class PamRemoteBrowserObject(): - def __init__(self): - self.uid = "" - self.type = "pamRemoteBrowser" - self.title = None - self.notes = None - self.rbiUrl = None - self.oneTimeCode = None - self.attachments = None # fileRef - - self.rbi_settings : Optional[PamRemoteBrowserSettings] = None # ft: pamRemoteBrowserSettings - # Use httpCredentialsUid <- resolved from autofill_credentials (ref rt:Login in pam_data.users[]) - - @classmethod - def load(cls, data: Union[str, dict], rotation_params: Optional[PamRotationParams] = None): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"""Record type "pamRemoteBrowser" failed to load from: {str(data)[:80]}""") - if not isinstance(data, dict): return obj - - dtype = data.get("type", None) - if dtype and str(dtype).lower() != "pamRemoteBrowser".lower(): - logging.warning(f"""PAM RBI data using wrong type "pamRemoteBrowser" != "{dtype}" """) - - obj.type = "pamRemoteBrowser" - obj.title = str(data["title"]) if "title" in data else None - obj.notes = str(data["notes"]) if "notes" in data else None - obj.rbiUrl = str(data["url"]) if "url" in data else None - obj.oneTimeCode = str(data["otp"]) if "otp" in data else None - obj.attachments = PamAttachmentsObject.load(data.get("attachments", None)) - - psd = data.get("pam_settings", None) - rbi_settings = PamRemoteBrowserSettings.load(psd) - obj.rbi_settings = None if is_empty_instance(rbi_settings) else rbi_settings - if psd and not obj.rbi_settings: - logging.error(f"""{obj.type}: failed to load RBI Settings from "{str(psd)[:80]}" """) - - if (obj.title is None or not obj.title.strip()) and obj.rbiUrl and str(obj.rbiUrl).strip(): - hostname = str(obj.rbiUrl).lower() - hostname = re.sub(r"^\s*https?://", "", hostname, flags=re.IGNORECASE) - hostname = hostname.split("/", 1)[0] - if hostname: - obj.title = f"PAM RBI - {hostname}" - - obj.validate_record() - - return obj - - def create_record(self, params, folder_uid): - args = { - "force": True, - "folder": folder_uid, - "record_type": self.type - } - if self.uid: args["record_uid"] = self.uid - if self.title: args["title"] = self.title - if self.notes: args["notes"] = self.notes - - fields = [] - if self.rbiUrl: fields.append(f"rbiUrl={self.rbiUrl}") - - if self.oneTimeCode: fields.append(f"oneTimeCode={self.oneTimeCode}") - - files = self.attachments.attachments if self.attachments and isinstance(self.attachments, PamAttachmentsObject) else [] - if files and isinstance(files, list): - for x in files: - if x and isinstance(x, PamAttachmentObject) and x.file: - fields.append(f"file=@{x.file}") - - # pam_settings connection belongs to the record - connection = {} - if self.rbi_settings and isinstance(self.rbi_settings, PamRemoteBrowserSettings): - if self.rbi_settings.connection: - connection = self.rbi_settings.connection.to_record_dict() - if connection: - val = json.dumps({"connection": connection or {}}) - fields.append(f"pamRemoteBrowserSettings=$JSON:{val}") - # switch to f.* once RT definition(s) update w/ pamRemoteBrowserSettings field - - if fields: args["fields"] = fields - uid = RecordEditAddCommand().execute(params, **args) - if uid and isinstance(uid, str): - self.uid = uid - - # DAG: after record creation - self.pam_settings.options - return uid - - def validate_record(self): - errmsg = validate_pam_connection(self.rbi_settings.connection, "pamRemoteBrowser") if self.rbi_settings else "" - if errmsg: - logging.warning(f"""PAM RBI "{self.title}" has incorrect connection setup: {errmsg}""") - -# PAM Settings field data -FONT_SIZES = (8, 9, 10, 11, 12, 14, 18, 24, 30, 36, 48, 60, 72, 96) -class ConnectionProtocol(Enum): - RDP = "rdp" - VNC = "vnc" - TELNET = "telnet" - SSH = "ssh" - KUBERNETES = "kubernetes" - SQLSERVER = "sql-server" - POSTGRESQL = "postgresql" - MYSQL = "mysql" - HTTP = "http" - -class RDPSecurity(Enum): - ANY = "any" - NLA = "nla" - TLS = "tls" - VMCONNECT = "vmconnect" - RDP = "rdp" - - @classmethod - def map(cls, rdp_security: str): - try: return cls(str(rdp_security).lower()) - except ValueError: return None - -class TerminalThemes(Enum): - BLACK_WHITE = "black-white" # Black on white - GRAY_BLACK = "gray-black" # Gray on black - GREEN_BLACK = "green-black" # Green on black - WHITE_BLACK = "white-black" # White on black - CUSTOM = "custom" # Not a valid value to send to guac - # example custom color scheme: - # "colorScheme": "background: rgb:00/3D/FC;\nforeground: rgb:74/1A/1A;\ncolor0: rgb:00/00/00;\ncolor1: rgb:99/3E/3E;\ncolor2: rgb:3E/99/3E;\ncolor3: rgb:99/99/3E;\ncolor4: rgb:3E/3E/99;\ncolor5: rgb:99/3E/99;\ncolor6: rgb:3E/99/99;\ncolor7: rgb:99/99/99;\ncolor8: rgb:3E/3E/3E;\ncolor9: rgb:FF/67/67;\ncolor10: rgb:67/FF/67;\ncolor11: rgb:FF/FF/67;\ncolor12: rgb:67/67/FF;\ncolor13: rgb:FF/67/FF;\ncolor14: rgb:67/FF/FF;\ncolor15: rgb:FF/FF/FF;" - - @classmethod - def map(cls, tty_theme: str): - try: return cls(str(tty_theme).lower()) - except ValueError: return None - -def parse_multiline(data: dict, key: str, message: str = "") -> Optional[List[str]]: - if data and isinstance(data, dict) and key and isinstance(key, str): - val = data.get(key, None) # "multiline": ["line1" "line2"] - if isinstance(val, str): val = [val] # allow for "multiline": "line1" - if val and isinstance(val, list): - if any(not isinstance(x, str) or x == "" for x in val): - logging.warning(f"{message} - value: {val[:24]}" if (isinstance(message, str) and message != "") - else "Error parsing multiline value (skipped): "\ - f"found empty or non string values - value: {val[:24]}") - else: - return val - return None - -def multiline_to_str(lines: Optional[List[str]]) -> Optional[str]: - if lines and isinstance(lines, list): - return "\n".join(lines) - return None - -def multiline_stringify(lines: Optional[List[str]]) -> Optional[str]: - if lines and isinstance(lines, list): - # nb! strip() may remove more quotes esp. at end of string - val = json.dumps("\n".join(lines)) - if val and val.startswith("\"") and val.endswith("\""): - val = val[1:-1] - return val - return None - -def parse_dag_option(option: Optional[str]) -> Optional[str]: - key = str(option).lower() - if key in ("on", "off", "default"): - return key - return None - -class ClipboardConnectionSettings: - def __init__(self, disableCopy: Optional[bool] = None, disablePaste: Optional[bool] = None): - self.disableCopy = disableCopy - self.disablePaste = disablePaste - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Clipboard Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - obj.disableCopy = utils.value_to_boolean(data.get("disable_copy", None)) - obj.disablePaste = utils.value_to_boolean(data.get("disable_paste", None)) - return obj - -def clipboard_connection_settings(connection_settings: Union[PamConnectionSettings, ConnectionSettingsHTTP]) -> Optional[ClipboardConnectionSettings]: - if connection_settings and connection_settings.protocol and connection_settings.protocol in ( - ConnectionProtocol.RDP, - ConnectionProtocol.VNC, - ConnectionProtocol.TELNET, - ConnectionProtocol.SSH, - ConnectionProtocol.SQLSERVER, - ConnectionProtocol.MYSQL, - ConnectionProtocol.POSTGRESQL, - ConnectionProtocol.HTTP - ): - disableCopy = getattr(connection_settings, "disableCopy", None) - disablePaste = getattr(connection_settings, "disablePaste", None) - return ClipboardConnectionSettings(disableCopy, disablePaste) - -class SFTPRootDirectorySettings: - def __init__(self, enableSftp: Optional[bool] = None, sftpRootDirectory: Optional[str] = None): - self.enableSftp = enableSftp - self.sftpRootDirectory = sftpRootDirectory - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"SFTP Root Directory Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - obj.enableSftp = utils.value_to_boolean(data.get("enable_sftp", None)) - val = data.get("sftp_root_directory", None) - if isinstance(val, str): obj.sftpRootDirectory = val - return obj - - def to_dict(self): - dict: Dict[str, Any] = {} - if self.enableSftp is not None and isinstance(self.enableSftp, bool): - dict["enableSftp"] = self.enableSftp - if self.sftpRootDirectory and isinstance(self.sftpRootDirectory, str) and self.sftpRootDirectory.strip(): - dict["sftpRootDirectory"] = self.sftpRootDirectory.strip() - - return dict - -class SFTPConnectionSettings(SFTPRootDirectorySettings): - def __init__( - self, - enableSftp: Optional[bool] = None, - sftpRootDirectory: Optional[str] = None, - sftpResource: Optional[List[str]] = None, - sftpUser: Optional[List[str]] = None, - sftpDirectory: Optional[str] = None, - sftpServerAliveInterval: Optional[int] = None - ): - super().__init__(enableSftp, sftpRootDirectory) - self.sftpResource = sftpResource - self.sftpUser = sftpUser - self.sftpDirectory = sftpDirectory - self.sftpServerAliveInterval = sftpServerAliveInterval - self.sftpResourceUid = None # resolve from sftpResource - self.sftpUserUid = None # resolve from sftpUser - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"SFTP Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - rds = SFTPRootDirectorySettings.load(data) - if rds: - obj.enableSftp = rds.enableSftp - obj.sftpRootDirectory = rds.sftpRootDirectory - - # which is the resource record (not yet in web UI) - obj.sftpResource = parse_multiline(data, "sftp_resource", "Error parsing sftp_resource") - obj.sftpUser = parse_multiline(data, "sftp_user_credentials", "Error parsing sftp_user_credentials") - val = data.get("sftp_upload_directory", None) - if isinstance(val, str): obj.sftpDirectory = val - val = data.get("sftp_keepalive_interval", None) - if type(val) is int: obj.sftpServerAliveInterval = abs(val) - elif val and str(val).isdecimal(): obj.sftpServerAliveInterval = int(val) - - return obj - - def to_dict(self): - dict: Dict[str, Any] = {} - if self.sftpRootDirectory and isinstance(self.sftpRootDirectory, str) and self.sftpRootDirectory.strip(): - dict["sftpRootDirectory"] = self.sftpRootDirectory.strip() - if self.enableSftp is not None and isinstance(self.enableSftp, bool): - dict["enableSftp"] = self.enableSftp - - # if resolved from sftpResource - if self.sftpResourceUid and isinstance(self.sftpResourceUid, str) and self.sftpResourceUid.strip(): - dict["sftpResourceUid"] = self.sftpResourceUid.strip() - # if resolved from sftpUser - if self.sftpUserUid and isinstance(self.sftpUserUid, str) and self.sftpUserUid.strip(): - dict["sftpUserUid"] = self.sftpUserUid.strip() - - if self.sftpDirectory and isinstance(self.sftpDirectory, str) and self.sftpDirectory.strip(): - dict["sftpDirectory"] = self.sftpDirectory.strip() - if self.sftpServerAliveInterval and type(self.sftpServerAliveInterval) is int and abs(self.sftpServerAliveInterval) > 0: - dict["sftpServerAliveInterval"] = abs(self.sftpServerAliveInterval) - - return dict - -def sftp_enabled(connection_settings: Union[PamConnectionSettings, ConnectionSettingsHTTP]) -> Optional[bool]: - if connection_settings and connection_settings.protocol and connection_settings.protocol in ( - ConnectionProtocol.RDP, - ConnectionProtocol.VNC, - ConnectionProtocol.SSH - ): - sftp = getattr(connection_settings, "sftp", None) - if sftp: - enabled = getattr(sftp, "enableSftp", None) - return enabled - -class TerminalDisplayConnectionSettings: - fontSizes: List[int] = [8,9,10,11,12,14,18,24,30,36,48,60,72,96] - def __init__(self, colorScheme: Optional[str] = None, fontSize: Optional[int] = None): - self.colorScheme = colorScheme - self.fontSize = fontSize - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Terminal Display Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - val = data.get("color_scheme", None) - if isinstance(val, str): obj.colorScheme = val - val = data.get("font_size", None) - if type(val) is int: obj.fontSize = val - elif val and str(val).isdecimal(): obj.fontSize = int(val) - if obj.fontSize and type(obj.fontSize) is int: - font_size: int = obj.fontSize - closest_number = min(obj.fontSizes, key=lambda x: abs(x - font_size)) - if closest_number != font_size: - logging.error(f"Terminal Display Connection Settings - adjusted invalid font_size from: {obj.fontSize} to: {closest_number}") - obj.fontSize = closest_number - return obj - -class BaseConnectionSettings: - def __init__(self, port: Optional[str] = None, allowSupplyUser: Optional[bool] = None, userRecords: Optional[List[str]] = None, recordingIncludeKeys: Optional[bool] = None): - self.port = port # Override port from host - self.allowSupplyUser = allowSupplyUser - self.recordingIncludeKeys = recordingIncludeKeys - self.userRecords = userRecords - self.userRecordUid = None # resolved from userRecords - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Base Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - val = data.get("port", None) # Override port from host - if isinstance(val, str) or str(val).isdecimal(): obj.port = str(val) - - obj.allowSupplyUser = utils.value_to_boolean(data.get("allow_supply_user", None)) - obj.userRecords = parse_multiline(data, "administrative_credentials", "Error parsing administrative_credentials") - obj.recordingIncludeKeys = utils.value_to_boolean(data.get("recording_include_keys", None)) - return obj - -class ConnectionSettingsRDP(BaseConnectionSettings, ClipboardConnectionSettings): - protocol = ConnectionProtocol.RDP - def __init__( - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - security: Optional[RDPSecurity] = None, - disableAuth: Optional[bool] = None, - ignoreCert: Optional[bool] = None, - loadBalanceInfo: Optional[str] = None, - preconnectionId: Optional[str] = None, - preconnectionBlob: Optional[str] = None, - sftp: Optional[SFTPConnectionSettings] = None, - disableAudio: Optional[bool] = None, - resizeMethod: Optional[str] = None, - enableWallpaper: Optional[bool] = None, - enableFullWindowDrag: Optional[bool] = None - ): - BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) - ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) - self.security = security if isinstance(security, RDPSecurity) else None - self.disableAuth = disableAuth - self.ignoreCert = ignoreCert - self.loadBalanceInfo = loadBalanceInfo - self.preconnectionId = preconnectionId - self.preconnectionBlob = preconnectionBlob - self.sftp = sftp if isinstance(sftp, SFTPConnectionSettings) else None - self.disableAudio = disableAudio - self.resizeMethod = resizeMethod # disable_dynamic_resizing ? "" : "display-update" - # Performance Properties - self.enableWallpaper = enableWallpaper - self.enableFullWindowDrag = enableFullWindowDrag - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Connection Settings RDP failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bcs = BaseConnectionSettings.load(data) - if bcs: - obj.port = bcs.port - obj.allowSupplyUser = bcs.allowSupplyUser - obj.userRecords = bcs.userRecords - obj.recordingIncludeKeys = bcs.recordingIncludeKeys - - ccs = ClipboardConnectionSettings.load(data) - if ccs: - obj.disableCopy = ccs.disableCopy - obj.disablePaste = ccs.disablePaste - - val = data.get("security", None) - if isinstance(val, str): obj.security = RDPSecurity.map(val) - obj.disableAuth = utils.value_to_boolean(data.get("disable_authentication", None)) - obj.ignoreCert = utils.value_to_boolean(data.get("ignore_server_cert", None)) - - val = data.get("load_balance_info", None) - if isinstance(val, str): obj.loadBalanceInfo = val # LoadBalance Info/Cookie - val = data.get("preconnection_id", None) - if isinstance(val, str): obj.preconnectionId = val - val = data.get("preconnection_blob", None) - if isinstance(val, str): obj.preconnectionBlob = val - sftp = data.get("sftp", None) - if isinstance(sftp, dict): obj.sftp = SFTPConnectionSettings.load(sftp) - - obj.disableAudio = utils.value_to_boolean(data.get("disable_audio", None)) - obj.enableWallpaper = utils.value_to_boolean(data.get("enable_wallpaper", None)) - obj.enableFullWindowDrag = utils.value_to_boolean(data.get("enable_full_window_drag", None)) - - # disable_dynamic_resizing ? "" : "display-update" - val = utils.value_to_boolean(data.get("disable_dynamic_resizing", None)) - if val is not True: obj.resizeMethod = "display-update" - - return obj - - def to_record_dict(self): - kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.RDP.value } # pylint: disable=E1101 - - # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) - recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] - uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] - if uids: - kvp["userRecords"] = uids - - if self.port and isinstance(self.port, str) and self.port.strip(): - kvp["port"] = self.port.strip() - if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): - kvp["allowSupplyUser"] = self.allowSupplyUser - if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): - kvp["recordingIncludeKeys"] = self.recordingIncludeKeys - if self.disableCopy is not None and isinstance(self.disableCopy, bool): - kvp["disableCopy"] = self.disableCopy - if self.disablePaste is not None and isinstance(self.disablePaste, bool): - kvp["disablePaste"] = self.disablePaste - if isinstance(self.security, RDPSecurity): - kvp["security"] = self.security.value.lower() - - if self.disableAuth is not None and isinstance(self.disableAuth, bool): - kvp["disableAuth"] = self.disableAuth - if self.ignoreCert is not None and isinstance(self.ignoreCert, bool): - kvp["ignoreCert"] = self.ignoreCert - - if self.loadBalanceInfo and isinstance(self.loadBalanceInfo, str) and self.loadBalanceInfo.strip(): - kvp["loadBalanceInfo"] = self.loadBalanceInfo.strip() - if self.preconnectionId and isinstance(self.preconnectionId, str) and self.preconnectionId.strip(): - kvp["preconnectionId"] = self.preconnectionId.strip() - if self.preconnectionBlob and isinstance(self.preconnectionBlob, str) and self.preconnectionBlob.strip(): - kvp["preconnectionBlob"] = self.preconnectionBlob.strip() - - if self.disableAudio is not None and isinstance(self.disableAudio, bool): - kvp["disableAudio"] = self.disableAudio - if self.enableFullWindowDrag is not None and isinstance(self.enableFullWindowDrag, bool): - kvp["enableFullWindowDrag"] = self.enableFullWindowDrag - if self.enableWallpaper is not None and isinstance(self.enableWallpaper, bool): - kvp["enableWallpaper"] = self.enableWallpaper - - # populated on load - "resizeMethod": disable_dynamic_resizing ? "" : "display-update" - if str(self.resizeMethod) == "display-update": - kvp["resizeMethod"] = self.resizeMethod - - if isinstance(self.sftp, SFTPConnectionSettings): - sftp = self.sftp.to_dict() - if sftp: - kvp["sftp"] = sftp - - return kvp - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -# field type: pamRemoteBrowserSettings -class ConnectionSettingsHTTP(BaseConnectionSettings, ClipboardConnectionSettings): - protocol = ConnectionProtocol.HTTP - def __init__( - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - allowUrlManipulation: Optional[bool] = None, - allowedUrlPatterns: Optional[str] = None, - allowedResourceUrlPatterns: Optional[str] = None, - httpCredentials: Optional[List[str]] = None, # autofill_credentials: login|pamUser - autofillConfiguration: Optional[str] = None, - ignoreInitialSslCert: Optional[bool] = None - ): - BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) - ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) - self.allowUrlManipulation = allowUrlManipulation - self.allowedUrlPatterns = allowedUrlPatterns - self.allowedResourceUrlPatterns = allowedResourceUrlPatterns - self.httpCredentials = httpCredentials # autofill_credentials: login|pamUser - self.autofillConfiguration = autofillConfiguration - self.ignoreInitialSslCert = ignoreInitialSslCert - self.httpCredentialsUid = None # resolved from httpCredentials - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Connection Settings HTTP failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bcs = BaseConnectionSettings.load(data) - if bcs: - # obj.port = bcs.port # not yet in web UI of RBI - obj.allowSupplyUser = bcs.allowSupplyUser - obj.userRecords = bcs.userRecords - obj.recordingIncludeKeys = bcs.recordingIncludeKeys - - ccs = ClipboardConnectionSettings.load(data) - if ccs: - obj.disableCopy = ccs.disableCopy - obj.disablePaste = ccs.disablePaste - - obj.allowUrlManipulation = utils.value_to_boolean(data.get("allow_url_manipulation", None)) - obj.allowedUrlPatterns = multiline_to_str(parse_multiline(data, "allowed_url_patterns", "Error parsing allowed_url_patterns")) - obj.allowedResourceUrlPatterns = multiline_to_str(parse_multiline(data, "allowed_resource_url_patterns", "Error parsing allowed_resource_url_patterns")) - obj.httpCredentials = parse_multiline(data, "autofill_credentials", "Error parsing autofill_credentials") - obj.autofillConfiguration = multiline_to_str(parse_multiline(data, "autofill_targets", "Error parsing autofill_targets")) - obj.ignoreInitialSslCert = utils.value_to_boolean(data.get("ignore_server_cert", None)) - - return obj - - def to_record_dict(self): - kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.HTTP.value } # pylint: disable=E1101 - - # if resolved (autofill_credentials->httpCredentialsUid) login|pamUser - recs: list = self.httpCredentialsUid if self.httpCredentialsUid and isinstance(self.httpCredentialsUid, list) else [] - uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] - if uids: - kvp["httpCredentialsUid"] = uids[0] # single credential - - # port - unused for RBI - # if self.port and isinstance(self.port, str) and self.port.strip(): - # kvp["port"] = self.port.strip() - if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): - kvp["allowSupplyUser"] = self.allowSupplyUser - if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): - kvp["recordingIncludeKeys"] = self.recordingIncludeKeys - if self.disableCopy is not None and isinstance(self.disableCopy, bool): - kvp["disableCopy"] = self.disableCopy - if self.disablePaste is not None and isinstance(self.disablePaste, bool): - kvp["disablePaste"] = self.disablePaste - - if self.allowUrlManipulation is not None and isinstance(self.allowUrlManipulation, bool): - kvp["allowUrlManipulation"] = self.allowUrlManipulation - if self.allowedUrlPatterns and isinstance(self.allowedUrlPatterns, str) and self.allowedUrlPatterns.strip(): - kvp["allowedUrlPatterns"] = self.allowedUrlPatterns.strip() - if self.allowedResourceUrlPatterns and isinstance(self.allowedResourceUrlPatterns, str) and self.allowedResourceUrlPatterns.strip(): - kvp["allowedResourceUrlPatterns"] = self.allowedResourceUrlPatterns.strip() - if self.autofillConfiguration and isinstance(self.autofillConfiguration, str) and self.autofillConfiguration.strip(): - kvp["autofillConfiguration"] = self.autofillConfiguration.strip() - if self.ignoreInitialSslCert is not None and isinstance(self.ignoreInitialSslCert, bool): - kvp["ignoreInitialSslCert"] = self.ignoreInitialSslCert - - return kvp - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -class ConnectionSettingsVNC(BaseConnectionSettings, ClipboardConnectionSettings): - protocol = ConnectionProtocol.VNC - def __init__( # pylint: disable=R0917 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - destHost: Optional[str] = None, - destPort: Optional[str] = None, - sftp: Optional[SFTPConnectionSettings] = None - ): - BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) - ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) - self.destHost = destHost - self.destPort = destPort - self.sftp = sftp if isinstance(sftp, SFTPConnectionSettings) else None - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Connection Settings VNC failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bcs = BaseConnectionSettings.load(data) - if bcs: - obj.port = bcs.port - obj.allowSupplyUser = bcs.allowSupplyUser - obj.userRecords = bcs.userRecords - obj.recordingIncludeKeys = bcs.recordingIncludeKeys - - ccs = ClipboardConnectionSettings.load(data) - if ccs: - obj.disableCopy = ccs.disableCopy - obj.disablePaste = ccs.disablePaste - - val = data.get("destination_host", None) - if isinstance(val, str): obj.destHost = val - val = data.get("destination_port", None) - if isinstance(val, str): obj.destPort = val - - sftp = data.get("sftp", None) - if isinstance(sftp, dict): obj.sftp = SFTPConnectionSettings.load(sftp) - - return obj - - def to_record_dict(self): - kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.VNC.value } # pylint: disable=E1101 - - # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) - recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] - uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] - if uids: - kvp["userRecords"] = uids - - if self.port and isinstance(self.port, str) and self.port.strip(): - kvp["port"] = self.port.strip() - if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): - kvp["allowSupplyUser"] = self.allowSupplyUser - if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): - kvp["recordingIncludeKeys"] = self.recordingIncludeKeys - if self.disableCopy is not None and isinstance(self.disableCopy, bool): - kvp["disableCopy"] = self.disableCopy - if self.disablePaste is not None and isinstance(self.disablePaste, bool): - kvp["disablePaste"] = self.disablePaste - - if self.destHost and isinstance(self.destHost, str) and self.destHost.strip(): - kvp["destHost"] = self.destHost.strip() - if self.destPort and isinstance(self.destPort, str) and self.destPort.strip(): - kvp["destPort"] = self.destPort.strip() - - if isinstance(self.sftp, SFTPConnectionSettings): - sftp = self.sftp.to_dict() - if sftp: - kvp["sftp"] = sftp - - return kvp - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -class ConnectionSettingsTelnet(BaseConnectionSettings, ClipboardConnectionSettings, TerminalDisplayConnectionSettings): - protocol = ConnectionProtocol.TELNET - def __init__( # pylint: disable=R0917 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - colorScheme: Optional[str] = None, - fontSize: Optional[int] = None, - usernameRegex: Optional[str] = None, - passwordRegex: Optional[str] = None, - loginSuccessRegex: Optional[str] = None, - loginFailureRegex: Optional[str] = None - ): - BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) - ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) - TerminalDisplayConnectionSettings.__init__(self, colorScheme, fontSize) - self.usernameRegex = usernameRegex - self.passwordRegex = passwordRegex - self.loginSuccessRegex = loginSuccessRegex - self.loginFailureRegex = loginFailureRegex - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Connection Settings Telnet failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bcs = BaseConnectionSettings.load(data) - if bcs: - obj.port = bcs.port - obj.allowSupplyUser = bcs.allowSupplyUser - obj.userRecords = bcs.userRecords - obj.recordingIncludeKeys = bcs.recordingIncludeKeys - - ccs = ClipboardConnectionSettings.load(data) - if ccs: - obj.disableCopy = ccs.disableCopy - obj.disablePaste = ccs.disablePaste - - tcs = TerminalDisplayConnectionSettings.load(data) - if tcs: - obj.colorScheme = tcs.colorScheme - obj.fontSize = tcs.fontSize - - val = data.get("username_regex", None) - if isinstance(val, str): obj.usernameRegex = val - val = data.get("password_regex", None) - if isinstance(val, str): obj.passwordRegex = val - val = data.get("login_success_regex", None) - if isinstance(val, str): obj.loginSuccessRegex = val - val = data.get("login_failure_regex", None) - if isinstance(val, str): obj.loginFailureRegex = val - - return obj - - def to_record_dict(self): - kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.TELNET.value } # pylint: disable=E1101 - - # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) - recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] - uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] - if uids: - kvp["userRecords"] = uids - - if self.port and isinstance(self.port, str) and self.port.strip(): - kvp["port"] = self.port.strip() - if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): - kvp["allowSupplyUser"] = self.allowSupplyUser - if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): - kvp["recordingIncludeKeys"] = self.recordingIncludeKeys - if self.disableCopy is not None and isinstance(self.disableCopy, bool): - kvp["disableCopy"] = self.disableCopy - if self.disablePaste is not None and isinstance(self.disablePaste, bool): - kvp["disablePaste"] = self.disablePaste - - if self.colorScheme and isinstance(self.colorScheme, str) and self.colorScheme.strip(): - kvp["colorScheme"] = self.colorScheme.strip() - if self.fontSize and type(self.fontSize) is int and self.fontSize > 4: - kvp["fontSize"] = str(self.fontSize) - if self.usernameRegex and isinstance(self.usernameRegex, str) and self.usernameRegex.strip(): - kvp["usernameRegex"] = self.usernameRegex.strip() - if self.passwordRegex and isinstance(self.passwordRegex, str) and self.passwordRegex.strip(): - kvp["passwordRegex"] = self.passwordRegex.strip() - if self.loginSuccessRegex and isinstance(self.loginSuccessRegex, str) and self.loginSuccessRegex.strip(): - kvp["loginSuccessRegex"] = self.loginSuccessRegex.strip() - if self.loginFailureRegex and isinstance(self.loginFailureRegex, str) and self.loginFailureRegex.strip(): - kvp["loginFailureRegex"] = self.loginFailureRegex.strip() - - return kvp - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -class ConnectionSettingsSSH(BaseConnectionSettings, ClipboardConnectionSettings, TerminalDisplayConnectionSettings): - protocol = ConnectionProtocol.SSH - def __init__( # pylint: disable=R0917 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - colorScheme: Optional[str] = None, - fontSize: Optional[int] = None, - hostKey: Optional[str] = None, - command: Optional[str] = None, - sftp: Optional[SFTPRootDirectorySettings] = None - ): - BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) - ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) - TerminalDisplayConnectionSettings.__init__(self, colorScheme, fontSize) - self.hostKey = hostKey - self.command = command - self.sftp = sftp if isinstance(sftp, SFTPRootDirectorySettings) else None - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Connection Settings SSH failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bcs = BaseConnectionSettings.load(data) - if bcs: - obj.port = bcs.port - obj.allowSupplyUser = bcs.allowSupplyUser - obj.userRecords = bcs.userRecords - obj.recordingIncludeKeys = bcs.recordingIncludeKeys - - ccs = ClipboardConnectionSettings.load(data) - if ccs: - obj.disableCopy = ccs.disableCopy - obj.disablePaste = ccs.disablePaste - - tcs = TerminalDisplayConnectionSettings.load(data) - if tcs: - obj.colorScheme = tcs.colorScheme - obj.fontSize = tcs.fontSize - - val = data.get("public_host_key", None) - if isinstance(val, str): obj.hostKey = val - val = data.get("command", None) - if isinstance(val, str): obj.command = val - sftp = data.get("sftp", None) - if isinstance(sftp, dict): obj.sftp = SFTPRootDirectorySettings.load(sftp) - - return obj - - def to_record_dict(self): - kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.SSH.value } # pylint: disable=E1101 - - # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) - recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] - uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] - if uids: - kvp["userRecords"] = uids - - if self.port and isinstance(self.port, str) and self.port.strip(): - kvp["port"] = self.port.strip() - if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): - kvp["allowSupplyUser"] = self.allowSupplyUser - if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): - kvp["recordingIncludeKeys"] = self.recordingIncludeKeys - if self.disableCopy is not None and isinstance(self.disableCopy, bool): - kvp["disableCopy"] = self.disableCopy - if self.disablePaste is not None and isinstance(self.disablePaste, bool): - kvp["disablePaste"] = self.disablePaste - - if self.colorScheme and isinstance(self.colorScheme, str) and self.colorScheme.strip(): - kvp["colorScheme"] = self.colorScheme.strip() - if self.fontSize and type(self.fontSize) is int and self.fontSize > 4: - kvp["fontSize"] = str(self.fontSize) - if self.hostKey and isinstance(self.hostKey, str) and self.hostKey.strip(): - kvp["hostKey"] = self.hostKey.strip() - if self.command and isinstance(self.command, str) and self.command.strip(): - kvp["command"] = self.command.strip() - - if isinstance(self.sftp, SFTPRootDirectorySettings): - srds = self.sftp.to_dict() - if srds: - kvp["sftp"] = srds - - return kvp - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -class ConnectionSettingsKubernetes(BaseConnectionSettings, TerminalDisplayConnectionSettings): - protocol = ConnectionProtocol.KUBERNETES - def __init__( # pylint: disable=R0917 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - colorScheme: Optional[str] = None, - fontSize: Optional[int] = None, - ignoreCert: Optional[bool] = None, - caCert: Optional[str] = None, - namespace: Optional[str] = None, - pod: Optional[str] = None, - container: Optional[str] = None, - clientCert: Optional[str] = None, - clientKey: Optional[str] = None - ): - BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) - TerminalDisplayConnectionSettings.__init__(self, colorScheme, fontSize) - self.ignoreCert = ignoreCert - self.caCert = caCert - self.namespace = namespace - self.pod = pod - self.container = container - self.clientCert = clientCert - self.clientKey = clientKey - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Connection Settings K8S failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bcs = BaseConnectionSettings.load(data) - if bcs: - obj.port = bcs.port - obj.allowSupplyUser = bcs.allowSupplyUser - obj.userRecords = bcs.userRecords - obj.recordingIncludeKeys = bcs.recordingIncludeKeys - - tcs = TerminalDisplayConnectionSettings.load(data) - if tcs: - obj.colorScheme = tcs.colorScheme - obj.fontSize = tcs.fontSize - - val = data.get("namespace", None) - if isinstance(val, str): obj.namespace = val - val = data.get("pod_name", None) - if isinstance(val, str): obj.pod = val - val = data.get("container", None) - if isinstance(val, str): obj.container = val - obj.ignoreCert = utils.value_to_boolean(data.get("ignore_server_cert", None)) - obj.caCert = multiline_to_str(parse_multiline(data, "ca_certificate", "Error parsing ca_certificate")) - obj.clientCert = multiline_to_str(parse_multiline(data, "client_certificate", "Error parsing client_certificate")) - obj.clientKey = multiline_to_str(parse_multiline(data, "client_key", "Error parsing client_key")) - - return obj - - def to_record_dict(self): - kvp: Dict[str, Any] = { "protocol": ConnectionProtocol.KUBERNETES.value } # pylint: disable=E1101 - - # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) - recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] - uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] - if uids: - kvp["userRecords"] = uids - - if self.port and isinstance(self.port, str) and self.port.strip(): - kvp["port"] = self.port.strip() - if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): - kvp["allowSupplyUser"] = self.allowSupplyUser - if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): - kvp["recordingIncludeKeys"] = self.recordingIncludeKeys - if self.colorScheme and isinstance(self.colorScheme, str) and self.colorScheme.strip(): - kvp["colorScheme"] = self.colorScheme.strip() - if self.fontSize and type(self.fontSize) is int and self.fontSize > 4: - kvp["fontSize"] = str(self.fontSize) - if self.namespace and isinstance(self.namespace, str) and self.namespace.strip(): - kvp["namespace"] = self.namespace.strip() - if self.pod and isinstance(self.pod, str) and self.pod.strip(): - kvp["pod"] = self.pod.strip() - - if self.container and isinstance(self.container, str) and self.container.strip(): - kvp["container"] = self.container.strip() - if self.ignoreCert is not None and isinstance(self.ignoreCert, bool): - kvp["ignoreCert"] = self.ignoreCert - if self.caCert and isinstance(self.caCert, str) and self.caCert.strip(): - kvp["caCert"] = self.caCert.strip() - if self.clientCert and isinstance(self.clientCert, str) and self.clientCert.strip(): - kvp["clientCert"] = self.clientCert.strip() - if self.clientKey and isinstance(self.clientKey, str) and self.clientKey.strip(): - kvp["clientKey"] = self.clientKey.strip() - - return kvp - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -class BaseDatabaseConnectionSettings(BaseConnectionSettings, ClipboardConnectionSettings): - def __init__( # pylint: disable=R0917 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - database: Optional[str] = None, - disableCsvExport: Optional[bool] = None, - disableCsvImport: Optional[bool] = None - ): - BaseConnectionSettings.__init__(self, port, allowSupplyUser, userRecords, recordingIncludeKeys) - ClipboardConnectionSettings.__init__(self, disableCopy, disablePaste) - self.database = database - self.disableCsvExport = disableCsvExport - self.disableCsvImport = disableCsvImport - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Database Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bcs = BaseConnectionSettings.load(data) - if bcs: - obj.port = bcs.port - obj.allowSupplyUser = bcs.allowSupplyUser - obj.userRecords = bcs.userRecords - obj.recordingIncludeKeys = bcs.recordingIncludeKeys - - ccs = ClipboardConnectionSettings.load(data) - if ccs: - obj.disableCopy = ccs.disableCopy - obj.disablePaste = ccs.disablePaste - - val = data.get("default_database", None) - if isinstance(val, str): obj.database = val - obj.disableCsvExport = utils.value_to_boolean(data.get("disable_csv_export", None)) - obj.disableCsvImport = utils.value_to_boolean(data.get("disable_csv_import", None)) - - return obj - - def to_record_dict(self): - kvp: Dict[str, Any] = {} - - # if resolved (userRecords->userRecordUid) from administrative_credentials (usually after user create) - recs: list = self.userRecordUid if self.userRecordUid and isinstance(self.userRecordUid, list) else [] - uids = [x.strip() for x in recs if isinstance(x, str) and x.strip() != ""] - if uids: - kvp["userRecords"] = uids - - if self.port and isinstance(self.port, str) and self.port.strip(): - kvp["port"] = self.port.strip() - if self.allowSupplyUser is not None and isinstance(self.allowSupplyUser, bool): - kvp["allowSupplyUser"] = self.allowSupplyUser - if self.recordingIncludeKeys is not None and isinstance(self.recordingIncludeKeys, bool): - kvp["recordingIncludeKeys"] = self.recordingIncludeKeys - if self.disableCopy is not None and isinstance(self.disableCopy, bool): - kvp["disableCopy"] = self.disableCopy - if self.disablePaste is not None and isinstance(self.disablePaste, bool): - kvp["disablePaste"] = self.disablePaste - if self.disableCsvExport is not None and isinstance(self.disableCsvExport, bool): - kvp["disableCsvExport"] = self.disableCsvExport - if self.disableCsvImport is not None and isinstance(self.disableCsvImport, bool): - kvp["disableCsvImport"] = self.disableCsvImport - if self.database and isinstance(self.database, str) and self.database.strip(): - kvp["database"] = self.database.strip() - - return kvp - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -class ConnectionSettingsSqlServer(BaseDatabaseConnectionSettings): - protocol = ConnectionProtocol.SQLSERVER - def __init__( # pylint: disable=W0246 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - database: Optional[str] = None, - disableCsvExport: Optional[bool] = None, - disableCsvImport: Optional[bool] = None - ): - super().__init__(port, allowSupplyUser, userRecords, recordingIncludeKeys, - disableCopy, disablePaste, database, - disableCsvExport, disableCsvImport) - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"SQLServer Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bdcs = BaseDatabaseConnectionSettings.load(data) - if bdcs: - obj.port = bdcs.port - obj.allowSupplyUser = bdcs.allowSupplyUser - obj.userRecords = bdcs.userRecords - obj.recordingIncludeKeys = bdcs.recordingIncludeKeys - obj.disableCopy = bdcs.disableCopy - obj.disablePaste = bdcs.disablePaste - obj.database = bdcs.database - obj.disableCsvExport = bdcs.disableCsvExport - obj.disableCsvImport = bdcs.disableCsvImport - - return obj - - def to_record_dict(self): - dict = super().to_record_dict() - dict["protocol"] = ConnectionProtocol.SQLSERVER.value # pylint: disable=E1101 - return dict - -class ConnectionSettingsPostgreSQL(BaseDatabaseConnectionSettings): - protocol = ConnectionProtocol.POSTGRESQL - def __init__( # pylint: disable=W0246,R0917 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - database: Optional[str] = None, - disableCsvExport: Optional[bool] = None, - disableCsvImport: Optional[bool] = None - ): - super().__init__(port, allowSupplyUser, userRecords, recordingIncludeKeys, - disableCopy, disablePaste, database, - disableCsvExport, disableCsvImport) - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"PostgreSQL Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bdcs = BaseDatabaseConnectionSettings.load(data) - if bdcs: - obj.port = bdcs.port - obj.allowSupplyUser = bdcs.allowSupplyUser - obj.userRecords = bdcs.userRecords - obj.recordingIncludeKeys = bdcs.recordingIncludeKeys - obj.disableCopy = bdcs.disableCopy - obj.disablePaste = bdcs.disablePaste - obj.database = bdcs.database - obj.disableCsvExport = bdcs.disableCsvExport - obj.disableCsvImport = bdcs.disableCsvImport - - return obj - - def to_record_dict(self): - dict = super().to_record_dict() - dict["protocol"] = ConnectionProtocol.POSTGRESQL.value # pylint: disable=E1101 - return dict - -class ConnectionSettingsMySQL(BaseDatabaseConnectionSettings): - protocol = ConnectionProtocol.MYSQL - def __init__( # pylint: disable=W0246,R0917 - self, - port: Optional[str] = None, # Override port from host - allowSupplyUser: Optional[bool] = None, - userRecords: Optional[List[str]] = None, - recordingIncludeKeys: Optional[bool] = None, - disableCopy: Optional[bool] = None, - disablePaste: Optional[bool] = None, - database: Optional[str] = None, - disableCsvExport: Optional[bool] = None, - disableCsvImport: Optional[bool] = None - ): - super().__init__(port, allowSupplyUser, userRecords, recordingIncludeKeys, - disableCopy, disablePaste, database, - disableCsvExport, disableCsvImport) - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"MySQL Connection Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - bdcs = BaseDatabaseConnectionSettings.load(data) - if bdcs: - obj.port = bdcs.port - obj.allowSupplyUser = bdcs.allowSupplyUser - obj.userRecords = bdcs.userRecords - obj.recordingIncludeKeys = bdcs.recordingIncludeKeys - obj.disableCopy = bdcs.disableCopy - obj.disablePaste = bdcs.disablePaste - obj.database = bdcs.database - obj.disableCsvExport = bdcs.disableCsvExport - obj.disableCsvImport = bdcs.disableCsvImport - - return obj - - def to_record_dict(self): - dict = super().to_record_dict() - dict["protocol"] = ConnectionProtocol.MYSQL.value # pylint: disable=E1101 - return dict - -PamConnectionSettings = Optional[ - Union[ - ConnectionSettingsRDP, - ConnectionSettingsVNC, - ConnectionSettingsTelnet, - ConnectionSettingsSSH, - ConnectionSettingsKubernetes, - ConnectionSettingsSqlServer, - ConnectionSettingsPostgreSQL, - ConnectionSettingsMySQL - ] -] - -class PamPortForwardSettings: - def __init__(self, port: Optional[str] = None, reusePort: Optional[bool] = None): - self.port = port # Override Port from host - self.reusePort = reusePort # Attempt to use the last connected port if available - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"Port Forward Settings failed to load from: {str(data)[:80]}") - if not isinstance(data, dict): return obj - - obj.port = data.get("port", None) - obj.reusePort = utils.value_to_boolean(data.get("reuse_port", None)) - return obj - - def to_record_dict(self): - dict = {} - if self.port and isinstance(self.port, str) and self.port.strip(): - dict["port"] = self.port.strip() - if self.reusePort is not None and isinstance(self.reusePort, bool): - dict["reusePort"] = self.reusePort - return dict - - def to_record_json(self): - dict = self.to_record_dict() or {} - rec_json = json.dumps(dict) - return rec_json - -class PamRemoteBrowserSettings: - def __init__( - self, - options: Optional[DagSettingsObject] = None, - connection: Optional[ConnectionSettingsHTTP] = None - ): - self.options = options - self.connection = connection - - @classmethod - def load(cls, data: Optional[Union[str, dict]]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"PAM RBI Settings field failed to load from: {str(data)[:80]}...") - if not isinstance(data, dict): return obj - - options = DagSettingsObject.load(data.get("options", {})) - if not is_empty_instance(options): - obj.options = options - - cdata = data.get("connection", {}) - # TO DO: if isinstance(cdata, str): lookup_by_name(pam_data.connections) - if not isinstance(cdata, dict): - logging.warning(f"""PAM RBI Settings: Connection must be a JSON object - skipping... "{str(cdata)[:24]}" """) - if cdata and isinstance(cdata, dict): - proto = cdata.get("protocol", "") - if proto and isinstance(proto, str): - if proto.lower() == "http": - conn = ConnectionSettingsHTTP.load(cdata) - if not is_empty_instance(conn): - obj.connection = conn - else: - logging.warning(f"""Connection skipped: unknown protocol "{str(proto)[:24]}" """) - - if not obj.connection and cdata and isinstance(cdata, dict): - logging.error(f"PAM RBI Settings failed to load from: {str(cdata)[:80]}...") - - return obj - -class PamSettingsFieldData: - def __init__( - self, - allowSupplyHost: Optional[bool] = None, - connection: PamConnectionSettings = None, # Optional[PamConnectionSettings] - portForward: Optional[PamPortForwardSettings] = None, - options: Optional[DagSettingsObject] = None, - jit_settings: Optional[DagJitSettingsObject] = None, - ai_settings: Optional[DagAiSettingsObject] = None, - ): - self.allowSupplyHost = allowSupplyHost - self.connection = connection - self.portForward = portForward - self.options = options - self.jit_settings = jit_settings - self.ai_settings = ai_settings - - # PamConnectionSettings excludes ConnectionSettingsHTTP - pam_connection_classes = [ - ConnectionSettingsRDP, - ConnectionSettingsVNC, - ConnectionSettingsTelnet, - ConnectionSettingsSSH, - ConnectionSettingsKubernetes, - ConnectionSettingsSqlServer, - ConnectionSettingsPostgreSQL, - ConnectionSettingsMySQL - ] - - @classmethod - def get_connection_class(cls, cdata: dict): - if cdata and isinstance(cdata, dict): - proto = cdata.get("protocol", "") - if proto and isinstance(proto, str): - proto = proto.lower() - for con in cls.pam_connection_classes: - pr = getattr(con, "protocol", "") - if isinstance(pr, ConnectionProtocol) and pr.value.lower() == proto: # pylint: disable=E1101 - return con.load(cdata) - logging.warning(f"""Connection skipped: unknown protocol "{str(proto)[:24]}" """) - return None - - def is_empty(self): - empty = is_empty_instance(self.options) - empty = empty and is_empty_instance(self.portForward) - empty = empty and is_empty_instance(self.connection, ["protocol"]) - return empty - - @classmethod - def load(cls, data: Union[str, dict]): - obj = cls() - try: data = json.loads(data) if isinstance(data, str) else data - except: logging.error(f"PAM Settings Field failed to load from: {str(data)[:80]}...") - if not isinstance(data, dict): return obj - - obj.allowSupplyHost = utils.value_to_boolean(data.get("allow_supply_host", None)) - options = DagSettingsObject.load(data.get("options", {})) - if not is_empty_instance(options): - obj.options = options - - # Parse jit_settings from options dict (nested inside options) - options_dict = data.get("options", {}) - if isinstance(options_dict, dict): - jit_value = options_dict.get("jit_settings", None) - if jit_value is not None: - jit_settings = DagJitSettingsObject.load(jit_value) - if jit_settings: - obj.jit_settings = jit_settings - - # Parse ai_settings from options dict (nested inside options) - options_dict = data.get("options", {}) - if isinstance(options_dict, dict): - ai_value = options_dict.get("ai_settings", None) - if ai_value is not None: - ai_settings = DagAiSettingsObject.load(ai_value) - if ai_settings: - obj.ai_settings = ai_settings - - portForward = PamPortForwardSettings.load(data.get("port_forward", {})) - if not is_empty_instance(portForward): - obj.portForward = portForward - - cdata = data.get("connection", {}) - # TO DO: if isinstance(cdata, str): lookup_by_name(pam_data.connections) - if not isinstance(cdata, dict): - logging.warning(f"""PAM Settings: Connection must be a JSON object - skipping... "{str(cdata)[:24]}" """) - obj.connection = cls.get_connection_class(cdata) - if not obj.connection and cdata and isinstance(cdata, dict): - logging.error(f"PAM Settings failed to load from: {str(cdata)[:80]}...") - - return obj - -def is_empty_instance(obj, skiplist: Optional[List[str]] = None): - """ Checks if all attributes (not on skiplist) are None """ - if not obj: return True - if not isinstance(skiplist, list): skiplist= [] - for attr, value in vars(obj).items(): - if not (attr in skiplist or value is None): - return False - return True - -def is_blank_instance(obj, skiplist: Optional[List[str]] = None): - """ Checks if all attributes (not on skiplist) are None or empty """ - if not obj: return True - if not isinstance(skiplist, list): skiplist= [] - for attr, value in vars(obj).items(): - if not (attr in skiplist or not value): - return False - return True - -def get_sftp_attribute(obj, name: str) -> str: - # Get one of pam_settings.connection.sftp.{sftpResource,sftpResourceUid,sftpUser,sftpUserUid} - value: str = "" - if (name and obj and - hasattr(obj, "pam_settings") and - hasattr(obj.pam_settings, "connection") and - hasattr(obj.pam_settings.connection, "sftp")): - if name == "sftpResource" and hasattr(obj.pam_settings.connection.sftp, "sftpResource"): - value = obj.pam_settings.connection.sftp.sftpResource or "" - elif name == "sftpResourceUid" and hasattr(obj.pam_settings.connection.sftp, "sftpResourceUid"): - value = obj.pam_settings.connection.sftp.sftpResourceUid or "" - elif name == "sftpUser" and hasattr(obj.pam_settings.connection.sftp, "sftpUser"): - value = obj.pam_settings.connection.sftp.sftpUser or "" - elif name == "sftpUserUid" and hasattr(obj.pam_settings.connection.sftp, "sftpUserUid"): - value = obj.pam_settings.connection.sftp.sftpUserUid or "" - else: - logging.debug(f"""Unknown sftp attribute "{name}" (skipped)""") - value = value[0] if isinstance(value, list) else value - value = value if isinstance(value, str) else "" - return value - -def set_sftp_uid(obj, name: str, uid: str) -> bool: - if not(obj and name): - return False - if not(uid and isinstance(uid, str) and RecordV3.is_valid_ref_uid(uid)): - logging.debug(f"""Invalid sftp UID "{uid}" (skipped)""") - return False - if (hasattr(obj, "pam_settings") and - hasattr(obj.pam_settings, "connection") and - hasattr(obj.pam_settings.connection, "sftp")): - if name == "sftpResourceUid" and hasattr(obj.pam_settings.connection.sftp, "sftpResourceUid"): - obj.pam_settings.connection.sftp.sftpResourceUid = uid - return True - elif name == "sftpUserUid" and hasattr(obj.pam_settings.connection.sftp, "sftpUserUid"): - obj.pam_settings.connection.sftp.sftpUserUid = uid - return True - else: - logging.debug(f"""Unknown sftp UID attribute "{name}" (skipped)""") - return False - -def is_admin_external(mach) -> bool: - res = False - if (mach and hasattr(mach, "is_admin_external") and mach.is_admin_external is True): - res = True - return res - -def get_admin_credential(obj, uid:bool=False) -> str: - # Get one of pam_settings.connection.{userRecords,userRecordUid} - value: str = "" - if (obj and hasattr(obj, "pam_settings") and - hasattr(obj.pam_settings, "connection") and - ((uid and hasattr(obj.pam_settings.connection, "userRecordUid")) or - (not uid and hasattr(obj.pam_settings.connection, "userRecords")))): - if uid and obj.pam_settings.connection.userRecordUid: - value = obj.pam_settings.connection.userRecordUid - elif not uid and obj.pam_settings.connection.userRecords: - value = obj.pam_settings.connection.userRecords - value = value[0] if isinstance(value, list) else value - value = value if isinstance(value, str) else "" - return value - -def set_user_record_uid(obj, uid: str, is_external: bool = False) -> bool: - if not(uid and isinstance(uid, str) and RecordV3.is_valid_ref_uid(uid)): - logging.debug(f"""Invalid userRecordUid "{uid}" (skipped)""") - return False - - if (uid and obj and hasattr(obj, "pam_settings") and - hasattr(obj.pam_settings, "connection") and - hasattr(obj.pam_settings.connection, "userRecordUid")): - obj.pam_settings.connection.userRecordUid = uid - if is_external is True: - if hasattr(obj, "is_admin_external"): - obj.is_admin_external = True - if hasattr(obj, "administrative_credentials_uid"): - obj.administrative_credentials_uid = uid - return True - else: - logging.debug("""Object has no attribute "userRecordUid" (skipped)""") - return False - -def find_external_user(mach, machines, title: str) -> list: - # Local pamMachine could reference pamDirectory AD user as its admin - res = [] - if title and machines and mach.type == "pamMachine": - mu = title.split(".", 1) # machine/user titles - mname = mu[0] if len(mu) > 1 else "" - uname = mu[1] if len(mu) > 1 else mu[0] - for m in machines: - if m.type == "pamDirectory" and (not mname or mname == m.title): - res.extend(search_machine(m, uname) or []) - return res - -def find_user(mach, users, title: str) -> list: - if not isinstance(mach, list): - res = search_machine(mach, title) or search_users(users, title) - else: - res = search_users(users, title) - for m in mach: - res = res or search_machine(m, title) - if res: break - return res or [] - -def search_users(users, user: str) -> list: - res = [] - if isinstance(users, list): - res = [x for x in users if getattr(x, "title", None) == user] - res = res or [x for x in users if getattr(x, "login", None) == user] - return res - -def search_machine(mach, user: str) -> list: - if mach and hasattr(mach, "users") and isinstance(mach.users, list): - return search_users(mach.users, user) - return [] - -def parse_command_options(obj, enable:bool) -> dict: - # Parse command options from DagSettingsObject (pam_resource - skiped/external) - args = {} - if not obj: return args - choices = {"on": True, "off": False} - record_key = "record" if enable else "resource_uid" - args[record_key] = obj.uid - opts = None - if isinstance(obj, PamRemoteBrowserObject): - opts = obj.rbi_settings.options if obj.rbi_settings and obj.rbi_settings.options else None - elif isinstance(obj, PamUserObject): - logging.warning("Trying to parse DAG settings from PAM User (skipped)") # PamUserObject.rotation_settings are different - elif not isinstance(obj, LoginUserObject): - opts = obj.pam_settings.options if obj.pam_settings and obj.pam_settings.options else None - if opts: - if enable: # PAMTunnelEditCommand.execute format enable_rotation=True/disable_rotation=True - val = opts.rotation.value if opts.rotation else "" - key = "enable_rotation" if val == "on" else "disable_rotation" if val == "off" else None - if key is not None: args[key] = True - val = opts.connections.value if opts.connections else "" - key = "enable_connections" if val == "on" else "disable_connections" if val == "off" else None - if key is not None: args[key] = True - val = opts.tunneling.value if opts.tunneling else "" - key = "enable_tunneling" if val == "on" else "disable_tunneling" if val == "off" else None - if key is not None: args[key] = True - val = opts.text_session_recording.value if opts.text_session_recording else "" - key = "enable_typescript_recording" if val == "on" else "disable_typescript_recording" if val == "off" else None - if key is not None: - args[key] = True - args[key.replace("_typescript_", "_typescripts_")] = True # legacy compat. - val = opts.graphical_session_recording.value if opts.graphical_session_recording else "" - key = "enable_connections_recording" if val == "on" else "disable_connections_recording" if val == "off" else None - if key is not None: args[key] = True - val = opts.remote_browser_isolation.value if opts.remote_browser_isolation else "" - key = "enable_remote_browser_isolation" if val == "on" else "disable_remote_browser_isolation" if val == "off" else None - if key is not None: args[key] = True - # AI and JIT settings don't apply to RBI records - if not isinstance(obj, PamRemoteBrowserObject): - val = opts.ai_threat_detection.value if opts.ai_threat_detection else "" - key = "enable_ai_threat_detection" if val == "on" else "disable_ai_threat_detection" if val == "off" else None - if key is not None: args[key] = True - val = opts.ai_terminate_session_on_detection.value if opts.ai_terminate_session_on_detection else "" - key = "enable_ai_terminate_session_on_detection" if val == "on" else "disable_ai_terminate_session_on_detection" if val == "off" else None - if key is not None: args[key] = True - else: # TunnelDAG.set_resource_allowed format rotation=True/False - if opts.rotation and opts.rotation.value in ("on", "off"): - args["rotation"] = choices[opts.rotation.value] - if opts.connections and opts.connections.value in ("on", "off"): - args["connections"] = choices[opts.connections.value] - if opts.tunneling and opts.tunneling.value in ("on", "off"): - args["tunneling"] = choices[opts.tunneling.value] - if opts.text_session_recording and opts.text_session_recording.value in ("on", "off"): - # args["typescriptrecording"] = choices[opts.text_session_recording.value] - args["typescript_recording"] = choices[opts.text_session_recording.value] - if opts.graphical_session_recording and opts.graphical_session_recording.value in ("on", "off"): - # args["recording"] = choices[opts.graphical_session_recording.value] - args["session_recording"] = choices[opts.graphical_session_recording.value] - if opts.remote_browser_isolation and opts.remote_browser_isolation.value in ("on", "off"): - args["remote_browser_isolation"] = choices[opts.remote_browser_isolation.value] - # AI and JIT settings don't apply to RBI records - if not isinstance(obj, PamRemoteBrowserObject): - if opts.ai_threat_detection and opts.ai_threat_detection.value in ("on", "off"): - args["ai_enabled"] = choices[opts.ai_threat_detection.value] - if opts.ai_terminate_session_on_detection and opts.ai_terminate_session_on_detection.value in ("on", "off"): - args["ai_session_terminate"] = choices[opts.ai_terminate_session_on_detection.value] - - return args - -def resolve_domain_admin(pce, users): - if not(users and isinstance(users, list)): - return - if (pce and hasattr(pce, "dom_administrative_credential") and pce.dom_administrative_credential and - hasattr(pce, "admin_credential_ref")): - dac = pce.dom_administrative_credential - res = {"titles": set(), "logins": set()} - for obj in users: - uid = getattr(obj, "uid", "") or "" - title = getattr(obj, "title", "") or "" - login = getattr(obj, "login", "") or "" - if not uid: # cannot resolve script credential to an empty UID - logging.debug(f"""Unable to resolve domain admin creds from rec without UID - "{title}:{login}" (skipped)""") - continue - if title and title == dac: - res["titles"].add(uid) - elif login and login == dac: - res["logins"].add(uid) - num_unique_uids = len(res["titles"] | res["logins"]) - if num_unique_uids != 1: - logging.debug(f"{num_unique_uids} matches while resolving domain admin creds for '{dac}' ") - if res["titles"]: - pce.admin_credential_ref = next(iter(res["titles"])) - elif res["logins"]: - pce.admin_credential_ref = next(iter(res["logins"])) - if pce.admin_credential_ref: - logging.debug(f"Domain admin credential '{dac}' resolved to '{pce.admin_credential_ref}' ") - -def resolve_script_creds(rec, users, resources): - creds = set() - if (rec and hasattr(rec, "scripts") and rec.scripts and - hasattr(rec.scripts, "scripts") and rec.scripts.scripts): - creds = set(chain.from_iterable( - (x.additional_credentials for x in rec.scripts.scripts if x.additional_credentials)) - ) - if not creds: # nothing to resolve - return - res = {x: {"titles":[], "logins":[]} for x in creds} - for obj in chain(users, resources): - uid = getattr(obj, "uid", "") or "" - title = getattr(obj, "title", "") or "" - login = getattr(obj, "login", "") or "" - if not uid: # cannot resolve script credential to an empty UID - logging.debug(f"""Unable to resolve script creds from rec without UID - "{title}:{login}" (skipped)""") - continue - if title and title in creds: - res[title]["titles"].append(uid) - elif login and login in creds: - res[login]["logins"].append(login) - - # recursive search in machine users - if hasattr(obj, "users") and obj.users and isinstance(obj.users, list): - for usr in obj.users: - uid = getattr(usr, "uid", "") or "" - title = getattr(usr, "title", "") or "" - login = getattr(usr, "login", "") or "" - if not uid: # cannot resolve script credential to an empty UID - logging.debug(f"""Unable to resolve script creds from rec without UID - "{title}:{login}" (skipped)""") - continue - if title and title in creds: - res[title]["titles"].append(uid) - elif login and login in creds: - res[login]["logins"].append(login) - - if logging.getLogger().getEffectiveLevel() <= logging.DEBUG: - for k, v in res.items(): - tlen = len(v.get("titles", [])) - llen = len(v.get("login", [])) - if tlen+llen != 1: - logging.debug(f"{tlen+llen} matches while resolving script creds for {k}") - - for script in (x for x in rec.scripts.scripts if x.additional_credentials): - for cred in script.additional_credentials: - matches = res.get(cred) or {} - match = next(chain(matches.get("titles") or [], matches.get("logins") or []), None) - if match: - script.record_refs.append(match) - else: - title = getattr(rec, "title", "") or "" - login = getattr(rec, "login", "") or "" - logging.warning(f"""Unable to resolve script creds "{cred}" from "{title}:{login}" """) - if script.record_refs: - script.record_refs = list(set(script.record_refs)) - -def add_pam_scripts(params, record, scripts): - """Add post-rotation script(s) to a rotation record""" - if not (isinstance(record, str) and record != "" - and isinstance(scripts, list) and len(scripts) > 0): - return # nothing to do - no record or no script(s) - - ruid = record if record in params.record_cache else "" - if not ruid: - records = list(vault_extensions.find_records( - params, search_str=record, record_version=(3, 6), - record_type=PAM_ROTATION_TYPES + PAM_CONFIG_TYPES)) - if len(records) == 0: - logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - Record "{record}" not found!""") - elif len(records) > 1: - logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - Record "{record}" is not unique. Use record UID!""") - else: - ruid = records[0].record_uid - rec = vault.KeeperRecord.load(params, ruid) if ruid else None - if rec and isinstance(rec, vault.TypedRecord): - if rec.version not in (3, 6): - logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - Record "{rec.record_uid}" is not a rotation record (skipped).""") - return - - script_field = next((x for x in rec.fields if x.type == "script"), None) - if not script_field: - script_field = vault.TypedField.new_field("script", [], "rotationScripts") - rec.fields.append(script_field) - for script in scripts: - file_name = script.file - full_name = os.path.abspath(os.path.expanduser(file_name)) - if not os.path.isfile(full_name): - logging.warning(f"""{bcolors.WARNING}Warning: {bcolors.ENDC} Add rotation script - File "{file_name}" not found (skipped).""") - continue - facade = record_facades.FileRefRecordFacade() - facade.record = rec - pre = set(facade.file_ref) - upload_task = attachment.FileUploadTask(full_name) - attachment.upload_attachments(params, rec, [upload_task]) - post = set(facade.file_ref) - df = post.difference(pre) - if len(df) == 1: - file_uid = df.pop() - facade.file_ref.remove(file_uid) - script_value = { - "fileRef": file_uid, - "recordRef": [], - "command": "", - } - # command and recordRef are optional - if script.script_command: - script_value["command"] = script.script_command - if script.record_refs: - for ref in script.record_refs: - script_value["recordRef"].append(ref) - if ref not in params.record_cache: - logging.debug(f"{bcolors.WARNING}Warning: {bcolors.ENDC} " - "Add rotation script - Additional Credentials Record " - f""" "{ref}" not found (recordRef added)!""") - script_field.value.append(script_value) # type: ignore - - record_management.update_record(params, rec) - api.sync_down(params) - params.sync_data = True diff --git a/keepercommander/commands/pam_import/extend.py b/keepercommander/commands/pam_import/extend.py new file mode 100644 index 000000000..87304553d --- /dev/null +++ b/keepercommander/commands/pam_import/extend.py @@ -0,0 +1,1351 @@ +# _ __ +# | |/ /___ ___ _ __ ___ _ _ ® +# | ' list[str]: + """Split folder path using path deilmiter / (escape: / -> //)""" + + # Escape char / confusion: a///b -> [a/]/[b] or [a]/[/b] + # Escape char ` or ^ (since \ is hard to put in strings and JSON) + # ...yet again a``/b -> [a`/b] or [a`]/[b] + + # Note: using / as escape char and path delimiter: / <-> // + placeholder = "\x00" # unlikely to appear in folder names + tmp = path.replace("//", placeholder).rstrip("/") + parts = tmp.split("/") # split on remaining single slashes + res = [part.replace(placeholder, "/") for part in parts] + + # check for bad path (odd number of slashes): a///b or a/////b etc. + if re.search(r"(? 0 and existing_parts[-1] == root: + possible_parents.append(existing_path) + + if len(possible_parents) > 1: + # Ambiguous: this partial path could belong to multiple locations + bad_paths.append((path, f"Ambiguous: '{root}' appears in multiple locations {possible_parents}")) + processed_paths.add(path) + elif len(possible_parents) == 1: + # This is a dependent path that needs parent to exist first + # Wait for parent to be processed (don't mark as processed yet) + pass + else: + # No possible parents found - truly a bad path + bad_paths.append((path, f"Root folder '{root}' not found in shared folders")) + processed_paths.add(path) + else: + # Check for ambiguous paths (multiple possible locations) + matching_roots = [] + for sf_name in sf_name_map: + if path.startswith(sf_name + "/") or path == sf_name: + matching_roots.append(sf_name) + + if len(matching_roots) > 1: + bad_paths.append((path, f"Ambiguous: maps to multiple roots {matching_roots}")) + else: + good_paths.append((path, parts)) + processed_paths.add(path) + + # Add paths to the corresponding folder trees + for path, parts in good_paths: + if parts and parts[0] in sf_name_map: + shf = sf_name_map[parts[0]] + current_level = shf['folder_tree'] + + # Navigate/create the folder structure + for _, folder_name in enumerate(parts[1:], 1): + if folder_name not in current_level: + current_level[folder_name] = { + 'uid': '', # Empty UID for new folders + 'name': folder_name, + 'subfolders': {} + } + current_level = current_level[folder_name]['subfolders'] + + return good_paths, bad_paths + +def build_tree_recursive(params, folder_uid: str): + """Recursively build tree for a folder and its subfolders""" + tree = {} + folder = params.folder_cache.get(folder_uid) + if not folder: + return tree + + for subfolder_uid in folder.subfolders: + subfolder = params.folder_cache.get(subfolder_uid) + if subfolder: + folder_name = subfolder.name or '' + tree[folder_name] = { + 'uid': subfolder.uid, + 'name': folder_name, + 'subfolders': build_tree_recursive(params, subfolder.uid) + } + + return tree + + +def _collect_path_to_uid_from_tree(path_prefix: str, tree: dict, path_to_uid: dict, only_existing: bool) -> None: + """Walk folder tree and fill path_to_uid. path_prefix e.g. 'gwapp', tree is shf['folder_tree']. + If only_existing, only add when node['uid'] is non-empty.""" + for name, node in (tree or {}).items(): + path = f"{path_prefix}/{name}" if path_prefix else name + uid = (node or {}).get("uid") or "" + if only_existing and not uid: + continue + if uid: + path_to_uid[path] = uid + subfolders = (node or {}).get("subfolders") or {} + if subfolders: + _collect_path_to_uid_from_tree(path, subfolders, path_to_uid, only_existing) + + +def _count_existing_and_new_paths(ksm_shared_folders: list, good_paths: list) -> tuple: + """Return (x_count, y_count, existing_paths_set, new_nodes_list). + existing_paths_set = set of full paths that exist (all segments have uid). + new_nodes_list = list of (full_path, parent_path, segment_name, node_ref) for each node with uid '', sorted by path (parent before child).""" + sf_name_map = {shf["name"]: shf for shf in ksm_shared_folders} + existing_paths = set() + new_nodes_list = [] # (full_path, parent_path, segment_name, node_dict) + + for path, parts in good_paths: + if not parts or parts[0] not in sf_name_map: + continue + root_name = parts[0] + if len(parts) == 1: + existing_paths.add(path) + continue + tree = sf_name_map[root_name].get("folder_tree") or {} + current = tree + prefix = root_name + parent_path = root_name + for i in range(1, len(parts)): + name = parts[i] + path_so_far = f"{prefix}/{name}" if prefix else name + node = current.get(name) if isinstance(current, dict) else None + if not node: + break + uid = node.get("uid") or "" + if uid: + existing_paths.add(path_so_far) + parent_path = path_so_far + else: + new_nodes_list.append((path_so_far, parent_path, name, node)) + parent_path = path_so_far + current = node.get("subfolders") or {} + prefix = path_so_far + + # Dedupe new nodes by path and sort so parent before child + seen = set() + deduped = [] + for item in new_nodes_list: + if item[0] not in seen: + seen.add(item[0]) + deduped.append(item) + deduped.sort(key=lambda x: (x[0].count("/"), x[0])) + x_count = len(existing_paths) + y_count = len(deduped) + return (x_count, y_count, existing_paths, deduped) + + +def _collect_all_folder_uids_under_ksm(ksm_shared_folders: list) -> set: + """Return set of all folder UIDs (shared folder roots + all descendants) under KSM app.""" + out = set() + for shf in ksm_shared_folders: + out.add(shf["uid"]) + tree = shf.get("folder_tree") or {} + + def walk(t): + for name, node in (t or {}).items(): + uid = (node or {}).get("uid") + if uid: + out.add(uid) + walk((node or {}).get("subfolders") or {}) + + walk(tree) + return out + + +def _get_ksm_app_record_uids(params, ksm_shared_folders: list) -> set: + """Return set of all record UIDs in any folder shared to the KSM app.""" + folder_uids = _collect_all_folder_uids_under_ksm(ksm_shared_folders) + record_uids = set() + subfolder_record_cache = getattr(params, "subfolder_record_cache", None) or {} + for fuid in folder_uids: + if fuid in subfolder_record_cache: + record_uids.update(subfolder_record_cache[fuid]) + return record_uids + + +def _get_records_in_folder(params, folder_uid: str): + """Return list of (record_uid, title, record_type) for records in folder_uid. + record_type from record for autodetect (e.g. pamUser, pamMachine, login).""" + subfolder_record_cache = getattr(params, "subfolder_record_cache", None) or {} + result = [] + for ruid in subfolder_record_cache.get(folder_uid, []): + try: + rec = vault.KeeperRecord.load(params, ruid) + title = getattr(rec, "title", "") or "" + rtype = "" + if hasattr(rec, "record_type"): + rtype = getattr(rec, "record_type", "") or "" + result.append((ruid, title, rtype)) + except Exception: + pass + return result + + +def _get_all_ksm_app_records(params, ksm_shared_folders: list) -> list: + """Return list of (record_uid, title, record_type) for every record in any folder under KSM app.""" + folder_uids = _collect_all_folder_uids_under_ksm(ksm_shared_folders) + out = [] + for fuid in folder_uids: + out.extend(_get_records_in_folder(params, fuid)) + return out + + +def _folder_uids_under_shf(shf: dict) -> set: + """Return set of folder UIDs under this shared folder (root + all descendants from folder_tree).""" + out = {shf.get("uid")} + tree = shf.get("folder_tree") or {} + + def walk(t): + for name, node in (t or {}).items(): + uid = (node or {}).get("uid") + if uid: + out.add(uid) + walk((node or {}).get("subfolders") or {}) + + walk(tree) + return out + + +def _is_resource_type(obj) -> bool: + """True if object is a PAM resource (machine, database, directory, remote browser).""" + t = (getattr(obj, "type", None) or "").lower() + return t in ("pammachine", "pamdatabase", "pamdirectory", "pamremotebrowser") + + +def _record_identifier(obj, fallback_login: str = "") -> str: + """Return identifier for error messages: uid if present, else title, else login (for users).""" + uid = getattr(obj, "uid_imported", None) or getattr(obj, "uid", None) + if uid and isinstance(uid, str) and RecordV3.is_valid_ref_uid(uid): + return f'uid "{uid}"' + title = getattr(obj, "title", None) or "" + if title: + return f'"{title}"' + login = getattr(obj, "login", None) or fallback_login + return f'login "{login}"' if login else "record" + + +def _has_autogenerated_title(obj) -> bool: + """True if obj has title set by base.py when missing in JSON. base.py uses: + pamUser -> PAM User - {login}; pamMachine -> PAM Machine - {login}; + pamDatabase -> PAM Database - {databaseId}; pamDirectory -> PAM Directory - {domainName}; + pamRemoteBrowser (RBI) -> PAM RBI - {hostname from rbiUrl}.""" + rtype = (getattr(obj, "type", None) or "").lower() + title = (getattr(obj, "title", None) or "").strip() + login = (getattr(obj, "login", None) or "").strip() + if rtype == "pamuser" and login and title == f"PAM User - {login}": + return True + if rtype == "pammachine" and login and title == f"PAM Machine - {login}": + return True + database_id = (getattr(obj, "databaseId", None) or "").strip() + if rtype == "pamdatabase" and database_id and title == f"PAM Database - {database_id}": + return True + domain_name = (getattr(obj, "domainName", None) or "").strip() + if rtype == "pamdirectory" and domain_name and title == f"PAM Directory - {domain_name}": + return True + if rtype == "pamremotebrowser" and getattr(obj, "rbiUrl", None) and title.startswith("PAM RBI - "): + return True + return False + + +def _vault_title_matches_import(vault_title: str, import_title: str) -> bool: + """True if vault record title matches import title verbatim (both already in same form, e.g. from base.py).""" + return (vault_title or "").strip() == (import_title or "").strip() + + +class PAMProjectExtendCommand(Command): + parser = argparse.ArgumentParser(prog="pam project extend") + parser.add_argument("--config", "-c", required=True, dest="config", action="store", help="PAM Configuration UID or Title") + parser.add_argument("--filename", "-f", required=True, dest="file_name", action="store", help="File to load import data from.") + parser.add_argument("--dry-run", "-d", required=False, dest="dry_run", action="store_true", default=False, help="Test import without modifying vault.") + + def get_parser(self): + return PAMProjectExtendCommand.parser + + def execute(self, params, **kwargs): + dry_run = kwargs.get("dry_run", False) is True + file_name = str(kwargs.get("file_name") or "") + config_name = str(kwargs.get("config") or "") + + api.sync_down(params) + + configuration = None + if config_name in params.record_cache: + configuration = vault.KeeperRecord.load(params, config_name) + else: + l_name = config_name.casefold() + for c in vault_extensions.find_records(params, record_version=6): + if c.title.casefold() == l_name: + configuration = c + break + + if not (configuration and isinstance(configuration, vault.TypedRecord) and configuration.version == 6): + raise CommandError("pam project extend", f"""PAM configuration not found: "{config_name}" """) + + if not (file_name != "" and os.path.isfile(file_name)): + raise CommandError("pam project extend", f"""PAM Import JSON file not found: "{file_name}" """) + + data = {} + try: + with open(file_name, encoding="utf-8") as f: + data = json.load(f) + except Exception: + data = {} + + pam_data = data.get("pam_data") if isinstance(data, dict) else {} + pam_data = pam_data if isinstance(pam_data, dict) else {} + users = pam_data["users"] if isinstance(pam_data.get("users"), list) else [] + resources = pam_data["resources"] if isinstance(pam_data.get("resources"), list) else [] + if not (resources or users): + raise CommandError("pam project extend", f"""PAM data missing - file "{file_name}" """ + """must be a valid JSON ex. {"pam_data": {"resources": [], "users":[]}} """) + + has_extra_keys = any(key != "pam_data" for key in data) if isinstance(data, dict) else False + if has_extra_keys: + logging.warning(f"{bcolors.WARNING}WARNING: Import JSON contains extra data - " + f"""`extend` command uses only "pam_data": {{ }} {bcolors.ENDC}""") + + if dry_run: + print("[DRY RUN] No changes will be made. This is a simulation only.") + + # Find Controller/Gateway/App from PAM Configuration + controller = configuration_controller_get(params, url_safe_str_to_bytes(configuration.record_uid)) + if not (controller and isinstance(controller, pam_pb2.PAMController) and controller.controllerUid): # pylint: disable=no-member + raise CommandError("pam project extend", f"{bcolors.FAIL}" + f"Gateway UID not found for configuration {configuration.record_uid}.") + + ksmapp_uid = None + gateway_uid = utils.base64_url_encode(controller.controllerUid) + all_gateways = gateway_helper.get_all_gateways(params) + found_gateways = list(filter(lambda g: g.controllerUid == controller.controllerUid, all_gateways)) + if found_gateways and found_gateways[0]: + ksmapp_uid = utils.base64_url_encode(found_gateways[0].applicationUid) + if ksmapp_uid is None: + raise CommandError("pam project extend", f"{bcolors.FAIL}" + f"KSM APP UID not found for Gateway {gateway_uid}.") + ksm_app_record = vault.KeeperRecord.load(params, ksmapp_uid) + if not (ksm_app_record and isinstance(ksm_app_record, vault.ApplicationRecord) and ksm_app_record.version == 5): + raise CommandError("pam project extend", f"""PAM KSM Application record not found: "{ksmapp_uid}" """) + + # Find KSM Application shared folders + ksm_shared_folders = self.get_app_shared_folders(params, ksmapp_uid) + if not ksm_shared_folders: + raise CommandError("pam project extend", f""" No shared folders found for KSM Application: "{ksmapp_uid}" """) + + if dry_run: + print(f"[DRY RUN] Will use PAM Configuration: {configuration.record_uid} {configuration.title}") + print(f"[DRY RUN] Will use PAM Gateway: {gateway_uid} {controller.controllerName}") + print(f"[DRY RUN] Will use KSM Application: {ksmapp_uid} {ksm_app_record.title}") + print(f"[DRY RUN] Total shared folders found for the KSM App: {len(ksm_shared_folders)}") + for shf in ksm_shared_folders: + uid, name, permissions = shf.get("uid"), shf.get("name"), shf.get("permissions") + print(f"""[DRY RUN] Found shared folder: {uid} "{name}" ({permissions})""") + + for shf in ksm_shared_folders: + shf["folder_tree"] = build_tree_recursive(params, shf["uid"]) + + project = { + "data": {"pam_data": pam_data}, + "options": {"dry_run": dry_run}, + "ksm_shared_folders": ksm_shared_folders, + "folders": {}, + "pam_config": {"pam_config_uid": configuration.record_uid, "pam_config_object": None}, + "error_count": 0, + } + + self.process_folders(params, project) + self.map_records(params, project) + if project.get("error_count", 0) == 0: + has_new_no_path = False + for o in chain(project.get("mapped_resources", []), project.get("mapped_users", [])): + if getattr(o, "_extend_tag", None) == "new" and not (getattr(o, "folder_path", None) or "").strip(): + has_new_no_path = True + break + if not has_new_no_path: + for mach in project.get("mapped_resources", []): + if hasattr(mach, "users") and isinstance(mach.users, list): + for u in mach.users: + if getattr(u, "_extend_tag", None) == "new" and not (getattr(u, "folder_path", None) or "").strip(): + has_new_no_path = True + break + if has_new_no_path: + break + if has_new_no_path: + self.autodetect_folders(params, project) + + err_count = project.get("error_count", 0) + new_count = project.get("new_record_count", 0) + if err_count > 0: + print(f"{err_count} errors; aborting. No changes made to vault.") + print("Use --dry-run option to see detailed error messages.") + return + if new_count == 0: + print("Nothing to update") + return + + path_to_folder_uid = (project.get("folders") or {}).get("path_to_folder_uid") or {} + res_folder_uid = (project.get("folders") or {}).get("resources_folder_uid", "") + usr_folder_uid = (project.get("folders") or {}).get("users_folder_uid", "") + + for o in chain(project.get("mapped_resources", []), project.get("mapped_users", [])): + if getattr(o, "_extend_tag", None) != "new": + continue + fp = (getattr(o, "folder_path", None) or "").strip() + o.resolved_folder_uid = path_to_folder_uid.get(fp) or (res_folder_uid if _is_resource_type(o) else usr_folder_uid) + for mach in project.get("mapped_resources", []): + if hasattr(mach, "users") and isinstance(mach.users, list): + for u in mach.users: + if getattr(u, "_extend_tag", None) != "new": + continue + fp = (getattr(u, "folder_path", None) or "").strip() + u.resolved_folder_uid = path_to_folder_uid.get(fp) or usr_folder_uid + + if dry_run: + print("[DRY RUN COMPLETE] No changes were made. All actions were validated but not executed.") + return + self.process_data(params, project) + + def get_app_shared_folders(self, params, ksm_app_uid: str) -> list[dict]: + ksm_shared_folders = [] + + try: + app_info_list = KSMCommand.get_app_info(params, ksm_app_uid) + if app_info_list and len(app_info_list) > 0: + app_info = app_info_list[0] + shares = [x for x in app_info.shares if x.shareType == APIRequest_pb2.SHARE_TYPE_FOLDER] # pylint: disable=no-member + for share in shares: + folder_uid = utils.base64_url_encode(share.secretUid) + if folder_uid in params.shared_folder_cache: + cached_sf = params.shared_folder_cache[folder_uid] + folder_name = cached_sf.get('name_unencrypted', 'Unknown') + is_editable = share.editable if hasattr(share, 'editable') else False + + ksm_shared_folders.append({ + 'uid': folder_uid, + 'name': folder_name, + 'editable': is_editable, + 'permissions': "Editable" if is_editable else "Read-Only" + }) + except Exception as e: + logging.error(f"Could not retrieve KSM application shares: {e}") + + return ksm_shared_folders + + def process_folders(self, params, project: dict) -> dict: + """Step 1: Parse folder_paths from pam_data, build tree, process paths, optionally create new folders. + Fills project['folders'] with path_to_folder_uid, good_paths, bad_paths; updates project['error_count'].""" + data = project.get("data") or {} + pam_data = data.get("pam_data") or {} + resources = pam_data.get("resources") or [] + users = pam_data.get("users") or [] + options = project.get("options") or {} + dry_run = options.get("dry_run", False) is True + ksm_shared_folders = project.get("ksm_shared_folders") or [] + folders_out = project.get("folders") or {} + project["folders"] = folders_out + + # Collect unique folder_paths from resources, nested machine.users[], and top-level users (raw dicts) + folder_paths_set = set() + for r in resources: + if isinstance(r, dict): + if r.get("folder_path"): + folder_paths_set.add((r["folder_path"],)) + for nested in r.get("users") or []: + if isinstance(nested, dict) and nested.get("folder_path"): + folder_paths_set.add((nested["folder_path"],)) + for u in users: + if isinstance(u, dict) and u.get("folder_path"): + folder_paths_set.add((u["folder_path"],)) + folder_paths = list(set(fp[0] for fp in folder_paths_set)) + + good_paths, bad_paths = process_folder_paths(folder_paths, ksm_shared_folders) + + path_to_folder_uid = {} + has_errors = bool(bad_paths) + for shf in ksm_shared_folders: + name = shf.get("name") or "" + if name: + path_to_folder_uid[name] = shf["uid"] + _collect_path_to_uid_from_tree( + name, + shf.get("folder_tree") or {}, + path_to_folder_uid, + only_existing=has_errors, + ) + + x_count, y_count, existing_paths_set, new_nodes_list = _count_existing_and_new_paths( + ksm_shared_folders, good_paths + ) + + # Pre-generate UIDs for new folders (same as records: known before create). Fills path_to_folder_uid + # so dry run and map_records can resolve folder_path for all good paths. + for full_path, _parent_path, _name, node in new_nodes_list: + if not (node or {}).get("uid"): + uid = api.generate_record_uid() + node["uid"] = uid + path_to_folder_uid[full_path] = uid + + step1_errors = [(path, reason) for path, reason in bad_paths] + if step1_errors: + project["error_count"] = project.get("error_count", 0) + len(step1_errors) + + # Folder path printing: dry run always; normal run only if errors or Y > 0 + print_paths = dry_run or step1_errors or y_count > 0 + if print_paths: + prefix = "[DRY RUN] " if dry_run else "" + print(f"{prefix}Processed {len(folder_paths)} folder paths:") + print(f"{prefix} - Good paths: {len(good_paths)}") + for path, _ in good_paths: + tag = "existing" if path in existing_paths_set else "new" + if logging.getLogger().getEffectiveLevel() <= logging.DEBUG: + print(f"{prefix} [{tag}] {path}") + else: + print(f"{prefix} ✓ {path}") + print(f"{prefix} - Bad paths: {len(bad_paths)}") + for path, reason in bad_paths: + print(f"{prefix} ✗ {path}: {reason}") + if step1_errors: + print(f"Total: {len(step1_errors)} errors") + + if not dry_run and not step1_errors and new_nodes_list: + sf_name_map = {shf["name"]: shf for shf in ksm_shared_folders} + for full_path, parent_path, name, node in new_nodes_list: + parent_uid = path_to_folder_uid.get(parent_path, "") + if not parent_uid and parent_path in sf_name_map: + parent_uid = sf_name_map[parent_path]["uid"] + new_uid = self.create_subfolder(params, name, parent_uid, folder_uid=node.get("uid")) + node["uid"] = new_uid + path_to_folder_uid[full_path] = new_uid + api.sync_down(params) + + existing_msg = f"{x_count} existing folders (skipped)" if x_count else "0 existing folders" + if dry_run: + print(f"[DRY RUN] {existing_msg}, {y_count} new folders to be created") + else: + print(f"{existing_msg}, {y_count} new folders created") + + if logging.getLogger().getEffectiveLevel() <= logging.DEBUG: + for path, _ in good_paths: + tag = "existing" if path in existing_paths_set else "new" + print(f" [DEBUG] [{tag}] {path}") + + folders_out["path_to_folder_uid"] = path_to_folder_uid + folders_out["good_paths"] = good_paths + folders_out["bad_paths"] = bad_paths + folders_out["folder_stats_x"] = x_count + folders_out["folder_stats_y"] = y_count + return folders_out + + def map_records(self, params, project: dict) -> tuple: + """Step 2: Parse resources/users, tag existing vs new, set obj.uid; collect errors. + Returns (resources, users, step2_errors, new_record_count). Updates project['error_count'].""" + data = project.get("data") or {} + pam_data = data.get("pam_data") or {} + path_to_folder_uid = (project.get("folders") or {}).get("path_to_folder_uid") or {} + ksm_shared_folders = project.get("ksm_shared_folders") or [] + options = project.get("options") or {} + dry_run = options.get("dry_run", False) is True + + rotation_profiles = pam_data.get("rotation_profiles") or {} + if not isinstance(rotation_profiles, dict): + rotation_profiles = {} + pam_cfg_uid = (project.get("pam_config") or {}).get("pam_config_uid", "") + rotation_params = PamRotationParams(configUid=pam_cfg_uid, profiles=rotation_profiles) + + usrs = pam_data.get("users") or [] + rsrs = pam_data.get("resources") or [] + users = [] + resources = [] + + for user in usrs: + rt = str(user.get("type", "")) if isinstance(user, dict) else "" + rt = next((x for x in ("login", "pamUser") if x.lower() == rt.lower()), rt) + if rt not in ("login", "pamUser") and isinstance(user, dict): + pam_keys = ("private_pem_key", "distinguished_name", "connect_database", "managed", "scripts", "rotation_settings") + if user.get("url"): rt = "login" + elif any(k in user for k in pam_keys): rt = "pamUser" + rt = next((x for x in ("login", "pamUser") if x.lower() == rt.lower()), "login") + if rt == "login": + usr = LoginUserObject.load(user) + else: + usr = PamUserObject.load(user) + if usr: + users.append(usr) + + for machine in rsrs: + rt = str(machine.get("type", "")).strip() if isinstance(machine, dict) else "" + if rt.lower() not in (x.lower() for x in PAM_RESOURCES_RECORD_TYPES): + title = str(machine.get("title", "")).strip() if isinstance(machine, dict) else "" + logging.error(f"Incorrect record type \"{rt}\" - should be one of {PAM_RESOURCES_RECORD_TYPES}, \"{title}\" record skipped.") + continue + obj = None + rtl = rt.lower() + if rtl == "pamdatabase": + obj = PamDatabaseObject.load(machine, rotation_params) + elif rtl == "pamdirectory": + obj = PamDirectoryObject.load(machine, rotation_params) + elif rtl == "pammachine": + obj = PamMachineObject.load(machine, rotation_params) + elif rtl == "pamremotebrowser": + obj = PamRemoteBrowserObject.load(machine, rotation_params) + if obj: + resources.append(obj) + + for obj in chain(resources, users): + if not (isinstance(getattr(obj, "uid", None), str) and RecordV3.is_valid_ref_uid(obj.uid)): + obj.uid = utils.generate_uid() + if hasattr(obj, "users") and isinstance(obj.users, list): + for usr in obj.users: + if not (isinstance(getattr(usr, "uid", None), str) and RecordV3.is_valid_ref_uid(usr.uid)): + usr.uid = utils.generate_uid() + + ksm_app_uids = _get_ksm_app_record_uids(params, ksm_shared_folders) + all_ksm_records = _get_all_ksm_app_records(params, ksm_shared_folders) + good_paths = (project.get("folders") or {}).get("good_paths") or [] + good_paths_set = {p for p, _ in good_paths} + step2_errors = [] + + def _scope_key(obj, good_paths_set): + # Scope by folder only if path is good (exists or to be created); else "global". + # "Global" means: SHF shared to KSM App; for users → autodetected users folder (or single + # folder for both); for resources → autodetected resources folder (or same single folder). + # 0 or 3+ autodetected folders is an error anyway. Users are never scoped by machine. + fp = (getattr(obj, "folder_path", None) or "").strip() + if fp and fp in good_paths_set: + return fp + return "global" + + seen_scope_title = {} # (scope_key, title) -> list of (ident, machine_suffix) for error message + for o in chain(resources, users): + scope = _scope_key(o, good_paths_set) + title = (getattr(o, "title", None) or "").strip() + if title: + key = (scope, title) + ident = _record_identifier(o) + seen_scope_title.setdefault(key, []).append((ident, "")) + for mach in resources: + if hasattr(mach, "users") and isinstance(mach.users, list): + for u in mach.users: + scope = _scope_key(u, good_paths_set) + title = (getattr(u, "title", None) or "").strip() + if title: + key = (scope, title) + ident = _record_identifier(u) + suffix = f' (nested on machine "{getattr(mach, "title", "")}")' + seen_scope_title.setdefault(key, []).append((ident, suffix)) + + for (scope, title), idents in seen_scope_title.items(): + if len(idents) > 1: + scope_msg = f"folder {scope}" if scope != "global" else "global" + step2_errors.append( + f'ERROR: Duplicate import records with same title "{title}" in same scope ({scope_msg}). ' + f'Add explicit "title" in JSON to disambiguate.' + ) + + def resolve_one(obj, parent_machine=None): + ident = _record_identifier(obj) + machine_suffix = "" + if parent_machine: + mt = getattr(parent_machine, "title", None) or "" + mu = getattr(parent_machine, "uid", None) or "" + machine_suffix = f' user on machine "{mt}"' if mt else f" user on machine <{mu}>" + + uid_imp = getattr(obj, "uid_imported", None) + if uid_imp and isinstance(uid_imp, str) and RecordV3.is_valid_ref_uid(uid_imp): + if uid_imp not in ksm_app_uids: + step2_errors.append(f'uid "{uid_imp}" not found in KSM app for record {ident}{machine_suffix}') + return + obj.uid = uid_imp + obj._extend_tag = "existing" + return + + folder_path = getattr(obj, "folder_path", None) or "" + title = (getattr(obj, "title", None) or "").strip() + login = (getattr(obj, "login", None) or "").strip() + + if folder_path: + folder_uid = path_to_folder_uid.get(folder_path) + if not folder_uid: + if folder_path in good_paths_set: + obj._extend_tag = "new" + return + step2_errors.append(f'folder_path "{folder_path}" could not be resolved for record {ident}{machine_suffix}') + return + if not title and not login: + obj._extend_tag = "new" + return + recs = _get_records_in_folder(params, folder_uid) + matches = [r for r in recs if _vault_title_matches_import(r[1], title)] + if len(matches) == 0: + obj._extend_tag = "new" + return + if len(matches) == 1: + obj.uid = matches[0][0] + obj._extend_tag = "existing" + return + step2_errors.append(f'Multiple matches for record {ident} in folder "{folder_path}"; add folder_path to disambiguate{machine_suffix}') + return + + if not title and not login: + obj._extend_tag = "new" + return + matches = [r for r in all_ksm_records if _vault_title_matches_import(r[1], title)] + if len(matches) == 0: + obj._extend_tag = "new" + return + if len(matches) == 1: + obj.uid = matches[0][0] + obj._extend_tag = "existing" + return + step2_errors.append(f'Multiple matches for record {ident}; add folder_path to disambiguate{machine_suffix}') + + for obj in resources: + resolve_one(obj, None) + for obj in users: + resolve_one(obj, None) + for mach in resources: + if hasattr(mach, "users") and isinstance(mach.users, list): + for usr in mach.users: + resolve_one(usr, mach) + + autogenerated_titles = [] + for o in chain(resources, users): + if _has_autogenerated_title(o): + autogenerated_titles.append(getattr(o, "title", None) or "") + for mach in resources: + if hasattr(mach, "users") and isinstance(mach.users, list): + for u in mach.users: + if _has_autogenerated_title(u): + autogenerated_titles.append(getattr(u, "title", None) or "") + if autogenerated_titles: + print( + f"{bcolors.WARNING}Warning: {len(autogenerated_titles)} record(s) have autogenerated titles " + f"(e.g. PAM User/Machine/Database/Directory/RBI - ). Add \"title\" in import JSON to set an explicit record title.{bcolors.ENDC}" + ) + if logging.getLogger().getEffectiveLevel() <= logging.DEBUG: + for t in autogenerated_titles: + print(f" [DEBUG] autogenerated title: {t}") + + machines = [x for x in resources if not isinstance(x, PamRemoteBrowserObject)] + pam_directories = [x for x in machines if (getattr(x, "type", "") or "").lower() == "pamdirectory"] + for mach in resources: + if not mach: + continue + admin_cred = get_admin_credential(mach) + sftp_user = get_sftp_attribute(mach, "sftpUser") + sftp_res = get_sftp_attribute(mach, "sftpResource") + if sftp_res: + ruids = [x for x in machines if getattr(x, "title", None) == sftp_res] + ruids = ruids or [x for x in machines if getattr(x, "login", None) == sftp_res] + if len(ruids) == 1 and getattr(ruids[0], "uid", ""): + set_sftp_uid(mach, "sftpResourceUid", ruids[0].uid) + if sftp_user: + ruids = find_user(mach, users, sftp_user) or find_user(machines, users, sftp_user) + if len(ruids) == 1 and getattr(ruids[0], "uid", ""): + set_sftp_uid(mach, "sftpUserUid", ruids[0].uid) + if admin_cred: + ruids = find_user(mach, users, admin_cred) + is_external = False + if not ruids: + ruids = find_external_user(mach, machines, admin_cred) + is_external = True + if len(ruids) == 1 and getattr(ruids[0], "uid", ""): + set_user_record_uid(mach, ruids[0].uid, is_external) + if mach.pam_settings and getattr(mach.pam_settings, "jit_settings", None): + jit = mach.pam_settings.jit_settings + ref = getattr(jit, "pam_directory_record", None) or "" + if ref and isinstance(ref, str) and ref.strip(): + matches = [x for x in pam_directories if getattr(x, "title", None) == ref.strip()] + if len(matches) == 1: + jit.pam_directory_uid = matches[0].uid + resolve_script_creds(mach, users, resources) + if hasattr(mach, "users") and isinstance(mach.users, list): + for usr in mach.users: + if usr and hasattr(usr, "rotation_settings") and usr.rotation_settings: + rot = getattr(usr.rotation_settings, "rotation", None) + if rot == "general": + usr.rotation_settings.resourceUid = mach.uid + elif rot in ("iam_user", "scripts_only"): + usr.rotation_settings.resourceUid = pam_cfg_uid + resolve_script_creds(usr, users, resources) + if hasattr(mach, "rbi_settings") and getattr(mach.rbi_settings, "connection", None): + conn = mach.rbi_settings.connection + if getattr(conn, "protocol", None) and str(getattr(conn.protocol, "value", "") or "").lower() == "http": + creds = getattr(conn, "httpCredentials", None) + if creds: + cred = str(creds[0]) if isinstance(creds, list) else str(creds) + matches = [x for x in users if getattr(x, "title", None) == cred] + matches = matches or [x for x in users if getattr(x, "login", None) == cred] + if len(matches) == 1 and getattr(matches[0], "uid", ""): + mach.rbi_settings.connection.httpCredentialsUid = [matches[0].uid] + for usr in users: + if usr and hasattr(usr, "rotation_settings") and usr.rotation_settings: + rot = getattr(usr.rotation_settings, "rotation", None) + if rot in ("iam_user", "scripts_only"): + usr.rotation_settings.resourceUid = pam_cfg_uid + elif rot == "general": + res = getattr(usr.rotation_settings, "resource", "") or "" + if res: + ruids = [x for x in machines if getattr(x, "title", None) == res] + ruids = ruids or [x for x in machines if getattr(x, "login", None) == res] + if ruids: + usr.rotation_settings.resourceUid = ruids[0].uid + resolve_script_creds(usr, users, resources) + + if step2_errors: + project["error_count"] = project.get("error_count", 0) + len(step2_errors) + + x_count = sum(1 for o in chain(resources, users) if getattr(o, "_extend_tag", None) == "existing") + for mach in resources: + if hasattr(mach, "users") and isinstance(mach.users, list): + x_count += sum(1 for u in mach.users if getattr(u, "_extend_tag", None) == "existing") + y_count = 0 + for o in chain(resources, users): + if getattr(o, "_extend_tag", None) == "new": + y_count += 1 + for mach in resources: + if hasattr(mach, "users") and isinstance(mach.users, list): + y_count += sum(1 for u in mach.users if getattr(u, "_extend_tag", None) == "new") + + existing_rec_msg = f"{x_count} existing records (skipped)" if x_count else "0 existing records" + total_line = f"{existing_rec_msg}, {y_count} new records to be created" + for err in step2_errors: + print(f" {err}") + if step2_errors: + print(f"Total: {len(step2_errors)} errors") + + if dry_run: + for o in chain(resources, users): + tag = getattr(o, "_extend_tag", "?") + path = getattr(o, "folder_path", "") or "autodetect" + otype = getattr(o, "type", "") or "" + label = getattr(o, "title", None) or getattr(o, "login", None) or "" + uid_suffix = f"\tuid={getattr(o, 'uid', '')}" if tag == "existing" else "" + print(f" [DRY RUN] [{tag}] folder={path}\trecord={otype}: {label}{uid_suffix}") + for mach in resources: + if hasattr(mach, "users") and isinstance(mach.users, list): + for u in mach.users: + tag = getattr(u, "_extend_tag", "?") + path = getattr(u, "folder_path", "") or "autodetect" + utype = getattr(u, "type", "") or "" + label = getattr(u, "title", None) or getattr(u, "login", None) or "" + uid_suffix = f"\tuid={getattr(u, 'uid', '')}" if tag == "existing" else "" + print(f" [DRY RUN] [{tag}] folder={path}\trecord={utype}: {label} (nested on {getattr(mach, 'title', '')}){uid_suffix}") + print(f"[DRY RUN] {total_line}") + else: + if logging.getLogger().getEffectiveLevel() <= logging.DEBUG: + for o in chain(resources, users): + tag = getattr(o, "_extend_tag", "?") + path = getattr(o, "folder_path", "") or "autodetect" + otype = getattr(o, "type", "") or "" + label = getattr(o, "title", None) or getattr(o, "login", None) or "" + uid_suffix = f"\tuid={getattr(o, 'uid', '')}" if tag == "existing" else "" + print(f" [DEBUG] [{tag}] folder={path}\trecord={otype}: {label}{uid_suffix}") + for mach in resources: + if hasattr(mach, "users") and isinstance(mach.users, list): + for u in mach.users: + tag = getattr(u, "_extend_tag", "?") + path = getattr(u, "folder_path", "") or "autodetect" + utype = getattr(u, "type", "") or "" + label = getattr(u, "title", None) or getattr(u, "login", None) or "" + uid_suffix = f"\tuid={getattr(u, 'uid', '')}" if tag == "existing" else "" + print(f" [DEBUG] [{tag}] folder={path}\trecord={utype}: {label} (nested on {getattr(mach, 'title', '')}){uid_suffix}") + print(total_line) + + project["mapped_resources"] = resources + project["mapped_users"] = users + project["new_record_count"] = y_count + return (resources, users, step2_errors, y_count) + + def autodetect_folders(self, params, project: dict) -> list: + """Step 3: Autodetect resources_folder_uid and users_folder_uid when new records have no folder_path. + Call only when error_count==0 and there are records with no uid and no folder_path (tagged new). + Returns list of step3 errors; updates project['folders'] with resources_folder_uid/users_folder_uid on success.""" + step3_errors = [] + folders_out = project.get("folders") or {} + ksm_shared_folders = project.get("ksm_shared_folders") or [] + subfolder_record_cache = getattr(params, "subfolder_record_cache", None) or {} + + new_no_path = [] + for o in chain(project.get("mapped_resources", []), project.get("mapped_users", [])): + if getattr(o, "_extend_tag", None) == "new": + if not (getattr(o, "folder_path", None) or "").strip(): + new_no_path.append(o) + for mach in project.get("mapped_resources", []): + if hasattr(mach, "users") and isinstance(mach.users, list): + for u in mach.users: + if getattr(u, "_extend_tag", None) == "new" and not (getattr(u, "folder_path", None) or "").strip(): + new_no_path.append(u) + if not new_no_path: + return step3_errors + + shf_list = [(shf["uid"], shf.get("name") or "") for shf in ksm_shared_folders] + if len(shf_list) == 1: + folders_out["resources_folder_uid"] = shf_list[0][0] + folders_out["users_folder_uid"] = shf_list[0][0] + print("Warning: Using single shared folder for both resources and users (best practice: separate).") + return step3_errors + + if len(shf_list) == 2: + names = [n for _, n in shf_list] + r_idx = next((i for i, n in enumerate(names) if n.endswith(" - Resources") or n.endswith("- Resources")), -1) + u_idx = next((i for i, n in enumerate(names) if n.endswith(" - Users") or n.endswith("- Users")), -1) + if r_idx >= 0 and u_idx >= 0 and r_idx != u_idx: + folders_out["resources_folder_uid"] = shf_list[r_idx][0] + folders_out["users_folder_uid"] = shf_list[u_idx][0] + return step3_errors + + non_empty = [] + for shf in ksm_shared_folders: + uids = _folder_uids_under_shf(shf) + if any(subfolder_record_cache.get(fuid) for fuid in uids): + non_empty.append(shf) + if len(non_empty) == 0: + step3_errors.append("Autodetect: no folders contain records; cannot assign resources/users folders.") + project["error_count"] = project.get("error_count", 0) + len(step3_errors) + for e in step3_errors: + print(f" {e}") + print(f"Total: {len(step3_errors)} errors") + return step3_errors + if len(non_empty) == 1: + folders_out["resources_folder_uid"] = non_empty[0]["uid"] + folders_out["users_folder_uid"] = non_empty[0]["uid"] + print("Warning: Using single non-empty folder for both resources and users.") + return step3_errors + if len(non_empty) == 2: + res_uid = users_uid = None + for shf in non_empty: + uids = _folder_uids_under_shf(shf) + for fuid in uids: + recs = _get_records_in_folder(params, fuid) + if not recs: + continue + for ruid, _title, rtype in recs: + rtype = (rtype or "").lower() + if rtype in ("pamuser", "login"): + users_uid = shf["uid"] + break + if rtype in ("pammachine", "pamdatabase", "pamdirectory", "pamremotebrowser"): + res_uid = shf["uid"] + break + if users_uid is not None or res_uid is not None: + break + if users_uid is not None and res_uid is not None: + break + if res_uid is not None and users_uid is not None: + folders_out["resources_folder_uid"] = res_uid + folders_out["users_folder_uid"] = users_uid + return step3_errors + step3_errors.append("Autodetect: could not determine which folder is resources vs users.") + else: + step3_errors.append("Autodetect: three or more non-empty folders; add folder_path to disambiguate.") + project["error_count"] = project.get("error_count", 0) + len(step3_errors) + for e in step3_errors: + print(f" {e}") + if step3_errors: + print(f"Total: {len(step3_errors)} errors") + return step3_errors + + def create_subfolder(self, params, folder_name:str, parent_uid:str="", permissions:Optional[Dict]=None, folder_uid:Optional[str]=None): + # folder_uid: if provided, create folder with this UID (same as records with pre-generated uid). + + name = str(folder_name or "").strip() + base_folder = params.folder_cache.get(parent_uid, None) or params.root_folder + + shared_folder = True if permissions else False + user_folder = True if not permissions else False # uf or sff (split later) + if not folder_uid: + folder_uid = api.generate_record_uid() + request: Dict[str, Any] = { + "command": "folder_add", + "folder_type": "user_folder", + "folder_uid": folder_uid + } + + if shared_folder: + if base_folder.type in {BaseFolderNode.RootFolderType, BaseFolderNode.UserFolderType}: + request["folder_type"] = "shared_folder" + for perm in ["manage_users", "manage_records", "can_share", "can_edit"]: + if permissions and permissions.get(perm, False) == True: + request[perm] = True + else: + raise CommandError("pam", "Shared folders cannot be nested") + elif user_folder: + if base_folder.type in {BaseFolderNode.SharedFolderType, BaseFolderNode.SharedFolderFolderType}: + request["folder_type"] = "shared_folder_folder" + else: + request["folder_type"] = "user_folder" + + if request.get("folder_type") is None: + if base_folder.type in {BaseFolderNode.SharedFolderType, BaseFolderNode.SharedFolderFolderType}: + request["folder_type"] = "shared_folder_folder" + + folder_key = os.urandom(32) + encryption_key = params.data_key + if request["folder_type"] == "shared_folder_folder": + sf_uid = base_folder.shared_folder_uid if base_folder.type == BaseFolderNode.SharedFolderFolderType else base_folder.uid + sf = params.shared_folder_cache[sf_uid] + encryption_key = sf["shared_folder_key_unencrypted"] + request["shared_folder_uid"] = sf_uid + + request["key"] = utils.base64_url_encode(crypto.encrypt_aes_v1(folder_key, encryption_key)) + if base_folder.type not in {BaseFolderNode.RootFolderType, BaseFolderNode.SharedFolderType}: + request["parent_uid"] = base_folder.uid + + if request["folder_type"] == "shared_folder": + request["name"] = utils.base64_url_encode(crypto.encrypt_aes_v1(name.encode("utf-8"), folder_key)) + data_dict = {"name": name} + data = json.dumps(data_dict) + request["data"] = utils.base64_url_encode(crypto.encrypt_aes_v1(data.encode("utf-8"), folder_key)) + + api.communicate(params, request) + api.sync_down(params) + params.environment_variables[LAST_FOLDER_UID] = folder_uid + if request["folder_type"] == "shared_folder": + params.environment_variables[LAST_SHARED_FOLDER_UID] = folder_uid + return folder_uid + + def find_folders(self, params, parent_uid:str, folder:str, is_shared_folder:bool) -> List[BaseFolderNode]: + result: List[BaseFolderNode] = [] + folders = params.folder_cache if params and params.folder_cache else {} + if not isinstance(folders, dict): + return result + + puid = parent_uid if parent_uid else None # root folder parent uid is set to None + matches = {k: v for k, v in folders.items() if v.parent_uid == puid and v.name == folder} + result = [v for k, v in matches.items() if + (is_shared_folder and v.type == BaseFolderNode.SharedFolderType) or + (not is_shared_folder and v.type == BaseFolderNode.UserFolderType)] + return result + + def create_ksm_app(self, params, app_name) -> str: + app_record_data = { + "title": app_name, + "type": "app" + } + + data_json = json.dumps(app_record_data) + record_key_unencrypted = utils.generate_aes_key() + record_key_encrypted = crypto.encrypt_aes_v2(record_key_unencrypted, params.data_key) + + app_record_uid_str = api.generate_record_uid() + app_record_uid = utils.base64_url_decode(app_record_uid_str) + + data = data_json.decode("utf-8") if isinstance(data_json, bytes) else data_json + data = api.pad_aes_gcm(data) + + rdata = bytes(data, "utf-8") # type: ignore + rdata = crypto.encrypt_aes_v2(rdata, record_key_unencrypted) + + ra = record_pb2.ApplicationAddRequest() # pylint: disable=E1101 + ra.app_uid = app_record_uid # type: ignore + ra.record_key = record_key_encrypted # type: ignore + ra.client_modified_time = api.current_milli_time() # type: ignore + ra.data = rdata # type: ignore + + api.communicate_rest(params, ra, "vault/application_add") + api.sync_down(params) + return app_record_uid_str + + def create_gateway( + self, params, gateway_name, ksm_app, config_init, ott_expire_in_min=5 + ): + token = KSMCommand.add_client( + params, + app_name_or_uid=ksm_app, + count=1, + unlock_ip=True, + first_access_expire_on=ott_expire_in_min, + access_expire_in_min=None, # None=Never, int = num of min + client_name=gateway_name, + config_init=config_init, + silent=True, + client_type=enterprise_pb2.DISCOVERY_AND_ROTATION_CONTROLLER) # pylint: disable=E1101 + api.sync_down(params) + + return token + + def verify_users_and_teams(self, params, users_and_teams): + api.load_available_teams(params) + for item in users_and_teams: + name = item.get("name", "") + teams = [] + # do not use params.team_cache: + for team in params.available_team_cache or []: + team = api.Team(team_uid=team.get("team_uid", ""), name=team.get("team_name", "")) + if name == team.team_uid or name.casefold() == team.name.casefold(): + teams.append(team) + users = [] + for user in params.enterprise.get("users", []): + # if user["node_id"] not in node_scope: continue + # skip: node_id, status, lock, tfa_enabled, account_share_expiration + usr = { + "id": user.get("enterprise_user_id", "") or "", + "username": user.get("username", "") or "", + "name": user.get("data", {}).get("displayname", "") or "" + } + if name in usr.values(): users.append(usr) + + teams_users = teams + users + num_found = len(teams_users) + if num_found == 0: + logging.warning(f"""Team/User: {bcolors.WARNING}"{name}"{bcolors.ENDC} - not found (skipped).""") + elif num_found > 1: + logging.warning(f"""Multiple matches ({num_found}) for team/user: {bcolors.WARNING}"{name}"{bcolors.ENDC} found (skipped).""") + if logging.getLogger().getEffectiveLevel() <= logging.DEBUG: + msg = "" + for x in teams_users: + msg += "\n" + (f"team_uid: {x.team_uid}, name: {x.name}" if isinstance(x, api.Team) else str(x)) + logging.debug(f"Matches from team/user lookup: {msg}") + + + def process_data(self, params, project): + """Extend: only create records tagged new; use resolved_folder_uid; for existing machines only add new users.""" + if project.get("options", {}).get("dry_run", False) is True: + return + from ..tunnel_and_connections import PAMTunnelEditCommand + from ..discoveryrotation import PAMCreateRecordRotationCommand + + resources = project.get("mapped_resources") or [] + users = project.get("mapped_users") or [] + pam_cfg_uid = (project.get("pam_config") or {}).get("pam_config_uid", "") + shfres = (project.get("folders") or {}).get("resources_folder_uid", "") + shfusr = (project.get("folders") or {}).get("users_folder_uid", "") + pce = (project.get("pam_config") or {}).get("pam_config_object") + + print("Started importing data...") + encrypted_session_token, encrypted_transmission_key, transmission_key = get_keeper_tokens(params) + tdag = TunnelDAG(params, encrypted_session_token, encrypted_transmission_key, pam_cfg_uid, True, + transmission_key=transmission_key) + pte = PAMTunnelEditCommand() + prc = PAMCreateRecordRotationCommand() + pdelta = 10 + + new_users = [u for u in users if getattr(u, "_extend_tag", None) == "new"] + if new_users: + logging.warning(f"Processing external users: {len(new_users)}") + for n, user in enumerate(new_users): + folder_uid = getattr(user, "resolved_folder_uid", None) or shfusr + user.create_record(params, folder_uid) + if n % pdelta == 0: + print(f"{n}/{len(new_users)}") + print(f"{len(new_users)}/{len(new_users)}\n") + + resources_sorted = sorted(resources, key=lambda r: (getattr(r, "type", "") or "").lower() != "pamdirectory") + new_resources = [r for r in resources_sorted if getattr(r, "_extend_tag", None) == "new"] + existing_resources = [r for r in resources_sorted if getattr(r, "_extend_tag", None) == "existing"] + if new_resources: + logging.warning(f"Processing resources: {len(new_resources)}") + for n, mach in enumerate(new_resources): + if n % pdelta == 0: + print(f"{n}/{len(new_resources)}") + folder_uid = getattr(mach, "resolved_folder_uid", None) or shfres + admin_uid = get_admin_credential(mach, True) + mach.create_record(params, folder_uid) + tdag.link_resource_to_config(mach.uid) + if isinstance(mach, PamRemoteBrowserObject): + args = parse_command_options(mach, True) + pte.execute(params, config=pam_cfg_uid, silent=True, **args) + args = parse_command_options(mach, False) + if args.get("remote_browser_isolation", False) is True: + args["connections"] = True + tdag.set_resource_allowed(**args) + else: + args = parse_command_options(mach, True) + if admin_uid: + args["admin"] = admin_uid + pte.execute(params, config=pam_cfg_uid, silent=True, **args) + if admin_uid and is_admin_external(mach): + tdag.link_user_to_resource(admin_uid, mach.uid, is_admin=True, belongs_to=False) + args = parse_command_options(mach, False) + tdag.set_resource_allowed(**args) + mach_users = getattr(mach, "users", []) or [] + for user in mach_users: + if getattr(user, "_extend_tag", None) != "new": + continue + rs = getattr(user, "rotation_settings", None) + if isinstance(user, PamUserObject) and rs and (getattr(rs, "rotation", "") or "").lower() == "general": + rs.resourceUid = mach.uid + ufolder = getattr(user, "resolved_folder_uid", None) or shfusr + user.create_record(params, ufolder) + if isinstance(user, PamUserObject): + tdag.link_user_to_resource(user.uid, mach.uid, admin_uid == user.uid, True) + if rs: + args = {"force": True, "config": pam_cfg_uid, "record_name": user.uid, "admin": admin_uid, "resource": mach.uid} + enabled = getattr(rs, "enabled", "") + key = {"on": "enable", "off": "disable"}.get(enabled, "") + if key: + args[key] = True + schedule = getattr(rs, "schedule", None) + schedule_type = getattr(schedule, "type", "") if schedule else "" + if schedule_type == "on-demand": + args["on_demand"] = True + elif schedule_type == "cron" and schedule and getattr(schedule, "cron", None): + args["schedule_cron_data"] = rs.schedule.cron + if getattr(rs, "password_complexity", None): + args["pwd_complexity"] = rs.password_complexity + prc.execute(params, silent=True, **args) + if new_resources: + print(f"{len(new_resources)}/{len(new_resources)}\n") + + for mach in existing_resources: + mach_users = getattr(mach, "users", []) or [] + admin_uid = get_admin_credential(mach, True) + for user in mach_users: + if getattr(user, "_extend_tag", None) != "new": + continue + rs = getattr(user, "rotation_settings", None) + if isinstance(user, PamUserObject) and rs and (getattr(rs, "rotation", "") or "").lower() == "general": + rs.resourceUid = mach.uid + ufolder = getattr(user, "resolved_folder_uid", None) or shfusr + user.create_record(params, ufolder) + if isinstance(user, PamUserObject): + tdag.link_user_to_resource(user.uid, mach.uid, admin_uid == user.uid, True) + if rs: + args = {"force": True, "config": pam_cfg_uid, "record_name": user.uid, "admin": admin_uid, "resource": mach.uid} + enabled = getattr(rs, "enabled", "") + key = {"on": "enable", "off": "disable"}.get(enabled, "") + if key: + args[key] = True + schedule = getattr(rs, "schedule", None) + schedule_type = getattr(schedule, "type", "") if schedule else "" + if schedule_type == "on-demand": + args["on_demand"] = True + elif schedule_type == "cron" and schedule and getattr(schedule, "cron", None): + args["schedule_cron_data"] = rs.schedule.cron + if getattr(rs, "password_complexity", None): + args["pwd_complexity"] = rs.password_complexity + prc.execute(params, silent=True, **args) + + if pce and getattr(pce, "scripts", None) and getattr(pce.scripts, "scripts", None): + refs = [x for x in pce.scripts.scripts if getattr(x, "record_refs", None)] + if refs: + api.sync_down(params) + add_pam_scripts(params, pam_cfg_uid, refs) + logging.debug("Done processing project data.") + return +