diff --git a/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/models/permissions.py b/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/models/permissions.py index cbc7fee84..7eccc2331 100644 --- a/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/models/permissions.py +++ b/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/models/permissions.py @@ -1,103 +1,101 @@ # (C) 2025 GoodData Corporation -from dataclasses import dataclass +from abc import abstractmethod from enum import Enum -from typing import Any, Iterator, TypeAlias +from typing import Any, Iterator, TypeAlias, TypeVar +import attrs from gooddata_sdk.catalog.identifier import CatalogAssigneeIdentifier from gooddata_sdk.catalog.permission.declarative_model.permission import ( CatalogDeclarativeSingleWorkspacePermission, CatalogDeclarativeWorkspacePermissions, ) +from pydantic import BaseModel from gooddata_pipelines.provisioning.utils.exceptions import BaseUserException -# TODO: refactor the full load and incremental load models to reuse as much as possible -# TODO: use pydantic models instead of dataclasses? -# TODO: make the validation logic more readable (as in PermissionIncrementalLoad) - TargetsPermissionDict: TypeAlias = dict[str, dict[str, bool]] +ConstructorType = TypeVar("ConstructorType", bound="ConstructorMixin") -class PermissionType(Enum): +class PermissionType(str, Enum): + # NOTE: Start using StrEnum with Python 3.11 user = "user" user_group = "userGroup" -@dataclass(frozen=True) -class PermissionIncrementalLoad: - permission: str - workspace_id: str - id: str - type: PermissionType - is_active: bool +class ConstructorMixin: + @staticmethod + def _get_id_and_type( + permission: dict[str, Any], + ) -> tuple[str, PermissionType]: + user_id: str | None = permission.get("user_id") + user_group_id: str | None = permission.get("ug_id") + if user_id and user_group_id: + raise ValueError("Only one of user_id or ug_id must be present") + elif user_id: + return user_id, PermissionType.user + elif user_group_id: + return user_group_id, PermissionType.user_group + else: + raise ValueError("Either user_id or ug_id must be present") @classmethod def from_list_of_dicts( - cls, data: list[dict[str, Any]] - ) -> list["PermissionIncrementalLoad"]: - """Creates a list of User objects from list of dicts.""" - id: str + cls: type[ConstructorType], data: list[dict[str, Any]] + ) -> list[ConstructorType]: + """Creates a list of instances from list of dicts.""" + # NOTE: We can use typing.Self for the return type in Python 3.11 permissions = [] for permission in data: - user_id: str | None = permission.get("user_id") - user_group_id: str | None = permission.get("ug_id") - - if user_id is not None: - target_type = PermissionType.user - id = user_id - elif user_group_id is not None: - target_type = PermissionType.user_group - id = user_group_id - - permissions.append( - PermissionIncrementalLoad( - permission=permission["ws_permissions"], - workspace_id=permission["ws_id"], - id=id, - type=target_type, - is_active=str(permission["is_active"]).lower() == "true", - ) - ) + permissions.append(cls.from_dict(permission)) return permissions + @classmethod + @abstractmethod + def from_dict(cls, data: dict[str, Any]) -> Any: + """Construction form a dictionary to be implemented by subclasses.""" + pass + -@dataclass(frozen=True) -class PermissionFullLoad: +class PermissionIncrementalLoad(BaseModel, ConstructorMixin): permission: str workspace_id: str - id: str - type: PermissionType + id_: str + type_: PermissionType + is_active: bool @classmethod - def from_list_of_dicts( - cls, data: list[dict[str, Any]] - ) -> list["PermissionFullLoad"]: - """Creates a list of User objects from list of dicts.""" - permissions = [] - for permission in data: - id = ( - permission["user_id"] - if permission["user_id"] - else permission["ug_id"] - ) + def from_dict(cls, data: dict[str, Any]) -> "PermissionIncrementalLoad": + """Returns an instance of PermissionIncrementalLoad from a dictionary.""" + id_, target_type = cls._get_id_and_type(data) + return cls( + permission=data["ws_permissions"], + workspace_id=data["ws_id"], + id_=id_, + type_=target_type, + is_active=data["is_active"], + ) - if permission["user_id"]: - target_type = PermissionType.user - else: - target_type = PermissionType.user_group - - permissions.append( - PermissionFullLoad( - permission=permission["ws_permissions"], - workspace_id=permission["ws_id"], - id=id, - type=target_type, - ) - ) - return permissions +class PermissionFullLoad(BaseModel, ConstructorMixin): + permission: str + workspace_id: str + id_: str + type_: PermissionType + + @classmethod + def from_dict(cls, data: dict[str, Any]) -> "PermissionFullLoad": + """Returns an instance of PermissionFullLoad from a dictionary.""" + id_, target_type = cls._get_id_and_type(data) + return cls( + permission=data["ws_permissions"], + workspace_id=data["ws_id"], + id_=id_, + type_=target_type, + ) -@dataclass + +@attrs.define class PermissionDeclaration: users: TargetsPermissionDict user_groups: TargetsPermissionDict @@ -192,7 +190,9 @@ def to_sdk_api(self) -> CatalogDeclarativeWorkspacePermissions: permissions=permission_declarations ) - def add_permission(self, permission: PermissionIncrementalLoad) -> None: + def add_incremental_permission( + self, permission: PermissionIncrementalLoad + ) -> None: """ Adds WSPermission object into respective field within the instance. Handles duplicate permissions and different combinations of input @@ -200,15 +200,15 @@ def add_permission(self, permission: PermissionIncrementalLoad) -> None: """ target_dict = ( self.users - if permission.type == PermissionType.user + if permission.type_ == PermissionType.user else self.user_groups ) - if permission.id not in target_dict: - target_dict[permission.id] = {} + if permission.id_ not in target_dict: + target_dict[permission.id_] = {} is_active = permission.is_active - target_permissions = target_dict[permission.id] + target_permissions = target_dict[permission.id_] permission_value = permission.permission if permission_value not in target_permissions: @@ -225,6 +225,27 @@ def add_permission(self, permission: PermissionIncrementalLoad) -> None: ) target_permissions[permission_value] = is_active + def add_full_load_permission(self, permission: PermissionFullLoad) -> None: + """ + Adds WSPermission object into respective field within the instance. + Handles duplicate permissions and different combinations of input + and upstream is_active permission states. + """ + target_dict = ( + self.users + if permission.type_ == PermissionType.user + else self.user_groups + ) + + if permission.id_ not in target_dict: + target_dict[permission.id_] = {} + + target_permissions = target_dict[permission.id_] + permission_value = permission.permission + + if permission_value not in target_permissions: + target_permissions[permission_value] = True + def upsert(self, other: "PermissionDeclaration") -> None: """ Modifies the owner object by merging with the other. diff --git a/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/permissions.py b/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/permissions.py index fa773a74a..b4fad7db3 100644 --- a/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/permissions.py +++ b/gooddata-pipelines/gooddata_pipelines/provisioning/entities/users/permissions.py @@ -2,6 +2,8 @@ """Module for provisioning user permissions in GoodData workspaces.""" +from typing import TypeVar + from gooddata_pipelines.api.exceptions import GoodDataApiException from gooddata_pipelines.provisioning.entities.users.models.permissions import ( PermissionDeclaration, @@ -14,6 +16,11 @@ from gooddata_pipelines.provisioning.provisioning import Provisioning from gooddata_pipelines.provisioning.utils.exceptions import BaseUserException +# Type variable for permission models (PermissionIncrementalLoad or PermissionFullLoad) +PermissionModel = TypeVar( + "PermissionModel", PermissionIncrementalLoad, PermissionFullLoad +) + class PermissionProvisioner( Provisioning[PermissionFullLoad, PermissionIncrementalLoad] @@ -72,7 +79,7 @@ def _get_upstream_declarations( @staticmethod def _construct_declarations( - permissions: list[PermissionIncrementalLoad], + permissions: list[PermissionIncrementalLoad] | list[PermissionFullLoad], ) -> WSPermissionsDeclarations: """Constructs workspace permission declarations from the input permissions.""" ws_dict: WSPermissionsDeclarations = {} @@ -82,7 +89,12 @@ def _construct_declarations( if ws_id not in ws_dict: ws_dict[ws_id] = PermissionDeclaration({}, {}) - ws_dict[ws_id].add_permission(permission) + if isinstance(permission, PermissionIncrementalLoad): + ws_dict[ws_id].add_incremental_permission(permission) + elif isinstance(permission, PermissionFullLoad): + ws_dict[ws_id].add_full_load_permission(permission) + else: + raise ValueError(f"Invalid permission type: {type(permission)}") return ws_dict def _check_user_group_exists(self, ug_id: str) -> None: @@ -90,14 +102,14 @@ def _check_user_group_exists(self, ug_id: str) -> None: self._api._sdk.catalog_user.get_user_group(ug_id) def _validate_permission( - self, permission: PermissionIncrementalLoad + self, permission: PermissionFullLoad | PermissionIncrementalLoad ) -> None: """Validates if the permission is correctly defined.""" - if permission.type == PermissionType.user: - self._api.get_user(permission.id, error_message="User not found") + if permission.type_ == PermissionType.user: + self._api.get_user(permission.id_, error_message="User not found") else: self._api.get_user_group( - permission.id, error_message="User group not found" + permission.id_, error_message="User group not found" ) self._api.get_workspace( @@ -105,10 +117,12 @@ def _validate_permission( ) def _filter_invalid_permissions( - self, permissions: list[PermissionIncrementalLoad] - ) -> list[PermissionIncrementalLoad]: + self, + permissions: list[PermissionModel], + ) -> list[PermissionModel]: """Filters out invalid permissions from the input list.""" - valid_permissions: list[PermissionIncrementalLoad] = [] + valid_permissions: list[PermissionModel] = [] + for permission in permissions: try: self._validate_permission(permission) @@ -121,13 +135,15 @@ def _filter_invalid_permissions( valid_permissions.append(permission) return valid_permissions - def _manage_permissions( - self, permissions: list[PermissionIncrementalLoad] - ) -> None: - """Manages permissions for a list of workspaces. - Modify upstream workspace declarations for each input workspace and skip non-existent ws_ids + def _provision_incremental_load(self) -> None: + """Provisiones permissions for a list of workspaces. + + Modifies existing upstream workspace permission declarations for each + input workspace and skips rest of the workspaces. """ - valid_permissions = self._filter_invalid_permissions(permissions) + valid_permissions = self._filter_invalid_permissions( + self.source_group_incremental + ) input_declarations = self._construct_declarations(valid_permissions) @@ -145,9 +161,21 @@ def _manage_permissions( self._api.put_declarative_permissions(ws_id, ws_permissions) self.logger.info(f"Updated permissions for workspace {ws_id}") - def _provision_incremental_load(self) -> None: - """Provision permissions based on the source group.""" - self._manage_permissions(self.source_group_incremental) - def _provision_full_load(self) -> None: - raise NotImplementedError("Not implemented yet.") + """Provisions permissions for selected of workspaces. + + Modifies upstream workspace declarations for each input workspace and + skips non-existent workspace ids. Overwrites any existing configuration + of the workspace permissions. + """ + valid_permissions = self._filter_invalid_permissions( + self.source_group_full + ) + + input_declarations = self._construct_declarations(valid_permissions) + + for ws_id, declaration in input_declarations.items(): + ws_permissions = declaration.to_sdk_api() + + self._api.put_declarative_permissions(ws_id, ws_permissions) + self.logger.info(f"Updated permissions for workspace {ws_id}") diff --git a/gooddata-pipelines/gooddata_pipelines/provisioning/provisioning.py b/gooddata-pipelines/gooddata_pipelines/provisioning/provisioning.py index b29fd909b..3c24991e7 100644 --- a/gooddata-pipelines/gooddata_pipelines/provisioning/provisioning.py +++ b/gooddata-pipelines/gooddata_pipelines/provisioning/provisioning.py @@ -36,6 +36,7 @@ def create( cls: Type[TProvisioning], host: str, token: str ) -> TProvisioning: """Creates a provisioner instance using provided host and token.""" + cls._validate_credentials(host, token) return cls(host=host, token=token) @classmethod @@ -48,6 +49,16 @@ def create_from_profile( content = profile_content(profile, profiles_path) return cls(**content) + @staticmethod + def _validate_credentials(host: str, token: str) -> None: + """Validates the credentials.""" + if (not host) and (not token): + raise ValueError("Host and token are required.") + if not host: + raise ValueError("Host is required.") + if not token: + raise ValueError("Token is required.") + @staticmethod def _create_groups( source_id: set[str], panther_id: set[str] @@ -95,13 +106,9 @@ def full_load(self, source_data: list[TFullLoadSourceData]) -> None: try: self._provision_full_load() - self.logger.info("Provisioning completed successfully.") + self.logger.info("Provisioning completed.") except Exception as e: - self.fatal_exception = str(e) - self.logger.error( - f"Provisioning failed. Error: {self.fatal_exception} " - + f"Context: {e.__dict__}" - ) + self._handle_fatal_exception(e) def incremental_load( self, source_data: list[TIncrementalSourceData] @@ -111,22 +118,34 @@ def incremental_load( Incremental provisioning is used to modify a subset of the upstream workspaces based on the source data provided. """ + # TODO: validate the data type of source group at runtime self.source_group_incremental = source_data try: self._provision_incremental_load() - self.logger.info("Provisioning completed successfully.") + self.logger.info("Provisioning completed.") except Exception as e: - self.fatal_exception = str(e) - self.logger.error( - f"Provisioning failed. Error: {self.fatal_exception} " - + f"Context: {e.__dict__}" - ) - - # TODO: implement a sceond provisioning method and name the two differently: - # 1) provision_incremental - will use the is_active logic, such as user provisioning now - # 2) provision_full - full load of the source data, like workspaces now - # Each will have its own implementation and source data model. - # Both use cases are required and need to be supported. - # This will also improve the clarity of the code as now provisioning of each - # entity works differently, leading to confusion. + self._handle_fatal_exception(e) + + def _handle_fatal_exception(self, e: Exception) -> None: + """Handles fatal exceptions during provisioning. + + Logs the exception content. Re-raises the exception if there is no + subscriber to the logger. + """ + self.fatal_exception = str(e) + + if hasattr(e, "__dict__"): + exception_context = f"Context: {e.__dict__}" + else: + exception_context = "" + + exception_message = ( + f"Provisioning failed. Error: {self.fatal_exception}. " + + exception_context + ) + + self.logger.error(exception_message) + + if not self.logger.subscribers: + raise Exception(exception_message) diff --git a/gooddata-pipelines/pyproject.toml b/gooddata-pipelines/pyproject.toml index 891176493..90a6b9914 100644 --- a/gooddata-pipelines/pyproject.toml +++ b/gooddata-pipelines/pyproject.toml @@ -2,7 +2,7 @@ [project] name = "gooddata-pipelines" version = "1.49.0" -description = "" +description = "GoodData Cloud lifecycle automation pipelines" authors = [{ name = "GoodData", email = "support@gooddata.com" }] license = { text = "MIT" } readme = "README.md" @@ -14,7 +14,7 @@ dependencies = [ "gooddata-sdk~=1.49.0", "boto3 (>=1.39.3,<2.0.0)", "boto3-stubs (>=1.39.3,<2.0.0)", - "types-pyyaml (>=6.0.12.20250326,<7.0.0)", + "types-pyyaml (>=6.0.12.20250326,<7.0.0)" ] [tool.mypy] @@ -27,14 +27,12 @@ no_implicit_optional = true exclude = [".venv"] line-length = 80 -[project.optional-dependencies] -dev = [ - "pytest (>=8.3.5,<9.0.0)", - "pytest-mock (>=3.14.0,<4.0.0)", - "ruff (>=0.11.2,<0.12.0)", - "mypy (>=1.16.0,<2.0.0)", - "moto (>=5.1.6,<6.0.0)", -] +[tool.poetry.group.dev.dependencies] +mypy = "^1.17.1" +moto = ">=5.1.6,<6.0.0" +pytest = ">=8.3.5,<9.0.0" +pytest-mock = ">=3.14.0,<4.0.0" +ruff = ">=0.11.2,<0.12.0" [build-system] requires = ["hatchling"] diff --git a/gooddata-pipelines/tests/backup_and_restore/test_backup.py b/gooddata-pipelines/tests/backup_and_restore/test_backup.py index 21992950e..81735a6ca 100644 --- a/gooddata-pipelines/tests/backup_and_restore/test_backup.py +++ b/gooddata-pipelines/tests/backup_and_restore/test_backup.py @@ -25,6 +25,9 @@ LocalStorage, ) from gooddata_pipelines.backup_and_restore.storage.s3_storage import S3Storage +from tests.conftest import TEST_DATA_DIR + +TEST_DATA_SUBDIR = f"{TEST_DATA_DIR}/backup" S3_BACKUP_PATH = "some/s3/backup/path/org_id/" S3_BUCKET = "some-s3-bucket" @@ -117,7 +120,9 @@ def test_get_local_storage(backup_manager): def test_archive_gooddata_layouts_to_zip(backup_manager): with tempfile.TemporaryDirectory() as tmpdir: shutil.copytree( - Path("tests/data/backup/test_exports/services/"), + Path( + f"{TEST_DATA_SUBDIR}/test_exports/services/", + ), Path(tmpdir + "/services"), ) backup_manager.archive_gooddata_layouts_to_zip( @@ -196,13 +201,11 @@ def test_store_user_data_filters(backup_manager): }, ] } - user_data_filter_folderlocation = Path( - "tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/user_data_filters" - ) + user_data_filter_folderlocation = f"{TEST_DATA_SUBDIR}/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/user_data_filters" backup_manager.store_user_data_filters( user_data_filters, Path( - "tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5" + f"{TEST_DATA_SUBDIR}/test_exports/services/wsid1/20230713-132759-1_3_1_dev5", ), "wsid1", ) @@ -227,7 +230,9 @@ def test_store_user_data_filters(backup_manager): assert count == 2 shutil.rmtree( - "tests/data/backup/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/user_data_filters" + Path( + f"{TEST_DATA_SUBDIR}/test_exports/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/user_data_filters", + ) ) @@ -235,47 +240,52 @@ def test_local_storage_export(backup_manager): with tempfile.TemporaryDirectory() as tmpdir: org_store_location = Path(tmpdir + "/services") shutil.copytree( - Path("tests/data/backup/test_exports/services/"), org_store_location + Path( + f"{TEST_DATA_SUBDIR}/test_exports/services/", + ), + org_store_location, ) local_storage = backup_manager.get_storage(LOCAL_CONFIG) local_storage.export( folder=tmpdir, org_id="services", - export_folder="tests/data/local_export", + export_folder=f"{TEST_DATA_DIR}/local_export", ) local_export_folder_exist = os.path.isdir( Path( - "tests/data/local_export/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model" + f"{TEST_DATA_DIR}/local_export/services/wsid1/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid1/analytics_model" ) ) local_export_folder2_exist = os.path.isdir( Path( - "tests/data/local_export/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm" + f"{TEST_DATA_DIR}/local_export/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/ldm" ) ) local_export_folder3_exist = os.path.isdir( Path( - "tests/data/local_export/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/user_data_filters" + f"{TEST_DATA_DIR}/local_export/services/wsid3/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid3/user_data_filters" ) ) local_export_file_exist = os.path.isfile( Path( - "tests/data/local_export/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboards/id.yaml" + f"{TEST_DATA_DIR}/local_export/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/analytical_dashboards/id.yaml" ) ) assert local_export_folder_exist assert local_export_folder2_exist assert local_export_folder3_exist assert local_export_file_exist - shutil.rmtree("tests/data/local_export") + shutil.rmtree(f"{TEST_DATA_DIR}/local_export") def test_file_upload(backup_manager, s3, s3_bucket): - backup_manager.storage.export("tests/data/backup/test_exports", "services") + backup_manager.storage.export( + f"{TEST_DATA_SUBDIR}/test_exports", "services" + ) s3.Object( S3_BUCKET, "some/s3/backup/path/org_id/services/wsid2/20230713-132759-1_3_1_dev5/gooddata_layouts/services/workspaces/wsid2/analytics_model/filter_contexts/id.yaml", diff --git a/gooddata-pipelines/tests/conftest.py b/gooddata-pipelines/tests/conftest.py index 0274fe829..261146b4d 100644 --- a/gooddata-pipelines/tests/conftest.py +++ b/gooddata-pipelines/tests/conftest.py @@ -1,5 +1,6 @@ # (C) 2025 GoodData Corporation +from pathlib import Path from unittest.mock import Mock import boto3 @@ -8,6 +9,8 @@ from gooddata_pipelines.api import GoodDataApi +TEST_DATA_DIR = str((Path(__file__).parent / "data").absolute()) + @pytest.fixture(scope="function", autouse=True) def mock_aws_services(): diff --git a/gooddata-pipelines/tests/data/provisioning/entities/permissions/existing_upstream_permissions.json b/gooddata-pipelines/tests/data/provisioning/entities/permissions/existing_upstream_permissions.json new file mode 100644 index 000000000..5edaaecf6 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/permissions/existing_upstream_permissions.json @@ -0,0 +1,16 @@ +{ + "child_workspace_id_1": [ + { + "name": "VIEW", + "assignee_id": "user_4", + "assignee_type": "user" + } + ], + "child_workspace_id_2": [ + { + "name": "ANALYZE", + "assignee_id": "user_1", + "assignee_type": "user" + } + ] +} diff --git a/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_expected_full_load.json b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_expected_full_load.json new file mode 100644 index 000000000..449d365f7 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_expected_full_load.json @@ -0,0 +1,26 @@ +{ + "child_workspace_id_1": [ + { + "name": "ANALYZE", + "assignee_id": "user_1", + "assignee_type": "user" + }, + { + "name": "VIEW", + "assignee_id": "user_2", + "assignee_type": "user" + } + ], + "child_workspace_id_2": [ + { + "name": "MANAGE", + "assignee_id": "user_3", + "assignee_type": "user" + }, + { + "name": "ANALYZE", + "assignee_id": "user_4", + "assignee_type": "user" + } + ] +} diff --git a/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_expected_incremental_load.json b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_expected_incremental_load.json new file mode 100644 index 000000000..b1bc385cb --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_expected_incremental_load.json @@ -0,0 +1,36 @@ +{ + "child_workspace_id_1": [ + { + "name": "ANALYZE", + "assignee_id": "user_1", + "assignee_type": "user" + }, + { + "name": "VIEW", + "assignee_id": "user_2", + "assignee_type": "user" + }, + { + "name": "VIEW", + "assignee_id": "user_4", + "assignee_type": "user" + } + ], + "child_workspace_id_2": [ + { + "name": "ANALYZE", + "assignee_id": "user_1", + "assignee_type": "user" + }, + { + "name": "MANAGE", + "assignee_id": "user_3", + "assignee_type": "user" + }, + { + "name": "ANALYZE", + "assignee_id": "user_4", + "assignee_type": "user" + } + ] +} diff --git a/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_input_full_load.json b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_input_full_load.json new file mode 100644 index 000000000..c899e3213 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_input_full_load.json @@ -0,0 +1,22 @@ +[ + { + "user_id": "user_1", + "ws_id": "child_workspace_id_1", + "ws_permissions": "ANALYZE" + }, + { + "user_id": "user_2", + "ws_id": "child_workspace_id_1", + "ws_permissions": "VIEW" + }, + { + "user_id": "user_3", + "ws_id": "child_workspace_id_2", + "ws_permissions": "MANAGE" + }, + { + "user_id": "user_4", + "ws_id": "child_workspace_id_2", + "ws_permissions": "ANALYZE" + } +] diff --git a/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_input_incremental_load.json b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_input_incremental_load.json new file mode 100644 index 000000000..3f1d765de --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/permissions/permissions_input_incremental_load.json @@ -0,0 +1,32 @@ +[ + { + "user_id": "user_1", + "ws_id": "child_workspace_id_1", + "ws_permissions": "ANALYZE", + "is_active": true + }, + { + "user_id": "user_2", + "ws_id": "child_workspace_id_1", + "ws_permissions": "VIEW", + "is_active": true + }, + { + "user_id": "user_2", + "ws_id": "child_workspace_id_1", + "ws_permissions": "VIEW", + "is_active": false + }, + { + "user_id": "user_3", + "ws_id": "child_workspace_id_2", + "ws_permissions": "MANAGE", + "is_active": true + }, + { + "user_id": "user_4", + "ws_id": "child_workspace_id_2", + "ws_permissions": "ANALYZE", + "is_active": true + } +] diff --git a/gooddata-pipelines/tests/data/provisioning/entities/users/existing_upstream_users.json b/gooddata-pipelines/tests/data/provisioning/entities/users/existing_upstream_users.json new file mode 100644 index 000000000..1e2c4d463 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/users/existing_upstream_users.json @@ -0,0 +1,26 @@ +[ + { + "user_id": "user_2", + "firstname": "Jane", + "lastname": "Doe", + "email": "jane.doe@example.com", + "authentication_id": "auth_2", + "user_groups": ["group_2", "group_3"] + }, + { + "user_id": "user_3", + "firstname": "Jim", + "lastname": "Stone", + "email": "jim.stone@example.com", + "authentication_id": "auth_3", + "user_groups": ["group_3", "group_4"] + }, + { + "user_id": "user_4", + "firstname": "Jack", + "lastname": "Cliff", + "email": "jack.cliff@example.com", + "authentication_id": "auth_4", + "user_groups": ["group_4", "group_5"] + } +] diff --git a/gooddata-pipelines/tests/data/provisioning/entities/users/users_expected_full_load.json b/gooddata-pipelines/tests/data/provisioning/entities/users/users_expected_full_load.json new file mode 100644 index 000000000..fa32d2748 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/users/users_expected_full_load.json @@ -0,0 +1,21 @@ +{ + "modified_users": [ + { + "user_id": "user_1", + "firstname": "John", + "lastname": "Doe", + "email": "john.doe@example.com", + "authentication_id": "auth_1", + "user_groups": ["group_1", "group_2"] + }, + { + "user_id": "user_3", + "firstname": "Jim", + "lastname": "Rock", + "email": "jim.rock@example.com", + "authentication_id": "auth_3", + "user_groups": ["group_3", "group_4"] + } + ], + "deleted_users": ["user_4"] +} diff --git a/gooddata-pipelines/tests/data/provisioning/entities/users/users_expected_incremental_load.json b/gooddata-pipelines/tests/data/provisioning/entities/users/users_expected_incremental_load.json new file mode 100644 index 000000000..fa32d2748 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/users/users_expected_incremental_load.json @@ -0,0 +1,21 @@ +{ + "modified_users": [ + { + "user_id": "user_1", + "firstname": "John", + "lastname": "Doe", + "email": "john.doe@example.com", + "authentication_id": "auth_1", + "user_groups": ["group_1", "group_2"] + }, + { + "user_id": "user_3", + "firstname": "Jim", + "lastname": "Rock", + "email": "jim.rock@example.com", + "authentication_id": "auth_3", + "user_groups": ["group_3", "group_4"] + } + ], + "deleted_users": ["user_4"] +} diff --git a/gooddata-pipelines/tests/data/provisioning/entities/users/users_input_full_load.json b/gooddata-pipelines/tests/data/provisioning/entities/users/users_input_full_load.json new file mode 100644 index 000000000..e92483a72 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/users/users_input_full_load.json @@ -0,0 +1,26 @@ +[ + { + "user_id": "user_1", + "firstname": "John", + "lastname": "Doe", + "email": "john.doe@example.com", + "auth_id": "auth_1", + "user_groups": "group_1,group_2" + }, + { + "user_id": "user_2", + "firstname": "Jane", + "lastname": "Doe", + "email": "jane.doe@example.com", + "auth_id": "auth_2", + "user_groups": "group_2,group_3" + }, + { + "user_id": "user_3", + "firstname": "Jim", + "lastname": "Rock", + "email": "jim.rock@example.com", + "auth_id": "auth_3", + "user_groups": "group_3,group_4" + } +] diff --git a/gooddata-pipelines/tests/data/provisioning/entities/users/users_input_incremental_load.json b/gooddata-pipelines/tests/data/provisioning/entities/users/users_input_incremental_load.json new file mode 100644 index 000000000..670867780 --- /dev/null +++ b/gooddata-pipelines/tests/data/provisioning/entities/users/users_input_incremental_load.json @@ -0,0 +1,38 @@ +[ + { + "user_id": "user_1", + "firstname": "John", + "lastname": "Doe", + "email": "john.doe@example.com", + "auth_id": "auth_1", + "user_groups": "group_1,group_2", + "is_active": true + }, + { + "user_id": "user_2", + "firstname": "Jane", + "lastname": "Doe", + "email": "jane.doe@example.com", + "auth_id": "auth_2", + "user_groups": "group_2,group_3", + "is_active": true + }, + { + "user_id": "user_3", + "firstname": "Jim", + "lastname": "Rock", + "email": "jim.rock@example.com", + "auth_id": "auth_3", + "user_groups": "group_3,group_4", + "is_active": true + }, + { + "user_id": "user_4", + "firstname": "Jack", + "lastname": "Cliff", + "email": "jack.cliff@example.com", + "auth_id": "auth_4", + "user_groups": "group_4,group_5", + "is_active": false + } +] diff --git a/gooddata-pipelines/tests/provisioning/entities/users/test_permissions.py b/gooddata-pipelines/tests/provisioning/entities/users/test_permissions.py index 0752a098d..d24d466ba 100644 --- a/gooddata-pipelines/tests/provisioning/entities/users/test_permissions.py +++ b/gooddata-pipelines/tests/provisioning/entities/users/test_permissions.py @@ -1,5 +1,8 @@ # (C) 2025 GoodData Corporation +import json +from typing import Literal +import pytest from gooddata_api_client.exceptions import ( # type: ignore[import] NotFoundException, ) @@ -8,14 +11,19 @@ CatalogDeclarativeSingleWorkspacePermission, CatalogDeclarativeWorkspacePermissions, ) +from pytest_mock import MockerFixture from gooddata_pipelines.provisioning.entities.users.models.permissions import ( PermissionDeclaration, + PermissionFullLoad, PermissionIncrementalLoad, - PermissionType, ) +from gooddata_pipelines.provisioning.entities.users.permissions import ( + PermissionProvisioner, +) +from tests.conftest import TEST_DATA_DIR -TEST_CSV_PATH = "tests/data/permission_mgmt/input.csv" +TEST_DATA_SUBDIR = f"{TEST_DATA_DIR}/provisioning/entities/permissions" USER_1 = CatalogAssigneeIdentifier(id="user_1", type="user") USER_2 = CatalogAssigneeIdentifier(id="user_2", type="user") @@ -67,56 +75,6 @@ "ws_id_2": UPSTREAM_WS_PERMISSION, } -EXPECTED_WS1_PERMISSIONS = CatalogDeclarativeWorkspacePermissions( - permissions=[ - CatalogDeclarativeSingleWorkspacePermission( - name="ANALYZE", assignee=USER_1 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="VIEW", assignee=USER_1 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="ANALYZE", assignee=USER_2 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="MANAGE", assignee=USER_2 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="ANALYZE", assignee=USER_3 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="ANALYZE", assignee=UG_1 - ), - CatalogDeclarativeSingleWorkspacePermission(name="VIEW", assignee=UG_1), - CatalogDeclarativeSingleWorkspacePermission( - name="ANALYZE", assignee=UG_2 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="MANAGE", assignee=UG_2 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="ANALYZE", assignee=UG_3 - ), - ] -) - -EXPECTED_WS2_PERMISSIONS = CatalogDeclarativeWorkspacePermissions( - permissions=[ - CatalogDeclarativeSingleWorkspacePermission( - name="MANAGE", assignee=USER_1 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="MANAGE", assignee=USER_3 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="MANAGE", assignee=UG_1 - ), - CatalogDeclarativeSingleWorkspacePermission( - name="MANAGE", assignee=UG_3 - ), - ] -) - def test_declaration_from_populated_sdk_api_obj(): declaration = PermissionDeclaration.from_sdk_api(UPSTREAM_WS_PERMISSION) @@ -184,71 +142,102 @@ def test_declaration_with_only_inactive_to_sdk_api_obj(): # in subsequent calls and to avoid dict deepcopy overhead. -def test_add_new_active_user_perm(): +def test_add_new_active_user_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "MANAGE", "", "user_1", PermissionType.user, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "MANAGE", + "ug_id": "", + "user_id": "user_1", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == { "user_1": {"ANALYZE": True, "VIEW": False, "MANAGE": True} } assert declaration.user_groups == {"ug_1": {"VIEW": True, "ANALYZE": False}} -def test_add_new_inactive_user_perm(): +def test_add_new_inactive_user_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "MANAGE", "", "user_1", PermissionType.user, False + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "MANAGE", + "ug_id": "", + "user_id": "user_1", + "is_active": False, + } ) - declaration.add_permission(permission) + + declaration.add_incremental_permission(permission) assert declaration.users == { "user_1": {"ANALYZE": True, "VIEW": False, "MANAGE": False} } assert declaration.user_groups == {"ug_1": {"VIEW": True, "ANALYZE": False}} -def test_overwrite_inactive_user_perm(): +def test_overwrite_inactive_user_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "VIEW", "", "user_1", PermissionType.user, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "VIEW", + "ug_id": "", + "user_id": "user_1", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": True}} assert declaration.user_groups == {"ug_1": {"VIEW": True, "ANALYZE": False}} -def test_overwrite_active_user_perm(): +def test_overwrite_active_user_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "ANALYZE", "", "user_1", PermissionType.user, False + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "ANALYZE", + "ug_id": "", + "user_id": "user_1", + "is_active": False, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": False}} assert declaration.user_groups == {"ug_1": {"VIEW": True, "ANALYZE": False}} -def test_add_new_user_perm(): +def test_add_new_user_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "VIEW", "", "user_2", PermissionType.user, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "VIEW", + "ug_id": "", + "user_id": "user_2", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == { "user_1": {"ANALYZE": True, "VIEW": False}, "user_2": {"VIEW": True}, @@ -256,15 +245,21 @@ def test_add_new_user_perm(): assert declaration.user_groups == {"ug_1": {"VIEW": True, "ANALYZE": False}} -def test_modify_one_of_user_perms(): +def test_modify_one_of_user_perms() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}, "user_2": {"VIEW": True}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "MANAGE", "", "user_1", PermissionType.user, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "MANAGE", + "ug_id": "", + "user_id": "user_1", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == { "user_1": {"ANALYZE": True, "VIEW": False, "MANAGE": True}, "user_2": {"VIEW": True}, @@ -275,71 +270,101 @@ def test_modify_one_of_user_perms(): # Add userGroup permission -def test_add_new_active_ug_perm(): +def test_add_new_active_ug_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "MANAGE", "", "ug_1", PermissionType.user_group, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "MANAGE", + "ug_id": "ug_1", + "user_id": "", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": False}} assert declaration.user_groups == { "ug_1": {"VIEW": True, "ANALYZE": False, "MANAGE": True} } -def test_add_new_inactive_ug_perm(): +def test_add_new_inactive_ug_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "MANAGE", "", "ug_1", PermissionType.user_group, False + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "MANAGE", + "ug_id": "ug_1", + "user_id": "", + "is_active": False, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": False}} assert declaration.user_groups == { "ug_1": {"VIEW": True, "ANALYZE": False, "MANAGE": False} } -def test_overwrite_inactive_ug_perm(): +def test_overwrite_inactive_ug_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "ANALYZE", "", "ug_1", PermissionType.user_group, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "ANALYZE", + "ug_id": "ug_1", + "user_id": "", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": False}} assert declaration.user_groups == {"ug_1": {"VIEW": True, "ANALYZE": True}} -def test_overwrite_active_ug_perm(): +def test_overwrite_active_ug_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "VIEW", "", "ug_1", PermissionType.user_group, False + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "VIEW", + "ug_id": "ug_1", + "user_id": "", + "is_active": False, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": False}} assert declaration.user_groups == {"ug_1": {"VIEW": True, "ANALYZE": False}} -def test_add_new_ug_perm(): +def test_add_new_ug_perm() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}}, ) - permission = PermissionIncrementalLoad( - "VIEW", "", "ug_2", PermissionType.user_group, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "VIEW", + "ug_id": "ug_2", + "user_id": "", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": False}} assert declaration.user_groups == { "ug_1": {"VIEW": True, "ANALYZE": False}, @@ -347,15 +372,21 @@ def test_add_new_ug_perm(): } -def test_modify_one_of_ug_perms(): +def test_modify_one_of_ug_perms() -> None: declaration = PermissionDeclaration( {"user_1": {"ANALYZE": True, "VIEW": False}}, {"ug_1": {"VIEW": True, "ANALYZE": False}, "ug_2": {"VIEW": True}}, ) - permission = PermissionIncrementalLoad( - "MANAGE", "", "ug_1", PermissionType.user_group, True + permission = PermissionIncrementalLoad.from_dict( + { + "ws_id": "", + "ws_permissions": "MANAGE", + "ug_id": "ug_1", + "user_id": "", + "is_active": True, + } ) - declaration.add_permission(permission) + declaration.add_incremental_permission(permission) assert declaration.users == {"user_1": {"ANALYZE": True, "VIEW": False}} assert declaration.user_groups == { "ug_1": {"VIEW": True, "ANALYZE": False, "MANAGE": True}, @@ -363,7 +394,7 @@ def test_modify_one_of_ug_perms(): } -def test_upsert(): +def test_upsert() -> None: owner = PermissionDeclaration( {"user_1": {"ANALYZE": True}, "user_2": {"VIEW": True}}, {"ug_1": {"ANALYZE": True}, "ug_2": {"VIEW": True}}, @@ -387,3 +418,132 @@ def mock_upstream_perms(ws_id: str) -> CatalogDeclarativeWorkspacePermissions: if ws_id not in UPSTREAM_WS_PERMISSIONS: raise NotFoundException(404) return UPSTREAM_WS_PERMISSIONS[ws_id] + + +@pytest.fixture +def permission_provisioner(mocker: MockerFixture) -> PermissionProvisioner: + provisioner_instance = PermissionProvisioner.create( + host="https://localhost:3000", token="token" + ) + + # Patch the API + mocker.patch.object(provisioner_instance, "_api", return_value=None) + + return provisioner_instance + + +def parse_expected_permissions( + raw_data: dict, +) -> dict[str, list[CatalogDeclarativeSingleWorkspacePermission]]: + expected_result: dict[ + str, list[CatalogDeclarativeSingleWorkspacePermission] + ] = {} + for workspace_id, workspace_permissions in raw_data.items(): + expected_permissions = [] + for permission in workspace_permissions: + expected_permissions.append( + CatalogDeclarativeSingleWorkspacePermission( + name=permission["name"], + assignee=CatalogAssigneeIdentifier( + id=permission["assignee_id"], + type=permission["assignee_type"], + ), + ) + ) + expected_result[workspace_id] = expected_permissions + return expected_result + + +@pytest.mark.parametrize( + ("source_data_path", "expected_data_path", "load_method"), + [ + ( + "permissions_input_full_load.json", + "permissions_expected_full_load.json", + "full_load", + ), + ( + "permissions_input_incremental_load.json", + "permissions_expected_incremental_load.json", + "incremental_load", + ), + ], +) +def test_permission_provisioner( + source_data_path: str, + expected_data_path: str, + load_method: Literal["incremental_load", "full_load"], + permission_provisioner: PermissionProvisioner, + mocker: MockerFixture, +) -> None: + source_data: list[dict] = [] + incremental_load_data: list[PermissionIncrementalLoad] = [] + full_load_data: list[PermissionFullLoad] = [] + + # Load existing upstream permissions + EXISTING_UPSTREAM_PERMISSIONS_PATH = ( + f"{TEST_DATA_SUBDIR}/existing_upstream_permissions.json" + ) + with open(EXISTING_UPSTREAM_PERMISSIONS_PATH, "r") as f: + raw_existing_upstream_permissions = json.load(f) + + existing_upstream_permissions = parse_expected_permissions( + raw_existing_upstream_permissions + ) + + def mock_get_declarative_permissions( + ws_id: str, + ) -> CatalogDeclarativeWorkspacePermissions: + return CatalogDeclarativeWorkspacePermissions( + permissions=existing_upstream_permissions[ws_id] + ) + + # Patch the get method to return existing upstream permissions + mocker.patch.object( + permission_provisioner._api, + "get_declarative_permissions", + side_effect=mock_get_declarative_permissions, + ) + + # Load source data + with open(f"{TEST_DATA_SUBDIR}/{source_data_path}", "r") as f: + source_data = json.load(f) + + # Load and parse expected data + with open(f"{TEST_DATA_SUBDIR}/{expected_data_path}", "r") as f: + raw_expected_result = json.load(f) + + expected_result = parse_expected_permissions(raw_expected_result) + + # Patch the put method to capture output and compare it with expected result + def compare_permissions( + workspace_id: str, + ws_permissions: CatalogDeclarativeWorkspacePermissions, + ) -> None: + actual_permissions = ws_permissions.permissions + expected_permissions = expected_result[workspace_id] + + assert len(actual_permissions) == len(expected_permissions) + + actual_sorted_permissions = sorted( + actual_permissions, key=lambda x: x.assignee.id + ) + expected_sorted_permissions = sorted( + expected_permissions, key=lambda x: x.assignee.id + ) + assert actual_sorted_permissions == expected_sorted_permissions + + mocker.patch.object( + permission_provisioner._api, + "put_declarative_permissions", + side_effect=compare_permissions, + ) + + if load_method == "incremental_load": + incremental_load_data = PermissionIncrementalLoad.from_list_of_dicts( + source_data + ) + permission_provisioner.incremental_load(incremental_load_data) + else: + full_load_data = PermissionFullLoad.from_list_of_dicts(source_data) + permission_provisioner.full_load(full_load_data) diff --git a/gooddata-pipelines/tests/provisioning/entities/users/test_users.py b/gooddata-pipelines/tests/provisioning/entities/users/test_users.py index 003bfd58a..864a511e8 100644 --- a/gooddata-pipelines/tests/provisioning/entities/users/test_users.py +++ b/gooddata-pipelines/tests/provisioning/entities/users/test_users.py @@ -1,18 +1,31 @@ # (C) 2025 GoodData Corporation - +import json from dataclasses import dataclass -from typing import Any, Optional -from unittest import mock +from typing import Literal, Optional -from gooddata_api_client.exceptions import ( # type: ignore[import] - NotFoundException, +import pytest +from gooddata_api_client.exceptions import NotFoundException # type: ignore +from gooddata_sdk.catalog.user.entity_model.user import ( + CatalogUser, + CatalogUserAttributes, + CatalogUserGroupsData, + CatalogUserRelationships, +) +from gooddata_sdk.catalog.user.entity_model.user_group import ( + CatalogUserGroup, ) -from gooddata_sdk.catalog.user.entity_model.user import CatalogUser -from gooddata_sdk.catalog.user.entity_model.user_group import CatalogUserGroup +from pytest_mock import MockerFixture from gooddata_pipelines.provisioning.entities.users.models.users import ( + UserFullLoad, UserIncrementalLoad, ) +from gooddata_pipelines.provisioning.entities.users.users import ( + UserProvisioner, +) +from tests.conftest import TEST_DATA_DIR + +TEST_DATA_SUBDIR = f"{TEST_DATA_DIR}/provisioning/entities/users" @dataclass @@ -130,74 +143,150 @@ def test_user_obj_to_sdk_no_ugs(): assert excepted == user.to_sdk_obj() -class MockResponse: - def __init__( - self, status_code, json_response: dict[str, Any] = {}, text: str = "" - ): - self.status_code = status_code - self.json_response = json_response - self.text = text - - def json(self): - return self.json_response - - -UPSTREAM_USERS = { - "jozef.mrkva": MockUser( - "jozef.mrkva", "jozef", "mrkva", "jozef.mrkva@test.com", "auth_id_1", [] - ), - "kristian.kalerab": MockUser( - "kristian.kalerab", - "kristian", - "kalerab", - "kristian.kalerab@test.com", - "auth_id_5", - [], - ), - "richard.cvikla": MockUser( - "richard.cvikla", "richard", "cvikla", None, "auth_id_6", [] - ), - "adam.avokado": MockUser("adam.avokado", None, None, None, "auth_id_7", []), -} - -UPSTREAM_UG_ID = "ug_1" -EXPECTED_NEW_UG_OBJ = CatalogUserGroup.init("ug_2", "ug_2") -EXPECTED_GET_IDS = { - "jozef.mrkva", - "kristian.kalerab", - "peter.pertzlen", - "zoltan.zeler", -} -EXPECTED_CREATE_OR_UPDATE_IDS = { - "peter.pertzlen", - "zoltan.zeler", - "kristian.kalerab", -} - - -def prepare_sdk(): - def mock_get_user(user_id): - if user_id not in UPSTREAM_USERS: - raise NotFoundException - return UPSTREAM_USERS[user_id].to_sdk() - - def mock_get_user_group(ug_id): - if ug_id != UPSTREAM_UG_ID: - raise NotFoundException - return - - sdk = mock.Mock() - sdk.catalog_user.get_user.side_effect = mock_get_user - sdk.catalog_user.get_user_group.side_effect = mock_get_user_group - return sdk - - -""" -jozef - No change; user exists -bartolomej - no change; user doesnt exist -peter - create (2 ugs); 1 ug exists, 1 doesnt -zoltan - create (1 ug); ug exists -kristian - update -richard - delete (diff fields than in upstream) -adam - delete (same fields as in upstream) -""" +@pytest.fixture +def user_provisioner(mocker: MockerFixture) -> UserProvisioner: + """Mock instance of UserProvisioner.""" + provisioner_instance = UserProvisioner.create( + host="https://localhost:3000", token="token" + ) + + # Patch the API + mocker.patch.object(provisioner_instance, "_api", return_value=None) + + return provisioner_instance + + +def parse_user_data(user_data: list[dict]) -> list[CatalogUser]: + """Parse json user metadata to CatalogUser objects.""" + users: list[CatalogUser] = [] + for user in user_data: + users.append( + CatalogUser( + id=user["user_id"], + attributes=CatalogUserAttributes( + firstname=user["firstname"], + lastname=user["lastname"], + email=user["email"], + authentication_id=user["authentication_id"], + ), + relationships=CatalogUserRelationships( + user_groups=CatalogUserGroupsData( + data=[ + CatalogUserGroup(id=group) + for group in user["user_groups"] + ] + ) + ), + ) + ) + return sorted(users, key=lambda x: x.id) + + +@pytest.mark.parametrize( + ("input_path", "expected_path", "load_method"), + [ + ( + "users_input_full_load.json", + "users_expected_full_load.json", + "full_load", + ), + ( + "users_input_incremental_load.json", + "users_expected_incremental_load.json", + "incremental_load", + ), + ], +) +def test_user_provisioning( + input_path: str, + expected_path: str, + load_method: Literal["full_load", "incremental_load"], + user_provisioner: UserProvisioner, + mocker: MockerFixture, +): + """Test complete user provisioning workflow by checking that the script will + attempt to create, update or delete expected users for given input.""" + + # Load input data + with open(f"{TEST_DATA_SUBDIR}/{input_path}", "r") as f: + input_data = json.load(f) + + # Load expected data + with open(f"{TEST_DATA_SUBDIR}/{expected_path}", "r") as f: + raw_expected_data = json.load(f) + + # Load and patch "existing users" + with open(f"{TEST_DATA_SUBDIR}/existing_upstream_users.json", "r") as f: + raw_upstream_users = json.load(f) + + upstream_users = parse_user_data(raw_upstream_users) + + mocker.patch.object( + user_provisioner._api, + "list_users", + return_value=upstream_users, + ) + + upstream_user_cache = {user.id: user for user in upstream_users} + + def patch_get_user(user_id: str): + if user_id in upstream_user_cache: + return upstream_user_cache[user_id] + raise NotFoundException(f"User {user_id} not found") + + mocker.patch.object( + user_provisioner._api._sdk.catalog_user, + "get_user", + side_effect=patch_get_user, + ) + + # Parse expected data + expected_deleted_users = sorted(raw_expected_data["deleted_users"]) + raw_expected_modified_users = raw_expected_data["modified_users"] + + expected_modified_users = parse_user_data(raw_expected_modified_users) + + # Patch the API methods to store which users were modified or deleted + created_or_updated_users: list[CatalogUser] = [] + deleted_users: list[str] = [] + + def patch_create_or_update_user(user: CatalogUser, *args, **kwargs): + created_or_updated_users.append(user) + + def patch_delete_user(user_id: str, *args, **kwargs): + deleted_users.append(user_id) + + mocker.patch.object( + user_provisioner._api, + "create_or_update_user", + side_effect=patch_create_or_update_user, + ) + mocker.patch.object( + user_provisioner._api, + "delete_user", + side_effect=patch_delete_user, + ) + + # Run the provisioning + if load_method == "incremental_load": + incremental_load_data = UserIncrementalLoad.from_list_of_dicts( + input_data + ) + user_provisioner.incremental_load(incremental_load_data) + else: + full_load_data = UserFullLoad.from_list_of_dicts(input_data) + user_provisioner.full_load(full_load_data) + + # Compare list lengths + assert len(created_or_updated_users) == len(expected_modified_users) + assert len(deleted_users) == len(expected_deleted_users) + + # Sort the actual data + created_or_updated_users = sorted( + created_or_updated_users, key=lambda x: x.id + ) + deleted_users = sorted(deleted_users) + + # Compare the actual data + assert deleted_users == expected_deleted_users + assert created_or_updated_users == expected_modified_users diff --git a/gooddata-pipelines/tox.ini b/gooddata-pipelines/tox.ini index 1ded2eb46..d61461c59 100644 --- a/gooddata-pipelines/tox.ini +++ b/gooddata-pipelines/tox.ini @@ -8,8 +8,7 @@ deps = pytest-mock poetry commands = - poetry install - poetry install --extras dev + poetry install --with dev poetry run pytest [testenv:mypy]