Skip to content

Commit febbdeb

Browse files
authored
Merge pull request #1108 from gooddata/aben/aws-credentials
feat: Allow using credentials for AWS session
2 parents 18ed9c5 + 1cd1fab commit febbdeb

File tree

3 files changed

+32
-17
lines changed

3 files changed

+32
-17
lines changed

gooddata-pipelines/README.md

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# GoodData Pipelines
22

3-
A high level library for automating the lifecycle of GoodData Cloud (GDC).
3+
A high-level library for automating the lifecycle of GoodData Cloud (GDC).
44

5-
You can use the package to manage following resoursec in GDC:
5+
You can use the package to manage following resources in GDC:
66

77
1. Provisioning (create, update, delete)
88
- User profiles
@@ -14,7 +14,7 @@ You can use the package to manage following resoursec in GDC:
1414
1. _[PLANNED]:_ Custom fields management
1515
- extend the Logical Data Model of a child workspace
1616

17-
In case you are not interested in incorporating a library in your own program, but would like to use a ready-made script, consider having a look at [GoodData Productivity Tools](https://github.com/gooddata/gooddata-productivity-tools).
17+
In case you are not interested in incorporating a library in your own program but would like to use a ready-made script, consider having a look at [GoodData Productivity Tools](https://github.com/gooddata/gooddata-productivity-tools).
1818

1919
## Provisioning
2020

@@ -31,23 +31,20 @@ import os
3131
from csv import DictReader
3232
from pathlib import Path
3333

34-
# Import the Entity Provisioner class and corresponing model from gooddata_pipelines library
34+
# Import the Entity Provisioner class and corresponding model from gooddata_pipelines library
3535
from gooddata_pipelines import UserFullLoad, UserProvisioner
36+
from gooddata_pipelines.logger.logger import LogObserver
3637

37-
# Optional: you can set up logging and subscribe it to the Provisioner
38-
from utils.logger import setup_logging
39-
40-
setup_logging()
38+
# Optionally, subscribe a standard Python logger to the LogObserver
39+
import logging
4140
logger = logging.getLogger(__name__)
41+
LogObserver().subscribe(logger)
4242

4343
# Create the Provisioner instance - you can also create the instance from a GDC yaml profile
4444
provisioner = UserProvisioner(
4545
host=os.environ["GDC_HOSTNAME"], token=os.environ["GDC_AUTH_TOKEN"]
4646
)
4747

48-
# Optional: subscribe to logs
49-
provisioner.logger.subscribe(logger)
50-
5148
# Load your data from your data source
5249
source_data_path: Path = Path("path/to/some.csv")
5350
source_data_reader = DictReader(source_data_path.read_text().splitlines())
@@ -60,4 +57,4 @@ full_load_data: list[UserFullLoad] = UserFullLoad.from_list_of_dicts(
6057
provisioner.full_load(full_load_data)
6158
```
6259

63-
Ready made scripts covering the basic use cases can be found here in the [GoodData Productivity Tools](https://github.com/gooddata/gooddata-productivity-tools) repository
60+
Ready-made scripts covering the basic use cases can be found here in the [GoodData Productivity Tools](https://github.com/gooddata/gooddata-productivity-tools) repository

gooddata-pipelines/gooddata_pipelines/backup_and_restore/models/storage.py

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
# (C) 2025 GoodData Corporation
22

33
from enum import Enum
4-
from typing import Annotated, TypeAlias
4+
from typing import Annotated, TypeAlias, Optional
55

66
import yaml
77
from pydantic import BaseModel, Field
@@ -22,6 +22,9 @@ class S3StorageConfig(BaseModel):
2222
backup_path: str
2323
bucket: str
2424
profile: str = "default"
25+
aws_access_key_id: Optional[str] = None
26+
aws_secret_access_key: Optional[str] = None
27+
aws_default_region: Optional[str] = None
2528

2629

2730
class LocalStorageConfig(BaseModel):

gooddata-pipelines/gooddata_pipelines/backup_and_restore/storage/s3_storage.py

Lines changed: 19 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -21,18 +21,33 @@ def __init__(self, conf: BackupRestoreConfig):
2121
raise ValueError("S3 storage config is required")
2222

2323
self._config = conf.storage
24-
self._profile = self._config.profile
25-
self._session = self._create_boto_session(self._profile)
24+
self._session = self._create_boto_session(self._config)
2625
self._resource = self._session.resource("s3")
2726
self._bucket = self._resource.Bucket(self._config.bucket) # type: ignore [missing library stubs]
2827
suffix = "/" if not self._config.backup_path.endswith("/") else ""
2928
self._backup_path = self._config.backup_path + suffix
3029

3130
self._verify_connection()
3231

33-
def _create_boto_session(self, profile: str) -> boto3.Session:
32+
def _create_boto_session(self, config: S3StorageConfig) -> boto3.Session:
33+
if config.aws_access_key_id and config.aws_secret_access_key:
34+
if not config.aws_default_region:
35+
self.logger.warning(
36+
"No AWS region specified. Defaulting to us-east-1."
37+
)
38+
try:
39+
return boto3.Session(
40+
aws_access_key_id=config.aws_access_key_id,
41+
aws_secret_access_key=config.aws_secret_access_key,
42+
region_name=config.aws_default_region,
43+
)
44+
except Exception:
45+
self.logger.warning(
46+
"Failed to create boto3 session with supplied credentials. Falling back to profile..."
47+
)
48+
3449
try:
35-
return boto3.Session(profile_name=profile)
50+
return boto3.Session(profile_name=config.profile)
3651
except Exception:
3752
self.logger.warning(
3853
'AWS profile "[default]" not found. Trying other fallback methods...'

0 commit comments

Comments
 (0)