Skip to content

Commit 5836acf

Browse files
committed
feat(gooddata-pipelines): Configurable parallelism of backup
1 parent 9b24663 commit 5836acf

File tree

2 files changed

+14
-2
lines changed

2 files changed

+14
-2
lines changed

gooddata-pipelines/gooddata_pipelines/backup_and_restore/constants.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
# (C) 2025 GoodData Corporation
12
import datetime
23
from dataclasses import dataclass
34

@@ -22,7 +23,7 @@ class DirNames:
2223

2324
@dataclass(frozen=True)
2425
class ConcurrencyDefaults:
25-
MAX_WORKERS = 2
26+
MAX_WORKERS = 1
2627
DEFAULT_BATCH_SIZE = 100
2728

2829

gooddata-pipelines/gooddata_pipelines/backup_and_restore/models/storage.py

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,10 @@
66
import yaml
77
from pydantic import BaseModel, Field
88

9-
from gooddata_pipelines.backup_and_restore.constants import BackupSettings
9+
from gooddata_pipelines.backup_and_restore.constants import (
10+
BackupSettings,
11+
ConcurrencyDefaults,
12+
)
1013

1114

1215
class StorageType(Enum):
@@ -83,6 +86,14 @@ class BackupRestoreConfig(BaseModel):
8386
description="Batch size must be greater than 0",
8487
),
8588
] = Field(default=BackupSettings.DEFAULT_BATCH_SIZE)
89+
max_workers: Annotated[
90+
int,
91+
Field(
92+
gt=0,
93+
lt=3,
94+
description="Max workers must be greater than 0 and less than 3",
95+
),
96+
] = Field(default=ConcurrencyDefaults.MAX_WORKERS)
8697

8798
@classmethod
8899
def from_yaml(cls, conf_path: str) -> "BackupRestoreConfig":

0 commit comments

Comments
 (0)