File tree Expand file tree Collapse file tree 2 files changed +14
-2
lines changed
gooddata-pipelines/gooddata_pipelines/backup_and_restore Expand file tree Collapse file tree 2 files changed +14
-2
lines changed Original file line number Diff line number Diff line change 1+ # (C) 2025 GoodData Corporation
12import datetime
23from dataclasses import dataclass
34
@@ -22,7 +23,7 @@ class DirNames:
2223
2324@dataclass (frozen = True )
2425class ConcurrencyDefaults :
25- MAX_WORKERS = 2
26+ MAX_WORKERS = 1
2627 DEFAULT_BATCH_SIZE = 100
2728
2829
Original file line number Diff line number Diff line change 66import yaml
77from pydantic import BaseModel , Field
88
9- from gooddata_pipelines .backup_and_restore .constants import BackupSettings
9+ from gooddata_pipelines .backup_and_restore .constants import (
10+ BackupSettings ,
11+ ConcurrencyDefaults ,
12+ )
1013
1114
1215class StorageType (Enum ):
@@ -83,6 +86,14 @@ class BackupRestoreConfig(BaseModel):
8386 description = "Batch size must be greater than 0" ,
8487 ),
8588 ] = Field (default = BackupSettings .DEFAULT_BATCH_SIZE )
89+ max_workers : Annotated [
90+ int ,
91+ Field (
92+ gt = 0 ,
93+ lt = 3 ,
94+ description = "Max workers must be greater than 0 and less than 3" ,
95+ ),
96+ ] = Field (default = ConcurrencyDefaults .MAX_WORKERS )
8697
8798 @classmethod
8899 def from_yaml (cls , conf_path : str ) -> "BackupRestoreConfig" :
You can’t perform that action at this time.
0 commit comments