File tree Expand file tree Collapse file tree 2 files changed +12
-4
lines changed
Expand file tree Collapse file tree 2 files changed +12
-4
lines changed Original file line number Diff line number Diff line change @@ -453,7 +453,9 @@ class DataConfig:
453453 augmentation : "AugmentationConfig" = field (default_factory = lambda : AugmentationConfig ())
454454
455455 # CellMap-specific configuration (for CellMap Segmentation Challenge)
456- cellmap : Optional [Dict [str , Any ]] = None # CellMap-specific data config (see tutorials/cellmap_*.yaml)
456+ cellmap : Optional [Dict [str , Any ]] = (
457+ None # CellMap-specific data config (see tutorials/cellmap_*.yaml)
458+ )
457459
458460
459461@dataclass
Original file line number Diff line number Diff line change @@ -195,7 +195,9 @@ def setup_config(args) -> Config:
195195 print ("🔧 Fast-dev-run mode: Overriding config for debugging" )
196196 print (f" - num_gpus: { cfg .system .training .num_gpus } → 1" )
197197 print (f" - num_cpus: { cfg .system .training .num_cpus } → 1" )
198- print (f" - num_workers: { cfg .system .training .num_workers } → 0 (avoid multiprocessing in debug mode)" )
198+ print (
199+ f" - num_workers: { cfg .system .training .num_workers } → 0 (avoid multiprocessing in debug mode)"
200+ )
199201 print (
200202 f" - batch_size: Controlled by PyTorch Lightning (--fast-dev-run={ args .fast_dev_run } )"
201203 )
@@ -221,10 +223,14 @@ def setup_config(args) -> Config:
221223 # CPU-only fallback: avoid multiprocessing workers when no CUDA is available
222224 if not torch .cuda .is_available ():
223225 if cfg .system .training .num_workers > 0 :
224- print ("🔧 CUDA not available, setting training num_workers=0 to avoid dataloader crashes" )
226+ print (
227+ "🔧 CUDA not available, setting training num_workers=0 to avoid dataloader crashes"
228+ )
225229 cfg .system .training .num_workers = 0
226230 if cfg .system .inference .num_workers > 0 :
227- print ("🔧 CUDA not available, setting inference num_workers=0 to avoid dataloader crashes" )
231+ print (
232+ "🔧 CUDA not available, setting inference num_workers=0 to avoid dataloader crashes"
233+ )
228234 cfg .system .inference .num_workers = 0
229235
230236 # Apply inference-specific overrides if in test/tune mode
You can’t perform that action at this time.
0 commit comments