@@ -263,10 +263,10 @@ def test_framework_raises_if_used_with_missing_package():
263263 ValueError , match = "No acceleration framework package found."
264264 ):
265265 sft_trainer .train (
266- MODEL_ARGS ,
267- DATA_ARGS ,
268- TRAIN_ARGS ,
269- PEFT_LORA_ARGS ,
266+ copy . deepcopy ( MODEL_ARGS ) ,
267+ copy . deepcopy ( DATA_ARGS ) ,
268+ copy . deepcopy ( TRAIN_ARGS ) ,
269+ copy . deepcopy ( PEFT_LORA_ARGS ) ,
270270 quantized_lora_config = quantized_lora_config ,
271271 )
272272
@@ -320,9 +320,9 @@ def test_framework_raises_due_to_invalid_arguments(
320320 with pytest .raises (exception , match = exception_msg ):
321321 sft_trainer .train (
322322 model_args ,
323- DATA_ARGS ,
323+ copy . deepcopy ( DATA_ARGS ) ,
324324 train_args ,
325- peft_config ,
325+ copy . deepcopy ( peft_config ) ,
326326 quantized_lora_config = quantized_lora_config ,
327327 )
328328
@@ -379,7 +379,7 @@ def test_framework_initialized_properly_peft(
379379 train_args = copy .deepcopy (TRAIN_ARGS )
380380 train_args .output_dir = tempdir
381381 train_args .save_strategy = "no"
382- train_args .fp16 = True
382+ train_args .bf16 = True
383383 peft_args = copy .deepcopy (PEFT_LORA_ARGS )
384384 peft_args .target_modules = ["q_proj" , "k_proj" ]
385385
@@ -395,7 +395,7 @@ def test_framework_initialized_properly_peft(
395395 with instantiate_model_patcher ():
396396 sft_trainer .train (
397397 model_args ,
398- DATA_ARGS ,
398+ copy . deepcopy ( DATA_ARGS ) ,
399399 train_args ,
400400 peft_args ,
401401 quantized_lora_config = quantized_lora_config ,
@@ -430,7 +430,7 @@ def test_framework_initialized_properly_foak():
430430 train_args = copy .deepcopy (TRAIN_ARGS )
431431 train_args .output_dir = tempdir
432432 train_args .save_strategy = "no"
433- train_args .fp16 = True
433+ train_args .bf16 = True
434434 peft_args = copy .deepcopy (PEFT_LORA_ARGS )
435435 peft_args .target_modules = ["q_proj" , "k_proj" ]
436436
@@ -465,7 +465,7 @@ def test_framework_initialized_properly_foak():
465465 with instantiate_model_patcher ():
466466 sft_trainer .train (
467467 model_args ,
468- DATA_ARGS ,
468+ copy . deepcopy ( DATA_ARGS ) ,
469469 train_args ,
470470 peft_args ,
471471 quantized_lora_config = quantized_lora_config ,
@@ -613,8 +613,8 @@ def test_error_raised_with_paddingfree_and_flash_attn_disabled():
613613 model_args .use_flash_attn = False
614614 sft_trainer .train (
615615 model_args ,
616- DATA_ARGS ,
617- TRAIN_ARGS ,
616+ copy . deepcopy ( DATA_ARGS ) ,
617+ copy . deepcopy ( TRAIN_ARGS ) ,
618618 attention_and_distributed_packing_config = attention_and_distributed_packing_config ,
619619 )
620620
@@ -637,8 +637,8 @@ def test_error_raised_with_multipack_and_paddingfree_disabled():
637637 model_args = copy .deepcopy (MODEL_ARGS )
638638 sft_trainer .train (
639639 model_args ,
640- DATA_ARGS ,
641- TRAIN_ARGS ,
640+ copy . deepcopy ( DATA_ARGS ) ,
641+ copy . deepcopy ( TRAIN_ARGS ) ,
642642 attention_and_distributed_packing_config = attention_and_distributed_packing_config ,
643643 )
644644
@@ -664,7 +664,7 @@ def test_error_raised_with_packing_and_paddingfree_enabled():
664664 train_args .packing = True
665665 sft_trainer .train (
666666 model_args ,
667- DATA_ARGS ,
667+ copy . deepcopy ( DATA_ARGS ) ,
668668 train_args ,
669669 attention_and_distributed_packing_config = attention_and_distributed_packing_config ,
670670 )
@@ -693,7 +693,6 @@ def test_error_raised_with_fused_lora_enabled_without_quantized_argument():
693693 train_args = copy .deepcopy (TRAIN_ARGS )
694694 train_args .output_dir = tempdir
695695 train_args .save_strategy = "no"
696- train_args .fp16 = True
697696 peft_args = copy .deepcopy (PEFT_LORA_ARGS )
698697 peft_args .target_modules = ["q_proj" , "k_proj" ]
699698
@@ -713,7 +712,7 @@ def test_error_raised_with_fused_lora_enabled_without_quantized_argument():
713712 with instantiate_model_patcher ():
714713 sft_trainer .train (
715714 model_args ,
716- DATA_ARGS ,
715+ copy . deepcopy ( DATA_ARGS ) ,
717716 train_args ,
718717 peft_args ,
719718 quantized_lora_config = None ,
0 commit comments