@@ -373,7 +373,7 @@ class EngineArgs:
373
373
# LoRA fields
374
374
enable_lora : bool = False
375
375
enable_lora_bias : bool = LoRAConfig .bias_enabled
376
- enable_activated_lora : bool = LoRAConfig .activated_lora_enabled
376
+ enable_activated_lora : bool = LoRAConfig .enable_activated_lora
377
377
max_loras : int = LoRAConfig .max_loras
378
378
max_lora_rank : int = LoRAConfig .max_lora_rank
379
379
default_mm_loras : Optional [Dict [str , str ]] = \
@@ -796,7 +796,7 @@ def add_cli_args(parser: FlexibleArgumentParser) -> FlexibleArgumentParser:
796
796
lora_group .add_argument ("--enable-lora-bias" ,
797
797
** lora_kwargs ["bias_enabled" ])
798
798
lora_group .add_argument ("--enable-activated-lora" ,
799
- ** lora_kwargs ["activated_lora_enabled " ])
799
+ ** lora_kwargs ["enable_activated_lora " ])
800
800
lora_group .add_argument ("--max-loras" , ** lora_kwargs ["max_loras" ])
801
801
lora_group .add_argument ("--max-lora-rank" ,
802
802
** lora_kwargs ["max_lora_rank" ])
@@ -1372,7 +1372,7 @@ def create_engine_config(
1372
1372
1373
1373
lora_config = LoRAConfig (
1374
1374
bias_enabled = self .enable_lora_bias ,
1375
- activated_lora_enabled = self .enable_activated_lora ,
1375
+ enable_activated_lora = self .enable_activated_lora ,
1376
1376
max_lora_rank = self .max_lora_rank ,
1377
1377
max_loras = self .max_loras ,
1378
1378
default_mm_loras = self .default_mm_loras ,
0 commit comments