fix crash if any lora

This commit is contained in:
DeepBeepMeep 2025-03-04 21:39:18 +01:00
parent 2be2cc21f3
commit d200a54336
2 changed files with 3 additions and 2 deletions

View File

@ -350,7 +350,7 @@ def setup_loras(pipe, lora_dir, lora_preselected_preset, split_linear_modules_m
if len(loras) > 0: if len(loras) > 0:
loras_names = [ Path(lora).stem for lora in loras ] loras_names = [ Path(lora).stem for lora in loras ]
offload.load_loras_into_model(pipe.transformer, loras, activate_all_loras=False, split_linear_modules_map = split_linear_modules_map) #lora_multiplier, offload.load_loras_into_model(pipe["transformer"], loras, activate_all_loras=False, split_linear_modules_map = split_linear_modules_map) #lora_multiplier,
if len(lora_preselected_preset) > 0: if len(lora_preselected_preset) > 0:
if not os.path.isfile(os.path.join(lora_dir, lora_preselected_preset + ".lset")): if not os.path.isfile(os.path.join(lora_dir, lora_preselected_preset + ".lset")):

View File

@ -16,4 +16,5 @@ gradio>=5.0.0
numpy>=1.23.5,<2 numpy>=1.23.5,<2
einops einops
moviepy==1.0.3 moviepy==1.0.3
mmgp==3.2.3 mmgp==3.2.3
peft==0.14.0