fixed lora per phase crash

This commit is contained in:
deepbeepmeep 2025-08-04 12:04:04 +02:00
parent 9c2e892fc9
commit c738462831
4 changed files with 10 additions and 10 deletions

View File

@ -17,7 +17,7 @@ gradio==5.23.0
numpy>=1.23.5,<2
einops
moviepy==1.0.3
mmgp==3.5.5
mmgp==3.5.6
peft==0.15.0
mutagen
pydantic==2.10.6

View File

@ -762,11 +762,11 @@ class WanModel(ModelMixin, ConfigMixin):
offload.shared_state["_chipmunk_layers"] = None
def preprocess_loras(self, model_type, sd):
new_sd = {}
for k,v in sd.items():
if not k.endswith(".modulation.diff"):
new_sd[ k] = v
sd = new_sd
# new_sd = {}
# for k,v in sd.items():
# if not k.endswith(".modulation.diff"):
# new_sd[ k] = v
# sd = new_sd
first = next(iter(sd), None)
if first == None:
return sd

View File

@ -52,7 +52,7 @@ def parse_loras_multipliers(loras_multipliers, nb_loras, num_inference_steps, me
phase_mult = mult.split(";")
shared_phases = len(phase_mult) <=1
if len(phase_mult) > max_phases:
return "", "", f"Loras can not be defined for more than {max_phases} Denoising phases for this model"
return "", "", f"Loras can not be defined for more than {max_phases} Denoising phase{'s' if max_phases>1 else ''} for this model"
for phase_no, mult in enumerate(phase_mult):
if phase_no > 0: current_phase = phase2
if "," in mult:

6
wgp.py
View File

@ -50,8 +50,8 @@ global_queue_ref = []
AUTOSAVE_FILENAME = "queue.zip"
PROMPT_VARS_MAX = 10
target_mmgp_version = "3.5.5"
WanGP_version = "7.6"
target_mmgp_version = "3.5.6"
WanGP_version = "7.61"
settings_version = 2.23
max_source_video_frames = 3000
prompt_enhancer_image_caption_model, prompt_enhancer_image_caption_processor, prompt_enhancer_llm_model, prompt_enhancer_llm_tokenizer = None, None, None, None
@ -297,7 +297,7 @@ def process_prompt_and_add_tasks(state, model_choice):
activated_loras = inputs["activated_loras"]
if len(loras_multipliers) > 0:
_, _, errors = parse_loras_multipliers(loras_multipliers, len(activated_loras), num_inference_steps)
_, _, errors = parse_loras_multipliers(loras_multipliers, len(activated_loras), num_inference_steps, max_phases= 2 if get_model_family(model_type)=="wan" and model_type not in ["sky_df_1.3B", "sky_df_14B"] else 1)
if len(errors) > 0:
gr.Info(f"Error parsing Loras Multipliers: {errors}")
return