fixed autodownloads

This commit is contained in:
deepbeepmeep 2025-07-29 09:08:34 +02:00
parent 4226fbece7
commit f603d77138
3 changed files with 7 additions and 4 deletions

View File

@ -19,7 +19,7 @@
}, },
"num_inference_steps": 10, "num_inference_steps": 10,
"guidance_scale": 1, "guidance_scale": 1,
"guidance_scale": 2, "guidance2_scale": 1,
"flow_shift": 2, "flow_shift": 2,
"switch_threshold" : 875 "switch_threshold" : 875
} }

View File

@ -14,6 +14,6 @@
}, },
"num_inference_steps": 10, "num_inference_steps": 10,
"guidance_scale": 1, "guidance_scale": 1,
"guidance_scale": 2, "guidance2_scale": 1,
"flow_shift": 2 "flow_shift": 2
} }

7
wgp.py
View File

@ -2761,14 +2761,17 @@ def load_models(model_type):
preload = server_config.get("preload_in_VRAM", 0) preload = server_config.get("preload_in_VRAM", 0)
model_file_list = [model_filename] model_file_list = [model_filename]
model_type_list = [model_type] model_type_list = [model_type]
model_submodel_no_list = [1]
if model_filename2 != None: if model_filename2 != None:
model_file_list += [model_filename2] model_file_list += [model_filename2]
model_type_list += [model_type] model_type_list += [model_type]
model_submodel_no_list += [2]
for module_type in modules: for module_type in modules:
model_file_list.append(get_model_filename(module_type, transformer_quantization, transformer_dtype, is_module= True)) model_file_list.append(get_model_filename(module_type, transformer_quantization, transformer_dtype, is_module= True))
model_type_list.append(module_type) model_type_list.append(module_type)
for filename, file_model_type in zip(model_file_list, model_type_list): model_submodel_no_list.append(0)
download_models(filename, file_model_type) for filename, file_model_type, submodel_no in zip(model_file_list, model_type_list, model_submodel_no_list):
download_models(filename, file_model_type, submodel_no)
VAE_dtype = torch.float16 if server_config.get("vae_precision","16") == "16" else torch.float VAE_dtype = torch.float16 if server_config.get("vae_precision","16") == "16" else torch.float
mixed_precision_transformer = server_config.get("mixed_precision","0") == "1" mixed_precision_transformer = server_config.get("mixed_precision","0") == "1"
transformer_type = None transformer_type = None