We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent e1c5281 commit 1208863Copy full SHA for 1208863
comfy/lora.py
@@ -245,7 +245,7 @@ def model_lora_keys_unet(model, key_map={}):
245
key_lora = k[len("diffusion_model."):-len(".weight")].replace(".", "_")
246
key_map["lora_unet_{}".format(key_lora)] = k
247
key_map["lora_prior_unet_{}".format(key_lora)] = k #cascade lora: TODO put lora key prefix in the model config
248
- key_map["model.{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names
+ key_map["{}".format(k[:-len(".weight")])] = k #generic lora format without any weird key names
249
250
diffusers_keys = comfy.utils.unet_to_diffusers(model.model_config.unet_config)
251
for k in diffusers_keys:
0 commit comments