Skip to content

Instantly share code, notes, and snippets.

@CapsAdmin
Created December 22, 2023 03:35
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save CapsAdmin/915700d05c4adefad5c3b096f216a3b5 to your computer and use it in GitHub Desktop.
Save CapsAdmin/915700d05c4adefad5c3b096f216a3b5 to your computer and use it in GitHub Desktop.
converts ip-adapter-faceid_sd15.bin["ip_adapter"]["*"] to a lora that can be loaded in comfyui as a normal lora, maybe the order is wrong
import torch
import sys
all_lora_keys = [
"lora_te_text_model_encoder_layers_0_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_0_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_0_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_0_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_0_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_0_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_0_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_0_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_0_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_0_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_0_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_0_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_0_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_0_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_0_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_0_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_0_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_0_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_1_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_1_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_1_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_1_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_1_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_1_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_1_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_1_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_1_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_1_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_1_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_1_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_1_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_1_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_1_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_1_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_1_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_1_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_2_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_2_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_2_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_2_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_2_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_2_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_2_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_2_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_2_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_2_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_2_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_2_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_2_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_2_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_2_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_2_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_2_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_2_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_3_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_3_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_3_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_3_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_3_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_3_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_3_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_3_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_3_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_3_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_3_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_3_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_3_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_3_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_3_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_3_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_3_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_3_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_4_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_4_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_4_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_4_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_4_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_4_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_4_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_4_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_4_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_4_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_4_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_4_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_4_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_4_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_4_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_4_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_4_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_4_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_5_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_5_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_5_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_5_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_5_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_5_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_5_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_5_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_5_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_5_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_5_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_5_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_5_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_5_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_5_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_5_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_5_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_5_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_6_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_6_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_6_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_6_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_6_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_6_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_6_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_6_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_6_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_6_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_6_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_6_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_6_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_6_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_6_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_6_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_6_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_6_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_7_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_7_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_7_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_7_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_7_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_7_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_7_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_7_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_7_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_7_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_7_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_7_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_7_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_7_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_7_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_7_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_7_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_7_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_8_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_8_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_8_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_8_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_8_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_8_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_8_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_8_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_8_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_8_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_8_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_8_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_8_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_8_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_8_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_8_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_8_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_8_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_9_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_9_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_9_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_9_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_9_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_9_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_9_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_9_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_9_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_9_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_9_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_9_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_9_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_9_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_9_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_9_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_9_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_9_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_10_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_10_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_10_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_10_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_10_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_10_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_10_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_10_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_10_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_10_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_10_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_10_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_10_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_10_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_10_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_10_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_10_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_10_mlp_fc2.alpha",
"lora_te_text_model_encoder_layers_11_self_attn_k_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_11_self_attn_k_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_11_self_attn_k_proj.alpha",
"lora_te_text_model_encoder_layers_11_self_attn_v_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_11_self_attn_v_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_11_self_attn_v_proj.alpha",
"lora_te_text_model_encoder_layers_11_self_attn_q_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_11_self_attn_q_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_11_self_attn_q_proj.alpha",
"lora_te_text_model_encoder_layers_11_self_attn_out_proj.lora_down.weight",
"lora_te_text_model_encoder_layers_11_self_attn_out_proj.lora_up.weight",
"lora_te_text_model_encoder_layers_11_self_attn_out_proj.alpha",
"lora_te_text_model_encoder_layers_11_mlp_fc1.lora_down.weight",
"lora_te_text_model_encoder_layers_11_mlp_fc1.lora_up.weight",
"lora_te_text_model_encoder_layers_11_mlp_fc1.alpha",
"lora_te_text_model_encoder_layers_11_mlp_fc2.lora_down.weight",
"lora_te_text_model_encoder_layers_11_mlp_fc2.lora_up.weight",
"lora_te_text_model_encoder_layers_11_mlp_fc2.alpha",
"lora_unet_conv_in.lora_mid.weight",
"lora_unet_conv_in.lora_down.weight",
"lora_unet_conv_in.lora_up.weight",
"lora_unet_conv_in.alpha",
"lora_unet_time_embedding_linear_1.lora_down.weight",
"lora_unet_time_embedding_linear_1.lora_up.weight",
"lora_unet_time_embedding_linear_1.alpha",
"lora_unet_time_embedding_linear_2.lora_down.weight",
"lora_unet_time_embedding_linear_2.lora_up.weight",
"lora_unet_time_embedding_linear_2.alpha",
"lora_unet_down_blocks_0_attentions_0_proj_in.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_proj_in.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_proj_in.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_down_blocks_0_attentions_0_proj_out.lora_down.weight",
"lora_unet_down_blocks_0_attentions_0_proj_out.lora_up.weight",
"lora_unet_down_blocks_0_attentions_0_proj_out.alpha",
"lora_unet_down_blocks_0_attentions_1_proj_in.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_proj_in.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_proj_in.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_down_blocks_0_attentions_1_proj_out.lora_down.weight",
"lora_unet_down_blocks_0_attentions_1_proj_out.lora_up.weight",
"lora_unet_down_blocks_0_attentions_1_proj_out.alpha",
"lora_unet_down_blocks_0_resnets_0_conv1.lora_mid.weight",
"lora_unet_down_blocks_0_resnets_0_conv1.lora_down.weight",
"lora_unet_down_blocks_0_resnets_0_conv1.lora_up.weight",
"lora_unet_down_blocks_0_resnets_0_conv1.alpha",
"lora_unet_down_blocks_0_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_0_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_0_resnets_0_time_emb_proj.alpha",
"lora_unet_down_blocks_0_resnets_0_conv2.lora_mid.weight",
"lora_unet_down_blocks_0_resnets_0_conv2.lora_down.weight",
"lora_unet_down_blocks_0_resnets_0_conv2.lora_up.weight",
"lora_unet_down_blocks_0_resnets_0_conv2.alpha",
"lora_unet_down_blocks_0_resnets_1_conv1.lora_mid.weight",
"lora_unet_down_blocks_0_resnets_1_conv1.lora_down.weight",
"lora_unet_down_blocks_0_resnets_1_conv1.lora_up.weight",
"lora_unet_down_blocks_0_resnets_1_conv1.alpha",
"lora_unet_down_blocks_0_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_0_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_0_resnets_1_time_emb_proj.alpha",
"lora_unet_down_blocks_0_resnets_1_conv2.lora_mid.weight",
"lora_unet_down_blocks_0_resnets_1_conv2.lora_down.weight",
"lora_unet_down_blocks_0_resnets_1_conv2.lora_up.weight",
"lora_unet_down_blocks_0_resnets_1_conv2.alpha",
"lora_unet_down_blocks_0_downsamplers_0_conv.lora_mid.weight",
"lora_unet_down_blocks_0_downsamplers_0_conv.lora_down.weight",
"lora_unet_down_blocks_0_downsamplers_0_conv.lora_up.weight",
"lora_unet_down_blocks_0_downsamplers_0_conv.alpha",
"lora_unet_down_blocks_1_attentions_0_proj_in.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_proj_in.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_proj_in.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_down_blocks_1_attentions_0_proj_out.lora_down.weight",
"lora_unet_down_blocks_1_attentions_0_proj_out.lora_up.weight",
"lora_unet_down_blocks_1_attentions_0_proj_out.alpha",
"lora_unet_down_blocks_1_attentions_1_proj_in.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_proj_in.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_proj_in.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_down_blocks_1_attentions_1_proj_out.lora_down.weight",
"lora_unet_down_blocks_1_attentions_1_proj_out.lora_up.weight",
"lora_unet_down_blocks_1_attentions_1_proj_out.alpha",
"lora_unet_down_blocks_1_resnets_0_conv1.lora_mid.weight",
"lora_unet_down_blocks_1_resnets_0_conv1.lora_down.weight",
"lora_unet_down_blocks_1_resnets_0_conv1.lora_up.weight",
"lora_unet_down_blocks_1_resnets_0_conv1.alpha",
"lora_unet_down_blocks_1_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_1_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_1_resnets_0_time_emb_proj.alpha",
"lora_unet_down_blocks_1_resnets_0_conv2.lora_mid.weight",
"lora_unet_down_blocks_1_resnets_0_conv2.lora_down.weight",
"lora_unet_down_blocks_1_resnets_0_conv2.lora_up.weight",
"lora_unet_down_blocks_1_resnets_0_conv2.alpha",
"lora_unet_down_blocks_1_resnets_0_conv_shortcut.lora_down.weight",
"lora_unet_down_blocks_1_resnets_0_conv_shortcut.lora_up.weight",
"lora_unet_down_blocks_1_resnets_0_conv_shortcut.alpha",
"lora_unet_down_blocks_1_resnets_1_conv1.lora_mid.weight",
"lora_unet_down_blocks_1_resnets_1_conv1.lora_down.weight",
"lora_unet_down_blocks_1_resnets_1_conv1.lora_up.weight",
"lora_unet_down_blocks_1_resnets_1_conv1.alpha",
"lora_unet_down_blocks_1_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_1_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_1_resnets_1_time_emb_proj.alpha",
"lora_unet_down_blocks_1_resnets_1_conv2.lora_mid.weight",
"lora_unet_down_blocks_1_resnets_1_conv2.lora_down.weight",
"lora_unet_down_blocks_1_resnets_1_conv2.lora_up.weight",
"lora_unet_down_blocks_1_resnets_1_conv2.alpha",
"lora_unet_down_blocks_1_downsamplers_0_conv.lora_mid.weight",
"lora_unet_down_blocks_1_downsamplers_0_conv.lora_down.weight",
"lora_unet_down_blocks_1_downsamplers_0_conv.lora_up.weight",
"lora_unet_down_blocks_1_downsamplers_0_conv.alpha",
"lora_unet_down_blocks_2_attentions_0_proj_in.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_proj_in.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_proj_in.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_down_blocks_2_attentions_0_proj_out.lora_down.weight",
"lora_unet_down_blocks_2_attentions_0_proj_out.lora_up.weight",
"lora_unet_down_blocks_2_attentions_0_proj_out.alpha",
"lora_unet_down_blocks_2_attentions_1_proj_in.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_proj_in.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_proj_in.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_down_blocks_2_attentions_1_proj_out.lora_down.weight",
"lora_unet_down_blocks_2_attentions_1_proj_out.lora_up.weight",
"lora_unet_down_blocks_2_attentions_1_proj_out.alpha",
"lora_unet_down_blocks_2_resnets_0_conv1.lora_mid.weight",
"lora_unet_down_blocks_2_resnets_0_conv1.lora_down.weight",
"lora_unet_down_blocks_2_resnets_0_conv1.lora_up.weight",
"lora_unet_down_blocks_2_resnets_0_conv1.alpha",
"lora_unet_down_blocks_2_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_2_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_2_resnets_0_time_emb_proj.alpha",
"lora_unet_down_blocks_2_resnets_0_conv2.lora_mid.weight",
"lora_unet_down_blocks_2_resnets_0_conv2.lora_down.weight",
"lora_unet_down_blocks_2_resnets_0_conv2.lora_up.weight",
"lora_unet_down_blocks_2_resnets_0_conv2.alpha",
"lora_unet_down_blocks_2_resnets_0_conv_shortcut.lora_down.weight",
"lora_unet_down_blocks_2_resnets_0_conv_shortcut.lora_up.weight",
"lora_unet_down_blocks_2_resnets_0_conv_shortcut.alpha",
"lora_unet_down_blocks_2_resnets_1_conv1.lora_mid.weight",
"lora_unet_down_blocks_2_resnets_1_conv1.lora_down.weight",
"lora_unet_down_blocks_2_resnets_1_conv1.lora_up.weight",
"lora_unet_down_blocks_2_resnets_1_conv1.alpha",
"lora_unet_down_blocks_2_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_2_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_2_resnets_1_time_emb_proj.alpha",
"lora_unet_down_blocks_2_resnets_1_conv2.lora_mid.weight",
"lora_unet_down_blocks_2_resnets_1_conv2.lora_down.weight",
"lora_unet_down_blocks_2_resnets_1_conv2.lora_up.weight",
"lora_unet_down_blocks_2_resnets_1_conv2.alpha",
"lora_unet_down_blocks_2_downsamplers_0_conv.lora_mid.weight",
"lora_unet_down_blocks_2_downsamplers_0_conv.lora_down.weight",
"lora_unet_down_blocks_2_downsamplers_0_conv.lora_up.weight",
"lora_unet_down_blocks_2_downsamplers_0_conv.alpha",
"lora_unet_down_blocks_3_resnets_0_conv1.lora_mid.weight",
"lora_unet_down_blocks_3_resnets_0_conv1.lora_down.weight",
"lora_unet_down_blocks_3_resnets_0_conv1.lora_up.weight",
"lora_unet_down_blocks_3_resnets_0_conv1.alpha",
"lora_unet_down_blocks_3_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_3_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_3_resnets_0_time_emb_proj.alpha",
"lora_unet_down_blocks_3_resnets_0_conv2.lora_mid.weight",
"lora_unet_down_blocks_3_resnets_0_conv2.lora_down.weight",
"lora_unet_down_blocks_3_resnets_0_conv2.lora_up.weight",
"lora_unet_down_blocks_3_resnets_0_conv2.alpha",
"lora_unet_down_blocks_3_resnets_1_conv1.lora_mid.weight",
"lora_unet_down_blocks_3_resnets_1_conv1.lora_down.weight",
"lora_unet_down_blocks_3_resnets_1_conv1.lora_up.weight",
"lora_unet_down_blocks_3_resnets_1_conv1.alpha",
"lora_unet_down_blocks_3_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_down_blocks_3_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_down_blocks_3_resnets_1_time_emb_proj.alpha",
"lora_unet_down_blocks_3_resnets_1_conv2.lora_mid.weight",
"lora_unet_down_blocks_3_resnets_1_conv2.lora_down.weight",
"lora_unet_down_blocks_3_resnets_1_conv2.lora_up.weight",
"lora_unet_down_blocks_3_resnets_1_conv2.alpha",
"lora_unet_up_blocks_0_resnets_0_conv1.lora_mid.weight",
"lora_unet_up_blocks_0_resnets_0_conv1.lora_down.weight",
"lora_unet_up_blocks_0_resnets_0_conv1.lora_up.weight",
"lora_unet_up_blocks_0_resnets_0_conv1.alpha",
"lora_unet_up_blocks_0_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_0_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_0_resnets_0_time_emb_proj.alpha",
"lora_unet_up_blocks_0_resnets_0_conv2.lora_mid.weight",
"lora_unet_up_blocks_0_resnets_0_conv2.lora_down.weight",
"lora_unet_up_blocks_0_resnets_0_conv2.lora_up.weight",
"lora_unet_up_blocks_0_resnets_0_conv2.alpha",
"lora_unet_up_blocks_0_resnets_0_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_0_resnets_0_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_0_resnets_0_conv_shortcut.alpha",
"lora_unet_up_blocks_0_resnets_1_conv1.lora_mid.weight",
"lora_unet_up_blocks_0_resnets_1_conv1.lora_down.weight",
"lora_unet_up_blocks_0_resnets_1_conv1.lora_up.weight",
"lora_unet_up_blocks_0_resnets_1_conv1.alpha",
"lora_unet_up_blocks_0_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_0_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_0_resnets_1_time_emb_proj.alpha",
"lora_unet_up_blocks_0_resnets_1_conv2.lora_mid.weight",
"lora_unet_up_blocks_0_resnets_1_conv2.lora_down.weight",
"lora_unet_up_blocks_0_resnets_1_conv2.lora_up.weight",
"lora_unet_up_blocks_0_resnets_1_conv2.alpha",
"lora_unet_up_blocks_0_resnets_1_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_0_resnets_1_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_0_resnets_1_conv_shortcut.alpha",
"lora_unet_up_blocks_0_resnets_2_conv1.lora_mid.weight",
"lora_unet_up_blocks_0_resnets_2_conv1.lora_down.weight",
"lora_unet_up_blocks_0_resnets_2_conv1.lora_up.weight",
"lora_unet_up_blocks_0_resnets_2_conv1.alpha",
"lora_unet_up_blocks_0_resnets_2_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_0_resnets_2_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_0_resnets_2_time_emb_proj.alpha",
"lora_unet_up_blocks_0_resnets_2_conv2.lora_mid.weight",
"lora_unet_up_blocks_0_resnets_2_conv2.lora_down.weight",
"lora_unet_up_blocks_0_resnets_2_conv2.lora_up.weight",
"lora_unet_up_blocks_0_resnets_2_conv2.alpha",
"lora_unet_up_blocks_0_resnets_2_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_0_resnets_2_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_0_resnets_2_conv_shortcut.alpha",
"lora_unet_up_blocks_0_upsamplers_0_conv.lora_mid.weight",
"lora_unet_up_blocks_0_upsamplers_0_conv.lora_down.weight",
"lora_unet_up_blocks_0_upsamplers_0_conv.lora_up.weight",
"lora_unet_up_blocks_0_upsamplers_0_conv.alpha",
"lora_unet_up_blocks_1_attentions_0_proj_in.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_proj_in.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_proj_in.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_1_attentions_0_proj_out.lora_down.weight",
"lora_unet_up_blocks_1_attentions_0_proj_out.lora_up.weight",
"lora_unet_up_blocks_1_attentions_0_proj_out.alpha",
"lora_unet_up_blocks_1_attentions_1_proj_in.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_proj_in.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_proj_in.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_1_attentions_1_proj_out.lora_down.weight",
"lora_unet_up_blocks_1_attentions_1_proj_out.lora_up.weight",
"lora_unet_up_blocks_1_attentions_1_proj_out.alpha",
"lora_unet_up_blocks_1_attentions_2_proj_in.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_proj_in.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_proj_in.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_1_attentions_2_proj_out.lora_down.weight",
"lora_unet_up_blocks_1_attentions_2_proj_out.lora_up.weight",
"lora_unet_up_blocks_1_attentions_2_proj_out.alpha",
"lora_unet_up_blocks_1_resnets_0_conv1.lora_mid.weight",
"lora_unet_up_blocks_1_resnets_0_conv1.lora_down.weight",
"lora_unet_up_blocks_1_resnets_0_conv1.lora_up.weight",
"lora_unet_up_blocks_1_resnets_0_conv1.alpha",
"lora_unet_up_blocks_1_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_1_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_1_resnets_0_time_emb_proj.alpha",
"lora_unet_up_blocks_1_resnets_0_conv2.lora_mid.weight",
"lora_unet_up_blocks_1_resnets_0_conv2.lora_down.weight",
"lora_unet_up_blocks_1_resnets_0_conv2.lora_up.weight",
"lora_unet_up_blocks_1_resnets_0_conv2.alpha",
"lora_unet_up_blocks_1_resnets_0_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_1_resnets_0_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_1_resnets_0_conv_shortcut.alpha",
"lora_unet_up_blocks_1_resnets_1_conv1.lora_mid.weight",
"lora_unet_up_blocks_1_resnets_1_conv1.lora_down.weight",
"lora_unet_up_blocks_1_resnets_1_conv1.lora_up.weight",
"lora_unet_up_blocks_1_resnets_1_conv1.alpha",
"lora_unet_up_blocks_1_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_1_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_1_resnets_1_time_emb_proj.alpha",
"lora_unet_up_blocks_1_resnets_1_conv2.lora_mid.weight",
"lora_unet_up_blocks_1_resnets_1_conv2.lora_down.weight",
"lora_unet_up_blocks_1_resnets_1_conv2.lora_up.weight",
"lora_unet_up_blocks_1_resnets_1_conv2.alpha",
"lora_unet_up_blocks_1_resnets_1_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_1_resnets_1_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_1_resnets_1_conv_shortcut.alpha",
"lora_unet_up_blocks_1_resnets_2_conv1.lora_mid.weight",
"lora_unet_up_blocks_1_resnets_2_conv1.lora_down.weight",
"lora_unet_up_blocks_1_resnets_2_conv1.lora_up.weight",
"lora_unet_up_blocks_1_resnets_2_conv1.alpha",
"lora_unet_up_blocks_1_resnets_2_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_1_resnets_2_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_1_resnets_2_time_emb_proj.alpha",
"lora_unet_up_blocks_1_resnets_2_conv2.lora_mid.weight",
"lora_unet_up_blocks_1_resnets_2_conv2.lora_down.weight",
"lora_unet_up_blocks_1_resnets_2_conv2.lora_up.weight",
"lora_unet_up_blocks_1_resnets_2_conv2.alpha",
"lora_unet_up_blocks_1_resnets_2_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_1_resnets_2_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_1_resnets_2_conv_shortcut.alpha",
"lora_unet_up_blocks_1_upsamplers_0_conv.lora_mid.weight",
"lora_unet_up_blocks_1_upsamplers_0_conv.lora_down.weight",
"lora_unet_up_blocks_1_upsamplers_0_conv.lora_up.weight",
"lora_unet_up_blocks_1_upsamplers_0_conv.alpha",
"lora_unet_up_blocks_2_attentions_0_proj_in.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_proj_in.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_proj_in.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_2_attentions_0_proj_out.lora_down.weight",
"lora_unet_up_blocks_2_attentions_0_proj_out.lora_up.weight",
"lora_unet_up_blocks_2_attentions_0_proj_out.alpha",
"lora_unet_up_blocks_2_attentions_1_proj_in.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_proj_in.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_proj_in.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_2_attentions_1_proj_out.lora_down.weight",
"lora_unet_up_blocks_2_attentions_1_proj_out.lora_up.weight",
"lora_unet_up_blocks_2_attentions_1_proj_out.alpha",
"lora_unet_up_blocks_2_attentions_2_proj_in.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_proj_in.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_proj_in.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_2_attentions_2_proj_out.lora_down.weight",
"lora_unet_up_blocks_2_attentions_2_proj_out.lora_up.weight",
"lora_unet_up_blocks_2_attentions_2_proj_out.alpha",
"lora_unet_up_blocks_2_resnets_0_conv1.lora_mid.weight",
"lora_unet_up_blocks_2_resnets_0_conv1.lora_down.weight",
"lora_unet_up_blocks_2_resnets_0_conv1.lora_up.weight",
"lora_unet_up_blocks_2_resnets_0_conv1.alpha",
"lora_unet_up_blocks_2_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_2_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_2_resnets_0_time_emb_proj.alpha",
"lora_unet_up_blocks_2_resnets_0_conv2.lora_mid.weight",
"lora_unet_up_blocks_2_resnets_0_conv2.lora_down.weight",
"lora_unet_up_blocks_2_resnets_0_conv2.lora_up.weight",
"lora_unet_up_blocks_2_resnets_0_conv2.alpha",
"lora_unet_up_blocks_2_resnets_0_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_2_resnets_0_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_2_resnets_0_conv_shortcut.alpha",
"lora_unet_up_blocks_2_resnets_1_conv1.lora_mid.weight",
"lora_unet_up_blocks_2_resnets_1_conv1.lora_down.weight",
"lora_unet_up_blocks_2_resnets_1_conv1.lora_up.weight",
"lora_unet_up_blocks_2_resnets_1_conv1.alpha",
"lora_unet_up_blocks_2_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_2_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_2_resnets_1_time_emb_proj.alpha",
"lora_unet_up_blocks_2_resnets_1_conv2.lora_mid.weight",
"lora_unet_up_blocks_2_resnets_1_conv2.lora_down.weight",
"lora_unet_up_blocks_2_resnets_1_conv2.lora_up.weight",
"lora_unet_up_blocks_2_resnets_1_conv2.alpha",
"lora_unet_up_blocks_2_resnets_1_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_2_resnets_1_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_2_resnets_1_conv_shortcut.alpha",
"lora_unet_up_blocks_2_resnets_2_conv1.lora_mid.weight",
"lora_unet_up_blocks_2_resnets_2_conv1.lora_down.weight",
"lora_unet_up_blocks_2_resnets_2_conv1.lora_up.weight",
"lora_unet_up_blocks_2_resnets_2_conv1.alpha",
"lora_unet_up_blocks_2_resnets_2_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_2_resnets_2_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_2_resnets_2_time_emb_proj.alpha",
"lora_unet_up_blocks_2_resnets_2_conv2.lora_mid.weight",
"lora_unet_up_blocks_2_resnets_2_conv2.lora_down.weight",
"lora_unet_up_blocks_2_resnets_2_conv2.lora_up.weight",
"lora_unet_up_blocks_2_resnets_2_conv2.alpha",
"lora_unet_up_blocks_2_resnets_2_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_2_resnets_2_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_2_resnets_2_conv_shortcut.alpha",
"lora_unet_up_blocks_2_upsamplers_0_conv.lora_mid.weight",
"lora_unet_up_blocks_2_upsamplers_0_conv.lora_down.weight",
"lora_unet_up_blocks_2_upsamplers_0_conv.lora_up.weight",
"lora_unet_up_blocks_2_upsamplers_0_conv.alpha",
"lora_unet_up_blocks_3_attentions_0_proj_in.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_proj_in.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_proj_in.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_3_attentions_0_proj_out.lora_down.weight",
"lora_unet_up_blocks_3_attentions_0_proj_out.lora_up.weight",
"lora_unet_up_blocks_3_attentions_0_proj_out.alpha",
"lora_unet_up_blocks_3_attentions_1_proj_in.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_proj_in.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_proj_in.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_3_attentions_1_proj_out.lora_down.weight",
"lora_unet_up_blocks_3_attentions_1_proj_out.lora_up.weight",
"lora_unet_up_blocks_3_attentions_1_proj_out.alpha",
"lora_unet_up_blocks_3_attentions_2_proj_in.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_proj_in.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_proj_in.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_up_blocks_3_attentions_2_proj_out.lora_down.weight",
"lora_unet_up_blocks_3_attentions_2_proj_out.lora_up.weight",
"lora_unet_up_blocks_3_attentions_2_proj_out.alpha",
"lora_unet_up_blocks_3_resnets_0_conv1.lora_mid.weight",
"lora_unet_up_blocks_3_resnets_0_conv1.lora_down.weight",
"lora_unet_up_blocks_3_resnets_0_conv1.lora_up.weight",
"lora_unet_up_blocks_3_resnets_0_conv1.alpha",
"lora_unet_up_blocks_3_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_3_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_3_resnets_0_time_emb_proj.alpha",
"lora_unet_up_blocks_3_resnets_0_conv2.lora_mid.weight",
"lora_unet_up_blocks_3_resnets_0_conv2.lora_down.weight",
"lora_unet_up_blocks_3_resnets_0_conv2.lora_up.weight",
"lora_unet_up_blocks_3_resnets_0_conv2.alpha",
"lora_unet_up_blocks_3_resnets_0_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_3_resnets_0_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_3_resnets_0_conv_shortcut.alpha",
"lora_unet_up_blocks_3_resnets_1_conv1.lora_mid.weight",
"lora_unet_up_blocks_3_resnets_1_conv1.lora_down.weight",
"lora_unet_up_blocks_3_resnets_1_conv1.lora_up.weight",
"lora_unet_up_blocks_3_resnets_1_conv1.alpha",
"lora_unet_up_blocks_3_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_3_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_3_resnets_1_time_emb_proj.alpha",
"lora_unet_up_blocks_3_resnets_1_conv2.lora_mid.weight",
"lora_unet_up_blocks_3_resnets_1_conv2.lora_down.weight",
"lora_unet_up_blocks_3_resnets_1_conv2.lora_up.weight",
"lora_unet_up_blocks_3_resnets_1_conv2.alpha",
"lora_unet_up_blocks_3_resnets_1_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_3_resnets_1_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_3_resnets_1_conv_shortcut.alpha",
"lora_unet_up_blocks_3_resnets_2_conv1.lora_mid.weight",
"lora_unet_up_blocks_3_resnets_2_conv1.lora_down.weight",
"lora_unet_up_blocks_3_resnets_2_conv1.lora_up.weight",
"lora_unet_up_blocks_3_resnets_2_conv1.alpha",
"lora_unet_up_blocks_3_resnets_2_time_emb_proj.lora_down.weight",
"lora_unet_up_blocks_3_resnets_2_time_emb_proj.lora_up.weight",
"lora_unet_up_blocks_3_resnets_2_time_emb_proj.alpha",
"lora_unet_up_blocks_3_resnets_2_conv2.lora_mid.weight",
"lora_unet_up_blocks_3_resnets_2_conv2.lora_down.weight",
"lora_unet_up_blocks_3_resnets_2_conv2.lora_up.weight",
"lora_unet_up_blocks_3_resnets_2_conv2.alpha",
"lora_unet_up_blocks_3_resnets_2_conv_shortcut.lora_down.weight",
"lora_unet_up_blocks_3_resnets_2_conv_shortcut.lora_up.weight",
"lora_unet_up_blocks_3_resnets_2_conv_shortcut.alpha",
"lora_unet_mid_block_attentions_0_proj_in.lora_down.weight",
"lora_unet_mid_block_attentions_0_proj_in.lora_up.weight",
"lora_unet_mid_block_attentions_0_proj_in.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.alpha",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_down.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.lora_up.weight",
"lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha",
"lora_unet_mid_block_attentions_0_proj_out.lora_down.weight",
"lora_unet_mid_block_attentions_0_proj_out.lora_up.weight",
"lora_unet_mid_block_attentions_0_proj_out.alpha",
"lora_unet_mid_block_resnets_0_conv1.lora_mid.weight",
"lora_unet_mid_block_resnets_0_conv1.lora_down.weight",
"lora_unet_mid_block_resnets_0_conv1.lora_up.weight",
"lora_unet_mid_block_resnets_0_conv1.alpha",
"lora_unet_mid_block_resnets_0_time_emb_proj.lora_down.weight",
"lora_unet_mid_block_resnets_0_time_emb_proj.lora_up.weight",
"lora_unet_mid_block_resnets_0_time_emb_proj.alpha",
"lora_unet_mid_block_resnets_0_conv2.lora_mid.weight",
"lora_unet_mid_block_resnets_0_conv2.lora_down.weight",
"lora_unet_mid_block_resnets_0_conv2.lora_up.weight",
"lora_unet_mid_block_resnets_0_conv2.alpha",
"lora_unet_mid_block_resnets_1_conv1.lora_mid.weight",
"lora_unet_mid_block_resnets_1_conv1.lora_down.weight",
"lora_unet_mid_block_resnets_1_conv1.lora_up.weight",
"lora_unet_mid_block_resnets_1_conv1.alpha",
"lora_unet_mid_block_resnets_1_time_emb_proj.lora_down.weight",
"lora_unet_mid_block_resnets_1_time_emb_proj.lora_up.weight",
"lora_unet_mid_block_resnets_1_time_emb_proj.alpha",
"lora_unet_mid_block_resnets_1_conv2.lora_mid.weight",
"lora_unet_mid_block_resnets_1_conv2.lora_down.weight",
"lora_unet_mid_block_resnets_1_conv2.lora_up.weight",
"lora_unet_mid_block_resnets_1_conv2.alpha",
"lora_unet_conv_out.lora_mid.weight",
"lora_unet_conv_out.lora_down.weight",
"lora_unet_conv_out.lora_up.weight",
"lora_unet_conv_out.alpha",
]
checkpoint = torch.load(sys.argv[1], map_location=torch.device('cpu'))
LORA_OUTPUT = {}
translate = {}
for key in checkpoint["ip_adapter"].keys():
if not "_ip." in key:
main_key = key.split(".")[0]
index = int(main_key)
if index not in translate:
translate[index] = []
translate[index].append(key)
index = -1
last_key = None
for key in all_lora_keys:
if "unet" in key and ("_to_q" in key or "_to_k" in key or "_to_v" in key or "_to_out" in key) and not "alpha" in key:
main_key = key.split(".")[0].replace("_to_q", "").replace("_to_k", "").replace("_to_v", "").replace("_to_out_0", "")
if main_key != last_key:
last_key = main_key
index += 1
for found_key in translate[index]:
if "_to_out_0" in key and "to_out" in found_key:
if "down.weight" in key and "down.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
if "up.weight" in key and "up.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
if "_to_q" in key and "_to_q" in found_key:
if "down.weight" in key and "down.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
if "up.weight" in key and "up.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
if "_to_k" in key and "_to_k" in found_key:
if "down.weight" in key and "down.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
if "up.weight" in key and "up.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
if "_to_v" in key and "_to_v" in found_key:
if "down.weight" in key and "down.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
if "up.weight" in key and "up.weight" in found_key:
LORA_OUTPUT[key] = checkpoint["ip_adapter"][found_key].clone()
print(found_key, " >>>> ", key)
from safetensors.torch import save_file
save_file(LORA_OUTPUT, "faceid.safetensors")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment