Skip to content

Instantly share code, notes, and snippets.

@takuma104
Created June 22, 2023 15:00
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save takuma104/dcf4626fe2b0564d02c6edd4e9fcb616 to your computer and use it in GitHub Desktop.
Save takuma104/dcf4626fe2b0564d02c6edd4e9fcb616 to your computer and use it in GitHub Desktop.
import torch
import sys
from safetensors.torch import load_file
if __name__ == "__main__":
filename = sys.argv[1]
state_dict = load_file(filename)
for key, value in state_dict.items():
if "lora_down" in key:
lora_name = key.split(".")[0]
lora_dim = value.size()[0]
lora_name_alpha = key.split(".")[0] + '.alpha'
if lora_name_alpha in state_dict:
alpha = state_dict[lora_name_alpha].item()
print(lora_name_alpha, alpha, lora_dim, alpha / lora_dim)
@takuma104
Copy link
Author

11701_26660_msw_locon-t2.safetensors

lora_te_text_model_encoder_layers_0_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_0_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_0_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_0_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_0_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_0_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_10_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_10_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_10_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_10_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_10_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_10_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_11_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_11_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_11_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_11_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_11_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_11_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_1_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_1_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_1_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_1_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_1_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_1_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_2_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_2_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_2_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_2_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_2_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_2_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_3_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_3_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_3_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_3_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_3_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_3_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_4_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_4_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_4_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_4_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_4_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_4_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_5_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_5_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_5_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_5_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_5_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_5_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_6_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_6_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_6_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_6_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_6_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_6_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_7_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_7_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_7_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_7_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_7_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_7_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_8_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_8_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_8_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_8_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_8_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_8_self_attn_v_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_9_mlp_fc1.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_9_mlp_fc2.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_9_self_attn_k_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_9_self_attn_out_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_9_self_attn_q_proj.alpha 4.0 8 0.5
lora_te_text_model_encoder_layers_9_self_attn_v_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_proj_in.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_proj_out.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_proj_in.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_proj_out.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_down_blocks_0_downsamplers_0_conv.alpha 1.0 1 1.0
lora_unet_down_blocks_0_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_0_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_0_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_0_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_0_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_0_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_proj_in.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_proj_out.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_proj_in.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_proj_out.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_down_blocks_1_downsamplers_0_conv.alpha 1.0 1 1.0
lora_unet_down_blocks_1_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_1_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_1_resnets_0_conv_shortcut.alpha 4.0 8 0.5
lora_unet_down_blocks_1_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_1_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_1_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_1_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_proj_in.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_proj_out.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_proj_in.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_proj_out.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_down_blocks_2_downsamplers_0_conv.alpha 1.0 1 1.0
lora_unet_down_blocks_2_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_2_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_2_resnets_0_conv_shortcut.alpha 4.0 8 0.5
lora_unet_down_blocks_2_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_2_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_2_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_2_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_3_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_3_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_3_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_down_blocks_3_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_down_blocks_3_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_down_blocks_3_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_proj_in.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_proj_out.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_mid_block_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_mid_block_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_mid_block_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_mid_block_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_mid_block_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_mid_block_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_0_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_0_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_0_resnets_0_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_0_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_0_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_0_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_0_resnets_1_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_0_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_0_resnets_2_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_0_resnets_2_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_0_resnets_2_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_0_resnets_2_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_0_upsamplers_0_conv.alpha 1.0 1 1.0
lora_unet_up_blocks_1_attentions_0_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_1_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_1_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_1_resnets_0_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_1_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_1_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_1_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_1_resnets_1_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_1_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_1_resnets_2_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_1_resnets_2_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_1_resnets_2_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_1_resnets_2_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_1_upsamplers_0_conv.alpha 1.0 1 1.0
lora_unet_up_blocks_2_attentions_0_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_2_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_2_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_2_resnets_0_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_2_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_2_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_2_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_2_resnets_1_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_2_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_2_resnets_2_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_2_resnets_2_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_2_resnets_2_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_2_resnets_2_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_2_upsamplers_0_conv.alpha 1.0 1 1.0
lora_unet_up_blocks_3_attentions_0_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_proj_in.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_proj_out.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.alpha 4.0 8 0.5
lora_unet_up_blocks_3_resnets_0_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_3_resnets_0_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_3_resnets_0_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_3_resnets_0_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_3_resnets_1_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_3_resnets_1_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_3_resnets_1_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_3_resnets_1_time_emb_proj.alpha 4.0 8 0.5
lora_unet_up_blocks_3_resnets_2_conv1.alpha 1.0 1 1.0
lora_unet_up_blocks_3_resnets_2_conv2.alpha 1.0 1 1.0
lora_unet_up_blocks_3_resnets_2_conv_shortcut.alpha 4.0 8 0.5
lora_unet_up_blocks_3_resnets_2_time_emb_proj.alpha 4.0 8 0.5

@takuma104
Copy link
Author

https://civitai.com/models/15699?modelVersionId=18531

lora_te_text_model_encoder_layers_0_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_0_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_0_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_0_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_0_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_0_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_10_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_10_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_10_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_10_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_10_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_10_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_11_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_11_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_11_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_11_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_11_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_11_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_1_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_1_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_1_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_1_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_1_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_1_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_2_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_2_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_2_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_2_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_2_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_2_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_3_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_3_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_3_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_3_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_3_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_3_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_4_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_4_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_4_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_4_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_4_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_4_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_5_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_5_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_5_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_5_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_5_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_5_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_6_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_6_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_6_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_6_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_6_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_6_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_7_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_7_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_7_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_7_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_7_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_7_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_8_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_8_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_8_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_8_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_8_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_8_self_attn_v_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_9_mlp_fc1.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_9_mlp_fc2.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_9_self_attn_k_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_9_self_attn_out_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_9_self_attn_q_proj.alpha 32.0 64 0.5
lora_te_text_model_encoder_layers_9_self_attn_v_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_proj_in.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_proj_out.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_0_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_proj_in.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_proj_out.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_0_attentions_1_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_down_blocks_0_downsamplers_0_conv.alpha 32.0 64 0.5
lora_unet_down_blocks_0_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_0_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_0_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_0_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_0_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_0_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_proj_in.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_proj_out.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_proj_in.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_proj_out.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_down_blocks_1_downsamplers_0_conv.alpha 32.0 64 0.5
lora_unet_down_blocks_1_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_1_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_1_resnets_0_conv_shortcut.alpha 32.0 64 0.5
lora_unet_down_blocks_1_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_1_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_1_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_1_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_proj_in.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_proj_out.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_proj_in.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_proj_out.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_down_blocks_2_downsamplers_0_conv.alpha 32.0 64 0.5
lora_unet_down_blocks_2_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_2_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_2_resnets_0_conv_shortcut.alpha 32.0 64 0.5
lora_unet_down_blocks_2_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_2_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_2_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_2_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_3_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_3_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_3_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_down_blocks_3_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_down_blocks_3_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_down_blocks_3_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_proj_in.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_proj_out.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_mid_block_attentions_0_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_mid_block_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_mid_block_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_mid_block_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_mid_block_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_mid_block_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_mid_block_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_0_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_1_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_2_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_2_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_2_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_0_resnets_2_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_0_upsamplers_0_conv.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_0_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_1_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_1_attentions_2_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_0_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_1_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_2_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_2_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_2_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_1_resnets_2_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_1_upsamplers_0_conv.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_0_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_1_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_2_attentions_2_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_0_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_1_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_2_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_2_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_2_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_2_resnets_2_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_2_upsamplers_0_conv.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_0_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_1_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_proj_in.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_proj_out.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn1_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_k.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_out_0.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_q.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_attn2_to_v.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_0_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_3_attentions_2_transformer_blocks_0_ff_net_2.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_0_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_0_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_0_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_0_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_1_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_1_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_1_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_1_time_emb_proj.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_2_conv1.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_2_conv2.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_2_conv_shortcut.alpha 32.0 64 0.5
lora_unet_up_blocks_3_resnets_2_time_emb_proj.alpha 32.0 64 0.5

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment