Skip to content

Commit

Permalink
add alpha
Browse files Browse the repository at this point in the history
  • Loading branch information
AbsterZhu committed Mar 18, 2024
1 parent 0739bd9 commit 45be249
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
4 changes: 2 additions & 2 deletions examples.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -84,11 +84,11 @@
"\n",
"if melo_info[0] == \"base\":\n",
" model = timm.create_model(\"vit_base_patch16_224\", pretrained=True)\n",
" melo = LoRA_ViT_timm(model, r=int(melo_info[3]), alpha=int(melo_info[4]), num_classes=int(melo_info[4]))\n",
" melo = LoRA_ViT_timm(model, r=int(melo_info[3]), alpha=int(melo_info[4]), num_classes=int(melo_info[5]))\n",
" melo.load_lora_parameters(melo_path)\n",
"else:\n",
" model = ViT('B_16_imagenet1k')\n",
" melo = LoRA_ViT(model, r=int(melo_info[3]), alpha=int(melo_info[4]), num_classes=int(melo_info[4]))\n",
" melo = LoRA_ViT(model, r=int(melo_info[3]), alpha=int(melo_info[4]), num_classes=int(melo_info[5]))\n",
" melo.load_lora_parameters(melo_path)"
]
},
Expand Down
2 changes: 1 addition & 1 deletion lora.py
Original file line number Diff line number Diff line change
Expand Up @@ -465,7 +465,7 @@ def __init__(self, vit_model: timm_ViT, lora_files: list, lora_layer=None):
w_b_linear_vs.append(w_b_linear_v)

_in = self.lora_vit.head.in_features
_out = int(melo_info[4])
_out = int(melo_info[5])
self.num_classes.append(_out)
self.fc_loras.append(f.get_tensor(f"fc_{_in}in_{_out}out"))

Expand Down

0 comments on commit 45be249

Please sign in to comment.