Skip to content

Commit 83ef4e4

Browse files
authored
feat: add T5 with llama.cpp naming convention support (#654)
1 parent 7dac89a commit 83ef4e4

File tree

1 file changed

+58
-0
lines changed

1 file changed

+58
-0
lines changed

model.cpp

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -181,6 +181,64 @@ std::unordered_map<std::string, std::string> pmid_v2_name_map = {
181181
std::string convert_open_clip_to_hf_clip(const std::string& name) {
182182
std::string new_name = name;
183183
std::string prefix;
184+
if (contains(new_name, ".enc.")) {
185+
// llama.cpp naming convention for T5
186+
size_t pos = new_name.find(".enc.");
187+
if (pos != std::string::npos) {
188+
new_name.replace(pos, 5, ".encoder.");
189+
}
190+
pos = new_name.find("blk.");
191+
if (pos != std::string::npos) {
192+
new_name.replace(pos, 4, "block.");
193+
}
194+
pos = new_name.find("output_norm.");
195+
if (pos != std::string::npos) {
196+
new_name.replace(pos, 12, "final_layer_norm.");
197+
}
198+
pos = new_name.find("attn_k.");
199+
if (pos != std::string::npos) {
200+
new_name.replace(pos, 7, "layer.0.SelfAttention.k.");
201+
}
202+
pos = new_name.find("attn_v.");
203+
if (pos != std::string::npos) {
204+
new_name.replace(pos, 7, "layer.0.SelfAttention.v.");
205+
}
206+
pos = new_name.find("attn_o.");
207+
if (pos != std::string::npos) {
208+
new_name.replace(pos, 7, "layer.0.SelfAttention.o.");
209+
}
210+
pos = new_name.find("attn_q.");
211+
if (pos != std::string::npos) {
212+
new_name.replace(pos, 7, "layer.0.SelfAttention.q.");
213+
}
214+
pos = new_name.find("attn_norm.");
215+
if (pos != std::string::npos) {
216+
new_name.replace(pos, 10, "layer.0.layer_norm.");
217+
}
218+
pos = new_name.find("ffn_norm.");
219+
if (pos != std::string::npos) {
220+
new_name.replace(pos, 9, "layer.1.layer_norm.");
221+
}
222+
pos = new_name.find("ffn_up.");
223+
if (pos != std::string::npos) {
224+
new_name.replace(pos, 7, "layer.1.DenseReluDense.wi_1.");
225+
}
226+
pos = new_name.find("ffn_down.");
227+
if (pos != std::string::npos) {
228+
new_name.replace(pos, 9, "layer.1.DenseReluDense.wo.");
229+
}
230+
pos = new_name.find("ffn_gate.");
231+
if (pos != std::string::npos) {
232+
new_name.replace(pos, 9, "layer.1.DenseReluDense.wi_0.");
233+
}
234+
pos = new_name.find("attn_rel_b.");
235+
if (pos != std::string::npos) {
236+
new_name.replace(pos, 11, "layer.0.SelfAttention.relative_attention_bias.");
237+
}
238+
} else if (name == "text_encoders.t5xxl.transformer.token_embd.weight") {
239+
new_name = "text_encoders.t5xxl.transformer.shared.weight";
240+
}
241+
184242
if (starts_with(new_name, "conditioner.embedders.0.open_clip.")) {
185243
prefix = "cond_stage_model.";
186244
new_name = new_name.substr(strlen("conditioner.embedders.0.open_clip."));

0 commit comments

Comments
 (0)