diff --git a/fastchat/conversation.py b/fastchat/conversation.py index 94a9b47f3d..9a485b8156 100644 --- a/fastchat/conversation.py +++ b/fastchat/conversation.py @@ -940,6 +940,19 @@ def get_conv_template(name: str) -> Conversation: ) ) +# Phind template +register_conv_template( + Conversation( + name="phind", + system_message="### System Prompt\nYou are an intelligent programming assistant.", + roles=("### User Message", "### Assistant"), + messages=(), + offset=0, + sep_style=SeparatorStyle.ADD_COLON_SINGLE, + sep="\n\n", + ) +) + if __name__ == "__main__": print("Vicuna template:") diff --git a/fastchat/model/model_adapter.py b/fastchat/model/model_adapter.py index a90aa61df0..028ac91f13 100644 --- a/fastchat/model/model_adapter.py +++ b/fastchat/model/model_adapter.py @@ -1601,6 +1601,16 @@ def get_default_conv_template(self, model_path: str) -> Conversation: return get_conv_template("llama-2") +class PhindCodeLlamaAdapter(CodeLlamaAdapter): + """The model adapter for Phind Code Llama""" + + def match(self, model_path: str): + return "phind-codellama-" in model_path.lower() + + def get_default_conv_template(self, model_path: str) -> Conversation: + return get_conv_template("phind") + + # Note: the registration order matters. # The one registered earlier has a higher matching priority. register_model_adapter(PeftModelAdapter) @@ -1658,6 +1668,7 @@ def get_default_conv_template(self, model_path: str) -> Conversation: register_model_adapter(VigogneChatAdapter) register_model_adapter(OpenLLaMaOpenInstructAdapter) register_model_adapter(ReaLMAdapter) +register_model_adapter(PhindCodeLlamaAdapter) register_model_adapter(CodeLlamaAdapter) # After all adapters, try the default base adapter.