Skip to content

Commit

Permalink
[NeuralChat] Support SOLAR-10.7B-Instruct-v1.0 model (#1069)
Browse files Browse the repository at this point in the history
Support SOLAR-10.7B-Instruct-v1.0 model 
Signed-off-by: lvliang-intel <liang1.lv@intel.com>
  • Loading branch information
lvliang-intel committed Dec 24, 2023
1 parent 37d4007 commit 77fb812
Show file tree
Hide file tree
Showing 3 changed files with 73 additions and 0 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def optimize(self, model, use_llm_runtime=False):
or re.search("neural-chat-7b-v2", model_name, re.IGNORECASE)
or re.search("neural-chat-7b-v3", model_name, re.IGNORECASE)
or re.search("starcoder", model_name, re.IGNORECASE)
or re.search("solar", model_name, re.IGNORECASE)
):
from intel_extension_for_transformers.transformers import AutoModelForCausalLM
optimized_model = AutoModelForCausalLM.from_pretrained(
Expand Down
3 changes: 3 additions & 0 deletions intel_extension_for_transformers/neural_chat/chatbot.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,9 @@ def build_chatbot(config: PipelineConfig=None):
elif "mistral" in config.model_name_or_path.lower():
from .models.mistral_model import MistralModel
adapter = MistralModel()
elif "solar" in config.model_name_or_path.lower():
from .models.solar_model import SolarModel
adapter = SolarModel()
elif "opt" in config.model_name_or_path.lower() or \
"gpt" in config.model_name_or_path.lower() or \
"flan-t5" in config.model_name_or_path.lower() or \
Expand Down
69 changes: 69 additions & 0 deletions intel_extension_for_transformers/neural_chat/models/solar_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2023 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from .base_model import BaseModel, register_model_adapter
import logging
from fastchat.conversation import get_conv_template, Conversation, register_conv_template, SeparatorStyle

logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
level=logging.INFO,
)
logger = logging.getLogger(__name__)


# Solar-10.7B Chat Template
# Reference: https://huggingface.co/upstage/SOLAR-10.7B-Instruct-v1.0/blob/main/tokenizer_config.json
register_conv_template(
Conversation(
name="solar",
system_message="",
roles=("### User", "### Assistant"),
sep_style=SeparatorStyle.ADD_COLON_SPACE_SINGLE,
sep="\n\n",
stop_str="</s>",
)
)

class SolarModel(BaseModel):
def match(self, model_path: str):
"""
Check if the provided model_path matches the current model.
Args:
model_path (str): Path to a model.
Returns:
bool: True if the model_path matches, False otherwise.
"""
return "solar-" in model_path.lower() and "instruct" in model_path.lower()

def get_default_conv_template(self, model_path: str) -> Conversation:
"""
Get the default conversation template for the given model path.
Args:
model_path (str): Path to the model.
Returns:
Conversation: A default conversation template.
"""
return get_conv_template("solar")

register_model_adapter(SolarModel)

0 comments on commit 77fb812

Please sign in to comment.