Skip to content

Commit

Permalink
Use ipex-llm package
Browse files Browse the repository at this point in the history
  • Loading branch information
chtanch committed Mar 22, 2024
1 parent 10accfd commit 01526f6
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions modules/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ def load_model(model_name, loader=None):
shared.is_seq2seq = False
shared.model_name = model_name
load_func_map = {
'IPEX-LLM': bigdl_llm_loader,
'IPEX-LLM': ipex_llm_loader,
'Transformers': huggingface_loader,
'AutoGPTQ': AutoGPTQ_loader,
'GPTQ-for-LLaMa': GPTQ_loader,
Expand Down Expand Up @@ -321,9 +321,9 @@ def AutoAWQ_loader(model_name):

return model

def bigdl_llm_loader(model_name):
def ipex_llm_loader(model_name):

from bigdl.llm.transformers import AutoModelForCausalLM, AutoModel, AutoModelForSeq2SeqLM
from ipex_llm.transformers import AutoModelForCausalLM, AutoModel, AutoModelForSeq2SeqLM

path_to_model = Path(f'{shared.args.model_dir}/{model_name}')

Expand Down
2 changes: 1 addition & 1 deletion modules/shared.py
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@ def fix_loader_name(name):
return 'QuIP#'
elif name in ['hqq']:
return 'HQQ'
elif name in ['IPEX-LLM', 'IPEX-LLM', 'bigdl']:
elif name in ['IPEX-LLM', 'ipex-llm']:
return 'IPEX-LLM'


Expand Down

0 comments on commit 01526f6

Please sign in to comment.