From 55dbb915cc2a95048f56e667b09dfad38d840421 Mon Sep 17 00:00:00 2001 From: Howard Su Date: Mon, 3 Jul 2023 19:58:58 +0800 Subject: [PATCH] [llama] No need to check file version when loading vocab score (#2079) --- llama.cpp | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/llama.cpp b/llama.cpp index a869bbac80304..f48a6ca79bec8 100644 --- a/llama.cpp +++ b/llama.cpp @@ -481,9 +481,7 @@ struct llama_file_loader { std::string word = file.read_string(len); float score = 0.0f; - if (file_version >= LLAMA_FILE_VERSION_GGMF_V1) { - file.read_raw(&score, sizeof(score)); - } + file.read_raw(&score, sizeof(score)); vocab.token_to_id[word] = i;