Skip to content

Commit

Permalink
refactor: rename model type rerank to reranker (#646)
Browse files Browse the repository at this point in the history
  • Loading branch information
sigoden committed Jun 24, 2024
1 parent 2fbb527 commit 2bc9607
Show file tree
Hide file tree
Showing 7 changed files with 33 additions and 32 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,12 +46,12 @@ AIChat offers users a wide and diverse selection of Large Language Models (LLMs)
- **VertexAI-Claude:** Claude-3.5/Claude-3 (paid, vision)
- **Bedrock:** Llama-3/Claude-3.5/Claude-3/Mistral (paid, vision)
- **Mistral** (paid, embedding, function-calling)
- **Cohere:** Command-R/Command-R+ (paid, embedding, rerank, function-calling)
- **Cohere:** Command-R/Command-R+ (paid, embedding, reranker, function-calling)
- **Perplexity:** Llama-3/Mixtral (paid)
- **Cloudflare:** (free, vision, embedding)
- **OpenRouter:** (paid, vision, function-calling)
- **Replicate:** (paid)
- **Ernie:** (paid, embedding, rerank, function-calling)
- **Ernie:** (paid, embedding, reranker, function-calling)
- **Qianwen:** Qwen (paid, vision, embedding, function-calling)
- **Moonshot:** (paid, function-calling)
- **Deepseek:** (paid)
Expand Down
6 changes: 3 additions & 3 deletions config.example.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ agents:

# ---- RAG ----
rag_embedding_model: null # Specifies the embedding model to use
rag_rerank_model: null # Specifies the rerank model to use
rag_reranker_model: null # Specifies the rerank model to use
rag_top_k: 4 # Specifies the number of documents to retrieve
rag_chunk_size: null # Specifies the chunk size
rag_chunk_overlap: null # Specifies the chunk overlap
Expand Down Expand Up @@ -89,8 +89,8 @@ clients:
# max_input_tokens: 2048
# default_chunk_size: 2000
# max_batch_size: 100
# - name: xxxx # Rerank model
# type: rerank
# - name: xxxx # Reranker model
# type: reranker
# max_input_tokens: 2048
# patches:
# <regex>: # The regex to match model names, e.g. '.*' 'gpt-4o' 'gpt-4o|gpt-4-.*'
Expand Down
19 changes: 10 additions & 9 deletions models.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -188,10 +188,10 @@
default_chunk_size: 1000
max_batch_size: 96
- name: rerank-english-v3.0
type: rerank
type: reranker
max_input_tokens: 4096
- name: rerank-multilingual-v3.0
type: rerank
type: reranker
max_input_tokens: 4096

- platform: perplexity
Expand Down Expand Up @@ -529,8 +529,9 @@
default_chunk_size: 2000
max_batch_size: 1
- name: bce_reranker_base
type: rerank
type: reranker
max_input_tokens: 1024
input_price: 0.28

- platform: qianwen
# docs:
Expand Down Expand Up @@ -1188,19 +1189,19 @@
default_chunk_size: 1500
max_batch_size: 100
- name: jina-reranker-v1-base-en
type: rerank
type: reranker
max_input_tokens: 8192
input_price: 0.02
- name: jina-reranker-v1-turbo-en
type: rerank
type: reranker
max_input_tokens: 8192
input_price: 0.02
- name: jina-colbert-v1-en
type: rerank
type: reranker
max_input_tokens: 8192
input_price: 0.02
- name: jina-reranker-v1-base-multilingual
type: rerank
type: reranker
max_input_tokens: 8192
input_price: 0.02

Expand Down Expand Up @@ -1246,10 +1247,10 @@
default_chunk_size: 2000
max_batch_size: 128
- name: rerank-1
type: rerank
type: reranker
max_input_tokens: 8000
input_price: 0.05
- name: rerank-lite-1
type: rerank
type: reranker
max_input_tokens: 4000
input_price: 0.02
4 changes: 2 additions & 2 deletions src/client/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -154,8 +154,8 @@ macro_rules! register_client {
list_models(config).into_iter().filter(|v| v.model_type() == "embedding").collect()
}

pub fn list_rerank_models(config: &$crate::config::Config) -> Vec<&'static $crate::client::Model> {
list_models(config).into_iter().filter(|v| v.model_type() == "rerank").collect()
pub fn list_reranker_models(config: &$crate::config::Config) -> Vec<&'static $crate::client::Model> {
list_models(config).into_iter().filter(|v| v.model_type() == "reranker").collect()
}
};
}
Expand Down
8 changes: 4 additions & 4 deletions src/client/model.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
use super::{
list_chat_models, list_embedding_models, list_rerank_models,
list_chat_models, list_embedding_models, list_reranker_models,
message::{Message, MessageContent},
EmbeddingsData,
};
Expand Down Expand Up @@ -57,10 +57,10 @@ impl Model {
}
}

pub fn retrieve_rerank(config: &Config, model_id: &str) -> Result<Self> {
match Self::find(&list_rerank_models(config), model_id) {
pub fn retrieve_reranker(config: &Config, model_id: &str) -> Result<Self> {
match Self::find(&list_reranker_models(config), model_id) {
Some(v) => Ok(v),
None => bail!("Invalid rerank model '{model_id}'"),
None => bail!("Invalid reranker model '{model_id}'"),
}
}

Expand Down
6 changes: 3 additions & 3 deletions src/config/input.rs
Original file line number Diff line number Diff line change
Expand Up @@ -177,11 +177,11 @@ impl Input {
config.rag_min_score_keyword_search,
)
};
let rerank = match self.config.read().rag_rerank_model.clone() {
Some(rerank_model_id) => {
let rerank = match self.config.read().rag_reranker_model.clone() {
Some(reranker_model_id) => {
let min_score = self.config.read().rag_min_score_rerank;
let rerank_model =
Model::retrieve_rerank(&self.config.read(), &rerank_model_id)?;
Model::retrieve_reranker(&self.config.read(), &reranker_model_id)?;
let rerank_client = init_client(&self.config, Some(rerank_model))?;
Some((rerank_client, min_score))
}
Expand Down
18 changes: 9 additions & 9 deletions src/config/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub use self::role::{Role, RoleLike, CODE_ROLE, EXPLAIN_SHELL_ROLE, SHELL_ROLE};
use self::session::Session;

use crate::client::{
create_client_config, list_chat_models, list_client_types, list_rerank_models, ClientConfig,
create_client_config, list_chat_models, list_client_types, list_reranker_models, ClientConfig,
Model, OPENAI_COMPATIBLE_PLATFORMS,
};
use crate::function::{FunctionDeclaration, Functions, FunctionsFilter, ToolResult};
Expand Down Expand Up @@ -108,7 +108,7 @@ pub struct Config {
pub agents: Vec<AgentConfig>,

pub rag_embedding_model: Option<String>,
pub rag_rerank_model: Option<String>,
pub rag_reranker_model: Option<String>,
pub rag_top_k: usize,
pub rag_chunk_size: Option<usize>,
pub rag_chunk_overlap: Option<usize>,
Expand Down Expand Up @@ -167,7 +167,7 @@ impl Default for Config {
agents: vec![],

rag_embedding_model: None,
rag_rerank_model: None,
rag_reranker_model: None,
rag_top_k: 4,
rag_chunk_size: None,
rag_chunk_overlap: None,
Expand Down Expand Up @@ -478,8 +478,8 @@ impl Config {
("compress_threshold", self.compress_threshold.to_string()),
("function_calling", self.function_calling.to_string()),
(
"rag_rerank_model",
format_option_value(&self.rag_rerank_model),
"rag_reranker_model",
format_option_value(&self.rag_reranker_model),
),
("rag_top_k", self.rag_top_k.to_string()),
("highlight", self.highlight.to_string()),
Expand Down Expand Up @@ -527,8 +527,8 @@ impl Config {
let value = parse_value(value)?;
self.set_top_p(value);
}
"rag_rerank_model" => {
self.rag_rerank_model = if value == "null" {
"rag_reranker_model" => {
self.rag_reranker_model = if value == "null" {
None
} else {
Some(value.to_string())
Expand Down Expand Up @@ -1096,7 +1096,7 @@ impl Config {
"max_output_tokens",
"temperature",
"top_p",
"rag_rerank_model",
"rag_reranker_model",
"rag_top_k",
"function_calling",
"compress_threshold",
Expand All @@ -1117,7 +1117,7 @@ impl Config {
Some(v) => vec![v.to_string()],
None => vec![],
},
"rag_rerank_model" => list_rerank_models(self).iter().map(|v| v.id()).collect(),
"rag_reranker_model" => list_reranker_models(self).iter().map(|v| v.id()).collect(),
"function_calling" => complete_bool(self.function_calling),
"save" => complete_bool(self.save),
"save_session" => {
Expand Down

0 comments on commit 2bc9607

Please sign in to comment.