Skip to content

Commit

Permalink
fix: regression openAI completion #849 (#850)
Browse files Browse the repository at this point in the history
  • Loading branch information
mikbry committed Apr 30, 2024
1 parent 43c21cc commit 4404b6f
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 3 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,3 +3,4 @@
**/.DS_Store
**/llama.log
/tmp
.VSCodeCounter
8 changes: 6 additions & 2 deletions webapp/native/src/providers/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -431,7 +431,9 @@ impl ProvidersManager {
return Ok(response?);
}
if llm_provider_type == "openai" || llm_provider_type == "server" {
let response = {
let conversation_id = query.options.conversation_id.clone();
let message_id = query.options.message_id.clone();
let mut response = {
let api = format!("{:}", llm_provider.url);
let secret_key = match llm_provider.key {
Some(k) => { k }
Expand All @@ -446,7 +448,6 @@ impl ProvidersManager {
};
// let model = model;
let query = query.clone();
let conversation_id = query.options.conversation_id.clone();
openai
::call_completion::<R>(
&api,
Expand All @@ -459,6 +460,7 @@ impl ProvidersManager {
Ok(response) => {
let mut response = response.clone();
response.conversation_id = conversation_id.clone();
response.message_id = message_id.clone();
let _ = app
.emit_all("opla-sse", response)
.map_err(|err| err.to_string());
Expand All @@ -473,6 +475,8 @@ impl ProvidersManager {
).await
.map_err(|err| err.to_string())?
};
response.conversation_id = conversation_id.clone();
response.message_id = message_id.clone();
let _ = app.emit_all("opla-sse", response).map_err(|err| err.to_string());
return Ok(());
}
Expand Down
2 changes: 1 addition & 1 deletion webapp/native/src/providers/openai.rs
Original file line number Diff line number Diff line change
Expand Up @@ -188,7 +188,7 @@ impl OpenAIChatCompletion {
fn to_llm_response(&self) -> LlmCompletionResponse {
let mut response = LlmCompletionResponse::new(
self.created,
"success",
"finished",
&self.choices[0].message.content
);
let usage = LlmUsage {
Expand Down

0 comments on commit 4404b6f

Please sign in to comment.