diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/deepseek-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/deepseek-chat.adoc index e33a8711246..08bd57a8e20 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/deepseek-chat.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/deepseek-chat.adoc @@ -145,13 +145,13 @@ ChatResponse response = chatModel.call( new Prompt( "Generate the names of 5 famous pirates.", OpenAiChatOptions.builder() - .withModel("deepseek-chat") - .withTemperature(0.4) + .model("deepseek-chat") + .temperature(0.4) .build() )); ---- -TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptionsBuilder.java[ChatOptionsBuilder#builder()]. +TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions#builder()]. == Function Calling diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/groq-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/groq-chat.adoc index 2f7adef52eb..a5a7a182f6a 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/groq-chat.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/groq-chat.adoc @@ -150,13 +150,13 @@ ChatResponse response = chatModel.call( new Prompt( "Generate the names of 5 famous pirates.", OpenAiChatOptions.builder() - .withModel("mixtral-8x7b-32768") - .withTemperature(0.4) + .model("mixtral-8x7b-32768") + .temperature(0.4) .build() )); ---- -TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptionsBuilder.java[ChatOptionsBuilder#builder()]. +TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions#builder()]. == Function Calling @@ -303,9 +303,9 @@ Next, create a `OpenAiChatModel` and use it for text generations: ---- var openAiApi = new OpenAiApi("https://api.groq.com/openai", System.getenv("GROQ_API_KEY")); var openAiChatOptions = OpenAiChatOptions.builder() - .withModel("llama3-70b-8192") - .withTemperature(0.4) - .withMaxTokens(200) + .model("llama3-70b-8192") + .temperature(0.4) + .maxTokens(200) .build(); var chatModel = new OpenAiChatModel(this.openAiApi, this.openAiChatOptions); diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/nvidia-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/nvidia-chat.adoc index 17bc1204ae4..44ccda71a28 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/nvidia-chat.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/nvidia-chat.adoc @@ -121,13 +121,13 @@ ChatResponse response = chatModel.call( new Prompt( "Generate the names of 5 famous pirates.", OpenAiChatOptions.builder() - .withModel("mixtral-8x7b-32768") - .withTemperature(0.4) + .model("mixtral-8x7b-32768") + .temperature(0.4) .build() )); ---- -TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptionsBuilder.java[ChatOptionsBuilder#builder()]. +TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions#builder()]. == Function Calling diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc index 3d2a03efac5..7e6a0b40a9c 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/ollama-chat.adoc @@ -309,8 +309,8 @@ String jsonSchema = """ Prompt prompt = new Prompt("how can I solve 8x + 7 = -23", OllamaOptions.builder() - .withModel(OllamaModel.LLAMA3_2.getName()) - .withFormat(new ObjectMapper().readValue(jsonSchema, Map.class)) + .model(OllamaModel.LLAMA3_2.getName()) + .format(new ObjectMapper().readValue(jsonSchema, Map.class)) .build()); ChatResponse response = this.ollamaChatModel.call(this.prompt); @@ -340,8 +340,8 @@ var outputConverter = new BeanOutputConverter<>(MathReasoning.class); Prompt prompt = new Prompt("how can I solve 8x + 7 = -23", OllamaOptions.builder() - .withModel(OllamaModel.LLAMA3_2.getName()) - .withFormat(outputConverter.getJsonSchemaMap()) + .model(OllamaModel.LLAMA3_2.getName()) + .format(outputConverter.getJsonSchemaMap()) .build()); ChatResponse response = this.ollamaChatModel.call(this.prompt); @@ -466,10 +466,14 @@ Next, create an `OllamaChatModel` instance and use it to send requests for text ---- var ollamaApi = new OllamaApi(); -var chatModel = new OllamaChatModel(this.ollamaApi, - OllamaOptions.create() - .model(OllamaOptions.DEFAULT_MODEL) - .temperature(0.9)); +var chatModel = OllamaChatModel.builder() + .ollamaApi(ollamaApi) + .defaultOptions( + OllamaOptions.builder() + .model(OllamaModel.MISTRAL) + .temperature(0.9) + .build()) + .build(); ChatResponse response = this.chatModel.call( new Prompt("Generate the names of 5 famous pirates.")); @@ -508,7 +512,7 @@ var request = ChatRequest.builder("orca-mini") .content("What is the capital of Bulgaria and what is the size? " + "What is the national anthem?") .build())) - .options(OllamaOptions.create().temperature(0.9)) + .options(OllamaOptions.builder().temperature(0.9).build()) .build(); ChatResponse response = this.ollamaApi.chat(this.request); @@ -519,7 +523,7 @@ var request2 = ChatRequest.builder("orca-mini") .messages(List.of(Message.builder(Role.USER) .content("What is the capital of Bulgaria and what is the size? " + "What is the national anthem?") .build())) - .options(OllamaOptions.create().temperature(0.9).toMap()) + .options(OllamaOptions.builder().temperature(0.9).build().toMap()) .build(); Flux streamingResponse = this.ollamaApi.streamingChat(this.request2); diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/perplexity-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/perplexity-chat.adoc index 1bf3a109d95..ceeafcd507f 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/perplexity-chat.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/perplexity-chat.adoc @@ -147,7 +147,7 @@ ChatResponse response = chatModel.call( )); ---- -TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java#L97[ChatOptions#builder()]. +TIP: In addition to the model specific https://github.com/spring-projects/spring-ai/blob/main/models/spring-ai-openai/src/main/java/org/springframework/ai/openai/OpenAiChatOptions.java[OpenAiChatOptions] you can use a portable https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions] instance, created with the https://github.com/spring-projects/spring-ai/blob/main/spring-ai-core/src/main/java/org/springframework/ai/chat/prompt/ChatOptions.java[ChatOptions#builder()]. == Function Calling