diff --git a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java index ffd6c4923b..39ba3d0cce 100644 --- a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java +++ b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiChatModel.java @@ -43,6 +43,7 @@ public class OpenAiChatModel implements ChatLanguageModel, TokenCountEstimator { @Builder public OpenAiChatModel(String baseUrl, String apiKey, + String organizationId, String modelName, Double temperature, Double topP, @@ -67,6 +68,7 @@ public OpenAiChatModel(String baseUrl, this.client = OpenAiClient.builder() .openAiApiKey(apiKey) .baseUrl(baseUrl) + .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) diff --git a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiEmbeddingModel.java b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiEmbeddingModel.java index 20b9689608..59707a4e9f 100644 --- a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiEmbeddingModel.java +++ b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiEmbeddingModel.java @@ -35,6 +35,7 @@ public class OpenAiEmbeddingModel implements EmbeddingModel, TokenCountEstimator @Builder public OpenAiEmbeddingModel(String baseUrl, String apiKey, + String organizationId, String modelName, Duration timeout, Integer maxRetries, @@ -53,6 +54,7 @@ public OpenAiEmbeddingModel(String baseUrl, this.client = OpenAiClient.builder() .openAiApiKey(apiKey) .baseUrl(baseUrl) + .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) diff --git a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiLanguageModel.java b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiLanguageModel.java index 41a044c1ab..e2a3fed4ba 100644 --- a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiLanguageModel.java +++ b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiLanguageModel.java @@ -35,6 +35,7 @@ public class OpenAiLanguageModel implements LanguageModel, TokenCountEstimator { @Builder public OpenAiLanguageModel(String baseUrl, String apiKey, + String organizationId, String modelName, Double temperature, Duration timeout, @@ -49,6 +50,7 @@ public OpenAiLanguageModel(String baseUrl, this.client = OpenAiClient.builder() .baseUrl(getOrDefault(baseUrl, OPENAI_URL)) .openAiApiKey(apiKey) + .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) diff --git a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiModerationModel.java b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiModerationModel.java index 0422d2a664..17b0f48af2 100644 --- a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiModerationModel.java +++ b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiModerationModel.java @@ -36,6 +36,7 @@ public class OpenAiModerationModel implements ModerationModel { @Builder public OpenAiModerationModel(String baseUrl, String apiKey, + String organizationId, String modelName, Duration timeout, Integer maxRetries, @@ -53,6 +54,7 @@ public OpenAiModerationModel(String baseUrl, this.client = OpenAiClient.builder() .openAiApiKey(apiKey) .baseUrl(baseUrl) + .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) diff --git a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingChatModel.java b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingChatModel.java index d33d3d9c0f..a576d84587 100644 --- a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingChatModel.java +++ b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingChatModel.java @@ -46,6 +46,7 @@ public class OpenAiStreamingChatModel implements StreamingChatLanguageModel, Tok @Builder public OpenAiStreamingChatModel(String baseUrl, String apiKey, + String organizationId, String modelName, Double temperature, Double topP, @@ -64,6 +65,7 @@ public OpenAiStreamingChatModel(String baseUrl, this.client = OpenAiClient.builder() .baseUrl(getOrDefault(baseUrl, OPENAI_URL)) .openAiApiKey(apiKey) + .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) diff --git a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModel.java b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModel.java index 7c9e56ee78..2bc66a50f1 100644 --- a/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModel.java +++ b/langchain4j-open-ai/src/main/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModel.java @@ -34,6 +34,7 @@ public class OpenAiStreamingLanguageModel implements StreamingLanguageModel, Tok @Builder public OpenAiStreamingLanguageModel(String baseUrl, String apiKey, + String organizationId, String modelName, Double temperature, Duration timeout, @@ -47,6 +48,7 @@ public OpenAiStreamingLanguageModel(String baseUrl, this.client = OpenAiClient.builder() .baseUrl(getOrDefault(baseUrl, OPENAI_URL)) .openAiApiKey(apiKey) + .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) diff --git a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiChatModelIT.java b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiChatModelIT.java index c475cc1570..c7ef1960f8 100644 --- a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiChatModelIT.java +++ b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiChatModelIT.java @@ -33,6 +33,7 @@ class OpenAiChatModelIT { ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .temperature(0.0) .logRequests(true) .logResponses(true) @@ -65,6 +66,7 @@ void should_generate_answer_and_return_token_usage_and_finish_reason_length() { // given ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .maxTokens(3) .build(); @@ -188,6 +190,7 @@ void should_execute_multiple_tools_in_parallel_then_answer() { // given ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_1106.toString()) // supports parallel function calling .temperature(0.0) .build(); diff --git a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiLanguageModelIT.java b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiLanguageModelIT.java index 37849f7f17..0b762b1b97 100644 --- a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiLanguageModelIT.java +++ b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiLanguageModelIT.java @@ -12,6 +12,7 @@ class OpenAiLanguageModelIT { LanguageModel model = OpenAiLanguageModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .logRequests(true) .logResponses(true) .build(); diff --git a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiModerationModelIT.java b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiModerationModelIT.java index 1162128078..b5e982316f 100644 --- a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiModerationModelIT.java +++ b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiModerationModelIT.java @@ -10,6 +10,7 @@ class OpenAiModerationModelIT { ModerationModel model = OpenAiModerationModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .build(); @Test diff --git a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingChatModelIT.java b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingChatModelIT.java index bc85027899..645da8a941 100644 --- a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingChatModelIT.java +++ b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingChatModelIT.java @@ -34,6 +34,7 @@ class OpenAiStreamingChatModelIT { StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .temperature(0.0) .logRequests(true) .logResponses(true) @@ -288,6 +289,7 @@ void should_execute_multiple_tools_in_parallel_then_stream_answer() throws Excep // given StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_1106.toString()) // supports parallel function calling .temperature(0.0) .logRequests(true) diff --git a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModelIT.java b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModelIT.java index 20dd1d507e..31cbd8f720 100644 --- a/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModelIT.java +++ b/langchain4j-open-ai/src/test/java/dev/langchain4j/model/openai/OpenAiStreamingLanguageModelIT.java @@ -18,6 +18,7 @@ class OpenAiStreamingLanguageModelIT { StreamingLanguageModel model = OpenAiStreamingLanguageModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .logRequests(true) .logResponses(true) .build(); diff --git a/langchain4j-parent/pom.xml b/langchain4j-parent/pom.xml index 71eb676d27..a2dbacee88 100644 --- a/langchain4j-parent/pom.xml +++ b/langchain4j-parent/pom.xml @@ -17,7 +17,7 @@ 1.8 1.8 UTF-8 - 0.11.1 + 0.12.1 1.0.0-beta.6 2.9.0 4.10.0 diff --git a/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/LangChain4jAutoConfiguration.java b/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/LangChain4jAutoConfiguration.java index c0f5dab9c3..abdb0dba52 100644 --- a/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/LangChain4jAutoConfiguration.java +++ b/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/LangChain4jAutoConfiguration.java @@ -51,6 +51,7 @@ ChatLanguageModel chatLanguageModel(LangChain4jProperties properties) { return OpenAiChatModel.builder() .baseUrl(openAi.getBaseUrl()) .apiKey(openAi.getApiKey()) + .organizationId(openAi.getOrganizationId()) .modelName(openAi.getModelName()) .temperature(openAi.getTemperature()) .topP(openAi.getTopP()) @@ -142,6 +143,7 @@ LanguageModel languageModel(LangChain4jProperties properties) { } return OpenAiLanguageModel.builder() .apiKey(openAi.getApiKey()) + .organizationId(openAi.getOrganizationId()) .modelName(openAi.getModelName()) .temperature(openAi.getTemperature()) .timeout(openAi.getTimeout()) @@ -229,6 +231,7 @@ EmbeddingModel embeddingModel(LangChain4jProperties properties) { } return OpenAiEmbeddingModel.builder() .apiKey(openAi.getApiKey()) + .organizationId(openAi.getOrganizationId()) .modelName(openAi.getModelName()) .timeout(openAi.getTimeout()) .maxRetries(openAi.getMaxRetries()) @@ -306,6 +309,7 @@ ModerationModel moderationModel(LangChain4jProperties properties) { return OpenAiModerationModel.builder() .apiKey(openAi.getApiKey()) + .organizationId(openAi.getOrganizationId()) .modelName(openAi.getModelName()) .timeout(openAi.getTimeout()) .maxRetries(openAi.getMaxRetries()) diff --git a/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/OpenAi.java b/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/OpenAi.java index 93251380c0..4a821cea6c 100644 --- a/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/OpenAi.java +++ b/langchain4j-spring-boot-starter/src/main/java/dev/langchain4j/OpenAi.java @@ -6,6 +6,7 @@ class OpenAi { private String baseUrl; private String apiKey; + private String organizationId; private String modelName; private Double temperature; private Double topP; @@ -33,6 +34,14 @@ public void setApiKey(String apiKey) { this.apiKey = apiKey; } + public String getOrganizationId() { + return organizationId; + } + + public void setOrganizationId(String organizationId) { + this.organizationId = organizationId; + } + public String getModelName() { return modelName; } diff --git a/langchain4j/src/test/java/dev/langchain4j/service/AiServicesIT.java b/langchain4j/src/test/java/dev/langchain4j/service/AiServicesIT.java index 7053c1432f..75abd7cd23 100644 --- a/langchain4j/src/test/java/dev/langchain4j/service/AiServicesIT.java +++ b/langchain4j/src/test/java/dev/langchain4j/service/AiServicesIT.java @@ -56,6 +56,7 @@ public class AiServicesIT { @Spy ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .temperature(0.0) .logRequests(true) .logResponses(true) @@ -67,6 +68,7 @@ public class AiServicesIT { @Spy ModerationModel moderationModel = OpenAiModerationModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .build(); ToolSpecification calculatorSpecification = ToolSpecification.builder() @@ -845,6 +847,7 @@ void should_execute_multiple_tools_in_parallel_then_answer() { ChatLanguageModel chatLanguageModel = spy(OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_1106) .temperature(0.0) .logRequests(true) diff --git a/langchain4j/src/test/java/dev/langchain4j/service/StreamingAiServicesIT.java b/langchain4j/src/test/java/dev/langchain4j/service/StreamingAiServicesIT.java index 786b6963b0..2de09de175 100644 --- a/langchain4j/src/test/java/dev/langchain4j/service/StreamingAiServicesIT.java +++ b/langchain4j/src/test/java/dev/langchain4j/service/StreamingAiServicesIT.java @@ -30,6 +30,7 @@ public class StreamingAiServicesIT { StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .temperature(0.0) .logRequests(true) .logResponses(true) @@ -298,6 +299,7 @@ void should_execute_multiple_tools_in_parallel_then_answer() throws Exception { StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) + .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_1106) .temperature(0.0) .logRequests(true)