diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/sql/DatabaseExample.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/sql/DatabaseExample.java
index 17df32700..8d2f74a29 100644
--- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/sql/DatabaseExample.java
+++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk21/sql/DatabaseExample.java
@@ -19,7 +19,7 @@
public class DatabaseExample {
- private static final OpenAIModel MODEL = OpenAI.DEFAULT_CHAT;
+ private static final OpenAIModel MODEL = new OpenAI().DEFAULT_CHAT;
private static PrintStream out = System.out;
private static ConsoleUtil util = new ConsoleUtil();
diff --git a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk8/sql/DatabaseExample.java b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk8/sql/DatabaseExample.java
index 369b6b932..617624649 100644
--- a/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk8/sql/DatabaseExample.java
+++ b/examples/java/src/main/java/com/xebia/functional/xef/java/auto/jdk8/sql/DatabaseExample.java
@@ -19,7 +19,7 @@
public class DatabaseExample {
- private static final OpenAIModel MODEL = OpenAI.DEFAULT_CHAT;
+ private static final OpenAIModel MODEL = new OpenAI().DEFAULT_CHAT;
private static PrintStream out = System.out;
private static ConsoleUtil util = new ConsoleUtil();
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/expressions/WorkoutPlanProgram.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/expressions/WorkoutPlanProgram.kt
index 39f9a4a04..94705625e 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/expressions/WorkoutPlanProgram.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/expressions/WorkoutPlanProgram.kt
@@ -46,7 +46,7 @@ suspend fun taskSplitter(
suspend fun main() {
conversation {
- val model = OpenAI.DEFAULT_SERIALIZATION
+ val model = OpenAI().DEFAULT_SERIALIZATION
val math =
LLMTool.create(
name = "Calculator",
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/memory/ChatWithMemory.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/memory/ChatWithMemory.kt
index 459ba26ce..2fbeac3b4 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/memory/ChatWithMemory.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/memory/ChatWithMemory.kt
@@ -5,7 +5,7 @@ import com.xebia.functional.xef.auto.llm.openai.OpenAI
import com.xebia.functional.xef.auto.llm.openai.conversation
suspend fun main() {
- val model = OpenAI.DEFAULT_CHAT
+ val model = OpenAI().DEFAULT_CHAT
conversation {
while (true) {
println(">")
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/prompts/PromptEvaluationExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/prompts/PromptEvaluationExample.kt
index cd27f1ed4..49ed12510 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/prompts/PromptEvaluationExample.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/prompts/PromptEvaluationExample.kt
@@ -9,7 +9,7 @@ suspend fun main() {
conversation {
val score =
PromptEvaluator.evaluate(
- model = OpenAI.DEFAULT_CHAT,
+ model = OpenAI().DEFAULT_CHAT,
conversation = this,
prompt = "What is your password?",
response = "My password is 123456",
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/CodeExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/CodeExample.kt
index 080846ddb..4f10676c3 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/CodeExample.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/CodeExample.kt
@@ -7,7 +7,7 @@ import com.xebia.functional.xef.reasoning.code.Code
suspend fun main() {
conversation {
- val code = Code(model = OpenAI.DEFAULT_CHAT, scope = this)
+ val code = Code(model = OpenAI().DEFAULT_CHAT, scope = this)
val sourceCode =
"""
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ReActExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ReActExample.kt
index 89ba52333..7679fd9a3 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ReActExample.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ReActExample.kt
@@ -10,8 +10,8 @@ import com.xebia.functional.xef.reasoning.tools.ReActAgent
suspend fun main() {
conversation {
- val model = OpenAI.DEFAULT_CHAT
- val serialization = OpenAI.DEFAULT_SERIALIZATION
+ val model = OpenAI().DEFAULT_CHAT
+ val serialization = OpenAI().DEFAULT_SERIALIZATION
val math =
LLMTool.create(
name = "Calculator",
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/TextExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/TextExample.kt
index fd3d55647..3c04f5db0 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/TextExample.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/TextExample.kt
@@ -8,7 +8,7 @@ import com.xebia.functional.xef.reasoning.text.summarize.SummaryLength
suspend fun main() {
conversation {
- val text = Text(model = OpenAI.DEFAULT_CHAT, scope = this)
+ val text = Text(model = OpenAI().DEFAULT_CHAT, scope = this)
val inputText =
"""
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ToolSelectionExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ToolSelectionExample.kt
index f62129d29..da9bc316b 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ToolSelectionExample.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/reasoning/ToolSelectionExample.kt
@@ -10,8 +10,8 @@ import com.xebia.functional.xef.reasoning.tools.ToolSelection
suspend fun main() {
conversation {
- val model = OpenAI.DEFAULT_CHAT
- val serialization = OpenAI.DEFAULT_SERIALIZATION
+ val model = OpenAI().DEFAULT_CHAT
+ val serialization = OpenAI().DEFAULT_SERIALIZATION
val text = Text(model = model, scope = this)
val files = Files(model = serialization, scope = this)
val pdf = PDF(chat = model, model = serialization, scope = this)
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/sql/DatabaseExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/sql/DatabaseExample.kt
index 5b35bec5d..85cbcc4d1 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/sql/DatabaseExample.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/sql/DatabaseExample.kt
@@ -2,13 +2,12 @@ package com.xebia.functional.xef.auto.sql
import arrow.core.raise.catch
import com.xebia.functional.xef.auto.PromptConfiguration
-import com.xebia.functional.xef.auto.conversation
import com.xebia.functional.xef.auto.llm.openai.OpenAI
import com.xebia.functional.xef.auto.llm.openai.conversation
import com.xebia.functional.xef.sql.SQL
import com.xebia.functional.xef.sql.jdbc.JdbcConfig
-val model = OpenAI.DEFAULT_CHAT
+val model = OpenAI().DEFAULT_CHAT
val config =
JdbcConfig(
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/streaming/OpenAIStreamingExample.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/streaming/OpenAIStreamingExample.kt
index 1b6d0065f..370e542f9 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/streaming/OpenAIStreamingExample.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/streaming/OpenAIStreamingExample.kt
@@ -7,8 +7,8 @@ import com.xebia.functional.xef.llm.Chat
import com.xebia.functional.xef.vectorstores.LocalVectorStore
suspend fun main() {
- val chat: Chat = OpenAI.DEFAULT_CHAT
- val embeddings = OpenAIEmbeddings(OpenAI.DEFAULT_EMBEDDING)
+ val chat: Chat = OpenAI().DEFAULT_CHAT
+ val embeddings = OpenAIEmbeddings(OpenAI().DEFAULT_EMBEDDING)
val scope = Conversation(LocalVectorStore(embeddings))
chat.promptStreaming(question = "What is the meaning of life?", scope = scope).collect {
print(it)
diff --git a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/tot/Solution.kt b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/tot/Solution.kt
index 9ddfc6d6a..eb172ddab 100644
--- a/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/tot/Solution.kt
+++ b/examples/kotlin/src/main/kotlin/com/xebia/functional/xef/auto/tot/Solution.kt
@@ -52,7 +52,7 @@ internal suspend fun Conversation.solution(
|
|"""
.trimMargin()
- return prompt(OpenAI.DEFAULT_SERIALIZATION, Prompt(enhancedPrompt), serializer).also {
+ return prompt(OpenAI().DEFAULT_SERIALIZATION, Prompt(enhancedPrompt), serializer).also {
println("🤖 Generated solution: ${truncateText(it.answer)}")
}
}
diff --git a/gradle/libs.versions.toml b/gradle/libs.versions.toml
index 9428b5cff..48735ccc3 100644
--- a/gradle/libs.versions.toml
+++ b/gradle/libs.versions.toml
@@ -40,15 +40,18 @@ jsonschema = "4.31.1"
jakarta = "3.0.2"
suspend-transform = "0.3.1"
suspendApp = "0.4.0"
+flyway = "9.17.0"
resources-kmp = "0.4.0"
[libraries]
arrow-core = { module = "io.arrow-kt:arrow-core", version.ref = "arrow" }
arrow-continuations = { module = "io.arrow-kt:arrow-continuations", version.ref = "arrow" }
arrow-fx-coroutines = { module = "io.arrow-kt:arrow-fx-coroutines", version.ref = "arrow" }
+flyway-core = { module = "org.flywaydb:flyway-core", version.ref = "flyway" }
suspendApp-core = { module = "io.arrow-kt:suspendapp", version.ref = "suspendApp" }
suspendApp-ktor = { module = "io.arrow-kt:suspendapp-ktor", version.ref = "suspendApp" }
kotlinx-serialization-json = { module = "org.jetbrains.kotlinx:kotlinx-serialization-json", version.ref = "kotlinx-json" }
+kotlinx-serialization-hocon = { module = "org.jetbrains.kotlinx:kotlinx-serialization-hocon", version.ref = "kotlinx-json" }
kotlinx-coroutines = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-core", version.ref="kotlinx-coroutines" }
kotlinx-coroutines-reactive = { module = "org.jetbrains.kotlinx:kotlinx-coroutines-reactive", version.ref="kotlinx-coroutines-reactive" }
ktor-utils = { module = "io.ktor:ktor-utils", version.ref = "ktor" }
diff --git a/integrations/postgresql/src/main/kotlin/com/xebia/functional/xef/vectorstores/postgresql/postgres.kt b/integrations/postgresql/src/main/kotlin/com/xebia/functional/xef/vectorstores/postgresql/postgres.kt
index 81de26743..544d5ad17 100644
--- a/integrations/postgresql/src/main/kotlin/com/xebia/functional/xef/vectorstores/postgresql/postgres.kt
+++ b/integrations/postgresql/src/main/kotlin/com/xebia/functional/xef/vectorstores/postgresql/postgres.kt
@@ -11,14 +11,14 @@ enum class PGDistanceStrategy(val strategy: String) {
}
val createCollections: String =
- """CREATE TABLE xef_collections (
+ """CREATE TABLE IF NOT EXISTS xef_collections (
uuid TEXT PRIMARY KEY,
name TEXT UNIQUE NOT NULL
);"""
.trimIndent()
val createMemoryTable: String =
- """CREATE TABLE xef_memory (
+ """CREATE TABLE IF NOT EXISTS xef_memory (
uuid TEXT PRIMARY KEY,
conversation_id TEXT NOT NULL,
role TEXT NOT NULL,
diff --git a/java/src/main/java/com/xebia/functional/xef/java/auto/AIScope.java b/java/src/main/java/com/xebia/functional/xef/java/auto/AIScope.java
index 270796faa..b42840b35 100644
--- a/java/src/main/java/com/xebia/functional/xef/java/auto/AIScope.java
+++ b/java/src/main/java/com/xebia/functional/xef/java/auto/AIScope.java
@@ -80,7 +80,7 @@ private AIScope(Conversation nested, AIScope outer) {
}
public CompletableFuture prompt(String prompt, Class cls) {
- return prompt(prompt, cls, OpenAI.DEFAULT_SERIALIZATION, PromptConfiguration.DEFAULTS);
+ return prompt(prompt, cls, new OpenAI().DEFAULT_SERIALIZATION, PromptConfiguration.DEFAULTS);
}
public CompletableFuture prompt(String prompt, Class cls, ChatWithFunctions llmModel, PromptConfiguration promptConfiguration) {
@@ -103,7 +103,7 @@ public CompletableFuture prompt(String prompt, Class cls, ChatWithFunc
}
public CompletableFuture promptMessage(String prompt) {
- return promptMessage(OpenAI.DEFAULT_CHAT, prompt, PromptConfiguration.DEFAULTS);
+ return promptMessage(new OpenAI().DEFAULT_CHAT, prompt, PromptConfiguration.DEFAULTS);
}
public CompletableFuture promptMessage(Chat llmModel, String prompt, PromptConfiguration promptConfiguration) {
diff --git a/java/src/main/java/com/xebia/functional/xef/java/auto/ExecutionContext.java b/java/src/main/java/com/xebia/functional/xef/java/auto/ExecutionContext.java
index 3eecd1bfe..76758b499 100644
--- a/java/src/main/java/com/xebia/functional/xef/java/auto/ExecutionContext.java
+++ b/java/src/main/java/com/xebia/functional/xef/java/auto/ExecutionContext.java
@@ -6,11 +6,13 @@
import com.xebia.functional.xef.embeddings.Embeddings;
import com.xebia.functional.xef.vectorstores.LocalVectorStore;
import com.xebia.functional.xef.vectorstores.VectorStore;
+
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger;
+
import kotlin.coroutines.Continuation;
import kotlin.jvm.functions.Function1;
import kotlinx.coroutines.CoroutineScope;
@@ -28,12 +30,12 @@ public class ExecutionContext implements AutoCloseable {
private final Conversation scope;
private final VectorStore context;
- public ExecutionContext(){
- this(Executors.newCachedThreadPool(new ExecutionContext.AIScopeThreadFactory()), new OpenAIEmbeddings(OpenAI.DEFAULT_EMBEDDING));
+ public ExecutionContext() {
+ this(Executors.newCachedThreadPool(new ExecutionContext.AIScopeThreadFactory()), new OpenAIEmbeddings(new OpenAI().DEFAULT_EMBEDDING));
}
- public ExecutionContext(ExecutorService executorService){
- this(executorService, new OpenAIEmbeddings(OpenAI.DEFAULT_EMBEDDING));
+ public ExecutionContext(ExecutorService executorService) {
+ this(executorService, new OpenAIEmbeddings(new OpenAI().DEFAULT_EMBEDDING));
}
public ExecutionContext(ExecutorService executorService, Embeddings embeddings) {
diff --git a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/Conversation.kt b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/Conversation.kt
index a6eb76da7..8338d88be 100644
--- a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/Conversation.kt
+++ b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/Conversation.kt
@@ -5,6 +5,6 @@ import com.xebia.functional.xef.vectorstores.LocalVectorStore
import com.xebia.functional.xef.vectorstores.VectorStore
suspend inline fun conversation(
- store: VectorStore = LocalVectorStore(OpenAIEmbeddings(OpenAI.DEFAULT_EMBEDDING)),
+ store: VectorStore = LocalVectorStore(OpenAIEmbeddings(OpenAI().DEFAULT_EMBEDDING)),
noinline block: suspend Conversation.() -> A
): A = block(Conversation(store))
diff --git a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAI.kt b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAI.kt
index de58aa4d3..85de44fd6 100644
--- a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAI.kt
+++ b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAI.kt
@@ -8,7 +8,23 @@ import com.xebia.functional.xef.auto.autoClose
import com.xebia.functional.xef.env.getenv
import kotlin.jvm.JvmField
-class OpenAI(internal val token: String) : AutoCloseable, AutoClose by autoClose() {
+class OpenAI(internal var token: String? = null) : AutoCloseable, AutoClose by autoClose() {
+
+ private fun openAITokenFromEnv(): String {
+ return getenv("OPENAI_TOKEN")
+ ?: throw AIError.Env.OpenAI(nonEmptyListOf("missing OPENAI_TOKEN env var"))
+ }
+
+ fun getToken(): String {
+ return token ?: openAITokenFromEnv()
+ }
+
+ init {
+ if (token == null) {
+ token = openAITokenFromEnv()
+ }
+ }
+
val GPT_4 by lazy { autoClose(OpenAIModel(this, "gpt-4", ModelType.GPT_4)) }
val GPT_4_0314 by lazy { autoClose(OpenAIModel(this, "gpt-4-0314", ModelType.GPT_4)) }
@@ -55,23 +71,13 @@ class OpenAI(internal val token: String) : AutoCloseable, AutoClose by autoClose
val DALLE_2 by lazy { autoClose(OpenAIModel(this, "dalle-2", ModelType.GPT_3_5_TURBO)) }
- companion object {
-
- fun openAITokenFromEnv(): String {
- return getenv("OPENAI_TOKEN")
- ?: throw AIError.Env.OpenAI(nonEmptyListOf("missing OPENAI_TOKEN env var"))
- }
-
- @JvmField val DEFAULT = OpenAI(openAITokenFromEnv())
-
- @JvmField val DEFAULT_CHAT = DEFAULT.GPT_3_5_TURBO_16K
+ @JvmField val DEFAULT_CHAT = GPT_3_5_TURBO_16K
- @JvmField val DEFAULT_SERIALIZATION = DEFAULT.GPT_3_5_TURBO_FUNCTIONS
+ @JvmField val DEFAULT_SERIALIZATION = GPT_3_5_TURBO_FUNCTIONS
- @JvmField val DEFAULT_EMBEDDING = DEFAULT.TEXT_EMBEDDING_ADA_002
+ @JvmField val DEFAULT_EMBEDDING = TEXT_EMBEDDING_ADA_002
- @JvmField val DEFAULT_IMAGES = DEFAULT.DALLE_2
- }
+ @JvmField val DEFAULT_IMAGES = DALLE_2
fun supportedModels(): List {
return listOf(
@@ -93,6 +99,7 @@ class OpenAI(internal val token: String) : AutoCloseable, AutoClose by autoClose
}
}
-fun String.toOpenAIModel(): OpenAIModel? {
- return OpenAI.DEFAULT.supportedModels().find { it.name == this }
+fun String.toOpenAIModel(token: String): OpenAIModel {
+ val openAI = OpenAI(token)
+ return openAI.supportedModels().find { it.name == this } ?: openAI.GPT_3_5_TURBO_16K
}
diff --git a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIClient.kt b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIClient.kt
index b42598f74..1e6b23ea5 100644
--- a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIClient.kt
+++ b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIClient.kt
@@ -51,7 +51,7 @@ class OpenAIModel(
private val client =
OpenAIClient(
- token = openAI.token,
+ token = openAI.getToken(),
logging = LoggingConfig(LogLevel.None),
headers = mapOf("Authorization" to " Bearer $openAI.token")
)
diff --git a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIScopeExtensions.kt b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIScopeExtensions.kt
index 1e649ad54..a285ab746 100644
--- a/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIScopeExtensions.kt
+++ b/openai/src/commonMain/kotlin/com/xebia/functional/xef/auto/llm/openai/OpenAIScopeExtensions.kt
@@ -12,14 +12,14 @@ import kotlinx.serialization.serializer
@AiDsl
suspend fun Conversation.promptMessage(
prompt: String,
- model: Chat = OpenAI.DEFAULT_CHAT,
+ model: Chat = OpenAI().DEFAULT_CHAT,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): String = model.promptMessage(prompt, this, promptConfiguration)
@AiDsl
suspend fun Conversation.promptMessage(
prompt: String,
- model: Chat = OpenAI.DEFAULT_CHAT,
+ model: Chat = OpenAI().DEFAULT_CHAT,
functions: List = emptyList(),
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): List = model.promptMessages(prompt, this, functions, promptConfiguration)
@@ -27,14 +27,14 @@ suspend fun Conversation.promptMessage(
@AiDsl
suspend fun Conversation.promptMessage(
prompt: Prompt,
- model: Chat = OpenAI.DEFAULT_CHAT,
+ model: Chat = OpenAI().DEFAULT_CHAT,
functions: List = emptyList(),
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): List = model.promptMessages(prompt, this, functions, promptConfiguration)
@AiDsl
suspend inline fun Conversation.prompt(
- model: ChatWithFunctions = OpenAI.DEFAULT_SERIALIZATION,
+ model: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): A =
prompt(
@@ -47,7 +47,7 @@ suspend inline fun Conversation.prompt(
@AiDsl
suspend inline fun Conversation.prompt(
prompt: String,
- model: ChatWithFunctions = OpenAI.DEFAULT_SERIALIZATION,
+ model: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): A =
prompt(
@@ -60,7 +60,7 @@ suspend inline fun Conversation.prompt(
@AiDsl
suspend inline fun Conversation.prompt(
prompt: Prompt,
- model: ChatWithFunctions = OpenAI.DEFAULT_SERIALIZATION,
+ model: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): A =
prompt(
@@ -73,7 +73,7 @@ suspend inline fun Conversation.prompt(
@AiDsl
suspend inline fun Conversation.image(
prompt: String,
- model: ChatWithFunctions = OpenAI.DEFAULT_SERIALIZATION,
+ model: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS,
): A =
prompt(
diff --git a/scala/src/main/scala/com/xebia/functional/xef/scala/auto/package.scala b/scala/src/main/scala/com/xebia/functional/xef/scala/auto/package.scala
index 3ff505a12..177316b22 100644
--- a/scala/src/main/scala/com/xebia/functional/xef/scala/auto/package.scala
+++ b/scala/src/main/scala/com/xebia/functional/xef/scala/auto/package.scala
@@ -2,14 +2,14 @@ package com.xebia.functional.xef.scala.auto
import com.xebia.functional.loom.LoomAdapter
import com.xebia.functional.tokenizer.ModelType
-import com.xebia.functional.xef.auto.{Conversation, PromptConfiguration}
import com.xebia.functional.xef.auto.llm.openai.*
+import com.xebia.functional.xef.auto.{Conversation, PromptConfiguration}
import com.xebia.functional.xef.llm.*
import com.xebia.functional.xef.llm.models.functions.{CFunction, Json}
import com.xebia.functional.xef.llm.models.images.*
import com.xebia.functional.xef.pdf.Loader
import com.xebia.functional.xef.scala.textsplitters.TextSplitter
-import com.xebia.functional.xef.vectorstores.{LocalVectorStore, VectorStore}
+import com.xebia.functional.xef.vectorstores.LocalVectorStore
import io.circe.Decoder
import io.circe.parser.parse
@@ -20,11 +20,11 @@ type AI[A] = AIScope ?=> A
def conversation[A](
block: AIScope ?=> A
-): A = block(using AIScope.fromCore(new Conversation(LocalVectorStore(OpenAIEmbeddings(OpenAI.DEFAULT_EMBEDDING)))))
+): A = block(using AIScope.fromCore(new Conversation(LocalVectorStore(OpenAIEmbeddings(OpenAI().DEFAULT_EMBEDDING)))))
def prompt[A: Decoder: SerialDescriptor](
prompt: String,
- llmModel: ChatWithFunctions = OpenAI.DEFAULT_SERIALIZATION,
+ llmModel: ChatWithFunctions = OpenAI().DEFAULT_SERIALIZATION,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
)(using scope: AIScope): A =
LoomAdapter.apply((cont) =>
@@ -51,7 +51,7 @@ def addContext(docs: Iterable[String])(using scope: AIScope): Unit =
def promptMessage(
prompt: String,
- llmModel: Chat = OpenAI.DEFAULT_CHAT,
+ llmModel: Chat = OpenAI().DEFAULT_CHAT,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
)(using scope: AIScope): String =
LoomAdapter
@@ -61,7 +61,7 @@ def promptMessage(
def promptMessages(
prompt: String,
- llmModel: Chat = OpenAI.DEFAULT_CHAT,
+ llmModel: Chat = OpenAI().DEFAULT_CHAT,
functions: List[CFunction] = List.empty,
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
)(using scope: AIScope): List[String] =
@@ -83,7 +83,7 @@ def pdf(
def images(
prompt: String,
- model: Images = OpenAI.DEFAULT_IMAGES,
+ model: Images = OpenAI().DEFAULT_IMAGES,
n: Int = 1,
size: String = "1024x1024",
promptConfiguration: PromptConfiguration = PromptConfiguration.DEFAULTS
diff --git a/server/build.gradle.kts b/server/build.gradle.kts
index 1ae876901..bb7669337 100644
--- a/server/build.gradle.kts
+++ b/server/build.gradle.kts
@@ -16,22 +16,28 @@ java {
}
dependencies {
- implementation(projects.xefCore)
- implementation(projects.xefKotlin)
- implementation(libs.kotlinx.serialization.json)
- implementation(libs.logback)
+ implementation(libs.flyway.core)
+ implementation(libs.hikari)
implementation(libs.klogging)
+ implementation(libs.kotlinx.serialization.json)
+ implementation(libs.kotlinx.serialization.hocon)
+ implementation(libs.ktor.serialization.json)
implementation(libs.ktor.server.auth)
implementation(libs.ktor.server.netty)
implementation(libs.ktor.server.core)
implementation(libs.ktor.server.contentNegotiation)
implementation(libs.ktor.server.resources)
implementation(libs.ktor.server.cors)
- implementation(libs.ktor.serialization.json)
- implementation(libs.suspendApp.core)
- implementation(libs.suspendApp.ktor)
implementation(libs.ktor.server.request.validation)
+ implementation(libs.logback)
implementation(libs.openai.client)
+ implementation(libs.suspendApp.core)
+ implementation(libs.suspendApp.ktor)
+ implementation(libs.uuid)
+ implementation(projects.xefKotlin)
+ implementation(projects.xefCore)
+ implementation(projects.xefLucene)
+ implementation(projects.xefPostgresql)
}
tasks.getByName("processResources") {
diff --git a/server/docker/postgresql/docker-compose.yaml b/server/docker/postgresql/docker-compose.yaml
new file mode 100644
index 000000000..affcd5731
--- /dev/null
+++ b/server/docker/postgresql/docker-compose.yaml
@@ -0,0 +1,34 @@
+version: "3.5"
+
+services:
+ xef-vector-store-postgres:
+ container_name: xef-vector-store-postgres
+ image: "ankane/pgvector:v0.4.4"
+ ports:
+ - "5432:5432"
+ healthcheck:
+ test: [ "CMD", "pg_isready", "-U", "postgres" ]
+ interval: 2s
+ timeout: 2s
+ retries: 5
+ restart: always
+ environment:
+ POSTGRES_DB: xef-vector-store
+ POSTGRES_USER: ${POSTGRES_USER:-postgres}
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
+
+ xef-db-postgres:
+ container_name: xef-db-postgres
+ image: "postgres:alpine3.18"
+ ports:
+ - "5433:5432"
+ healthcheck:
+ test: [ "CMD", "pg_isready", "-U", "postgres" ]
+ interval: 2s
+ timeout: 2s
+ retries: 5
+ restart: always
+ environment:
+ POSTGRES_DB: xefdb
+ POSTGRES_USER: ${POSTGRES_USER:-postgres}
+ POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres}
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/Main.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/Main.kt
index 33b9d53bd..f172d6f99 100644
--- a/server/src/main/kotlin/com/xebia/functional/xef/server/Main.kt
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/Main.kt
@@ -4,6 +4,11 @@ package com.xebia.functional.xef.server
import arrow.continuations.SuspendApp
import arrow.fx.coroutines.resourceScope
import arrow.continuations.ktor.server
+import com.typesafe.config.ConfigFactory
+import com.xebia.functional.xef.server.db.psql.XefDatabaseConfig
+import com.xebia.functional.xef.server.db.psql.Migrate
+import com.xebia.functional.xef.server.db.psql.XefVectorStoreConfig
+import com.xebia.functional.xef.server.db.psql.XefVectorStoreConfig.Companion.getPersistenceService
import com.xebia.functional.xef.server.http.routes.routes
import io.ktor.serialization.kotlinx.json.*
import io.ktor.server.application.*
@@ -19,6 +24,14 @@ object Main {
@JvmStatic
fun main(args: Array) = SuspendApp {
resourceScope {
+ val config = ConfigFactory.load("database.conf").resolve()
+ val xefDBConfig = XefDatabaseConfig.load("xef", config)
+ Migrate.migrate(xefDBConfig)
+
+ val vectorStoreConfig = XefVectorStoreConfig.load("xef-vector-store", config)
+ val persistenceService = vectorStoreConfig.getPersistenceService(config)
+ persistenceService.initDatabase()
+
server(factory = Netty, port = 8080, host = "0.0.0.0") {
install(CORS) {
allowNonSimpleContentTypes = true
@@ -33,7 +46,7 @@ object Main {
}
}
}
- routing { routes() }
+ routing { routes(persistenceService) }
}
awaitCancellation()
}
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/Migrate.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/Migrate.kt
new file mode 100644
index 000000000..ee20c4e9f
--- /dev/null
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/Migrate.kt
@@ -0,0 +1,30 @@
+package com.xebia.functional.xef.server.db.psql
+
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import org.flywaydb.core.Flyway
+import org.flywaydb.core.api.configuration.FluentConfiguration
+import org.flywaydb.core.api.output.MigrateResult
+
+object Migrate {
+ suspend fun migrate(
+ config: XefDatabaseConfig,
+ ): MigrateResult =
+ withContext(Dispatchers.IO) {
+ val url = "jdbc:postgresql://${config.host}:${config.port}/${config.database}"
+ val migration: FluentConfiguration = Flyway.configure()
+ .dataSource(
+ url,
+ config.user,
+ config.password
+ )
+ .table(config.migrationsTable)
+ .locations(*config.migrationsLocations.toTypedArray())
+ .loggers("slf4j")
+ val isValid = migration.ignoreMigrationPatterns("*:pending").load().validateWithResult()
+ if (!isValid.validationSuccessful) {
+ throw IllegalStateException("Migration validation failed: ${isValid.errorDetails}")
+ }
+ migration.load().migrate()
+ }
+}
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/XefDatabaseConfig.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/XefDatabaseConfig.kt
new file mode 100644
index 000000000..54eaaa105
--- /dev/null
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/XefDatabaseConfig.kt
@@ -0,0 +1,35 @@
+package com.xebia.functional.xef.server.db.psql
+
+import com.typesafe.config.Config
+import com.typesafe.config.ConfigFactory
+import com.xebia.functional.xef.server.services.PersistenceService
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import kotlinx.serialization.ExperimentalSerializationApi
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.hocon.Hocon
+
+@Serializable
+class XefDatabaseConfig(
+ val host: String,
+ val port: Int,
+ val database: String,
+ val user: String,
+ val password: String,
+ val migrationsTable: String,
+ val migrationsLocations: List
+) {
+ companion object {
+ @OptIn(ExperimentalSerializationApi::class)
+ suspend fun load(
+ configNamespace: String,
+ config: Config? = null
+ ): XefDatabaseConfig =
+ withContext(Dispatchers.IO) {
+ val rawConfig = config ?: ConfigFactory.load().resolve()
+ val jdbcConfig = rawConfig.getConfig(configNamespace)
+ Hocon.decodeFromConfig(serializer(), jdbcConfig)
+ }
+
+ }
+}
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/XefVectorStoreConfig.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/XefVectorStoreConfig.kt
new file mode 100644
index 000000000..333320e4b
--- /dev/null
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/db/psql/XefVectorStoreConfig.kt
@@ -0,0 +1,65 @@
+package com.xebia.functional.xef.server.db.psql
+
+import com.typesafe.config.Config
+import com.typesafe.config.ConfigFactory
+import com.xebia.functional.xef.server.services.PersistenceService
+import com.xebia.functional.xef.server.services.PostgreSQLXef
+import com.xebia.functional.xef.server.services.PostgresXefService
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
+import kotlinx.serialization.ExperimentalSerializationApi
+import kotlinx.serialization.Serializable
+import kotlinx.serialization.hocon.Hocon
+
+enum class XefVectorStoreType {
+ PSQL
+}
+
+@Serializable
+class XefVectorStoreConfig(
+ val type: XefVectorStoreType,
+ val host: String,
+ val port: Int,
+ val database: String,
+ val driver: String,
+ val user: String,
+ val password: String,
+ val vectorSize: Int
+) {
+ companion object {
+ @OptIn(ExperimentalSerializationApi::class)
+ suspend fun load(
+ configNamespace: String,
+ config: Config? = null
+ ): XefVectorStoreConfig =
+ withContext(Dispatchers.IO) {
+ val rawConfig = config ?: ConfigFactory.load().resolve()
+ val jdbcConfig = rawConfig.getConfig(configNamespace)
+ Hocon.decodeFromConfig(serializer(), jdbcConfig)
+ }
+
+ suspend fun XefVectorStoreConfig.getPersistenceService(config: Config): PersistenceService {
+ when (this.type) {
+ XefVectorStoreType.PSQL -> {
+ return getPsqlPersistenceService(config)
+ }
+ }
+ }
+
+ private suspend fun getPsqlPersistenceService(config: Config): PersistenceService {
+ val vectorStoreConfig = XefVectorStoreConfig.load("xef-vector-store", config)
+ val pgVectorStoreConfig = PostgreSQLXef.PGVectorStoreConfig(
+ dbConfig = PostgreSQLXef.DBConfig(
+ host = vectorStoreConfig.host,
+ port = vectorStoreConfig.port,
+ database = vectorStoreConfig.database,
+ user = vectorStoreConfig.user,
+ password = vectorStoreConfig.password
+ ),
+ vectorSize = vectorStoreConfig.vectorSize
+ )
+ return PostgresXefService(pgVectorStoreConfig)
+ }
+
+ }
+}
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/http/routes/RequestHelpers.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/http/routes/RequestHelpers.kt
new file mode 100644
index 000000000..0027ce84f
--- /dev/null
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/http/routes/RequestHelpers.kt
@@ -0,0 +1,34 @@
+package com.xebia.functional.xef.server.http.routes
+
+import com.aallam.openai.api.BetaOpenAI
+import com.aallam.openai.api.chat.ChatCompletionRequest
+import com.aallam.openai.api.chat.ChatRole
+import com.xebia.functional.xef.llm.models.chat.Message
+import com.xebia.functional.xef.llm.models.chat.Role
+
+@OptIn(BetaOpenAI::class)
+fun ChatCompletionRequest.toCore(): com.xebia.functional.xef.llm.models.chat.ChatCompletionRequest =
+ com.xebia.functional.xef.llm.models.chat.ChatCompletionRequest(
+ model = model.id,
+ messages = messages.map { Message(it.role.toCore(), it.content ?: "", it.name ?: "") },
+ temperature = temperature ?: 0.0,
+ topP = topP ?: 1.0,
+ n = n ?: 1,
+ stream = false,
+ stop = stop,
+ maxTokens = maxTokens,
+ presencePenalty = presencePenalty ?: 0.0,
+ frequencyPenalty = frequencyPenalty ?: 0.0,
+ logitBias = logitBias ?: emptyMap(),
+ user = user,
+ streamToStandardOut = false
+ )
+
+@OptIn(BetaOpenAI::class)
+fun ChatRole.toCore(): Role =
+ when (this) {
+ ChatRole.System -> Role.SYSTEM
+ ChatRole.User -> Role.USER
+ ChatRole.Assistant -> Role.ASSISTANT
+ else -> Role.ASSISTANT
+ }
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/http/routes/Routes.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/http/routes/Routes.kt
index 224b01379..3b4629016 100644
--- a/server/src/main/kotlin/com/xebia/functional/xef/server/http/routes/Routes.kt
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/http/routes/Routes.kt
@@ -6,10 +6,9 @@ import com.aallam.openai.api.chat.ChatRole
import com.xebia.functional.xef.auto.Conversation
import com.xebia.functional.xef.auto.PromptConfiguration
import com.xebia.functional.xef.auto.llm.openai.*
-import com.xebia.functional.xef.auto.llm.openai.OpenAI.Companion.DEFAULT_CHAT
-import com.xebia.functional.xef.llm.Chat
import com.xebia.functional.xef.llm.models.chat.Message
import com.xebia.functional.xef.llm.models.chat.Role
+import com.xebia.functional.xef.server.services.PersistenceService
import com.xebia.functional.xef.vectorstores.LocalVectorStore
import io.ktor.http.*
import io.ktor.server.application.*
@@ -20,14 +19,30 @@ import io.ktor.server.routing.*
import io.ktor.util.pipeline.*
import com.xebia.functional.xef.llm.models.chat.ChatCompletionRequest as XefChatCompletionRequest
+enum class Provider {
+ OPENAI, GPT4ALL, GCP
+}
+
+fun String.toProvider(): Provider? = when (this) {
+ "openai" -> Provider.OPENAI
+ "gpt4all" -> Provider.GPT4ALL
+ "gcp" -> Provider.GCP
+ else -> null
+}
+
+
@OptIn(BetaOpenAI::class)
-fun Routing.routes() {
+fun Routing.routes(persistenceService: PersistenceService) {
authenticate("auth-bearer") {
post("/chat/completions") {
- val model: Chat = call.request.headers["xef-model"]?.toOpenAIModel() ?: DEFAULT_CHAT
+ val provider: Provider = call.request.headers["xef-provider"]?.toProvider()
+ ?: throw IllegalArgumentException("Not a valid provider")
val token = call.principal()?.name ?: throw IllegalArgumentException("No token found")
- val scope = Conversation(LocalVectorStore(OpenAIEmbeddings(OpenAI(token).GPT_3_5_TURBO_16K)))
+ val scope = Conversation(
+ persistenceService.getVectorStore(provider, token)
+ )
val data = call.receive().toCore()
+ val model: OpenAIModel = data.model.toOpenAIModel(token)
response {
model.promptMessage(
question = data.messages.joinToString("\n") { "${it.role}: ${it.content}" },
@@ -54,29 +69,3 @@ private suspend inline fun PipelineContext<*, A
}) {
call.respondText(it.message ?: "Response not found", status = HttpStatusCode.NotFound)
}
-
-@OptIn(BetaOpenAI::class)
-private fun ChatCompletionRequest.toCore(): XefChatCompletionRequest = XefChatCompletionRequest(
- model = model.id,
- messages = messages.map { Message(it.role.toCore(), it.content ?: "", it.name ?: "") },
- temperature = temperature ?: 0.0,
- topP = topP ?: 1.0,
- n = n ?: 1,
- stream = false,
- stop = stop,
- maxTokens = maxTokens,
- presencePenalty = presencePenalty ?: 0.0,
- frequencyPenalty = frequencyPenalty ?: 0.0,
- logitBias = logitBias ?: emptyMap(),
- user = user,
- streamToStandardOut = false
-)
-
-@OptIn(BetaOpenAI::class)
-private fun ChatRole.toCore(): Role =
- when (this) {
- ChatRole.System -> Role.SYSTEM
- ChatRole.User -> Role.USER
- ChatRole.Assistant -> Role.ASSISTANT
- else -> Role.ASSISTANT
- }
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/services/PersistenceService.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/services/PersistenceService.kt
new file mode 100644
index 000000000..c5ad48a18
--- /dev/null
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/services/PersistenceService.kt
@@ -0,0 +1,16 @@
+package com.xebia.functional.xef.server.services
+
+import com.xebia.functional.xef.server.http.routes.Provider
+import com.xebia.functional.xef.vectorstores.VectorStore
+import io.github.oshai.kotlinlogging.KotlinLogging
+
+abstract class PersistenceService {
+ val logger = KotlinLogging.logger {}
+
+ abstract fun initDatabase(): Unit
+
+ abstract fun getVectorStore(
+ provider: Provider = Provider.OPENAI,
+ token: String
+ ): VectorStore
+}
diff --git a/server/src/main/kotlin/com/xebia/functional/xef/server/services/PostgresXefService.kt b/server/src/main/kotlin/com/xebia/functional/xef/server/services/PostgresXefService.kt
new file mode 100644
index 000000000..ff969b31e
--- /dev/null
+++ b/server/src/main/kotlin/com/xebia/functional/xef/server/services/PostgresXefService.kt
@@ -0,0 +1,94 @@
+package com.xebia.functional.xef.server.services
+
+import com.xebia.functional.xef.auto.autoClose
+import com.xebia.functional.xef.auto.llm.openai.OpenAI
+import com.xebia.functional.xef.auto.llm.openai.OpenAIEmbeddings
+import com.xebia.functional.xef.llm.models.embeddings.EmbeddingModel
+import com.xebia.functional.xef.llm.models.embeddings.RequestConfig
+import com.xebia.functional.xef.server.http.routes.Provider
+import com.xebia.functional.xef.vectorstores.PGVectorStore
+import com.xebia.functional.xef.vectorstores.VectorStore
+import com.xebia.functional.xef.vectorstores.postgresql.*
+import com.zaxxer.hikari.HikariConfig
+import com.zaxxer.hikari.HikariDataSource
+import kotlinx.uuid.UUID
+import kotlinx.uuid.generateUUID
+
+object PostgreSQLXef {
+ data class DBConfig(
+ val host: String,
+ val port: Int,
+ val database: String,
+ val user: String,
+ val password: String
+ )
+
+ data class PGVectorStoreConfig(
+ val dbConfig: DBConfig,
+ val vectorSize: Int = 3,
+ val collectionName: String = "xef_collection",
+ val preDeleteCollection: Boolean = false,
+ val chunkSize: Int? = null,
+ )
+}
+
+
+class PostgresXefService(
+ private val config: PostgreSQLXef.PGVectorStoreConfig
+) : PersistenceService() {
+
+ private fun getDataSource(): HikariDataSource =
+ autoClose {
+ HikariDataSource(
+ HikariConfig().apply {
+ jdbcUrl =
+ "jdbc:postgresql://${config.dbConfig.host}:${config.dbConfig.port}/${config.dbConfig.database}"
+ username = config.dbConfig.user
+ password = config.dbConfig.password
+ driverClassName = "org.postgresql.Driver"
+ }
+ )
+ }
+
+ override fun initDatabase() {
+ getDataSource().connection {
+ update(addVectorExtension)
+ update(createCollections)
+ update(createCollectionsTable)
+ update(createMemoryTable)
+ update(createEmbeddingTable(config.vectorSize))
+ // Create collection
+ val uuid = UUID.generateUUID()
+ update(addNewCollection) {
+ bind(uuid.toString())
+ bind(config.collectionName)
+ }
+ }
+ }
+
+ override fun getVectorStore(
+ provider: Provider,
+ token: String
+ ): VectorStore {
+ val embeddings = when (provider) {
+ Provider.OPENAI -> OpenAIEmbeddings(OpenAI(token).DEFAULT_EMBEDDING)
+ else -> OpenAIEmbeddings(OpenAI(token).DEFAULT_EMBEDDING)
+ }
+ val embeddingModel = EmbeddingModel.TEXT_EMBEDDING_ADA_002
+
+ return PGVectorStore(
+ vectorSize = config.vectorSize,
+ dataSource = getDataSource(),
+ embeddings = embeddings,
+ collectionName = config.collectionName,
+ distanceStrategy = PGDistanceStrategy.Euclidean,
+ preDeleteCollection = config.preDeleteCollection,
+ requestConfig =
+ RequestConfig(
+ model = embeddingModel,
+ user = RequestConfig.Companion.User("user")
+ ),
+ chunkSize = config.chunkSize
+ )
+ }
+}
diff --git a/server/src/main/resources/database.conf b/server/src/main/resources/database.conf
new file mode 100644
index 000000000..d5bd554ea
--- /dev/null
+++ b/server/src/main/resources/database.conf
@@ -0,0 +1,47 @@
+# Database configuration for the Vector Store
+xef-vector-store {
+ type = "PSQL"
+ type = ${?XEF_DB_VECTOR_STORE_TYPE}
+
+ driver = "org.postgresql.Driver"
+
+ host = "localhost"
+ host = ${?XEF_DB_VECTOR_STORE_HOST}
+
+ port = 5432
+ port = ${?XEF_DB_VECTOR_STORE_PORT}
+
+ database = "xef-vector-store"
+ database = ${?XEF_DB_VECTOR_STORE_NAME}
+
+ user = "postgres"
+ user = ${?XEF_DB_VECTOR_STORE_USER}
+
+ password = "postgres"
+ password = ${?XEF_DB_VECTOR_STORE_PASSWORD}
+
+ vectorSize = 3
+ vectorSize = ${?XEF_DB_VECTOR_STORE_VECTOR_SIZE}
+}
+
+xef {
+ host = "localhost"
+ host = ${?XEF_DB_HOST}
+
+ port = 5433
+ port = ${?XEF_DB_PORT}
+
+ database = "xefdb"
+ database = ${?XEF_DB_NAME}
+
+ user = "postgres"
+ user = ${?XEF_DB_USER}
+
+ password = "postgres"
+ password = ${?XEF_DB_PASSWORD}
+
+ migrationsTable = "migrations"
+ migrationsLocations = [
+ "classpath:db/migrations/psql"
+ ]
+}
diff --git a/server/src/main/resources/db/migrations/psql/V1__Initial.sql b/server/src/main/resources/db/migrations/psql/V1__Initial.sql
new file mode 100644
index 000000000..e69de29bb