Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion graphile/graphile-llm/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,8 @@
"url": "https://github.com/constructive-io/constructive/issues"
},
"dependencies": {
"@agentic-kit/ollama": "^1.0.3"
"@agentic-kit/ollama": "^1.0.3",
"@constructive-io/graphql-env": "workspace:^"
},
"peerDependencies": {
"@dataplan/pg": "1.0.0",
Expand Down
12 changes: 7 additions & 5 deletions graphile/graphile-llm/src/chat.ts
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
*/

import OllamaClient from '@agentic-kit/ollama';
import { getEnvOptions } from '@constructive-io/graphql-env';
import type { ChatConfig, ChatFunction, ChatMessage, ChatOptions, LlmModuleData } from './types';

// ─── Built-in Providers ─────────────────────────────────────────────────────
Expand Down Expand Up @@ -95,21 +96,22 @@ export function buildChatCompleterFromModule(data: LlmModuleData): ChatFunction
}

/**
* Resolve a chat completer from environment variables.
* Resolve a chat completer from environment variables via getEnvOptions().
* This is a fallback for development when no llm_module or defaultChatCompleter is configured.
*
* Environment variables:
* Environment variables (parsed by @constructive-io/graphql-env):
* CHAT_PROVIDER - Provider name ('ollama')
* CHAT_MODEL - Model identifier (e.g. 'llama3')
* CHAT_BASE_URL - Provider base URL
*/
export function buildChatCompleterFromEnv(): ChatFunction | null {
const provider = process.env.CHAT_PROVIDER;
const { llm } = getEnvOptions();
const provider = llm?.chat?.provider;
if (!provider) return null;

return buildChatCompleter({
provider,
model: process.env.CHAT_MODEL,
baseUrl: process.env.CHAT_BASE_URL,
model: llm?.chat?.model,
baseUrl: llm?.chat?.baseUrl,
});
}
12 changes: 7 additions & 5 deletions graphile/graphile-llm/src/embedder.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
*/

import OllamaClient from '@agentic-kit/ollama';
import { getEnvOptions } from '@constructive-io/graphql-env';
import type { EmbedderConfig, EmbedderFunction, LlmModuleData } from './types';

// ─── Built-in Providers ─────────────────────────────────────────────────────
Expand Down Expand Up @@ -63,21 +64,22 @@ export function buildEmbedderFromModule(data: LlmModuleData): EmbedderFunction |
}

/**
* Resolve an embedder from environment variables.
* Resolve an embedder from environment variables via getEnvOptions().
* This is a fallback for development when no llm_module or defaultEmbedder is configured.
*
* Environment variables:
* Environment variables (parsed by @constructive-io/graphql-env):
* EMBEDDER_PROVIDER - Provider name ('ollama')
* EMBEDDER_MODEL - Model identifier
* EMBEDDER_BASE_URL - Provider base URL
*/
export function buildEmbedderFromEnv(): EmbedderFunction | null {
const provider = process.env.EMBEDDER_PROVIDER;
const { llm } = getEnvOptions();
const provider = llm?.embedder?.provider;
if (!provider) return null;

return buildEmbedder({
provider,
model: process.env.EMBEDDER_MODEL,
baseUrl: process.env.EMBEDDER_BASE_URL,
model: llm?.embedder?.model,
baseUrl: llm?.embedder?.baseUrl,
});
}
25 changes: 25 additions & 0 deletions graphql/env/src/env.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,13 @@ export const getGraphQLEnvVars = (env: NodeJS.ProcessEnv = process.env): Partial
API_ANON_ROLE,
API_ROLE_NAME,
API_DEFAULT_DATABASE_ID,

EMBEDDER_PROVIDER,
EMBEDDER_MODEL,
EMBEDDER_BASE_URL,
CHAT_PROVIDER,
CHAT_MODEL,
CHAT_BASE_URL,
} = env;

return {
Expand All @@ -51,5 +58,23 @@ export const getGraphQLEnvVars = (env: NodeJS.ProcessEnv = process.env): Partial
...(API_ROLE_NAME && { roleName: API_ROLE_NAME }),
...(API_DEFAULT_DATABASE_ID && { defaultDatabaseId: API_DEFAULT_DATABASE_ID }),
},
...((EMBEDDER_PROVIDER || CHAT_PROVIDER) && {
llm: {
...((EMBEDDER_PROVIDER || EMBEDDER_MODEL || EMBEDDER_BASE_URL) && {
embedder: {
...(EMBEDDER_PROVIDER && { provider: EMBEDDER_PROVIDER }),
...(EMBEDDER_MODEL && { model: EMBEDDER_MODEL }),
...(EMBEDDER_BASE_URL && { baseUrl: EMBEDDER_BASE_URL }),
},
}),
...((CHAT_PROVIDER || CHAT_MODEL || CHAT_BASE_URL) && {
chat: {
...(CHAT_PROVIDER && { provider: CHAT_PROVIDER }),
...(CHAT_MODEL && { model: CHAT_MODEL }),
...(CHAT_BASE_URL && { baseUrl: CHAT_BASE_URL }),
},
}),
},
}),
};
};
3 changes: 3 additions & 0 deletions graphql/types/src/constructive.ts
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import {
graphileFeatureDefaults,
apiDefaults
} from './graphile';
import { LlmOptions } from './llm';

/**
* GraphQL-specific options for Constructive
Expand Down Expand Up @@ -56,6 +57,8 @@ export interface ConstructiveOptions extends PgpmOptions, ConstructiveGraphQLOpt
migrations?: MigrationOptions;
/** Job system configuration */
jobs?: JobsConfig;
/** LLM provider configuration (embeddings, chat, RAG) */
llm?: LlmOptions;
}

/**
Expand Down
7 changes: 7 additions & 0 deletions graphql/types/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,10 @@ export {
GraphQLError,
QueryResult
} from './adapter';

// Export LLM types
export {
LlmOptions,
LlmEmbedderOptions,
LlmChatOptions
} from './llm';
48 changes: 48 additions & 0 deletions graphql/types/src/llm.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
/**
* LLM provider configuration options.
*
* Used by graphile-llm to resolve embedding and chat completion providers
* from the unified environment configuration system (getEnvOptions).
*/

/**
* Configuration for an LLM embedding provider.
*/
export interface LlmEmbedderOptions {
/** Provider name (e.g. 'ollama') */
provider?: string;
/** Model identifier (e.g. 'nomic-embed-text') */
model?: string;
/** Provider base URL (e.g. 'http://localhost:11434') */
baseUrl?: string;
}

/**
* Configuration for an LLM chat completion provider.
*/
export interface LlmChatOptions {
/** Provider name (e.g. 'ollama') */
provider?: string;
/** Model identifier (e.g. 'llama3') */
model?: string;
/** Provider base URL (e.g. 'http://localhost:11434') */
baseUrl?: string;
}

/**
* Top-level LLM configuration options.
*
* Environment variables:
* EMBEDDER_PROVIDER - Embedding provider name
* EMBEDDER_MODEL - Embedding model identifier
* EMBEDDER_BASE_URL - Embedding provider base URL
* CHAT_PROVIDER - Chat completion provider name
* CHAT_MODEL - Chat completion model identifier
* CHAT_BASE_URL - Chat completion provider base URL
*/
export interface LlmOptions {
/** Embedding provider configuration */
embedder?: LlmEmbedderOptions;
/** Chat completion provider configuration */
chat?: LlmChatOptions;
}
Loading
Loading