Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion examples/01-agent-code-skill/04.1-chat-planner-coder.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import { Agent, Chat, Component, Model, TAgentMode, TLLMEvent } from '@smythos/sdk';
import chalk from 'chalk';
import * as readline from 'readline';
import { EmitUnit, PluginBase, TokenLoom } from 'tokenloom';
import { EmitUnit, PluginAPI, PluginBase, TokenLoom } from 'tokenloom';

//Show the tasks list and status to the user at every step before performing the tasks, and also give a tasks status summary after tasks.
//When you display the tasks list to a user show it in a concise way with a summary and checkboxes for each task.
Expand Down
18 changes: 15 additions & 3 deletions examples/05-VectorDB-with-agent/01-upsert-and-search.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { Agent, Doc, Model, Scope } from '@smythos/sdk';
import { Agent, Doc, Model, Scope, TLLMEvent } from '@smythos/sdk';
import path from 'path';
import { fileURLToPath } from 'url';

Expand All @@ -16,6 +16,7 @@ const pineconeSettings = {
indexName: 'demo-vec',
apiKey: process.env.PINECONE_API_KEY,
embeddings: Model.OpenAI('text-embedding-3-large'),
//you can also use Model.GoogleAI('gemini-embedding-001', { dimensions: 1024 })
};

async function createAgent() {
Expand Down Expand Up @@ -83,14 +84,25 @@ async function indexDataForAgent(agent: Agent) {
await pinecone.insertDoc(parsedDoc.title, parsedDoc, { myEntry: 'My Metadata' });
}

const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms));

async function main() {
const agent = await createAgent();
console.log('Indexing data for agent');
await indexDataForAgent(agent);

console.log('Waiting for 5 seconds before prompting the agent ... sometimes the index is not ready immediately');
await delay(5000);

console.log('Prompting the agent');

//this will prompt the agent and use the agent's LLM to determine which skill to use
const promptResult = await agent.prompt('What is bitcoin Proof-of-Work ?');
const promptStream = await agent.prompt('What is bitcoin Proof-of-Work ?').stream();
//the response comes back in natural language
console.log(promptResult);
console.log('\n');
promptStream.on(TLLMEvent.Content, (content) => {
process.stdout.write(content);
});
}

main();
6 changes: 2 additions & 4 deletions examples/06-Storage-no-agent/01-localstorage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,16 +2,14 @@ import { Storage } from '@smythos/sdk';

async function main() {
const localStorage = Storage.LocalStorage();

await localStorage.write('test.txt', 'Hello, world!');

const data = await localStorage.read('test.txt');

const dataAsString = data.toString();

console.log(dataAsString);


}

main();
main();
121 changes: 52 additions & 69 deletions examples/11-zoom-rtms-integration/types.d.ts
Original file line number Diff line number Diff line change
@@ -1,88 +1,71 @@
// Global type declarations for Node.js environment
declare global {
namespace NodeJS {
interface ProcessEnv {
PORT?: string;
ZOOM_SECRET_TOKEN?: string;
ZOOM_CLIENT_ID?: string;
ZOOM_CLIENT_SECRET?: string;
WEBHOOK_PATH?: string;
OPENAI_API_KEY?: string;
ANTHROPIC_API_KEY?: string;
PINECONE_API_KEY?: string;
PINECONE_INDEX_NAME?: string;
AWS_ACCESS_KEY_ID?: string;
AWS_SECRET_ACCESS_KEY?: string;
AWS_REGION?: string;
AWS_S3_BUCKET?: string;
LOG_LEVEL?: string;
namespace NodeJS {
interface ProcessEnv {
PORT?: string;
ZOOM_SECRET_TOKEN?: string;
ZOOM_CLIENT_ID?: string;
ZOOM_CLIENT_SECRET?: string;
WEBHOOK_PATH?: string;
OPENAI_API_KEY?: string;
ANTHROPIC_API_KEY?: string;
PINECONE_API_KEY?: string;
PINECONE_INDEX_NAME?: string;
AWS_ACCESS_KEY_ID?: string;
AWS_SECRET_ACCESS_KEY?: string;
AWS_REGION?: string;
AWS_S3_BUCKET?: string;
LOG_LEVEL?: string;
}
}
}

var process: NodeJS.Process;
var console: Console;
var Buffer: BufferConstructor;
}

// Module declarations for packages that might not have types
declare module '@smythos/sdk' {
export class Agent {
constructor(config: any);
addSkill(skill: any): void;
prompt(message: string): Promise<string>;
llm: any;
storage: any;
vectordb: any;
}

export class Model {
static OpenAI(model: string): any;
static Anthropic(model: string): any;
}
var process: NodeJS.Process;
var console: Console;
var Buffer: BufferConstructor;
}

declare module 'crypto' {
export function createHmac(algorithm: string, key: string): any;
export function createHmac(algorithm: string, key: string): any;
}

declare module 'ws' {
export default class WebSocket {
constructor(url: string, options?: any);
on(event: string, callback: Function): void;
send(data: string): void;
close(): void;
}
export default class WebSocket {
constructor(url: string, options?: any);
on(event: string, callback: Function): void;
send(data: string): void;
close(): void;
}
}

declare module 'express' {
export interface Request {
body: any;
}
export interface Response {
json(data: any): void;
sendStatus(code: number): void;
}
interface Express {
use(middleware: any): void;
post(path: string, handler: any): void;
get(path: string, handler: any): void;
listen(port: string | number, callback?: () => void): void;
}
interface ExpressStatic {
(): Express;
json(): any;
}
const express: ExpressStatic;
export default express;
export { Request, Response };
export interface Request {
body: any;
}

export interface Response {
json(data: any): void;
sendStatus(code: number): void;
}

interface Express {
use(middleware: any): void;
post(path: string, handler: any): void;
get(path: string, handler: any): void;
listen(port: string | number, callback?: () => void): void;
}

interface ExpressStatic {
(): Express;
json(): any;
}

const express: ExpressStatic;
export default express;
export { Request, Response };
}

declare module 'dotenv' {
export function config(): void;
export function config(): void;
}

export {};
2 changes: 1 addition & 1 deletion packages/core/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@smythos/sre",
"version": "1.5.67",
"version": "1.5.68",
"description": "Smyth Runtime Environment",
"author": "Alaa-eddine KADDOURI",
"license": "MIT",
Expand Down
2 changes: 1 addition & 1 deletion packages/core/src/helpers/Conversation.helper.ts
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ export class Conversation extends EventEmitter {
const reqMethods = this._reqMethods;
const toolsConfig = this._toolsConfig;
//deduplicate tools
toolsConfig.tools = toolsConfig.tools.filter((tool, index, self) => self.findIndex((t) => t.name === tool.name) === index);
toolsConfig.tools = toolsConfig.tools.filter((tool, index, self) => self.findIndex((t) => t.function.name === tool.function.name) === index);
const endpoints = this._endpoints;
const baseUrl = this._baseUrl;
const message_id = 'msg_' + randomUUID();
Expand Down
1 change: 1 addition & 0 deletions packages/core/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -166,6 +166,7 @@ export * from './subsystems/IO/VectorDB.service/connectors/MilvusVectorDB.class'
export * from './subsystems/IO/VectorDB.service/connectors/PineconeVectorDB.class';
export * from './subsystems/IO/VectorDB.service/connectors/RAMVecrtorDB.class';
export * from './subsystems/IO/VectorDB.service/embed/BaseEmbedding';
export * from './subsystems/IO/VectorDB.service/embed/GoogleEmbedding';
export * from './subsystems/IO/VectorDB.service/embed/index';
export * from './subsystems/IO/VectorDB.service/embed/OpenAIEmbedding';
export * from './subsystems/LLMManager/LLM.service/connectors/Anthropic.class';
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
import { GoogleGenAI } from '@google/genai';
import { BaseEmbedding, TEmbeddings } from './BaseEmbedding';
import { AccessCandidate } from '@sre/Security/AccessControl/AccessCandidate.class';
import { getLLMCredentials } from '@sre/LLMManager/LLM.service/LLMCredentials.helper';
import { TLLMCredentials, TLLMModel, BasicCredentials } from '@sre/types/LLM.types';

const DEFAULT_MODEL = 'gemini-embedding-001';

export class GoogleEmbeds extends BaseEmbedding {
protected client: GoogleGenAI;

// Keep in sync with Gemini API supported embedding models
public static models = ['gemini-embedding-001', 'text-embedding-005', 'text-multilingual-embedding-002'];
public canSpecifyDimensions = true;

constructor(private settings?: Partial<TEmbeddings>) {
super({ model: settings?.model ?? DEFAULT_MODEL, ...settings });
}

async embedTexts(texts: string[], candidate: AccessCandidate): Promise<number[][]> {
const batches = this.chunkArr(this.processTexts(texts), this.chunkSize);

const batchRequests = batches.map((batch) => {
return this.embed(batch, candidate);
});
const batchResponses = await Promise.all(batchRequests);

const embeddings: number[][] = [];
for (let i = 0; i < batchResponses.length; i += 1) {
const batch = batches[i];
const batchResponse = batchResponses[i];
for (let j = 0; j < batch.length; j += 1) {
embeddings.push(batchResponse[j]);
}
}
return embeddings;
}

async embedText(text: string, candidate: AccessCandidate): Promise<number[]> {
const processedText = this.processTexts([text])[0];
const embeddings = await this.embed([processedText], candidate);
return embeddings[0];
}

protected async embed(texts: string[], candidate: AccessCandidate): Promise<number[][]> {
let apiKey: string | undefined;

// Try to get from credentials first
try {
const modelInfo: TLLMModel = {
provider: 'GoogleAI',
modelId: this.model,
credentials: this.settings?.credentials as unknown as TLLMCredentials,
};
const credentials = await getLLMCredentials(candidate, modelInfo);
apiKey = (credentials as BasicCredentials)?.apiKey;
} catch (e) {
// If credential system fails, fall back to environment variable
}

// Fall back to environment variable if not found in credentials
if (!apiKey) {
apiKey = process.env.GOOGLE_AI_API_KEY;
}

if (!apiKey) {
throw new Error('Please provide an API key for Google AI embeddings via credentials or GOOGLE_AI_API_KEY environment variable');
}

if (!this.client) {
this.client = new GoogleGenAI({ apiKey });
}

try {
const outputDimensionality = this.dimensions && Number.isFinite(this.dimensions) ? this.dimensions : undefined;

// Batch request using the new SDK
const res = await this.client.models.embedContent({
model: this.model,
contents: texts,
...(outputDimensionality ? { outputDimensionality } : {}),
});

// The SDK can return either { embedding } for single or { embeddings } for batch
const vectors: number[][] = Array.isArray((res as any).embeddings)
? (res as any).embeddings.map((e: any) => e.values as number[])
: [((res as any).embedding?.values as number[]) || []];

// Enforce dimensions and normalization when requested or when non-3072
const targetDim = outputDimensionality;
const processed = vectors.map((v) => this.postProcessEmbedding(v, targetDim));

return processed;
} catch (e) {
throw new Error(`Google Embeddings API error: ${e.message || e}`);
}
}

private postProcessEmbedding(values: number[], targetDim?: number): number[] {
let v = Array.isArray(values) ? values.slice() : [];
if (targetDim && targetDim > 0) {
if (v.length > targetDim) {
// SDK ignored smaller dimension: truncate
v = v.slice(0, targetDim);
} else if (v.length < targetDim) {
// SDK returned shorter vector: pad with zeros
v = v.concat(Array(targetDim - v.length).fill(0));
}
}
// Normalize for non-default 3072 dims (recommended by Google docs)
const needNormalize = (targetDim && targetDim !== 3072) || (!targetDim && v.length !== 3072);
if (needNormalize && v.length > 0) {
const norm = Math.sqrt(v.reduce((acc, x) => acc + x * x, 0));
if (norm > 0) v = v.map((x) => x / norm);
}
return v;
}
}
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { OpenAIEmbeds } from './OpenAIEmbedding';
import { GoogleEmbeds } from './GoogleEmbedding';
import { TEmbeddings } from './BaseEmbedding';

// a factory to get the correct embedding provider based on the provider name
Expand All @@ -7,6 +8,10 @@ const supportedProviders = {
embedder: OpenAIEmbeds,
models: OpenAIEmbeds.models,
},
GoogleAI: {
embedder: GoogleEmbeds,
models: GoogleEmbeds.models,
},
} as const;

export type SupportedProviders = keyof typeof supportedProviders;
Expand Down
Loading