Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions .changeset/early-colts-approve.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
'flowtestai-app': minor
---

Add support for google geminin function calling in generating flow
2 changes: 2 additions & 0 deletions packages/flowtest-electron/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@
"@aws-sdk/client-bedrock-runtime": "^3.583.0",
"@aws-sdk/credential-provider-node": "^3.583.0",
"@aws-sdk/types": "^3.577.0",
"@google/generative-ai": "^0.16.0",
"@langchain/community": "^0.2.19",
"@langchain/google-genai": "^0.0.25",
"@smithy/eventstream-codec": "^3.0.0",
"@smithy/protocol-http": "^4.0.0",
"@smithy/signature-v4": "^3.0.0",
Expand Down
6 changes: 6 additions & 0 deletions packages/flowtest-electron/src/ai/flowtestai.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
const BedrockClaudeGenerate = require('./models/bedrock_claude');
const GeminiGenerate = require('./models/gemini');
const OpenAIGenerate = require('./models/openai');

class FlowtestAI {
Expand All @@ -13,6 +14,11 @@ class FlowtestAI {
const bedrock_claude = new BedrockClaudeGenerate(model.apiKey);
const functions = await bedrock_claude.filter_functions(available_functions, user_instruction);
return await bedrock_claude.process_user_instruction(functions, user_instruction);
} else if (model.name === 'GEMINI') {
const available_functions = await this.get_available_functions(collection);
const gemini = new GeminiGenerate(model.apiKey);
const functions = await gemini.filter_functions(available_functions, user_instruction);
return await gemini.process_user_instruction(functions, user_instruction);
} else {
throw Error(`Model ${model.name} not supported`);
}
Expand Down
137 changes: 137 additions & 0 deletions packages/flowtest-electron/src/ai/models/gemini.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
const { GoogleGenerativeAI } = require('@google/generative-ai');
const { GoogleGenerativeAIEmbeddings } = require('@langchain/google-genai');
const { TaskType } = require('@google/generative-ai');
const { MemoryVectorStore } = require('langchain/vectorstores/memory');

class GeminiGenerate {
constructor(apiKey) {
this.genAI = new GoogleGenerativeAI(apiKey);

this.embeddings = new GoogleGenerativeAIEmbeddings({
apiKey,
model: 'text-embedding-004', // 768 dimensions
taskType: TaskType.RETRIEVAL_DOCUMENT,
title: 'Document title',
});
}

async filter_functions(functions, instruction) {
const documents = functions.map((f) => {
const { parameters, ...fDescription } = f.function;
return JSON.stringify(fDescription);
});

const vectorStore = await MemoryVectorStore.fromTexts(documents, [], this.embeddings);

// 128 (max no of functions accepted by openAI function calling)
const retrievedDocuments = await vectorStore.similaritySearch(instruction, 10);
var selectedFunctions = [];
retrievedDocuments.forEach((document) => {
const pDocument = JSON.parse(document.pageContent);
const findF = functions.find(
(f) => f.function.name === pDocument.name && f.function.description === pDocument.description,
);
if (findF) {
selectedFunctions = selectedFunctions.concat(findF);
}
});

return selectedFunctions;
}

async process_user_instruction(functions, instruction) {
//console.log(functions.map((f) => f.function.name));
// Define the function call format
const fn = `{"name": "function_name"}`;

// Prepare the function string for the system prompt
const fnStr = functions.map((f) => JSON.stringify(f)).join('\n');

// Define the system prompt
const systemPrompt = `
You are a helpful assistant with access to the following functions:

${fnStr}

To use these functions respond with, only output function names, ignore arguments needed by those functions:

<multiplefunctions>
<functioncall> ${fn} </functioncall>
<functioncall> ${fn} </functioncall>
...
</multiplefunctions>

Edge cases you must handle:
- If there are multiple functions that can fullfill user request, list them all.
- If there are no functions that match the user request, you will respond politely that you cannot help.
- If the user has not provided all information to execute the function call, choose the best possible set of values. Only, respond with the information requested and nothing else.
- If asked something that cannot be determined with the user's request details, respond that it is not possible to fulfill the request and explain why.
`;

const model = this.genAI.getGenerativeModel({
model: 'gemini-1.5-pro-latest',
systemInstruction: {
role: 'system',
parts: [{ text: systemPrompt }],
},
});

// Prepare the messages for the language model

const request = {
contents: [{ role: 'user', parts: [{ text: instruction }] }],
};

// Invoke the language model and get the completion
const completion = await model.generateContent(request);

const content = completion.response.candidates[0].content.parts[0].text.trim();

// Extract function calls from the completion
const extractedFunctions = this.extractFunctionCalls(content);

return extractedFunctions;
}

extractFunctionCalls(completion) {
let content = typeof completion === 'string' ? completion : completion.content;

// Multiple functions lookup
const mfnPattern = /<multiplefunctions>(.*?)<\/multiplefunctions>/s;
const mfnMatch = content.match(mfnPattern);

// Single function lookup
const singlePattern = /<functioncall>(.*?)<\/functioncall>/s;
const singleMatch = content.match(singlePattern);

let functions = [];

if (!mfnMatch && !singleMatch) {
// No function calls found
return null;
} else if (mfnMatch) {
// Multiple function calls found
const multiplefn = mfnMatch[1];
const fnMatches = [...multiplefn.matchAll(/<functioncall>(.*?)<\/functioncall>/gs)];
for (let fnMatch of fnMatches) {
const fnText = fnMatch[1].replace(/\\/g, '');
try {
functions.push(JSON.parse(fnText));
} catch {
// Ignore invalid JSON
}
}
} else {
// Single function call found
const fnText = singleMatch[1].replace(/\\/g, '');
try {
functions.push(JSON.parse(fnText));
} catch {
// Ignore invalid JSON
}
}
return functions;
}
}

module.exports = GeminiGenerate;
19 changes: 19 additions & 0 deletions packages/flowtest-electron/tests/utils/flowtest-ai.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,23 @@ describe('generate', () => {
const nodeNames = result.map((node) => node.name);
expect(nodeNames).toEqual(['addPet', 'getPetById', 'findPetsByStatus']);
}, 60000);

it('should generate functions using gemini', async () => {
const f = new FlowtestAI();
const USER_INSTRUCTION =
'Add a new pet to the store. \
Then get the created pet. \
Then get pet with status as available.';
//const testYaml = fs.readFileSync('tests/test.yaml', { encoding: 'utf8', flag: 'r' });
let api = await SwaggerParser.validate('tests/test.yaml');
console.log('API name: %s, Version: %s', api.info.title, api.info.version);
const resolvedSpec = (await JsonRefs.resolveRefs(api)).resolved;

let result = await f.generate(resolvedSpec, USER_INSTRUCTION, {
name: 'GEMINI',
apiKey: '',
});
const nodeNames = result.map((node) => node.name);
expect(nodeNames).toEqual(['addPet', 'getPetById', 'findPetsByStatus']);
}, 60000);
});
Loading