Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hotfix/timeout llm #64

Merged
merged 3 commits into from
Mar 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
node_modules
build
.vscode
tests
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"name": "ai-agent-enterprise",
"description": "AI Agent simplifies the implementation and use of generative AI with LangChain",
"version": "0.0.42",
"version": "0.0.43",
"main": "./build/index.js",
"types": "./build/index.d.ts",
"files": [
Expand Down
17 changes: 11 additions & 6 deletions src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -95,11 +95,15 @@ class Agent extends AgentBaseCommand implements IAgent {
}
);

const referenciesDocs = relevantDocs
.map((doc: { metadata: unknown }) => doc.metadata)
.join(', ');

return { relevantDocs, referenciesDocs };
const referenciesObjDocs: any = {};
relevantDocs.map(
(doc: { metadata: any }) => referenciesObjDocs[doc.metadata] = doc.metadata
)

return {
relevantDocs: relevantDocs.map((doc: any) => doc.pageContent).join('\n'),
referenciesDocs: Object.values(referenciesObjDocs),
};
}

async call(args: IInputProps): Promise<void> {
Expand All @@ -126,7 +130,8 @@ class Agent extends AgentBaseCommand implements IAgent {

const result = await chain.call({
referencies: referenciesDocs,
input_documents: relevantDocs,
relevant_docs: relevantDocs,
input_documents: [],
query: question,
question: question,
chat_history: chatMessages,
Expand Down
109 changes: 109 additions & 0 deletions src/helpers/http-request.helpers.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,109 @@
class FetchRequestError extends Error {
public code: any;
public originalStack: any;

constructor(originalError: any) {
const detailedError = originalError.cause
? originalError.cause
: originalError;

let { message, code } = detailedError;

if (originalError.name === 'AbortError') {
message = 'Request timeout';
code = 'ETIMEDOUT';
}

super(message);

this.name = detailedError.name;
this.code = code;
this.originalStack = detailedError.stack;
}
}

function tryParseJSON(value: any) {
try {
return JSON.parse(value);
} catch (error) {
return value;
}
};

function configureTimeout({ timeout }: any) {
let timeoutId = null;
const controller = new AbortController();
const { signal } = controller;

if (timeout) {
timeoutId = setTimeout(() => {
controller.abort();
}, timeout);
}

return { signal, timeoutId };
}

async function formatResponse(response: Response, returnStream: any) {
const body = returnStream
? response.body
: tryParseJSON(await response.text());

const responseHeaders = {} as any;

Array.from(response.headers.keys()).forEach((key) => {
responseHeaders[key] = response.headers.get(key);
});

const formattedResponse = {
body,
response: {
body,
headers: responseHeaders,
ok: response.ok,
statusCode: response.status,
statusText: response.statusText,
},
};

return response.ok
? formattedResponse
: Promise.reject(formattedResponse);
}

async function fetchRequest(
options = {} as any,
returnStream = false,
) {
try {
const abortSignal = configureTimeout(options);

const reqHeaders = {
'Content-Type': 'application/json',
...options.headers,
};

const response = await fetch(options.url, {
...options,
body: JSON.stringify(options.body),
headers: reqHeaders,
signal: abortSignal.signal,
});

clearTimeout(abortSignal.timeoutId);

return formatResponse(response, returnStream);
} catch (error) {
const fetchRequestError = new FetchRequestError(error);

console.error({
details: fetchRequestError,
error: fetchRequestError.name,
message: `[COMMONS-FETCH-REQUEST]-ERROR: ${fetchRequestError.name} - ${fetchRequestError.message}`,
});

throw fetchRequestError;
}
};

export default fetchRequest;
5 changes: 3 additions & 2 deletions src/services/chain/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ class ChainService {
- The question must be a single sentence.\n
- You must remove any punctuation from the question.\n
- You must remove any words that are not relevant to the question.\n
- If you are unable to formulate a question, respond in a friendly manner so the user can rephrase the question.\n\n
- If you are unable to formulate a answer, respond in a friendly manner so the user can rephrase the question.\n\n

USER PROMPT: {user_prompt}\n
--------------------------------------
CHAT HISTORY: {format_chat_messages}\n
--------------------------------------
Context found in documents: {summaries}\n
Context found in documents: {relevant_docs}\n
--------------------------------------
Name of reference files: {referencies}\n
`;
Expand Down Expand Up @@ -144,6 +144,7 @@ class ChainService {
inputVariables: [
'query',
'referencies',
'relevant_docs',
'input_documents',
'question',
'chat_history',
Expand Down
1 change: 1 addition & 0 deletions src/services/llm/azure-llm-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ class AzureLLMService {
return new ChatOpenAI({
temperature: this._chatSettings.temperature,
streaming: true,
maxConcurrency: 10,
azureOpenAIApiDeploymentName: this._llmSettings.model,
azureOpenAIApiVersion: this._llmSettings.apiVersion,
azureOpenAIApiKey: this._llmSettings.apiKey,
Expand Down
21 changes: 13 additions & 8 deletions src/services/vector-store/azure-vector-store.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ import { Embeddings } from 'langchain/embeddings/base';
import { VectorStore } from 'langchain/vectorstores/base';
import { nanoid } from 'nanoid';


import request from '../../helpers/http-request.helpers';

interface AzureSearchConfig {
name: string;
indexes: string[];
Expand Down Expand Up @@ -38,6 +41,7 @@ type AzureCogFilter = {
};

type AzureCogRequestObject = {
select: string;
search: string;
facets: string[];
filter: string;
Expand Down Expand Up @@ -156,6 +160,7 @@ export class AzureCogSearch<TModel extends Record<string, unknown>> extends Vect
const url = `${this.baseUrl}/${index || this._config.indexes[0]}/docs/search?api-version=${this._config.apiVersion}`;

const searchBody: AzureCogRequestObject = {
select: 'pageContent, metadata',
search: filter?.search || '*',
facets: filter?.facets || [],
filter: filter?.filter || '',
Expand All @@ -175,21 +180,21 @@ export class AzureCogSearch<TModel extends Record<string, unknown>> extends Vect

const fetcher = async (url: string, body: any, apiKey: string) => {
const options = {
url,
method: 'POST',
body: JSON.stringify(body),
body,
headers: {
'Content-Type': 'application/json',
'api-key': apiKey,
},
};

const response = await fetch(url, options);
try {
const rs = await request(options, false);

if (!response.ok) {
const err = await response.json();

throw new Error(JSON.stringify(err));
return rs.body;
} catch (error) {
console.error('vector-error', error);
throw error;
}

return await response.json();
};
Loading