Skip to content

Commit 43fde2f

Browse files
committed
ft-97: filter functions based on user instruction to obey 128 functions limit of openai
1 parent 53a3ac1 commit 43fde2f

File tree

2 files changed

+39
-4
lines changed

2 files changed

+39
-4
lines changed

server/package.json

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,14 @@
3030
},
3131
"dependencies": {
3232
"@apidevtools/swagger-parser": "^10.1.0",
33+
"@langchain/community": "^0.0.16",
34+
"@langchain/openai": "^0.0.11",
3335
"cors": "^2.8.5",
3436
"dotenv": "^16.3.1",
3537
"express": "^4.17.1",
38+
"hnswlib-node": "^1.4.2",
3639
"json-refs": "^3.0.15",
40+
"langchain": "^0.1.2",
3741
"multer": "^1.4.5-lts.1",
3842
"openai": "^4.14.1",
3943
"reflect-metadata": "^0.1.13",

server/src/flowtest-ai.ts

Lines changed: 35 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@ import JsonRefs from 'json-refs'
33
import OpenAI from 'openai';
44
import dotenv from 'dotenv';
55
import * as fs from 'fs';
6+
import { HNSWLib } from "@langchain/community/vectorstores/hnswlib";
7+
import { OpenAIEmbeddings } from "@langchain/openai";
68

79
dotenv.config();
810

@@ -20,7 +22,8 @@ const MAX_CALLS = 10
2022
class FlowtestAI {
2123

2224
async generate(collection: string, user_instruction: string): Promise<any[]> {
23-
const functions = await this.get_available_functions(collection);
25+
const available_functions = await this.get_available_functions(collection);
26+
const functions = await this.filter_functions(available_functions, user_instruction);
2427
return await this.process_user_instruction(functions, user_instruction);
2528
}
2629

@@ -73,10 +76,39 @@ class FlowtestAI {
7376
)
7477
})
7578
})
76-
console.log(JSON.stringify(functions));
79+
// console.log(JSON.stringify(functions));
7780
return functions;
7881
}
7982

83+
async filter_functions(functions: any[], instruction: string): Promise<any[]> {
84+
const chunkSize = 32;
85+
const chunks = [];
86+
87+
for (let i = 0; i < functions.length; i += chunkSize) {
88+
const chunk = functions.slice(i, i + chunkSize);
89+
chunks.push(chunk);
90+
}
91+
92+
const documents = chunks.map((chunk) => JSON.stringify(chunk));
93+
94+
const vectorStore = await HNSWLib.fromTexts(
95+
documents,
96+
[],
97+
new OpenAIEmbeddings({
98+
openAIApiKey: process.env.OPENAI_API_KEY
99+
})
100+
);
101+
102+
// 32 x 4 = 128 (max no of functions accepted by openAI function calling)
103+
const retrievedDocuments = await vectorStore.similaritySearch(instruction, 4);
104+
var selectedFunctions = [];
105+
retrievedDocuments.forEach((document) => {
106+
selectedFunctions = selectedFunctions.concat(JSON.parse(document.pageContent));
107+
})
108+
109+
return selectedFunctions;
110+
}
111+
80112
async get_openai_response(functions: any[], messages: any[]) {
81113
return await openai.chat.completions.create({
82114
model: "gpt-3.5-turbo-16k-0613",
@@ -105,8 +137,7 @@ class FlowtestAI {
105137
console.log(message["function_call"])
106138
messages.push(message)
107139

108-
// For the sake of this example, we'll simply add a message to simulate success.
109-
// Normally, you'd want to call the function here, and append the results to messages.
140+
// We'll simply add a message to simulate successful function call.
110141
messages.push(
111142
{
112143
"role": "function",

0 commit comments

Comments
 (0)