@@ -3,6 +3,8 @@ import JsonRefs from 'json-refs'
33import OpenAI from 'openai' ;
44import dotenv from 'dotenv' ;
55import * as fs from 'fs' ;
6+ import { HNSWLib } from "@langchain/community/vectorstores/hnswlib" ;
7+ import { OpenAIEmbeddings } from "@langchain/openai" ;
68
79dotenv . config ( ) ;
810
@@ -20,7 +22,8 @@ const MAX_CALLS = 10
2022class FlowtestAI {
2123
2224 async generate ( collection : string , user_instruction : string ) : Promise < any [ ] > {
23- const functions = await this . get_available_functions ( collection ) ;
25+ const available_functions = await this . get_available_functions ( collection ) ;
26+ const functions = await this . filter_functions ( available_functions , user_instruction ) ;
2427 return await this . process_user_instruction ( functions , user_instruction ) ;
2528 }
2629
@@ -73,10 +76,39 @@ class FlowtestAI {
7376 )
7477 } )
7578 } )
76- console . log ( JSON . stringify ( functions ) ) ;
79+ // console.log(JSON.stringify(functions));
7780 return functions ;
7881 }
7982
83+ async filter_functions ( functions : any [ ] , instruction : string ) : Promise < any [ ] > {
84+ const chunkSize = 32 ;
85+ const chunks = [ ] ;
86+
87+ for ( let i = 0 ; i < functions . length ; i += chunkSize ) {
88+ const chunk = functions . slice ( i , i + chunkSize ) ;
89+ chunks . push ( chunk ) ;
90+ }
91+
92+ const documents = chunks . map ( ( chunk ) => JSON . stringify ( chunk ) ) ;
93+
94+ const vectorStore = await HNSWLib . fromTexts (
95+ documents ,
96+ [ ] ,
97+ new OpenAIEmbeddings ( {
98+ openAIApiKey : process . env . OPENAI_API_KEY
99+ } )
100+ ) ;
101+
102+ // 32 x 4 = 128 (max no of functions accepted by openAI function calling)
103+ const retrievedDocuments = await vectorStore . similaritySearch ( instruction , 4 ) ;
104+ var selectedFunctions = [ ] ;
105+ retrievedDocuments . forEach ( ( document ) => {
106+ selectedFunctions = selectedFunctions . concat ( JSON . parse ( document . pageContent ) ) ;
107+ } )
108+
109+ return selectedFunctions ;
110+ }
111+
80112 async get_openai_response ( functions : any [ ] , messages : any [ ] ) {
81113 return await openai . chat . completions . create ( {
82114 model : "gpt-3.5-turbo-16k-0613" ,
@@ -105,8 +137,7 @@ class FlowtestAI {
105137 console . log ( message [ "function_call" ] )
106138 messages . push ( message )
107139
108- // For the sake of this example, we'll simply add a message to simulate success.
109- // Normally, you'd want to call the function here, and append the results to messages.
140+ // We'll simply add a message to simulate successful function call.
110141 messages . push (
111142 {
112143 "role" : "function" ,
0 commit comments