Skip to content

Commit bd15547

Browse files
committed
Add flowtest ai component in main process
1 parent 45a6242 commit bd15547

File tree

4 files changed

+193
-1
lines changed

4 files changed

+193
-1
lines changed

packages/flowtest-electron/package.json

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,9 @@
2222
"electron-store": "^8.1.0",
2323
"fs": "^0.0.1-security",
2424
"json-refs": "^3.0.15",
25+
"langchain": "^0.1.28",
2526
"lodash": "^4.17.21",
27+
"openai": "^4.29.1",
2628
"path": "^0.12.7",
2729
"uuid": "^9.0.1"
2830
}

packages/flowtest-electron/src/ipc/collection.js

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,10 @@ const updateFile = require('../utils/filemanager/updatefile');
1515
const deleteFile = require('../utils/filemanager/deletefile');
1616
const { flowDataToReadableData, readableDataToFlowData } = require('../utils/parser');
1717
const readFile = require('../utils/filemanager/readfile');
18+
const FlowtestAI = require('../utils/flowtestai');
1819

1920
const collectionStore = new Collections();
21+
const flowTestAI = new FlowtestAI();
2022

2123
const timeout = 60000;
2224

@@ -85,7 +87,7 @@ const registerRendererEventHandlers = (mainWindow, watcher) => {
8587
id: id,
8688
name: collectionName,
8789
pathname: pathname,
88-
collection: spec,
90+
openapi_spec: resolvedSpec.resolved,
8991
nodes: parsedNodes,
9092
};
9193

@@ -247,6 +249,19 @@ const registerRendererEventHandlers = (mainWindow, watcher) => {
247249
}
248250
}
249251
});
252+
253+
ipcMain.handle('renderer:create-flowtest-ai', async (event, instruction, collectionId) => {
254+
try {
255+
const collection = collectionStore.getAll().find((c) => c.id === collectionId);
256+
if (collection) {
257+
return await flowTestAI.generate(collection.openapi_spec, instruction);
258+
} else {
259+
return Promise.reject(new Error('Collection not found'));
260+
}
261+
} catch (error) {
262+
return Promise.reject(error);
263+
}
264+
});
250265
};
251266

252267
module.exports = registerRendererEventHandlers;
Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
const OpenAI = require('openai');
2+
const dotenv = require('dotenv');
3+
const { MemoryVectorStore } = require('langchain/vectorstores/memory');
4+
const { OpenAIEmbeddings } = require('@langchain/openai');
5+
6+
dotenv.config();
7+
8+
const openai = new OpenAI({
9+
apiKey: process.env.OPENAI_API_KEY,
10+
});
11+
12+
const SYSTEM_MESSAGE = `You are a helpful assistant. \
13+
Respond to the following prompt by using function_call and then summarize actions. \
14+
Ask for clarification if a user request is ambiguous.`;
15+
16+
// Maximum number of function calls allowed to prevent infinite or lengthy loops
17+
const MAX_CALLS = 10;
18+
19+
class FlowtestAI {
20+
async generate(collection, user_instruction) {
21+
const available_functions = await this.get_available_functions(collection);
22+
const functions = await this.filter_functions(available_functions, user_instruction);
23+
return await this.process_user_instruction(functions, user_instruction);
24+
}
25+
26+
async get_available_functions(collection) {
27+
let functions = [];
28+
Object.entries(collection['paths']).map(([path, methods], index) => {
29+
Object.entries(methods).map(([method, spec], index1) => {
30+
const function_name = spec['operationId'];
31+
32+
const desc = spec['description'] || spec['summary'] || '';
33+
34+
let schema = { type: 'object', properties: {} };
35+
36+
let req_body = undefined;
37+
if (spec['requestBody']) {
38+
if (spec['requestBody']['content']) {
39+
if (spec['requestBody']['content']['application/json']) {
40+
if (spec['requestBody']['content']['application/json']['schema']) {
41+
req_body = spec['requestBody']['content']['application/json']['schema'];
42+
}
43+
}
44+
}
45+
}
46+
47+
if (req_body != undefined) {
48+
schema['properties']['requestBody'] = req_body;
49+
}
50+
51+
const params = spec['parameters'] ? spec['parameters'] : [];
52+
const param_properties = {};
53+
if (params.length > 0) {
54+
for (const param of params) {
55+
if (param['schema']) {
56+
param_properties[param['name']] = param['schema'];
57+
}
58+
}
59+
schema['properties']['parameters'] = {
60+
type: 'object',
61+
properties: param_properties,
62+
};
63+
}
64+
65+
functions.push({ name: function_name, description: desc, parameters: schema });
66+
});
67+
});
68+
// console.log(JSON.stringify(functions));
69+
return functions;
70+
}
71+
72+
async filter_functions(functions, instruction) {
73+
const chunkSize = 32;
74+
const chunks = [];
75+
76+
for (let i = 0; i < functions.length; i += chunkSize) {
77+
const chunk = functions.slice(i, i + chunkSize);
78+
chunks.push(chunk);
79+
}
80+
81+
const documents = chunks.map((chunk) => JSON.stringify(chunk));
82+
83+
const vectorStore = await MemoryVectorStore.fromTexts(
84+
documents,
85+
[],
86+
new OpenAIEmbeddings({
87+
openAIApiKey: process.env.OPENAI_API_KEY,
88+
}),
89+
);
90+
91+
// 32 x 4 = 128 (max no of functions accepted by openAI function calling)
92+
const retrievedDocuments = await vectorStore.similaritySearch(instruction, 4);
93+
var selectedFunctions = [];
94+
retrievedDocuments.forEach((document) => {
95+
selectedFunctions = selectedFunctions.concat(JSON.parse(document.pageContent));
96+
});
97+
98+
return selectedFunctions;
99+
}
100+
101+
async get_openai_response(functions, messages) {
102+
return await openai.chat.completions.create({
103+
model: 'gpt-3.5-turbo-16k-0613',
104+
functions: functions,
105+
function_call: 'auto', // "auto" means the model can pick between generating a message or calling a function.
106+
temperature: 0,
107+
messages: messages,
108+
});
109+
}
110+
111+
async process_user_instruction(functions, instruction) {
112+
let result = [];
113+
let num_calls = 0;
114+
const messages = [
115+
{ content: SYSTEM_MESSAGE, role: 'system' },
116+
{ content: instruction, role: 'user' },
117+
];
118+
119+
while (num_calls < MAX_CALLS) {
120+
const response = await this.get_openai_response(functions, messages);
121+
// console.log(response)
122+
const message = response['choices'][0]['message'];
123+
124+
if (message['function_call']) {
125+
console.log('Function call #: ', num_calls + 1);
126+
console.log(message['function_call']);
127+
messages.push(message);
128+
129+
// We'll simply add a message to simulate successful function call.
130+
messages.push({
131+
role: 'function',
132+
content: 'success',
133+
name: message['function_call']['name'],
134+
});
135+
result.push(message['function_call']);
136+
137+
num_calls += 1;
138+
} else {
139+
console.log('Message: ');
140+
console.log(message['content']);
141+
break;
142+
}
143+
}
144+
145+
if (num_calls >= MAX_CALLS) {
146+
console.log('Reached max chained function calls: ', MAX_CALLS);
147+
}
148+
149+
return result;
150+
}
151+
}
152+
153+
module.exports = FlowtestAI;
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
const fs = require('fs');
2+
const SwaggerParser = require('@apidevtools/swagger-parser');
3+
const JsonRefs = require('json-refs');
4+
const FlowtestAI = require('../../src/utils/flowtestai');
5+
6+
describe('generate', () => {
7+
it('should generate functions from openapi spec', async () => {
8+
const f = new FlowtestAI();
9+
const USER_INSTRUCTION =
10+
'Instruction: Add a new pet to the store. \
11+
Then get the created pet. \
12+
Then get pet with status as available.';
13+
//const testYaml = fs.readFileSync('tests/test.yaml', { encoding: 'utf8', flag: 'r' });
14+
let api = await SwaggerParser.validate('tests/test.yaml');
15+
console.log('API name: %s, Version: %s', api.info.title, api.info.version);
16+
const resolvedSpec = (await JsonRefs.resolveRefs(api)).resolved;
17+
18+
let result = await f.generate(resolvedSpec, USER_INSTRUCTION);
19+
const nodeNames = result.map((node) => node.name);
20+
expect(nodeNames).toEqual(['addPet', 'getPetById', 'findPetsByStatus']);
21+
}, 60000);
22+
});

0 commit comments

Comments
 (0)