@@ -3,73 +3,142 @@ import styles from "./styles.module.css";
33
44const EXAMPLES = {
55 basic : {
6- title : "Basic RAG Pipeline " ,
7- code : `const { createRagPipeline } = require('@devilsdev/rag-pipeline-utils');
6+ title : "Basic Custom Plugins " ,
7+ code : `const { createRagPipeline, pluginRegistry } = require('@devilsdev/rag-pipeline-utils');
88
9- // Initialize pipeline
10- const pipeline = createRagPipeline({
11- embedder: {
12- type: 'openai',
13- apiKey: process.env.OPENAI_API_KEY
14- },
15- retriever: {
16- type: 'pinecone',
17- apiKey: process.env.PINECONE_API_KEY,
18- indexName: 'docs'
19- },
20- llm: {
21- type: 'openai',
22- model: 'gpt-3.5-turbo'
9+ // Define custom embedder plugin
10+ class MyEmbedder {
11+ async embed(text) {
12+ // Call your embedding service (OpenAI, Cohere, local model, etc.)
13+ const response = await fetch('https://api.openai.com/v1/embeddings', {
14+ method: 'POST',
15+ headers: {
16+ 'Authorization': \`Bearer \${process.env.OPENAI_API_KEY}\`,
17+ 'Content-Type': 'application/json'
18+ },
19+ body: JSON.stringify({
20+ input: text,
21+ model: 'text-embedding-3-small'
22+ })
23+ });
24+ const data = await response.json();
25+ return data.data[0].embedding;
2326 }
24- });
27+ }
28+
29+ // Define custom retriever plugin
30+ class MyRetriever {
31+ async retrieve({ query, queryVector, topK }) {
32+ // Perform similarity search in your vector DB
33+ // This is a simplified example
34+ const results = await yourVectorDB.search({
35+ vector: queryVector,
36+ limit: topK
37+ });
38+ return results;
39+ }
40+ }
2541
26- // Ingest documents
27- await pipeline.ingest('./documents');
42+ // Define custom LLM plugin
43+ class MyLLM {
44+ async generate(query, context, options) {
45+ // Call your LLM (OpenAI, Anthropic, local model, etc.)
46+ const prompt = \`Context: \${JSON.stringify(context)}\\n\\nQuestion: \${query}\`;
47+ // Return generated answer
48+ return "Answer based on context...";
49+ }
50+ }
2851
29- // Query the pipeline
30- const result = await pipeline.query('How does authentication work?');
31- console.log(result.answer);` ,
32- } ,
33- customEmbedder : {
34- title : "Custom Embedder" ,
35- code : `const { createRagPipeline } = require('@devilsdev/rag-pipeline-utils');
52+ // Create pipeline with custom plugins
53+ const pipeline = createRagPipeline({
54+ embedder: new MyEmbedder(),
55+ retriever: new MyRetriever(),
56+ llm: new MyLLM()
57+ });
3658
37- // Define custom embedder
38- class CustomEmbedder {
59+ // Use the pipeline
60+ const result = await pipeline.run({
61+ query: 'How does authentication work?',
62+ options: { topK: 5 }
63+ });
64+ console.log(result);` ,
65+ } ,
66+ registered : {
67+ title : "Using Plugin Registry" ,
68+ code : `const { createRagPipeline, pluginRegistry } = require('@devilsdev/rag-pipeline-utils');
69+
70+ // Define your plugins
71+ class OpenAIEmbedder {
72+ constructor(options) {
73+ this.apiKey = options.apiKey;
74+ this.model = options.model || 'text-embedding-3-small';
75+ }
3976 async embed(text) {
40- // Your custom embedding logic
41- const embedding = await this.computeEmbedding(text);
77+ // Implementation details...
4278 return embedding;
4379 }
80+ }
4481
45- async embedBatch(texts) {
46- // Batch processing for efficiency
47- return Promise.all(texts.map(t => this.embed(t)));
82+ class PineconeRetriever {
83+ constructor(options) {
84+ this.apiKey = options.apiKey;
85+ this.indexName = options.indexName;
86+ }
87+ async retrieve({ queryVector, topK }) {
88+ // Implementation details...
89+ return results;
4890 }
91+ }
4992
50- async computeEmbedding(text) {
51- // Example: Use a local model or API
52- const response = await fetch('http://localhost:8080/embed', {
53- method: 'POST',
54- body: JSON.stringify({ text }),
55- headers: { 'Content-Type': 'application/json' }
56- });
57- return response.json() ;
93+ class OpenAILLM {
94+ constructor(options) {
95+ this.apiKey = options.apiKey;
96+ this.model = options.model || 'gpt-3.5-turbo';
97+ }
98+ async generate(query, context, options) {
99+ // Implementation details...
100+ return answer ;
58101 }
59102}
60103
61- // Use custom embedder in pipeline
104+ // Register plugins
105+ await pluginRegistry.register(
106+ 'embedder',
107+ 'openai',
108+ new OpenAIEmbedder({ apiKey: process.env.OPENAI_API_KEY })
109+ );
110+
111+ await pluginRegistry.register(
112+ 'retriever',
113+ 'pinecone',
114+ new PineconeRetriever({
115+ apiKey: process.env.PINECONE_API_KEY,
116+ indexName: 'docs'
117+ })
118+ );
119+
120+ await pluginRegistry.register(
121+ 'llm',
122+ 'openai',
123+ new OpenAILLM({ apiKey: process.env.OPENAI_API_KEY })
124+ );
125+
126+ // Use registered plugins by name
62127const pipeline = createRagPipeline({
63- embedder: new CustomEmbedder(),
64- retriever: myRetriever,
65- llm: myLLM
66- });` ,
128+ registry: pluginRegistry,
129+ embedder: 'openai', // String reference to registered plugin
130+ retriever: 'pinecone',
131+ llm: 'openai'
132+ });
133+
134+ const result = await pipeline.run({ query: 'Your question here' });` ,
67135 } ,
68136 caching : {
69- title : "Caching Strategy " ,
137+ title : "Caching Wrapper Plugin " ,
70138 code : `const { createRagPipeline } = require('@devilsdev/rag-pipeline-utils');
71139const NodeCache = require('node-cache');
72140
141+ // Wrapper plugin that adds caching to any embedder
73142class CachedEmbedder {
74143 constructor(baseEmbedder) {
75144 this.baseEmbedder = baseEmbedder;
@@ -84,7 +153,10 @@ class CachedEmbedder {
84153
85154 // Check cache
86155 const cached = this.cache.get(key);
87- if (cached) return cached;
156+ if (cached) {
157+ console.log('Cache hit for:', text.substring(0, 50));
158+ return cached;
159+ }
88160
89161 // Compute and cache
90162 const embedding = await this.baseEmbedder.embed(text);
@@ -100,59 +172,84 @@ class CachedEmbedder {
100172 }
101173}
102174
175+ // Your base embedder
176+ class OpenAIEmbedder {
177+ async embed(text) {
178+ // API call to OpenAI
179+ return embedding;
180+ }
181+ }
182+
183+ // Wrap with caching
184+ const baseEmbedder = new OpenAIEmbedder();
185+ const cachedEmbedder = new CachedEmbedder(baseEmbedder);
186+
103187const pipeline = createRagPipeline({
104- embedder: new CachedEmbedder(baseEmbedder) ,
188+ embedder: cachedEmbedder ,
105189 retriever: myRetriever,
106190 llm: myLLM
107191});` ,
108192 } ,
109- security : {
110- title : "Security & Authentication" ,
111- code : `const {
112- createRagPipeline,
113- JwtValidator,
114- InputSanitizer,
115- RateLimiter
116- } = require('@devilsdev/rag-pipeline-utils');
117-
118- // Setup security components
119- const jwtValidator = new JwtValidator({
120- issuer: 'https://auth.example.com',
121- audience: 'rag-api'
122- });
193+ dagWorkflow : {
194+ title : "DAG-Based Workflow" ,
195+ code : `const { DAGEngine, pluginRegistry } = require('@devilsdev/rag-pipeline-utils');
196+
197+ // Define custom plugins
198+ class PDFLoader {
199+ async load(filePath) {
200+ // Load and parse PDF
201+ return { content: "...", metadata: {...} };
202+ }
203+ }
204+
205+ class TextChunker {
206+ async chunk(document) {
207+ // Split into chunks
208+ return chunks;
209+ }
210+ }
123211
124- const sanitizer = new InputSanitizer({
125- maxLength: 2000,
126- blockPatterns: [/ignore.*previous/i]
212+ // Register plugins
213+ pluginRegistry.register('loader', 'pdf', new PDFLoader());
214+ pluginRegistry.register('chunker', 'text', new TextChunker());
215+
216+ // Create DAG workflow
217+ const dag = new DAGEngine({
218+ timeout: 30000,
219+ continueOnError: false
127220});
128221
129- const limiter = new RateLimiter({
130- capacity: 100,
131- refillRate: 10
222+ // Define pipeline steps
223+ dag.addNode('load', async (input) => {
224+ const loader = pluginRegistry.get('loader', 'pdf');
225+ return loader.load(input.filePath);
132226});
133227
134- // Protected endpoint
135- app.post('/api/query', async (req, res) => {
136- try {
137- // Authenticate
138- const user = await jwtValidator.validate(
139- req.headers.authorization
140- );
228+ dag.addNode('chunk', async (document) => {
229+ const chunker = pluginRegistry.get('chunker', 'text');
230+ return chunker.chunk(document);
231+ });
141232
142- // Rate limit
143- await limiter.checkLimit(user.id);
233+ dag.addNode('embed', async (chunks) => {
234+ const embedder = pluginRegistry.get('embedder', 'openai');
235+ return await Promise.all(
236+ chunks.map(chunk => embedder.embed(chunk.text))
237+ );
238+ });
144239
145- // Sanitize input
146- const query = sanitizer.sanitize(req.body.query);
240+ dag.addNode('store', async (embeddings) => {
241+ const retriever = pluginRegistry.get('retriever', 'pinecone');
242+ return retriever.upsert(embeddings);
243+ });
147244
148- // Query pipeline
149- const result = await pipeline.query(query);
245+ // Connect workflow: load -> chunk -> embed -> store
246+ dag.connect('load', 'chunk');
247+ dag.connect('chunk', 'embed');
248+ dag.connect('embed', 'store');
150249
151- res.json(result);
152- } catch (error) {
153- res.status(401).json({ error: error.message });
154- }
155- });` ,
250+ // Execute
251+ const results = await dag.execute({ filePath: './document.pdf' });
252+ console.log('Pipeline completed:', results);` ,
156253 } ,
157254} ;
158255
0 commit comments