Skip to content

Commit 049b1b8

Browse files
committed
Added simplistic RAG to lab 3
1 parent ff56151 commit 049b1b8

File tree

1 file changed

+79
-0
lines changed

1 file changed

+79
-0
lines changed

Labs/lab_3_mongodb_vector_search.ipynb

Lines changed: 79 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@
5151
"load_dotenv()\n",
5252
"CONNECTION_STRING = os.environ.get(\"DB_CONNECTION_STRING\")\n",
5353
"EMBEDDINGS_DEPLOYMENT_NAME = \"embeddings\"\n",
54+
"COMPLETIONS_DEPLOYMENT_NAME = \"completions\"\n",
5455
"AOAI_ENDPOINT = os.environ.get(\"AOAI_ENDPOINT\")\n",
5556
"AOAI_KEY = os.environ.get(\"AOAI_KEY\")\n",
5657
"AOAI_API_VERSION = \"2023-05-15\""
@@ -337,6 +338,84 @@
337338
"for result in results:\n",
338339
" print_product_search_result(result) "
339340
]
341+
},
342+
{
343+
"cell_type": "markdown",
344+
"metadata": {},
345+
"source": [
346+
"## Use vector search results in a RAG pattern with Chat GPT-3.5"
347+
]
348+
},
349+
{
350+
"cell_type": "code",
351+
"execution_count": null,
352+
"metadata": {},
353+
"outputs": [],
354+
"source": [
355+
"# A system prompt describes the responsibilities, instructions, and persona of the AI.\n",
356+
"system_prompt = \"\"\"\n",
357+
"You are a helpful, fun and friendly sales assistant for Cosmic Works, a bicycle and bicycle accessories store. \n",
358+
"Your name is Cosmo.\n",
359+
"You are designed to answer questions about the products that Cosmic Works sells.\n",
360+
"\n",
361+
"Only answer questions related to the information provided in the list of products below that are represented\n",
362+
"in JSON format.\n",
363+
"\n",
364+
"If you are asked a question that is not in the list, respond with \"I don't know.\"\n",
365+
"\n",
366+
"List of products:\n",
367+
"\"\"\""
368+
]
369+
},
370+
{
371+
"cell_type": "code",
372+
"execution_count": null,
373+
"metadata": {},
374+
"outputs": [],
375+
"source": [
376+
"def rag_with_vector_search(question: str, num_results: int = 3):\n",
377+
" \"\"\"\n",
378+
" Use the RAG model to generate a prompt using vector search results based on the\n",
379+
" incoming question. \n",
380+
" \"\"\"\n",
381+
" # perform the vector search and build product list\n",
382+
" results = vector_search(\"products\", question, num_results=num_results)\n",
383+
" product_list = \"\"\n",
384+
" for result in results:\n",
385+
" if \"contentVector\" in result[\"document\"]:\n",
386+
" del result[\"document\"][\"contentVector\"]\n",
387+
" product_list += json.dumps(result[\"document\"], indent=4, default=str) + \"\\n\\n\"\n",
388+
"\n",
389+
" # generate prompt for the LLM with vector results\n",
390+
" formatted_prompt = system_prompt + product_list\n",
391+
"\n",
392+
" # prepare the LLM request\n",
393+
" messages = [\n",
394+
" {\"role\": \"system\", \"content\": formatted_prompt},\n",
395+
" {\"role\": \"user\", \"content\": question}\n",
396+
" ]\n",
397+
"\n",
398+
" completion = ai_client.chat.completions.create(messages=messages, model=COMPLETIONS_DEPLOYMENT_NAME)\n",
399+
" return completion.choices[0].message.content"
400+
]
401+
},
402+
{
403+
"cell_type": "code",
404+
"execution_count": null,
405+
"metadata": {},
406+
"outputs": [],
407+
"source": [
408+
"print(rag_with_vector_search(\"What bikes do you have?\", 5))"
409+
]
410+
},
411+
{
412+
"cell_type": "code",
413+
"execution_count": null,
414+
"metadata": {},
415+
"outputs": [],
416+
"source": [
417+
"print(rag_with_vector_search(\"What are the names and skus of yellow products?\", 5))"
418+
]
340419
}
341420
],
342421
"metadata": {

0 commit comments

Comments
 (0)