Skip to content

Commit 0a60ef0

Browse files
committed
Merge branch 'feature/chat-with-document' into 'develop'
Feature/chat with document See merge request genaiic-reusable-assets/engagement-artifacts/genaiic-idp-accelerator!242
2 parents d02d4e7 + 8921800 commit 0a60ef0

File tree

3 files changed

+19
-18
lines changed

3 files changed

+19
-18
lines changed

docs/web-ui.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ Your chat history will be saved as you continue your chat but if you leave the d
107107
### How to Use
108108

109109
1. Navigate to a document's detail page and scroll to the bottom
110-
2. In the text area, type in your question and you'll see an answer pop up after the document is analyzed with the model that's configured for summarization
110+
2. In the text area, type in your question and you'll see an answer pop up after the document is analyzed with the Nova Pro model
111111

112112
## Authentication Features
113113

src/lambda/chat_with_document_resolver/index.py

Lines changed: 15 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,11 @@ def handler(event, context):
5252
full_prompt = "You are an assistant that's responsible for getting details from document text attached here based on questions from the user.\n\n"
5353
full_prompt += "If you don't know the answer, just say that you don't know. Don't try to make up an answer.\n\n"
5454
full_prompt += "Additionally, use the user and assistant responses in the following JSON object to see what's been asked and what the resposes were in the past.\n\n"
55-
full_prompt += "The JSON object is: " + json.dumps(history) + ".\n\n"
56-
full_prompt += "The user's question is: " + prompt
55+
# full_prompt += "Your response MUST be in the following JSON format: {'content': [{'text': 'String'}]}.\n\n"
56+
# full_prompt += "You MUST NOT include outside of that JSON format.\n\n"
57+
# full_prompt += "Do NOT include the role or anything else in the response."
58+
full_prompt += "The history JSON object is: " + json.dumps(history) + ".\n\n"
59+
full_prompt += "The user's question is: " + prompt + "\n\n"
5760

5861
# this feature is not enabled until the model can be selected on the chat screen
5962
# selectedModelId = event['arguments']['modelId']
@@ -67,13 +70,14 @@ def handler(event, context):
6770

6871
# Call Bedrock Runtime to get Python code based on the prompt
6972
if (len(objectKey)):
70-
encoded_string = objectKey.encode()
71-
md5_hash = hashlib.md5(encoded_string, usedforsecurity=False)
72-
hex_representation = md5_hash.hexdigest()
73+
# encoded_string = objectKey.encode()
74+
# md5_hash = hashlib.md5(encoded_string)
75+
# hex_representation = md5_hash.hexdigest()
7376

7477
# full text key
7578
fulltext_key = objectKey + '/summary/fulltext.txt'
7679

80+
logger.info(f"Model: {selectedModelId}")
7781
logger.info(f"Output Bucket: {output_bucket}")
7882
logger.info(f"Full Text Key: {fulltext_key}")
7983

@@ -108,6 +112,7 @@ def handler(event, context):
108112

109113
# print('invoking model converse')
110114

115+
selectedModelId = 'us.amazon.nova-pro-v1:0'
111116
response = bedrock_runtime.converse(
112117
modelId=selectedModelId,
113118
messages=message
@@ -121,25 +126,22 @@ def handler(event, context):
121126
# print(f"cacheWriteInputTokens: {token_usage['cacheWriteInputTokens']}")
122127
# print(f"Stop reason: {response['stopReason']}")
123128

124-
output_message = response['output']['message']
125-
126-
model_response_text = ''
127-
for content in output_message['content']:
128-
model_response_text += content['text']
129129

130-
# print output_message
130+
output_message = response['output']['message']
131+
text_content = output_message['content'][0]['text']
131132

132-
chat_response = {"cr" : output_message }
133+
chat_response = {"cr": {"content": [{"text": text_content}]}}
133134
return json.dumps(chat_response)
134135

135136

137+
136138
except ClientError as e:
137139
error_code = e.response['Error']['Code']
138140
error_message = e.response['Error']['Message']
139141
logger.error(f"S3 ClientError: {error_code} - {error_message}")
140142

141143
if error_code == 'NoSuchKey':
142-
raise Exception(f"File not found: {objectKey}")
144+
raise Exception(f"File not found: {fulltext_key}. The chat feature will not work with files that were processed prior to v0.3.11.")
143145
elif error_code == 'NoSuchBucket':
144146
raise Exception(f"Bucket not found: {output_bucket}")
145147
else:

src/ui/src/components/chat-panel/ChatPanel.jsx

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@ const ChatPanel = (item) => {
8080
const chatResponse = getChatResponse(objectKey, prompt, jsonChatHistory);
8181

8282
let chatResponseData = {};
83+
let chatItem = {};
8384

8485
chatResponse
8586
.then((r) => {
@@ -93,12 +94,10 @@ const ChatPanel = (item) => {
9394
type: 'msg',
9495
};
9596

96-
const chatItem = {
97+
chatItem = {
9798
ask: prompt,
9899
response: cResponse.cr.content[0].text,
99100
};
100-
101-
setJsonChatHistory((prevChatHistory) => [...prevChatHistory, chatItem]);
102101
}
103102
})
104103
.catch((r) => {
@@ -114,8 +113,8 @@ const ChatPanel = (item) => {
114113
.finally(() => {
115114
// remove loader from the chat queries
116115
setChatQueries((prevChatQueries) => prevChatQueries.filter((data) => data.role !== 'loader'));
117-
118116
setChatQueries((prevChatQueries) => [...prevChatQueries, chatResponseData]);
117+
setJsonChatHistory((prevChatHistory) => [...prevChatHistory, chatItem]);
119118
const maxScrollHeight = document.documentElement.scrollHeight;
120119
window.scrollTo(0, maxScrollHeight);
121120
});

0 commit comments

Comments
 (0)