Skip to content

Commit 5da767f

Browse files
committed
hardcoding the model for chat for now, working to fix the response differences
1 parent db29256 commit 5da767f

File tree

2 files changed

+19
-18
lines changed

2 files changed

+19
-18
lines changed

src/lambda/chat_with_document_resolver/index.py

Lines changed: 16 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -52,8 +52,11 @@ def handler(event, context):
5252
full_prompt = "You are an assistant that's responsible for getting details from document text attached here based on questions from the user.\n\n"
5353
full_prompt += "If you don't know the answer, just say that you don't know. Don't try to make up an answer.\n\n"
5454
full_prompt += "Additionally, use the user and assistant responses in the following JSON object to see what's been asked and what the resposes were in the past.\n\n"
55-
full_prompt += "The JSON object is: " + json.dumps(history) + ".\n\n"
56-
full_prompt += "The user's question is: " + prompt
55+
# full_prompt += "Your response MUST be in the following JSON format: {'content': [{'text': 'String'}]}.\n\n"
56+
# full_prompt += "You MUST NOT include outside of that JSON format.\n\n"
57+
# full_prompt += "Do NOT include the role or anything else in the response."
58+
full_prompt += "The history JSON object is: " + json.dumps(history) + ".\n\n"
59+
full_prompt += "The user's question is: " + prompt + "\n\n"
5760

5861
# this feature is not enabled until the model can be selected on the chat screen
5962
# selectedModelId = event['arguments']['modelId']
@@ -67,13 +70,14 @@ def handler(event, context):
6770

6871
# Call Bedrock Runtime to get Python code based on the prompt
6972
if (len(objectKey)):
70-
encoded_string = objectKey.encode()
71-
md5_hash = hashlib.md5(encoded_string, usedforsecurity=False)
72-
hex_representation = md5_hash.hexdigest()
73+
# encoded_string = objectKey.encode()
74+
# md5_hash = hashlib.md5(encoded_string)
75+
# hex_representation = md5_hash.hexdigest()
7376

7477
# full text key
7578
fulltext_key = objectKey + '/summary/fulltext.txt'
7679

80+
logger.info(f"Model: {selectedModelId}")
7781
logger.info(f"Output Bucket: {output_bucket}")
7882
logger.info(f"Full Text Key: {fulltext_key}")
7983

@@ -108,6 +112,7 @@ def handler(event, context):
108112

109113
# print('invoking model converse')
110114

115+
selectedModelId = 'us.amazon.nova-pro-v1:0'
111116
response = bedrock_runtime.converse(
112117
modelId=selectedModelId,
113118
messages=message
@@ -121,18 +126,15 @@ def handler(event, context):
121126
# print(f"cacheWriteInputTokens: {token_usage['cacheWriteInputTokens']}")
122127
# print(f"Stop reason: {response['stopReason']}")
123128

124-
output_message = response['output']['message']
125-
126-
model_response_text = ''
127-
for content in output_message['content']:
128-
model_response_text += content['text']
129129

130-
# print output_message
130+
output_message = response['output']['message']
131+
text_content = output_message['content'][0]['text']
131132

132-
chat_response = {"cr" : output_message }
133+
chat_response = {"cr": {"content": [{"text": text_content}]}}
133134
return json.dumps(chat_response)
134135

135136

137+
136138
except ClientError as e:
137139
error_code = e.response['Error']['Code']
138140
error_message = e.response['Error']['Message']
@@ -146,7 +148,7 @@ def handler(event, context):
146148
raise Exception(error_message)
147149

148150
except Exception as e:
149-
logger.error(f"Unexpected Error: {str(e)}")
150-
raise Exception(f"Unexpected Error: {str(e)}")
151+
logger.error(f"Unexpected error: {str(e)}")
152+
raise Exception(f"Error fetching file: {str(e)}")
151153

152154
return response_data

src/ui/src/components/chat-panel/ChatPanel.jsx

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@ const ChatPanel = (item) => {
8080
const chatResponse = getChatResponse(objectKey, prompt, jsonChatHistory);
8181

8282
let chatResponseData = {};
83+
let chatItem = {};
8384

8485
chatResponse
8586
.then((r) => {
@@ -93,12 +94,10 @@ const ChatPanel = (item) => {
9394
type: 'msg',
9495
};
9596

96-
const chatItem = {
97+
chatItem = {
9798
ask: prompt,
9899
response: cResponse.cr.content[0].text,
99100
};
100-
101-
setJsonChatHistory((prevChatHistory) => [...prevChatHistory, chatItem]);
102101
}
103102
})
104103
.catch((r) => {
@@ -114,8 +113,8 @@ const ChatPanel = (item) => {
114113
.finally(() => {
115114
// remove loader from the chat queries
116115
setChatQueries((prevChatQueries) => prevChatQueries.filter((data) => data.role !== 'loader'));
117-
118116
setChatQueries((prevChatQueries) => [...prevChatQueries, chatResponseData]);
117+
setJsonChatHistory((prevChatHistory) => [...prevChatHistory, chatItem]);
119118
const maxScrollHeight = document.documentElement.scrollHeight;
120119
window.scrollTo(0, maxScrollHeight);
121120
});

0 commit comments

Comments
 (0)