@@ -45,7 +45,8 @@ import {
4545 createMessage ,
4646 useAppConfig ,
4747 DEFAULT_TOPIC ,
48- ModelType ,
48+ Model ,
49+ ModelClient ,
4950} from "../store" ;
5051
5152import {
@@ -92,9 +93,9 @@ import { ChatCommandPrefix, useChatCommand, useCommand } from "../command";
9293import { prettyObject } from "../utils/format" ;
9394import { ExportMessageModal } from "./exporter" ;
9495import { MultimodalContent } from "../client/api" ;
95- import { WebLLMContext } from "../client/webllm" ;
9696import { Template , useTemplateStore } from "../store/template" ;
9797import Image from "next/image" ;
98+ import { MLCLLMContext , WebLLMContext } from "../context" ;
9899
99100export function ScrollDownToast ( prop : { show : boolean ; onclick : ( ) => void } ) {
100101 return (
@@ -125,7 +126,7 @@ export function SessionConfigModel(props: { onClose: () => void }) {
125126 } ;
126127
127128 return (
128- < div className = "modal-template " >
129+ < div className = "screen-model-container " >
129130 < Modal
130131 title = { Locale . Context . Edit }
131132 onClose = { ( ) => props . onClose ( ) }
@@ -556,7 +557,7 @@ export function ChatActions(props: {
556557 onClose = { ( ) => setShowModelSelector ( false ) }
557558 onSelection = { ( s ) => {
558559 if ( s . length === 0 ) return ;
559- config . selectModel ( s [ 0 ] as ModelType ) ;
560+ config . selectModel ( s [ 0 ] as Model ) ;
560561 showToast ( s [ 0 ] ) ;
561562 } }
562563 />
@@ -606,6 +607,12 @@ function _Chat() {
606607 const [ uploading , setUploading ] = useState ( false ) ;
607608 const [ showEditPromptModal , setShowEditPromptModal ] = useState ( false ) ;
608609 const webllm = useContext ( WebLLMContext ) ! ;
610+ const mlcllm = useContext ( MLCLLMContext ) ! ;
611+
612+ const llm =
613+ config . modelClientType === ModelClient . MLCLLM_API ? mlcllm : webllm ;
614+
615+ const models = config . models ;
609616
610617 // prompt hints
611618 const promptStore = usePromptStore ( ) ;
@@ -685,7 +692,7 @@ function _Chat() {
685692
686693 if ( isStreaming ) return ;
687694
688- chatStore . onUserInput ( userInput , webllm , attachImages ) ;
695+ chatStore . onUserInput ( userInput , llm , attachImages ) ;
689696 setAttachImages ( [ ] ) ;
690697 localStorage . setItem ( LAST_INPUT_KEY , userInput ) ;
691698 setUserInput ( "" ) ;
@@ -713,7 +720,7 @@ function _Chat() {
713720
714721 // stop response
715722 const onUserStop = ( ) => {
716- webllm . abort ( ) ;
723+ llm . abort ( ) ;
717724 chatStore . stopStreaming ( ) ;
718725 } ;
719726
@@ -836,7 +843,7 @@ function _Chat() {
836843 // resend the message
837844 const textContent = getMessageTextContent ( userMessage ) ;
838845 const images = getMessageImages ( userMessage ) ;
839- chatStore . onUserInput ( textContent , webllm , images ) ;
846+ chatStore . onUserInput ( textContent , llm , images ) ;
840847 inputRef . current ?. focus ( ) ;
841848 } ;
842849
@@ -867,7 +874,13 @@ function _Chat() {
867874 ]
868875 : [ ] ,
869876 ) ;
870- } , [ config . sendPreviewBubble , context , session . messages , userInput ] ) ;
877+ } , [
878+ config . sendPreviewBubble ,
879+ context ,
880+ session . messages ,
881+ session . messages . length ,
882+ userInput ,
883+ ] ) ;
871884
872885 const [ msgRenderIndex , _setMsgRenderIndex ] = useState (
873886 Math . max ( 0 , renderMessages . length - CHAT_PAGE_SIZE ) ,
@@ -1183,10 +1196,9 @@ function _Chat() {
11831196 ) }
11841197 { message . role === "assistant" && (
11851198 < div className = { styles [ "chat-message-role-name" ] } >
1186- { config . models . find ( ( m ) => m . name === message . model )
1187- ? config . models . find (
1188- ( m ) => m . name === message . model ,
1189- ) ! . display_name
1199+ { models . find ( ( m ) => m . name === message . model )
1200+ ? models . find ( ( m ) => m . name === message . model ) !
1201+ . display_name
11901202 : message . model }
11911203 </ div >
11921204 ) }
0 commit comments