Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 26 additions & 4 deletions app/client/webllm.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,21 +36,33 @@ type WebLLMHandler = ServiceWorkerWebLLMHandler | WebWorkerWebLLMHandler;
export class WebLLMApi implements LLMApi {
private llmConfig?: LLMConfig;
private initialized = false;
webllm: WebLLMHandler;
webllm?: WebLLMHandler;
private type: "serviceWorker" | "webWorker";
private logLevel: LogLevel;
private isEngineInitialized = false;

constructor(
type: "serviceWorker" | "webWorker",
logLevel: LogLevel = "WARN",
) {
this.type = type;
this.logLevel = logLevel;
}

private initEngine() {
if (this.isEngineInitialized || typeof window === "undefined") {
return;
}

const engineConfig = {
appConfig: {
...prebuiltAppConfig,
useIndexedDBCache: this.llmConfig?.cache === "index_db",
},
logLevel,
logLevel: this.logLevel,
};

if (type === "serviceWorker") {
if (this.type === "serviceWorker") {
log.info("Create ServiceWorkerMLCEngine");
this.webllm = {
type: "serviceWorker",
Expand All @@ -68,12 +80,17 @@ export class WebLLMApi implements LLMApi {
),
};
}
this.isEngineInitialized = true;
}

private async initModel(onUpdate?: (message: string, chunk: string) => void) {
if (!this.llmConfig) {
throw Error("llmConfig is undefined");
}
this.initEngine();
if (!this.webllm) {
throw Error("Engine not initialized");
}
this.webllm.engine.setInitProgressCallback((report: InitProgressReport) => {
onUpdate?.(report.text, report.text);
});
Expand Down Expand Up @@ -153,7 +170,8 @@ export class WebLLMApi implements LLMApi {
}

async abort() {
await this.webllm.engine?.interruptGenerate();
this.initEngine();
await this.webllm?.engine?.interruptGenerate();
}

private isDifferentConfig(config: LLMConfig): boolean {
Expand Down Expand Up @@ -227,6 +245,10 @@ export class WebLLMApi implements LLMApi {
extraBody.enable_thinking = this.llmConfig?.enable_thinking ?? false;
}

this.initEngine();
if (!this.webllm) {
throw Error("Engine not initialized");
}
const completion = await this.webllm.engine.chatCompletion({
stream: stream,
messages: (newMessages || messages) as ChatCompletionMessageParam[],
Expand Down
6 changes: 3 additions & 3 deletions app/components/home.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -239,9 +239,9 @@ const useWebLLM = () => {
}
}, []);

if (webllm?.webllm.type === "serviceWorker") {
if (webllm?.webllm?.type === "serviceWorker") {
setInterval(() => {
if (webllm) {
if (webllm?.webllm) {
// 10s per heartbeat, dead after 30 seconds of inactivity
setWebllmAlive(
!!webllm.webllm.engine &&
Expand Down Expand Up @@ -314,7 +314,7 @@ const useLogLevel = (webllm?: WebLLMApi) => {
useEffect(() => {
log.setLevel(config.logLevel);
if (webllm?.webllm?.engine) {
webllm.webllm.engine.setLogLevel(config.logLevel);
webllm?.webllm?.engine.setLogLevel(config.logLevel);
}
}, [config.logLevel, webllm?.webllm?.engine]);
};
Expand Down