Skip to content

Commit c94e258

Browse files
committed
chore: refactor code base
1 parent ddd4489 commit c94e258

File tree

2 files changed

+3
-26
lines changed

2 files changed

+3
-26
lines changed

app/client/api.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -70,6 +70,5 @@ export interface ModelRecord {
7070

7171
export abstract class LLMApi {
7272
abstract chat(options: ChatOptions): Promise<void>;
73-
abstract usage(): Promise<LLMUsage>;
7473
abstract abort(): Promise<void>;
7574
}

app/client/webllm.ts

Lines changed: 3 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -69,32 +69,17 @@ export class WebLLMApi implements LLMApi {
6969
}
7070
}
7171

72-
async initModel(onUpdate?: (message: string, chunk: string) => void) {
72+
private async initModel(onUpdate?: (message: string, chunk: string) => void) {
7373
if (!this.llmConfig) {
7474
throw Error("llmConfig is undefined");
7575
}
7676
this.webllm.engine.setInitProgressCallback((report: InitProgressReport) => {
7777
onUpdate?.(report.text, report.text);
7878
});
79-
if (this.webllm.type === "serviceWorker") {
80-
await this.webllm.engine.reload(this.llmConfig.model, this.llmConfig);
81-
} else {
82-
await this.webllm.engine.reload(this.llmConfig.model, this.llmConfig);
83-
}
79+
await this.webllm.engine.reload(this.llmConfig.model, this.llmConfig);
8480
this.initialized = true;
8581
}
8682

87-
isConfigChanged(config: LLMConfig) {
88-
return (
89-
this.llmConfig?.model !== config.model ||
90-
this.llmConfig?.cache !== config.cache ||
91-
this.llmConfig?.temperature !== config.temperature ||
92-
this.llmConfig?.top_p !== config.top_p ||
93-
this.llmConfig?.presence_penalty !== config.presence_penalty ||
94-
this.llmConfig?.frequency_penalty !== config.frequency_penalty
95-
);
96-
}
97-
9883
async chat(options: ChatOptions): Promise<void> {
9984
if (!this.initialized || this.isDifferentConfig(options.config)) {
10085
this.llmConfig = { ...(this.llmConfig || {}), ...options.config };
@@ -156,14 +141,7 @@ export class WebLLMApi implements LLMApi {
156141
await this.webllm.engine?.interruptGenerate();
157142
}
158143

159-
async usage() {
160-
return {
161-
used: 0,
162-
total: 0,
163-
};
164-
}
165-
166-
isDifferentConfig(config: LLMConfig): boolean {
144+
private isDifferentConfig(config: LLMConfig): boolean {
167145
if (!this.llmConfig) {
168146
return true;
169147
}

0 commit comments

Comments
 (0)