Skip to content

Commit d35d12d

Browse files
tnfsscjacoblee93
andauthored
community[minor]: upgraded @mlc/web-llm dependency and updated it's ChatModel (#5637)
* chore(community/webllm): upgraded @mlc/web-llm dependency and updated it's ChatModel * Format --------- Co-authored-by: jacoblee93 <jacoblee93@gmail.com>
1 parent f2a087d commit d35d12d

File tree

3 files changed

+27
-28
lines changed

3 files changed

+27
-28
lines changed

‎libs/langchain-community/package.json‎

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@
8686
"@langchain/scripts": "~0.0.14",
8787
"@layerup/layerup-security": "^1.5.12",
8888
"@mendable/firecrawl-js": "^0.0.13",
89-
"@mlc-ai/web-llm": "^0.2.35",
89+
"@mlc-ai/web-llm": "^0.2.40",
9090
"@mozilla/readability": "^0.4.4",
9191
"@neondatabase/serverless": "^0.9.1",
9292
"@notionhq/client": "^2.2.10",
@@ -245,7 +245,7 @@
245245
"@huggingface/inference": "^2.6.4",
246246
"@layerup/layerup-security": "^1.5.12",
247247
"@mendable/firecrawl-js": "^0.0.13",
248-
"@mlc-ai/web-llm": "^0.2.35",
248+
"@mlc-ai/web-llm": "^0.2.40",
249249
"@mozilla/readability": "*",
250250
"@neondatabase/serverless": "*",
251251
"@notionhq/client": "^2.2.10",

‎libs/langchain-community/src/chat_models/webllm.ts‎

Lines changed: 10 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ export interface WebLLMCallOptions extends BaseLanguageModelCallOptions {}
4343
export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
4444
static inputs: WebLLMInputs;
4545

46-
protected engine: webllm.EngineInterface;
46+
protected engine: webllm.MLCEngine;
4747

4848
appConfig?: webllm.AppConfig;
4949

@@ -63,40 +63,33 @@ export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
6363
this.chatOptions = inputs.chatOptions;
6464
this.model = inputs.model;
6565
this.temperature = inputs.temperature;
66+
this.engine = new webllm.MLCEngine();
6667
}
6768

6869
_llmType() {
6970
return "web-llm";
7071
}
7172

7273
async initialize(progressCallback?: webllm.InitProgressCallback) {
73-
this.engine = new webllm.Engine();
7474
if (progressCallback !== undefined) {
7575
this.engine.setInitProgressCallback(progressCallback);
7676
}
7777
await this.reload(this.model, this.chatOptions, this.appConfig);
78-
this.engine.setInitProgressCallback(() => {});
7978
}
8079

8180
async reload(
8281
modelId: string,
8382
newAppConfig?: webllm.AppConfig,
8483
newChatOpts?: webllm.ChatOptions
8584
) {
86-
if (this.engine !== undefined) {
87-
await this.engine.reload(modelId, newAppConfig, newChatOpts);
88-
} else {
89-
throw new Error("Initialize model before reloading.");
90-
}
85+
await this.engine.reload(modelId, newChatOpts, newAppConfig);
9186
}
9287

9388
async *_streamResponseChunks(
9489
messages: BaseMessage[],
9590
options: this["ParsedCallOptions"],
9691
runManager?: CallbackManagerForLLMRun
9792
): AsyncGenerator<ChatGenerationChunk> {
98-
await this.initialize();
99-
10093
const messagesInput: ChatCompletionMessageParam[] = messages.map(
10194
(message) => {
10295
if (typeof message.content !== "string") {
@@ -124,15 +117,12 @@ export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
124117
}
125118
);
126119

127-
const stream = this.engine.chatCompletionAsyncChunkGenerator(
128-
{
129-
stream: true,
130-
messages: messagesInput,
131-
stop: options.stop,
132-
logprobs: true,
133-
},
134-
{}
135-
);
120+
const stream = await this.engine.chat.completions.create({
121+
stream: true,
122+
messages: messagesInput,
123+
stop: options.stop,
124+
logprobs: true,
125+
});
136126
for await (const chunk of stream) {
137127
// Last chunk has undefined content
138128
const text = chunk.choices[0].delta.content ?? "";
@@ -146,7 +136,7 @@ export class ChatWebLLM extends SimpleChatModel<WebLLMCallOptions> {
146136
},
147137
}),
148138
});
149-
await runManager?.handleLLMNewToken(text ?? "");
139+
await runManager?.handleLLMNewToken(text);
150140
}
151141
}
152142

‎yarn.lock‎

Lines changed: 15 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -9100,7 +9100,7 @@ __metadata:
91009100
"@langchain/scripts": ~0.0.14
91019101
"@layerup/layerup-security": ^1.5.12
91029102
"@mendable/firecrawl-js": ^0.0.13
9103-
"@mlc-ai/web-llm": ^0.2.35
9103+
"@mlc-ai/web-llm": ^0.2.40
91049104
"@mozilla/readability": ^0.4.4
91059105
"@neondatabase/serverless": ^0.9.1
91069106
"@notionhq/client": ^2.2.10
@@ -9267,7 +9267,7 @@ __metadata:
92679267
"@huggingface/inference": ^2.6.4
92689268
"@layerup/layerup-security": ^1.5.12
92699269
"@mendable/firecrawl-js": ^0.0.13
9270-
"@mlc-ai/web-llm": ^0.2.35
9270+
"@mlc-ai/web-llm": ^0.2.40
92719271
"@mozilla/readability": "*"
92729272
"@neondatabase/serverless": "*"
92739273
"@notionhq/client": ^2.2.10
@@ -10478,10 +10478,12 @@ __metadata:
1047810478
languageName: node
1047910479
linkType: hard
1048010480

10481-
"@mlc-ai/web-llm@npm:^0.2.35":
10482-
version: 0.2.35
10483-
resolution: "@mlc-ai/web-llm@npm:0.2.35"
10484-
checksum: 03c1d1847340f88474e1eeed7a91cc09e29299a1216e378385ffe5479c203d39a8656d98c9187864322453a91f046b874d7073662ab04033527079d9bb29bee3
10481+
"@mlc-ai/web-llm@npm:^0.2.40":
10482+
version: 0.2.40
10483+
resolution: "@mlc-ai/web-llm@npm:0.2.40"
10484+
dependencies:
10485+
loglevel: ^1.9.1
10486+
checksum: 44d46178f7b7f899893ee8096fd4188b8c343589a10428c52f87b1b7e708f7a94b2b6315c8a6f8075f14d6d92aebfd8afc7f6d049a2ef60f8b8dc950b98a82e2
1048510487
languageName: node
1048610488
linkType: hard
1048710489

@@ -28464,6 +28466,13 @@ __metadata:
2846428466
languageName: node
2846528467
linkType: hard
2846628468

28469+
"loglevel@npm:^1.9.1":
28470+
version: 1.9.1
28471+
resolution: "loglevel@npm:1.9.1"
28472+
checksum: e1c8586108c4d566122e91f8a79c8df728920e3a714875affa5120566761a24077ec8ec9e5fc388b022e39fc411ec6e090cde1b5775871241b045139771eeb06
28473+
languageName: node
28474+
linkType: hard
28475+
2846728476
"long@npm:*, long@npm:^5.2.1, long@npm:~5.2.3":
2846828477
version: 5.2.3
2846928478
resolution: "long@npm:5.2.3"

0 commit comments

Comments
 (0)