Skip to content

Commit 1ed0287

Browse files
authored
chore (ai): stable sendStart/sendFinish options (#6499)
1 parent 655cf3c commit 1ed0287

File tree

5 files changed

+23
-28
lines changed

5 files changed

+23
-28
lines changed

‎.changeset/rare-foxes-build.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': major
3+
---
4+
5+
chore (ai): stable sendStart/sendFinish options

‎examples/next-openai/app/api/use-chat-custom-sources/route.ts

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -27,11 +27,7 @@ export async function POST(req: Request) {
2727
messages: convertToModelMessages(messages),
2828
});
2929

30-
writer.merge(
31-
result.toUIMessageStream({
32-
experimental_sendStart: false,
33-
}),
34-
);
30+
writer.merge(result.toUIMessageStream({ sendStart: false }));
3531
},
3632
originalMessages: messages,
3733
onFinish: options => {

‎packages/ai/core/generate-text/stream-text-result.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ export type UIMessageStreamOptions = {
8282
* that send additional data.
8383
* Default to true.
8484
*/
85-
experimental_sendFinish?: boolean;
85+
sendFinish?: boolean;
8686

8787
/**
8888
* Send the message start event to the client.
@@ -95,7 +95,7 @@ export type UIMessageStreamOptions = {
9595
* streamText calls that send additional data to prevent
9696
* the message start event from being sent multiple times.
9797
*/
98-
experimental_sendStart?: boolean;
98+
sendStart?: boolean;
9999

100100
/**
101101
* Process an error, e.g. to log it. Default to `() => 'An error occurred.'`.

‎packages/ai/core/generate-text/stream-text.test.ts

Lines changed: 3 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -999,9 +999,7 @@ describe('streamText', () => {
999999
...defaultSettings(),
10001000
});
10011001

1002-
result.pipeUIMessageStreamToResponse(mockResponse, {
1003-
experimental_sendFinish: false,
1004-
});
1002+
result.pipeUIMessageStreamToResponse(mockResponse, { sendFinish: false });
10051003

10061004
await mockResponse.waitForEnd();
10071005

@@ -1447,9 +1445,7 @@ describe('streamText', () => {
14471445
...defaultSettings(),
14481446
});
14491447

1450-
const uiMessageStream = result.toUIMessageStream({
1451-
experimental_sendFinish: false,
1452-
});
1448+
const uiMessageStream = result.toUIMessageStream({ sendFinish: false });
14531449

14541450
expect(
14551451
await convertReadableStreamToArray(uiMessageStream),
@@ -1472,9 +1468,7 @@ describe('streamText', () => {
14721468
...defaultSettings(),
14731469
});
14741470

1475-
const uiMessageStream = result.toUIMessageStream({
1476-
experimental_sendStart: false,
1477-
});
1471+
const uiMessageStream = result.toUIMessageStream({ sendStart: false });
14781472

14791473
expect(
14801474
await convertReadableStreamToArray(uiMessageStream),

‎packages/ai/core/generate-text/stream-text.ts

Lines changed: 12 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1411,8 +1411,8 @@ However, the LLM results are expected to be small enough to not cause issues.
14111411
messageMetadata,
14121412
sendReasoning = false,
14131413
sendSources = false,
1414-
experimental_sendStart = true,
1415-
experimental_sendFinish = true,
1414+
sendStart = true,
1415+
sendFinish = true,
14161416
onError = () => 'An error occurred.', // mask error messages for safety by default
14171417
}: UIMessageStreamOptions = {}): ReadableStream<UIMessageStreamPart> {
14181418
const lastMessage = originalMessages[originalMessages.length - 1];
@@ -1539,7 +1539,7 @@ However, the LLM results are expected to be small enough to not cause issues.
15391539
}
15401540

15411541
case 'start': {
1542-
if (experimental_sendStart) {
1542+
if (sendStart) {
15431543
const metadata = messageMetadata?.({ part });
15441544
controller.enqueue({
15451545
type: 'start',
@@ -1551,7 +1551,7 @@ However, the LLM results are expected to be small enough to not cause issues.
15511551
}
15521552

15531553
case 'finish': {
1554-
if (experimental_sendFinish) {
1554+
if (sendFinish) {
15551555
const metadata = messageMetadata?.({ part });
15561556
controller.enqueue({
15571557
type: 'finish',
@@ -1587,8 +1587,8 @@ However, the LLM results are expected to be small enough to not cause issues.
15871587
messageMetadata,
15881588
sendReasoning,
15891589
sendSources,
1590-
experimental_sendFinish,
1591-
experimental_sendStart,
1590+
sendFinish,
1591+
sendStart,
15921592
onError,
15931593
...init
15941594
}: ResponseInit & UIMessageStreamOptions = {},
@@ -1602,8 +1602,8 @@ However, the LLM results are expected to be small enough to not cause issues.
16021602
messageMetadata,
16031603
sendReasoning,
16041604
sendSources,
1605-
experimental_sendFinish,
1606-
experimental_sendStart,
1605+
sendFinish,
1606+
sendStart,
16071607
onError,
16081608
}),
16091609
...init,
@@ -1625,8 +1625,8 @@ However, the LLM results are expected to be small enough to not cause issues.
16251625
messageMetadata,
16261626
sendReasoning,
16271627
sendSources,
1628-
experimental_sendFinish,
1629-
experimental_sendStart,
1628+
sendFinish,
1629+
sendStart,
16301630
onError,
16311631
...init
16321632
}: ResponseInit & UIMessageStreamOptions = {}): Response {
@@ -1638,8 +1638,8 @@ However, the LLM results are expected to be small enough to not cause issues.
16381638
messageMetadata,
16391639
sendReasoning,
16401640
sendSources,
1641-
experimental_sendFinish,
1642-
experimental_sendStart,
1641+
sendFinish,
1642+
sendStart,
16431643
onError,
16441644
}),
16451645
...init,

0 commit comments

Comments
 (0)