Skip to content

Commit c040e2f

Browse files
authored
fix (ui): inject generated response message id (#7198)
## Background It was not possible to override the generated message id in the ui message stream. ## Summary Add `generateMessageId` property to `toUIMessageStream` on stream text result. ## Related Issues Fixes #7178
1 parent 42fd756 commit c040e2f

File tree

5 files changed

+102
-27
lines changed

5 files changed

+102
-27
lines changed

‎.changeset/giant-ravens-reflect.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
fix (ui): inject generated response message id

‎packages/ai/src/generate-text/stream-text-result.ts

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { ReasoningPart } from '@ai-sdk/provider-utils';
1+
import { IdGenerator, ReasoningPart } from '@ai-sdk/provider-utils';
22
import { ServerResponse } from 'node:http';
33
import { InferUIMessageChunk } from '../../src/ui-message-stream/ui-message-chunks';
44
import { UIMessageStreamResponseInit } from '../../src/ui-message-stream/ui-message-stream-response-init';
@@ -29,6 +29,14 @@ export type UIMessageStreamOptions<UI_MESSAGE extends UIMessage> = {
2929
*/
3030
originalMessages?: UI_MESSAGE[];
3131

32+
/**
33+
* Generate a message ID for the response message.
34+
*
35+
* If not provided, no message ID will be set for the response message (unless
36+
* the original messages are provided and the last message is an assistant message).
37+
*/
38+
generateMessageId?: IdGenerator;
39+
3240
onFinish?: (options: {
3341
/**
3442
* The updates list of UI messages.

‎packages/ai/src/generate-text/stream-text.test.ts

Lines changed: 56 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2648,7 +2648,7 @@ describe('streamText', () => {
26482648
`);
26492649
});
26502650

2651-
it('should not generate a new message id when onFinish is provided', async () => {
2651+
it('should not generate a new message id when onFinish is provided and generateMessageId is not provided', async () => {
26522652
const result = streamText({
26532653
model: createTestModel(),
26542654
...defaultSettings(),
@@ -2702,6 +2702,61 @@ describe('streamText', () => {
27022702
]
27032703
`);
27042704
});
2705+
2706+
it('should generate a new message id when generateMessageId is provided', async () => {
2707+
const result = streamText({
2708+
model: createTestModel(),
2709+
...defaultSettings(),
2710+
});
2711+
2712+
const uiMessageStream = result.toUIMessageStream({
2713+
generateMessageId: mockId({ prefix: 'id' }),
2714+
});
2715+
2716+
expect(await convertReadableStreamToArray(uiMessageStream))
2717+
.toMatchInlineSnapshot(`
2718+
[
2719+
{
2720+
"messageId": "id-0",
2721+
"messageMetadata": undefined,
2722+
"type": "start",
2723+
},
2724+
{
2725+
"type": "start-step",
2726+
},
2727+
{
2728+
"id": "1",
2729+
"type": "text-start",
2730+
},
2731+
{
2732+
"delta": "Hello",
2733+
"id": "1",
2734+
"type": "text-delta",
2735+
},
2736+
{
2737+
"delta": ", ",
2738+
"id": "1",
2739+
"type": "text-delta",
2740+
},
2741+
{
2742+
"delta": "world!",
2743+
"id": "1",
2744+
"type": "text-delta",
2745+
},
2746+
{
2747+
"id": "1",
2748+
"type": "text-end",
2749+
},
2750+
{
2751+
"type": "finish-step",
2752+
},
2753+
{
2754+
"messageMetadata": undefined,
2755+
"type": "finish",
2756+
},
2757+
]
2758+
`);
2759+
});
27052760
});
27062761

27072762
describe('result.toUIMessageStreamResponse', () => {

‎packages/ai/src/generate-text/stream-text.ts

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1573,6 +1573,7 @@ However, the LLM results are expected to be small enough to not cause issues.
15731573

15741574
toUIMessageStream<UI_MESSAGE extends UIMessage>({
15751575
originalMessages,
1576+
generateMessageId,
15761577
onFinish,
15771578
messageMetadata,
15781579
sendReasoning = true,
@@ -1806,7 +1807,7 @@ However, the LLM results are expected to be small enough to not cause issues.
18061807

18071808
return handleUIMessageStreamFinish<UI_MESSAGE>({
18081809
stream: baseStream,
1809-
messageId: responseMessageId,
1810+
messageId: responseMessageId ?? generateMessageId?.(),
18101811
originalMessages,
18111812
onFinish,
18121813
onError,
@@ -1817,6 +1818,7 @@ However, the LLM results are expected to be small enough to not cause issues.
18171818
response: ServerResponse,
18181819
{
18191820
originalMessages,
1821+
generateMessageId,
18201822
onFinish,
18211823
messageMetadata,
18221824
sendReasoning,
@@ -1831,6 +1833,7 @@ However, the LLM results are expected to be small enough to not cause issues.
18311833
response,
18321834
stream: this.toUIMessageStream({
18331835
originalMessages,
1836+
generateMessageId,
18341837
onFinish,
18351838
messageMetadata,
18361839
sendReasoning,
@@ -1853,6 +1856,7 @@ However, the LLM results are expected to be small enough to not cause issues.
18531856

18541857
toUIMessageStreamResponse<UI_MESSAGE extends UIMessage>({
18551858
originalMessages,
1859+
generateMessageId,
18561860
onFinish,
18571861
messageMetadata,
18581862
sendReasoning,
@@ -1866,6 +1870,7 @@ However, the LLM results are expected to be small enough to not cause issues.
18661870
return createUIMessageStreamResponse({
18671871
stream: this.toUIMessageStream({
18681872
originalMessages,
1873+
generateMessageId,
18691874
onFinish,
18701875
messageMetadata,
18711876
sendReasoning,

‎packages/ai/src/ui-message-stream/handle-ui-message-stream-finish.ts

Lines changed: 26 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -48,10 +48,6 @@ export function handleUIMessageStreamFinish<UI_MESSAGE extends UIMessage>({
4848
responseMessage: UI_MESSAGE;
4949
}) => void;
5050
}): ReadableStream<InferUIMessageChunk<UI_MESSAGE>> {
51-
if (onFinish == null) {
52-
return stream;
53-
}
54-
5551
// last message is only relevant for assistant messages
5652
let lastMessage: UI_MESSAGE | undefined =
5753
originalMessages?.[originalMessages.length - 1];
@@ -62,6 +58,31 @@ export function handleUIMessageStreamFinish<UI_MESSAGE extends UIMessage>({
6258
messageId = lastMessage.id;
6359
}
6460

61+
const idInjectedStream = stream.pipeThrough(
62+
new TransformStream<
63+
InferUIMessageChunk<UI_MESSAGE>,
64+
InferUIMessageChunk<UI_MESSAGE>
65+
>({
66+
transform(chunk, controller) {
67+
// when there is no messageId in the start chunk,
68+
// but the user checked for persistence,
69+
// inject the messageId into the chunk
70+
if (chunk.type === 'start') {
71+
const startChunk = chunk as UIMessageChunk & { type: 'start' };
72+
if (startChunk.messageId == null && messageId != null) {
73+
startChunk.messageId = messageId;
74+
}
75+
}
76+
77+
controller.enqueue(chunk);
78+
},
79+
}),
80+
);
81+
82+
if (onFinish == null) {
83+
return idInjectedStream;
84+
}
85+
6586
const state = createStreamingUIMessageState<UI_MESSAGE>({
6687
lastMessage: lastMessage
6788
? (structuredClone(lastMessage) as UI_MESSAGE)
@@ -79,26 +100,7 @@ export function handleUIMessageStreamFinish<UI_MESSAGE extends UIMessage>({
79100
};
80101

81102
return processUIMessageStream<UI_MESSAGE>({
82-
stream: stream.pipeThrough(
83-
new TransformStream<
84-
InferUIMessageChunk<UI_MESSAGE>,
85-
InferUIMessageChunk<UI_MESSAGE>
86-
>({
87-
transform(chunk, controller) {
88-
// when there is no messageId in the start chunk,
89-
// but the user checked for persistence,
90-
// inject the messageId into the chunk
91-
if (chunk.type === 'start') {
92-
const startChunk = chunk as UIMessageChunk & { type: 'start' };
93-
if (startChunk.messageId == null && messageId != null) {
94-
startChunk.messageId = messageId;
95-
}
96-
}
97-
98-
controller.enqueue(chunk);
99-
},
100-
}),
101-
),
103+
stream: idInjectedStream,
102104
runUpdateMessageJob,
103105
onError,
104106
}).pipeThrough(

0 commit comments

Comments
 (0)