Skip to content

Commit e244a78

Browse files
authored
chore (ai): remove StreamData and mergeStreams (#6150)
## Background `StreamData` has been deprecated and should be removed in AI SDK 5. ## Summary * remove `StreamData` * remove `mergeStreams` helper function * change `StreamTextResult.toDataStream` response type * rename `DataStreamString` to `DataStreamText` ## Future Work Rework langchain and llamaindex streaming integration.
1 parent 1757264 commit e244a78

File tree

15 files changed

+52
-589
lines changed

15 files changed

+52
-589
lines changed

‎.changeset/two-roses-think.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': major
3+
---
4+
5+
chore (ai): remove StreamData and mergeStreams

‎packages/ai/core/generate-text/__snapshots__/stream-text.test.ts.snap

Lines changed: 0 additions & 57 deletions
Original file line numberDiff line numberDiff line change
@@ -4123,25 +4123,6 @@ exports[`streamText > result.pipeDataStreamToResponse > should support custom er
41234123
]
41244124
`;
41254125

4126-
exports[`streamText > result.pipeDataStreamToResponse > should support merging with existing stream data 1`] = `
4127-
[
4128-
"2:["stream-data-value"]
4129-
",
4130-
"f:{"messageId":"msg-0"}
4131-
",
4132-
"0:"Hello"
4133-
",
4134-
"0:", "
4135-
",
4136-
"0:"world!"
4137-
",
4138-
"e:{"finishReason":"stop","usage":{"inputTokens":3,"outputTokens":10,"totalTokens":13},"isContinued":false}
4139-
",
4140-
"d:{"finishReason":"stop","usage":{"inputTokens":3,"outputTokens":10,"totalTokens":13}}
4141-
",
4142-
]
4143-
`;
4144-
41454126
exports[`streamText > result.pipeDataStreamToResponse > should suppress usage information when sendUsage is false 1`] = `
41464127
[
41474128
"f:{"messageId":"msg-0"}
@@ -4793,25 +4774,6 @@ exports[`streamText > result.toDataStream > should support custom error messages
47934774
]
47944775
`;
47954776

4796-
exports[`streamText > result.toDataStream > should support merging with existing stream data 1`] = `
4797-
[
4798-
"2:["stream-data-value"]
4799-
",
4800-
"f:{"messageId":"msg-0"}
4801-
",
4802-
"0:"Hello"
4803-
",
4804-
"0:", "
4805-
",
4806-
"0:"world!"
4807-
",
4808-
"e:{"finishReason":"stop","usage":{"inputTokens":3,"outputTokens":10,"totalTokens":13},"isContinued":false}
4809-
",
4810-
"d:{"finishReason":"stop","usage":{"inputTokens":3,"outputTokens":10,"totalTokens":13}}
4811-
",
4812-
]
4813-
`;
4814-
48154777
exports[`streamText > result.toDataStream > should suppress usage information when sendUsage is false 1`] = `
48164778
[
48174779
"f:{"messageId":"msg-0"}
@@ -4885,25 +4847,6 @@ exports[`streamText > result.toDataStreamResponse > should support custom error
48854847
]
48864848
`;
48874849

4888-
exports[`streamText > result.toDataStreamResponse > should support merging with existing stream data 1`] = `
4889-
[
4890-
"2:["stream-data-value"]
4891-
",
4892-
"f:{"messageId":"msg-0"}
4893-
",
4894-
"0:"Hello"
4895-
",
4896-
"0:", "
4897-
",
4898-
"0:"world!"
4899-
",
4900-
"e:{"finishReason":"stop","usage":{"inputTokens":3,"outputTokens":10,"totalTokens":13},"isContinued":false}
4901-
",
4902-
"d:{"finishReason":"stop","usage":{"inputTokens":3,"outputTokens":10,"totalTokens":13}}
4903-
",
4904-
]
4905-
`;
4906-
49074850
exports[`streamText > result.toDataStreamResponse > should suppress usage information when sendUsage is false 1`] = `
49084851
[
49094852
"f:{"messageId":"msg-0"}

‎packages/ai/core/generate-text/stream-text-result.ts

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { ServerResponse } from 'node:http';
2-
import { StreamData } from '../../streams/stream-data';
32
import { DataStreamWriter } from '../data-stream/data-stream-writer';
3+
import { ReasoningPart } from '../prompt/content-part';
44
import {
55
CallWarning,
66
FinishReason,
@@ -11,12 +11,12 @@ import { Source } from '../types/language-model';
1111
import { LanguageModelResponseMetadata } from '../types/language-model-response-metadata';
1212
import { LanguageModelUsage } from '../types/usage';
1313
import { AsyncIterableStream } from '../util/async-iterable-stream';
14+
import { DataStreamText } from '../util/data-stream-parts';
1415
import { GeneratedFile } from './generated-file';
1516
import { ResponseMessage, StepResult } from './step-result';
1617
import { ToolCallUnion } from './tool-call';
1718
import { ToolResultUnion } from './tool-result';
1819
import { ToolSet } from './tool-set';
19-
import { ReasoningPart } from '../prompt/content-part';
2020

2121
export type DataStreamOptions = {
2222
/**
@@ -215,10 +215,9 @@ If an error occurs, it is passed to the optional `onError` callback.
215215
*/
216216
toDataStream(
217217
options?: {
218-
data?: StreamData;
219218
getErrorMessage?: (error: unknown) => string;
220219
} & DataStreamOptions,
221-
): ReadableStream<Uint8Array>;
220+
): ReadableStream<DataStreamText>;
222221

223222
/**
224223
* Merges the result as a data stream into another data stream.
@@ -247,7 +246,6 @@ If an error occurs, it is passed to the optional `onError` callback.
247246
pipeDataStreamToResponse(
248247
response: ServerResponse,
249248
options?: ResponseInit & {
250-
data?: StreamData;
251249
getErrorMessage?: (error: unknown) => string;
252250
} & DataStreamOptions,
253251
): void;
@@ -278,7 +276,6 @@ If an error occurs, it is passed to the optional `onError` callback.
278276
*/
279277
toDataStreamResponse(
280278
options?: ResponseInit & {
281-
data?: StreamData;
282279
getErrorMessage?: (error: unknown) => string;
283280
} & DataStreamOptions,
284281
): Response;

‎packages/ai/core/generate-text/stream-text.test.ts

Lines changed: 12 additions & 120 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ import {
1515
} from '@ai-sdk/provider-utils/test';
1616
import assert from 'node:assert';
1717
import { z } from 'zod';
18-
import { StreamData } from '../../streams/stream-data';
1918
import { createDataStream } from '../data-stream/create-data-stream';
2019
import { MockLanguageModelV2 } from '../test/mock-language-model-v2';
2120
import { createMockServerResponse } from '../test/mock-server-response';
@@ -873,34 +872,6 @@ describe('streamText', () => {
873872
expect(mockResponse.getDecodedChunks()).toMatchSnapshot();
874873
});
875874

876-
it('should support merging with existing stream data', async () => {
877-
const mockResponse = createMockServerResponse();
878-
879-
const result = streamText({
880-
model: createTestModel(),
881-
prompt: 'test-input',
882-
experimental_generateMessageId: mockId({ prefix: 'msg' }),
883-
});
884-
885-
const streamData = new StreamData();
886-
streamData.append('stream-data-value');
887-
streamData.close();
888-
889-
result.pipeDataStreamToResponse(mockResponse, {
890-
data: streamData,
891-
});
892-
893-
await mockResponse.waitForEnd();
894-
895-
expect(mockResponse.statusCode).toBe(200);
896-
expect(mockResponse.headers).toEqual({
897-
'Content-Type': 'text/plain; charset=utf-8',
898-
'X-Vercel-AI-Data-Stream': 'v1',
899-
});
900-
901-
expect(mockResponse.getDecodedChunks()).toMatchSnapshot();
902-
});
903-
904875
it('should mask error messages by default', async () => {
905876
const mockResponse = createMockServerResponse();
906877

@@ -1099,30 +1070,7 @@ describe('streamText', () => {
10991070

11001071
const dataStream = result.toDataStream();
11011072

1102-
expect(
1103-
await convertReadableStreamToArray(
1104-
dataStream.pipeThrough(new TextDecoderStream()),
1105-
),
1106-
).toMatchSnapshot();
1107-
});
1108-
1109-
it('should support merging with existing stream data', async () => {
1110-
const result = streamText({
1111-
model: createTestModel(),
1112-
...defaultSettings(),
1113-
});
1114-
1115-
const streamData = new StreamData();
1116-
streamData.append('stream-data-value');
1117-
streamData.close();
1118-
1119-
const dataStream = result.toDataStream({ data: streamData });
1120-
1121-
expect(
1122-
await convertReadableStreamToArray(
1123-
dataStream.pipeThrough(new TextDecoderStream()),
1124-
),
1125-
).toMatchSnapshot();
1073+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
11261074
});
11271075

11281076
it('should send tool call and tool result stream parts', async () => {
@@ -1167,9 +1115,7 @@ describe('streamText', () => {
11671115
});
11681116

11691117
expect(
1170-
await convertReadableStreamToArray(
1171-
result.toDataStream().pipeThrough(new TextDecoderStream()),
1172-
),
1118+
await convertReadableStreamToArray(result.toDataStream()),
11731119
).toMatchSnapshot();
11741120
});
11751121

@@ -1216,9 +1162,7 @@ describe('streamText', () => {
12161162
});
12171163

12181164
expect(
1219-
await convertReadableStreamToArray(
1220-
result.toDataStream().pipeThrough(new TextDecoderStream()),
1221-
),
1165+
await convertReadableStreamToArray(result.toDataStream()),
12221166
).toMatchSnapshot();
12231167
});
12241168

@@ -1234,11 +1178,7 @@ describe('streamText', () => {
12341178

12351179
const dataStream = result.toDataStream();
12361180

1237-
expect(
1238-
await convertReadableStreamToArray(
1239-
dataStream.pipeThrough(new TextDecoderStream()),
1240-
),
1241-
).toMatchSnapshot();
1181+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
12421182
});
12431183

12441184
it('should support custom error messages', async () => {
@@ -1255,11 +1195,7 @@ describe('streamText', () => {
12551195
getErrorMessage: error => `custom error message: ${error}`,
12561196
});
12571197

1258-
expect(
1259-
await convertReadableStreamToArray(
1260-
dataStream.pipeThrough(new TextDecoderStream()),
1261-
),
1262-
).toMatchSnapshot();
1198+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
12631199
});
12641200

12651201
it('should suppress usage information when sendUsage is false', async () => {
@@ -1279,11 +1215,7 @@ describe('streamText', () => {
12791215

12801216
const dataStream = result.toDataStream({ sendUsage: false });
12811217

1282-
expect(
1283-
await convertReadableStreamToArray(
1284-
dataStream.pipeThrough(new TextDecoderStream()),
1285-
),
1286-
).toMatchSnapshot();
1218+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
12871219
});
12881220

12891221
it('should omit message finish event (d:) when sendFinish is false', async () => {
@@ -1305,11 +1237,7 @@ describe('streamText', () => {
13051237
experimental_sendFinish: false,
13061238
});
13071239

1308-
expect(
1309-
await convertReadableStreamToArray(
1310-
dataStream.pipeThrough(new TextDecoderStream()),
1311-
),
1312-
).toMatchSnapshot();
1240+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
13131241
});
13141242

13151243
it('should send reasoning content when sendReasoning is true', async () => {
@@ -1320,11 +1248,7 @@ describe('streamText', () => {
13201248

13211249
const dataStream = result.toDataStream({ sendReasoning: true });
13221250

1323-
expect(
1324-
await convertReadableStreamToArray(
1325-
dataStream.pipeThrough(new TextDecoderStream()),
1326-
),
1327-
).toMatchSnapshot();
1251+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
13281252
});
13291253

13301254
it('should send source content when sendSources is true', async () => {
@@ -1335,11 +1259,7 @@ describe('streamText', () => {
13351259

13361260
const dataStream = result.toDataStream({ sendSources: true });
13371261

1338-
expect(
1339-
await convertReadableStreamToArray(
1340-
dataStream.pipeThrough(new TextDecoderStream()),
1341-
),
1342-
).toMatchSnapshot();
1262+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
13431263
});
13441264

13451265
it('should send file content', async () => {
@@ -1350,11 +1270,7 @@ describe('streamText', () => {
13501270

13511271
const dataStream = result.toDataStream();
13521272

1353-
expect(
1354-
await convertReadableStreamToArray(
1355-
dataStream.pipeThrough(new TextDecoderStream()),
1356-
),
1357-
).toMatchSnapshot();
1273+
expect(await convertReadableStreamToArray(dataStream)).toMatchSnapshot();
13581274
});
13591275
});
13601276

@@ -1404,26 +1320,6 @@ describe('streamText', () => {
14041320
expect(await convertResponseStreamToArray(response)).toMatchSnapshot();
14051321
});
14061322

1407-
it('should support merging with existing stream data', async () => {
1408-
const result = streamText({
1409-
model: createTestModel(),
1410-
prompt: 'test-input',
1411-
experimental_generateMessageId: mockId({ prefix: 'msg' }),
1412-
});
1413-
1414-
const streamData = new StreamData();
1415-
streamData.append('stream-data-value');
1416-
streamData.close();
1417-
1418-
const response = result.toDataStreamResponse({ data: streamData });
1419-
1420-
expect(response.status).toStrictEqual(200);
1421-
expect(response.headers.get('Content-Type')).toStrictEqual(
1422-
'text/plain; charset=utf-8',
1423-
);
1424-
expect(await convertResponseStreamToArray(response)).toMatchSnapshot();
1425-
});
1426-
14271323
it('should mask error messages by default', async () => {
14281324
const result = streamText({
14291325
model: createTestModel({
@@ -1654,9 +1550,7 @@ describe('streamText', () => {
16541550
expect({
16551551
textStream: await convertAsyncIterableToArray(result.textStream),
16561552
fullStream: await convertAsyncIterableToArray(result.fullStream),
1657-
dataStream: await convertReadableStreamToArray(
1658-
result.toDataStream().pipeThrough(new TextDecoderStream()),
1659-
),
1553+
dataStream: await convertReadableStreamToArray(result.toDataStream()),
16601554
}).toMatchSnapshot();
16611555
});
16621556
});
@@ -2905,9 +2799,7 @@ describe('streamText', () => {
29052799
const dataStream = result.toDataStream();
29062800

29072801
expect(
2908-
await convertReadableStreamToArray(
2909-
dataStream.pipeThrough(new TextDecoderStream()),
2910-
),
2802+
await convertReadableStreamToArray(dataStream),
29112803
).toMatchSnapshot();
29122804
});
29132805

0 commit comments

Comments
 (0)