Skip to content

Commit 14cb3be

Browse files
authored
chore(providers/llamaindex): extract to separate package (#5934)
1 parent c9fbc57 commit 14cb3be

File tree

18 files changed

+202
-25
lines changed

18 files changed

+202
-25
lines changed

‎.changeset/famous-fans-provide.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
'ai': patch
3+
---
4+
5+
chore(providers/llamaindex): extract to separate package

‎content/docs/07-reference/04-stream-helpers/16-llamaindex-adapter.mdx

Lines changed: 10 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
---
2-
title: LlamaIndexAdapter
3-
description: API Reference for LlamaIndexAdapter.
2+
title: @ai-sdk/llamaindex Adapter
3+
description: API Reference for the LlamaIndex Adapter.
44
---
55

6-
# `LlamaIndexAdapter`
6+
# `@ai-sdk/llamaindex`
77

8-
The `LlamaIndexAdapter` module provides helper functions to transform LlamaIndex output streams into data streams and data stream responses.
8+
The `@ai-sdk/llamaindex` package provides helper functions to transform LlamaIndex output streams into data streams and data stream responses.
99
See the [LlamaIndex Adapter documentation](/providers/adapters/llamaindex) for more information.
1010

1111
It supports:
@@ -15,7 +15,10 @@ It supports:
1515

1616
## Import
1717

18-
<Snippet text={`import { LlamaIndexAdapter } from "ai"`} prompt={false} />
18+
<Snippet
19+
text={`import { toDataResponse } from "@ai-sdk/llamaindex"`}
20+
prompt={false}
21+
/>
1922

2023
## API Signature
2124

@@ -49,7 +52,7 @@ It supports:
4952

5053
```tsx filename="app/api/completion/route.ts" highlight="15"
5154
import { OpenAI, SimpleChatEngine } from 'llamaindex';
52-
import { LlamaIndexAdapter } from 'ai';
55+
import { toDataStreamResponse } from '@ai-sdk/llamaindex';
5356

5457
export async function POST(req: Request) {
5558
const { prompt } = await req.json();
@@ -62,6 +65,6 @@ export async function POST(req: Request) {
6265
stream: true,
6366
});
6467

65-
return LlamaIndexAdapter.toDataStreamResponse(stream);
68+
return toDataStreamResponse(stream);
6669
}
6770
```

‎content/docs/07-reference/04-stream-helpers/index.mdx

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -81,12 +81,6 @@ collapsed: true
8181
"Transforms the response from LangChain's language models into a readable stream.",
8282
href: '/docs/reference/stream-helpers/langchain-stream',
8383
},
84-
{
85-
title: 'LlamaIndexAdapter',
86-
description:
87-
"Transforms the response from LlamaIndex's streams into data streams.",
88-
href: '/docs/reference/stream-helpers/llamaindex-adapter',
89-
},
9084
{
9185
title: 'MistralStream',
9286
description:

‎content/providers/04-adapters/02-llamaindex.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,11 +11,11 @@ description: Learn how to use LlamaIndex with the AI SDK.
1111

1212
Here is a basic example that uses both AI SDK and LlamaIndex together with the [Next.js](https://nextjs.org/docs) App Router.
1313

14-
The AI SDK [`LlamaIndexAdapter`](/docs/reference/stream-helpers/llamaindex-adapter) uses the stream result from calling the `chat` method on a [LlamaIndex ChatEngine](https://ts.llamaindex.ai/modules/chat_engine) or the `query` method on a [LlamaIndex QueryEngine](https://ts.llamaindex.ai/modules/query_engines) to pipe text to the client.
14+
The AI SDK [`@ai-sdk/llamaindex` package](/docs/reference/stream-helpers/llamaindex-adapter) uses the stream result from calling the `chat` method on a [LlamaIndex ChatEngine](https://ts.llamaindex.ai/modules/chat_engine) or the `query` method on a [LlamaIndex QueryEngine](https://ts.llamaindex.ai/modules/query_engines) to pipe text to the client.
1515

1616
```tsx filename="app/api/completion/route.ts" highlight="17"
1717
import { OpenAI, SimpleChatEngine } from 'llamaindex';
18-
import { LlamaIndexAdapter } from 'ai';
18+
import { toDataStreamResponse } from '@ai-sdk/llamaindex';
1919

2020
export const maxDuration = 60;
2121

@@ -30,7 +30,7 @@ export async function POST(req: Request) {
3030
stream: true,
3131
});
3232

33-
return LlamaIndexAdapter.toDataStreamResponse(stream);
33+
return toDataStreamResponse(stream);
3434
}
3535
```
3636

‎packages/ai/streams/index.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,3 @@
11
export * from '../core/index';
22
export * from '../errors/index';
3-
4-
export * as LlamaIndexAdapter from './llamaindex-adapter';
53
export * from './stream-data';

‎packages/llamaindex/CHANGELOG.md

Whitespace-only changes.

‎packages/llamaindex/README.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# AI SDK - LlamaIndex Adapter
2+
3+
This package contains a LlamaIndex adapter for the AI SDK.

‎packages/llamaindex/package.json

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
{
2+
"name": "@ai-sdk/llamaindex",
3+
"version": "1.0.0-canary.0",
4+
"license": "Apache-2.0",
5+
"sideEffects": false,
6+
"main": "./dist/index.js",
7+
"module": "./dist/index.mjs",
8+
"types": "./dist/index.d.ts",
9+
"files": [
10+
"dist/**/*",
11+
"CHANGELOG.md"
12+
],
13+
"scripts": {
14+
"build": "pnpm clean && tsup --tsconfig tsconfig.build.json",
15+
"build:watch": "pnpm clean && tsup --watch",
16+
"clean": "rm -rf dist *.tsbuildinfo",
17+
"lint": "eslint \"./**/*.ts*\"",
18+
"type-check": "tsc --build",
19+
"prettier-check": "prettier --check \"./**/*.ts*\"",
20+
"test": "pnpm test:node && pnpm test:edge",
21+
"test:update": "pnpm test:node -u",
22+
"test:watch": "vitest --config vitest.node.config.js",
23+
"test:edge": "vitest --config vitest.edge.config.js --run",
24+
"test:node": "vitest --config vitest.node.config.js --run"
25+
},
26+
"exports": {
27+
"./package.json": "./package.json",
28+
".": {
29+
"types": "./dist/index.d.ts",
30+
"import": "./dist/index.mjs",
31+
"require": "./dist/index.js"
32+
}
33+
},
34+
"dependencies": {
35+
"ai": "workspace:*",
36+
"@ai-sdk/provider-utils": "workspace:*"
37+
},
38+
"devDependencies": {
39+
"@types/node": "20.17.24",
40+
"@vercel/ai-tsconfig": "workspace:*",
41+
"tsup": "^8",
42+
"typescript": "5.8.3"
43+
},
44+
"engines": {
45+
"node": ">=18"
46+
},
47+
"publishConfig": {
48+
"access": "public"
49+
},
50+
"homepage": "https://sdk.vercel.ai/docs",
51+
"repository": {
52+
"type": "git",
53+
"url": "git+https://github.com/vercel/ai.git"
54+
},
55+
"bugs": {
56+
"url": "https://github.com/vercel/ai/issues"
57+
},
58+
"keywords": [
59+
"ai"
60+
]
61+
}

‎packages/llamaindex/src/index.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
export * from './llamaindex-adapter';

‎packages/ai/streams/llamaindex-adapter.test.ts renamed to ‎packages/llamaindex/src/llamaindex-adapter.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import {
88
toDataStream,
99
toDataStreamResponse,
1010
} from './llamaindex-adapter';
11-
import { createDataStream } from '../core';
11+
import { createDataStream } from 'ai';
1212

1313
describe('toDataStream', () => {
1414
it('should convert AsyncIterable<EngineResponse>', async () => {

0 commit comments

Comments
 (0)