Skip to content

Commit e1cbf8a

Browse files
authored
chore: extract rsc to separate package (#5542)
1 parent 968d173 commit e1cbf8a

File tree

106 files changed

+495
-433
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

106 files changed

+495
-433
lines changed

‎.changeset/afraid-moles-cross.md

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
---
2+
'@ai-sdk/rsc': major
3+
'ai': major
4+
---
5+
6+
chore(@ai-sdk/rsc): extract to separate package

‎content/cookbook/20-rsc/20-stream-text.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -20,14 +20,14 @@ Text generation can sometimes take a long time to complete, especially when you'
2020

2121
## Client
2222

23-
Let's create a simple React component that will call the `generate` function when a button is clicked. The `generate` function will call the `streamText` function, which will then generate text based on the input prompt. To consume the stream of text in the client, we will use the `readStreamableValue` function from the `ai/rsc` module.
23+
Let's create a simple React component that will call the `generate` function when a button is clicked. The `generate` function will call the `streamText` function, which will then generate text based on the input prompt. To consume the stream of text in the client, we will use the `readStreamableValue` function from the `@ai-sdk/rsc` module.
2424

2525
```tsx filename="app/page.tsx"
2626
'use client';
2727

2828
import { useState } from 'react';
2929
import { generate } from './actions';
30-
import { readStreamableValue } from 'ai/rsc';
30+
import { readStreamableValue } from '@ai-sdk/rsc';
3131

3232
// Allow streaming responses up to 30 seconds
3333
export const maxDuration = 30;
@@ -66,7 +66,7 @@ Using DevTools, we can see the text generation being streamed to the client in r
6666

6767
import { streamText } from 'ai';
6868
import { openai } from '@ai-sdk/openai';
69-
import { createStreamableValue } from 'ai/rsc';
69+
import { createStreamableValue } from '@ai-sdk/rsc';
7070

7171
export async function generate(input: string) {
7272
const stream = createStreamableValue('');

‎content/cookbook/20-rsc/21-stream-text-with-chat-prompt.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,7 @@ Let's create a simple conversation between a user and a model, and place a butto
3232

3333
import { useState } from 'react';
3434
import { Message, continueConversation } from './actions';
35-
import { readStreamableValue } from 'ai/rsc';
35+
import { readStreamableValue } from '@ai-sdk/rsc';
3636

3737
// Allow streaming responses up to 30 seconds
3838
export const maxDuration = 30;
@@ -95,7 +95,7 @@ Now, let's implement the `continueConversation` function that will insert the us
9595

9696
import { streamText } from 'ai';
9797
import { openai } from '@ai-sdk/openai';
98-
import { createStreamableValue } from 'ai/rsc';
98+
import { createStreamableValue } from '@ai-sdk/rsc';
9999

100100
export interface Message {
101101
role: 'user' | 'assistant';

‎content/cookbook/20-rsc/40-stream-object.mdx

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ Let's create a simple React component that will call the `getNotifications` func
5151

5252
import { useState } from 'react';
5353
import { generate } from './actions';
54-
import { readStreamableValue } from 'ai/rsc';
54+
import { readStreamableValue } from '@ai-sdk/rsc';
5555

5656
// Allow streaming responses up to 30 seconds
5757
export const maxDuration = 30;
@@ -92,7 +92,7 @@ Now let's implement the `getNotifications` function. We'll use the `generateObje
9292

9393
import { streamObject } from 'ai';
9494
import { openai } from '@ai-sdk/openai';
95-
import { createStreamableValue } from 'ai/rsc';
95+
import { createStreamableValue } from '@ai-sdk/rsc';
9696
import { z } from 'zod';
9797

9898
export async function generate(input: string) {

‎content/cookbook/20-rsc/60-save-messages-to-database.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ export default function RootLayout({
4141

4242
import { useState } from 'react';
4343
import { ClientMessage } from './actions';
44-
import { useActions, useUIState } from 'ai/rsc';
44+
import { useActions, useUIState } from '@ai-sdk/rsc';
4545
import { generateId } from 'ai';
4646

4747
// Allow streaming responses up to 30 seconds
@@ -100,7 +100,7 @@ We will use the callback function to listen to state changes and save the conver
100100
```tsx filename='app/actions.tsx'
101101
'use server';
102102

103-
import { getAIState, getMutableAIState, streamUI } from 'ai/rsc';
103+
import { getAIState, getMutableAIState, streamUI } from '@ai-sdk/rsc';
104104
import { openai } from '@ai-sdk/openai';
105105
import { ReactNode } from 'react';
106106
import { z } from 'zod';
@@ -176,7 +176,7 @@ export async function continueConversation(
176176
```
177177

178178
```ts filename='app/ai.ts'
179-
import { createAI } from 'ai/rsc';
179+
import { createAI } from '@ai-sdk/rsc';
180180
import { ServerMessage, ClientMessage, continueConversation } from './actions';
181181

182182
export const AI = createAI<ServerMessage[], ClientMessage[]>({

‎content/cookbook/20-rsc/61-restore-messages-from-database.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ export default function RootLayout({
3939

4040
import { useState, useEffect } from 'react';
4141
import { ClientMessage } from './actions';
42-
import { useActions, useUIState } from 'ai/rsc';
42+
import { useActions, useUIState } from '@ai-sdk/rsc';
4343
import { generateId } from 'ai';
4444

4545
export default function Home() {
@@ -97,7 +97,7 @@ export default function Home() {
9797
The server-side implementation handles the restoration of messages and their transformation into the appropriate format for display.
9898

9999
```tsx filename='app/ai.ts'
100-
import { createAI } from 'ai/rsc';
100+
import { createAI } from '@ai-sdk/rsc';
101101
import { ServerMessage, ClientMessage, continueConversation } from './actions';
102102
import { Stock } from '@ai-studio/components/stock';
103103
import { generateId } from 'ai';
@@ -126,7 +126,7 @@ export const AI = createAI<ServerMessage[], ClientMessage[]>({
126126
```tsx filename='app/actions.tsx'
127127
'use server';
128128

129-
import { getAIState } from 'ai/rsc';
129+
import { getAIState } from '@ai-sdk/rsc';
130130

131131
export interface ServerMessage {
132132
role: 'user' | 'assistant' | 'function';

‎content/cookbook/20-rsc/90-render-visual-interface-in-chat.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ When we define multiple functions in [`tools`](/docs/reference/ai-sdk-core/gener
1717

1818
import { useState } from 'react';
1919
import { ClientMessage } from './actions';
20-
import { useActions, useUIState } from 'ai/rsc';
20+
import { useActions, useUIState } from '@ai-sdk/rsc';
2121
import { generateId } from 'ai';
2222

2323
// Allow streaming responses up to 30 seconds
@@ -112,7 +112,7 @@ export async function Flight({ flightNumber }) {
112112
```tsx filename='app/actions.tsx'
113113
'use server';
114114

115-
import { getMutableAIState, streamUI } from 'ai/rsc';
115+
import { getMutableAIState, streamUI } from '@ai-sdk/rsc';
116116
import { openai } from '@ai-sdk/openai';
117117
import { ReactNode } from 'react';
118118
import { z } from 'zod';
@@ -206,7 +206,7 @@ export async function continueConversation(
206206
```
207207

208208
```typescript filename='app/ai.ts'
209-
import { createAI } from 'ai/rsc';
209+
import { createAI } from '@ai-sdk/rsc';
210210
import { ServerMessage, ClientMessage, continueConversation } from './actions';
211211

212212
export const AI = createAI<ServerMessage[], ClientMessage[]>({

‎content/cookbook/20-rsc/91-stream-updates-to-visual-interfaces.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ In our previous example we've been streaming react components from the server to
1515

1616
import { useState } from 'react';
1717
import { ClientMessage } from './actions';
18-
import { useActions, useUIState } from 'ai/rsc';
18+
import { useActions, useUIState } from '@ai-sdk/rsc';
1919
import { generateId } from 'ai';
2020

2121
// Allow streaming responses up to 30 seconds
@@ -72,7 +72,7 @@ export default function Home() {
7272
```tsx filename='app/actions.tsx'
7373
'use server';
7474

75-
import { getMutableAIState, streamUI } from 'ai/rsc';
75+
import { getMutableAIState, streamUI } from '@ai-sdk/rsc';
7676
import { openai } from '@ai-sdk/openai';
7777
import { ReactNode } from 'react';
7878
import { z } from 'zod';
@@ -137,7 +137,7 @@ export async function continueConversation(
137137
```
138138

139139
```typescript filename='app/ai.ts'
140-
import { createAI } from 'ai/rsc';
140+
import { createAI } from '@ai-sdk/rsc';
141141
import { ServerMessage, ClientMessage, continueConversation } from './actions';
142142

143143
export const AI = createAI<ServerMessage[], ClientMessage[]>({

‎content/cookbook/20-rsc/92-stream-ui-record-token-usage.mdx

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ It is called when the stream is finished.
1919

2020
import { useState } from 'react';
2121
import { ClientMessage } from './actions';
22-
import { useActions, useUIState } from 'ai/rsc';
22+
import { useActions, useUIState } from '@ai-sdk/rsc';
2323
import { generateId } from 'ai';
2424

2525
// Allow streaming responses up to 30 seconds
@@ -76,7 +76,7 @@ export default function Home() {
7676
```tsx filename='app/actions.tsx' highlight={"57-63"}
7777
'use server';
7878

79-
import { createAI, getMutableAIState, streamUI } from 'ai/rsc';
79+
import { createAI, getMutableAIState, streamUI } from '@ai-sdk/rsc';
8080
import { openai } from '@ai-sdk/openai';
8181
import { ReactNode } from 'react';
8282
import { z } from 'zod';
@@ -148,7 +148,7 @@ export async function continueConversation(
148148
```
149149

150150
```typescript filename='app/ai.ts'
151-
import { createAI } from 'ai/rsc';
151+
import { createAI } from '@ai-sdk/rsc';
152152
import { ServerMessage, ClientMessage, continueConversation } from './actions';
153153

154154
export const AI = createAI<ServerMessage[], ClientMessage[]>({

‎content/docs/02-guides/21-llama-3_1.mdx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -254,7 +254,7 @@ First, create a Server Action.
254254
```tsx filename="app/actions.tsx"
255255
'use server';
256256

257-
import { streamUI } from 'ai/rsc';
257+
import { streamUI } from '@ai-sdk/rsc';
258258
import { deepinfra } from '@ai-sdk/deepinfra';
259259
import { z } from 'zod';
260260

0 commit comments

Comments
 (0)